code stringlengths 1 25.8M | language stringclasses 18 values | source stringclasses 4 values | repo stringclasses 78 values | path stringlengths 0 268 |
|---|---|---|---|---|
from distutils.errors import DistutilsOptionError
from unittest import TestLoader
import sys
from setuptools.extern import six
from setuptools.extern.six.moves import map
from pkg_resources import (resource_listdir, resource_exists, normalize_path,
working_set, _namespace_packages,
add_activation_listener, require, EntryPoint)
from setuptools import Command
from setuptools.py31compat import unittest_main
class ScanningLoader(TestLoader):
def loadTestsFromModule(self, module, pattern=None):
"""Return a suite of all tests cases contained in the given module
If the module is a package, load tests from all the modules in it.
If the module has an ``additional_tests`` function, call it and add
the return value to the tests.
"""
tests = []
tests.append(TestLoader.loadTestsFromModule(self, module))
if hasattr(module, "additional_tests"):
tests.append(module.additional_tests())
if hasattr(module, '__path__'):
for file in resource_listdir(module.__name__, ''):
if file.endswith('.py') and file != '__init__.py':
submodule = module.__name__ + '.' + file[:-3]
else:
if resource_exists(module.__name__, file + '/__init__.py'):
submodule = module.__name__ + '.' + file
else:
continue
tests.append(self.loadTestsFromName(submodule))
if len(tests) != 1:
return self.suiteClass(tests)
else:
return tests[0] # don't create a nested suite for only one return
# adapted from jaraco.classes.properties:NonDataProperty
class NonDataProperty(object):
def __init__(self, fget):
self.fget = fget
def __get__(self, obj, objtype=None):
if obj is None:
return self
return self.fget(obj)
class test(Command):
"""Command to run unit tests after in-place build"""
description = "run unit tests after in-place build"
user_options = [
('test-module=', 'm', "Run 'test_suite' in specified module"),
('test-suite=', 's',
"Test suite to run (e.g. 'some_module.test_suite')"),
('test-runner=', 'r', "Test runner to use"),
]
def initialize_options(self):
self.test_suite = None
self.test_module = None
self.test_loader = None
self.test_runner = None
def finalize_options(self):
if self.test_suite and self.test_module:
msg = "You may specify a module or a suite, but not both"
raise DistutilsOptionError(msg)
if self.test_suite is None:
if self.test_module is None:
self.test_suite = self.distribution.test_suite
else:
self.test_suite = self.test_module + ".test_suite"
if self.test_loader is None:
self.test_loader = getattr(self.distribution, 'test_loader', None)
if self.test_loader is None:
self.test_loader = "setuptools.command.test:ScanningLoader"
if self.test_runner is None:
self.test_runner = getattr(self.distribution, 'test_runner', None)
@NonDataProperty
def test_args(self):
return list(self._test_args())
def _test_args(self):
if self.verbose:
yield '--verbose'
if self.test_suite:
yield self.test_suite
def with_project_on_sys_path(self, func):
with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False)
if with_2to3:
# If we run 2to3 we can not do this inplace:
# Ensure metadata is up-to-date
self.reinitialize_command('build_py', inplace=0)
self.run_command('build_py')
bpy_cmd = self.get_finalized_command("build_py")
build_path = normalize_path(bpy_cmd.build_lib)
# Build extensions
self.reinitialize_command('egg_info', egg_base=build_path)
self.run_command('egg_info')
self.reinitialize_command('build_ext', inplace=0)
self.run_command('build_ext')
else:
# Without 2to3 inplace works fine:
self.run_command('egg_info')
# Build extensions in-place
self.reinitialize_command('build_ext', inplace=1)
self.run_command('build_ext')
ei_cmd = self.get_finalized_command("egg_info")
old_path = sys.path[:]
old_modules = sys.modules.copy()
try:
sys.path.insert(0, normalize_path(ei_cmd.egg_base))
working_set.__init__()
add_activation_listener(lambda dist: dist.activate())
require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
func()
finally:
sys.path[:] = old_path
sys.modules.clear()
sys.modules.update(old_modules)
working_set.__init__()
def run(self):
if self.distribution.install_requires:
self.distribution.fetch_build_eggs(
self.distribution.install_requires)
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
cmd = ' '.join(self._argv)
if self.dry_run:
self.announce('skipping "%s" (dry run)' % cmd)
else:
self.announce('running "%s"' % cmd)
self.with_project_on_sys_path(self.run_tests)
def run_tests(self):
# Purge modules under test from sys.modules. The test loader will
# re-import them from the build location. Required when 2to3 is used
# with namespace packages.
if six.PY3 and getattr(self.distribution, 'use_2to3', False):
module = self.test_suite.split('.')[0]
if module in _namespace_packages:
del_modules = []
if module in sys.modules:
del_modules.append(module)
module += '.'
for name in sys.modules:
if name.startswith(module):
del_modules.append(name)
list(map(sys.modules.__delitem__, del_modules))
unittest_main(
None, None, self._argv,
testLoader=self._resolve_as_ep(self.test_loader),
testRunner=self._resolve_as_ep(self.test_runner),
)
@property
def _argv(self):
return ['unittest'] + self.test_args
@staticmethod
def _resolve_as_ep(val):
"""
Load the indicated attribute value, called, as a as if it were
specified as an entry point.
"""
if val is None:
return
parsed = EntryPoint.parse("x=" + val)
return parsed.resolve()() | unknown | codeparrot/codeparrot-clean | ||
from common import *
from solution import *
import copy
import sys
import datetime
num_test = 110
true, false = True, False
in_0 = []
in_org_0 = []
out = []
def test_ret(arr, answer_len):
if len(arr) != answer_len: return False
sum = 0
for i in range(len(arr)):
sum += arr[i]
return sum == 0
def load_test():
f = open('judge/tests/longest-zero-subarray.txt', 'r')
global in_0, in_org_0
in_0 = read_int_matrix(f)
in_org_0 = copy.deepcopy(in_0)
global out
out = read_int_matrix(f)
f.close
def judge():
load_test()
capture_stdout()
start_time = datetime.datetime.now()
for i in range(num_test):
print ('Testing case #' + str(i+1))
answer = longest_subarray(in_0[i])
if (not test_ret(answer, len(out[i]))):
release_stdout()
out_str = str(i+1) + " / " + str(num_test) + ";"
out_str += str(in_org_0[i])
out_str += ";"
out_str += str(answer)
out_str += ";"
out_str += str(out[i])
print(out_str)
return
release_stdout()
delta = datetime.datetime.now() - start_time
runtime = str(int(delta.total_seconds() * 1000))
print('Accepted;' + runtime) | unknown | codeparrot/codeparrot-clean | ||
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations \u0026 Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"links": [],
"panels": [
{
"datasource": {
"type": "grafana-testdata-datasource"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": 0
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 22,
"w": 24,
"x": 0,
"y": 0
},
"id": 5,
"options": {
"infinitePan": true,
"inlineEditing": false,
"panZoom": true,
"root": {
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"elements": [
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "dark-green"
},
"radius": 0,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "text"
},
"size": 20,
"text": {
"fixed": "Library"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 39",
"placement": {
"height": 118,
"left": 1584,
"top": 259,
"width": 204
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "dark-blue"
},
"radius": 0,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "light-blue"
},
"size": 20,
"text": {
"fixed": "Viewport"
},
"valign": "bottom"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": -0.8229007633587786,
"y": 0.27741935483870966
},
"sourceOriginal": {
"x": 1223.8335877862596,
"y": 300
},
"target": {
"x": 1.0112359550561798,
"y": -0.012987012987012988
},
"targetName": "Element 20",
"targetOriginal": {
"x": 990,
"y": 474
},
"vertices": [
{
"x": -0.18181818181818182,
"y": 0
},
{
"x": -0.18181818181818182,
"y": 1
}
]
},
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"radius": {
"fixed": 25,
"max": 200,
"min": 0
},
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": -0.3068702290076336,
"y": -0.47419354838709676
},
"sourceOriginal": {
"x": 1401.090076335878,
"y": 533
},
"target": {
"x": 1.0112359550561798,
"y": -0.4805194805194805
},
"targetName": "Element 20",
"targetOriginal": {
"x": 990,
"y": 492
},
"vertices": [
{
"x": 0.415,
"y": 0
},
{
"x": 0.4775,
"y": 1
}
]
},
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"direction": "reverse",
"path": "straight",
"radius": {
"fixed": 0,
"max": 200,
"min": 0
},
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 0.7903930131004366,
"y": -0.36935483870967745
},
"sourceOriginal": {
"x": 1778,
"y": 500.5
},
"target": {
"x": 1.0120481927710843,
"y": 0.024691358024691357
},
"targetName": "Element 35",
"targetOriginal": {
"x": 1523,
"y": 177
},
"vertices": [
{
"x": -0.1607843137254902,
"y": 0
},
{
"x": -0.1607843137254902,
"y": 1
}
]
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 32",
"placement": {
"height": 620,
"left": 1163,
"top": 76,
"width": 687
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "#464646"
},
"radius": 0,
"width": 2
},
"config": {
"align": "center",
"color": {
"fixed": "#505050"
},
"size": 20,
"text": {
"fixed": "Equipment State"
},
"valign": "bottom"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 36",
"placement": {
"height": 229,
"left": 1340,
"top": 386,
"width": 438
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-orange"
},
"radius": 0,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Subscriptions"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 28",
"placement": {
"height": 77,
"left": 831,
"top": 339,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-orange"
},
"radius": 0,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Subscriptions"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 27",
"placement": {
"height": 77,
"left": 821,
"top": 329,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "light-blue"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Mutation"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 26",
"placement": {
"height": 77,
"left": 583,
"top": 455,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "light-blue"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Mutation"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 25",
"placement": {
"height": 77,
"left": 573,
"top": 445,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#d9d9d9"
}
},
"border": {
"color": {
"fixed": "light-blue"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Subscription"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 24",
"placement": {
"height": 77,
"left": 583,
"top": 338,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "light-blue"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Subscription"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 0.8876404494382022,
"y": 0.2597402597402597
},
"sourceOriginal": {
"x": 741,
"y": 356.5
},
"target": {
"x": -0.9775280898876404,
"y": -0.012987012987012988
},
"targetName": "Element 19",
"targetOriginal": {
"x": 813,
"y": 358
}
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 23",
"placement": {
"height": 77,
"left": 573,
"top": 328,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "light-blue"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Query"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 22",
"placement": {
"height": 77,
"left": 583,
"top": 221,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "light-blue"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Query"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 0.8876404494382022,
"y": 0.2597402597402597
},
"sourceOriginal": {
"x": 741,
"y": 239.5
},
"target": {
"x": -0.9887640449438202,
"y": -0.06493506493506493
},
"targetName": "Element 18",
"targetOriginal": {
"x": 812,
"y": 242
}
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 21",
"placement": {
"height": 77,
"left": 573,
"top": 211,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-blue"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Controllers"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": -0.2247191011235955,
"y": -0.4805194805194805
},
"sourceOriginal": {
"x": 342,
"y": 395
},
"target": {
"x": 0.011235955056179775,
"y": 0.922077922077922
},
"targetName": "Element 12",
"targetOriginal": {
"x": 343,
"y": 458
}
},
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 0.7752808988764045,
"y": 0.5194805194805194
},
"sourceOriginal": {
"x": 431,
"y": 356.5
},
"target": {
"x": -1.0833333333333333,
"y": 0.020618556701030927
},
"targetName": "Element 15",
"targetOriginal": {
"x": 526,
"y": 356
}
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 11",
"placement": {
"height": 77,
"left": 273,
"top": 338,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-blue"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Controllers"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 10",
"placement": {
"height": 77,
"left": 263,
"top": 328,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "light-orange"
},
"size": "contain"
},
"border": {
"color": {
"fixed": "transparent"
},
"radius": 0,
"width": 5
},
"config": {
"fill": {
"fixed": "#D9D9D9"
},
"path": {
"field": "",
"fixed": "https://upload.wikimedia.org/wikipedia/commons/c/c8/Forme_ligne.svg",
"mode": "fixed"
}
},
"connections": [],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 2",
"placement": {
"height": 96,
"left": 1390,
"top": 461,
"width": 100
},
"type": "icon"
},
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"text": {
"fixed": ""
},
"valign": "middle"
},
"connections": [],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 3",
"placement": {
"height": 30,
"left": 1390,
"top": 423,
"width": 30
},
"type": "ellipse"
},
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"text": {
"fixed": ""
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 4",
"placement": {
"height": 30,
"left": 1460,
"top": 423,
"width": 30
},
"type": "ellipse"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-blue"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Controllers"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "green"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": 0.5
},
"sourceOriginal": {
"x": 431,
"y": 337.25
},
"target": {
"x": -1.0112359550561798,
"y": -0.012987012987012988
},
"targetName": "Element 16",
"targetOriginal": {
"x": 562,
"y": 240
},
"vertices": [
{
"x": 0.42748091603053434,
"y": 0
},
{
"x": 0.42748091603053434,
"y": 1
}
]
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 6",
"placement": {
"height": 77,
"left": 253,
"top": 318,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-purple"
},
"width": 4
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "DB"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"direction": "both",
"lineStyle": "dashed",
"path": "straight",
"radius": {
"fixed": 50,
"max": 200,
"min": 0
},
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 0,
"y": -1
},
"sourceOriginal": {
"x": 150,
"y": 223
},
"target": {
"x": -0.5168539325842697,
"y": 1
},
"targetName": "Element 6",
"targetOriginal": {
"x": 296,
"y": 318
},
"vertices": [
{
"x": 0,
"y": 0.5263157894736842
}
]
},
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"direction": "reverse",
"lineStyle": "dashed",
"path": "straight",
"radius": {
"fixed": 15,
"max": 200,
"min": 0
},
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 0,
"y": 1
},
"sourceOriginal": {
"x": 150,
"y": 148
},
"target": {
"x": -1.0112359550561798,
"y": -0.012987012987012988
},
"targetName": "Element 30",
"targetOriginal": {
"x": 563,
"y": 82
},
"vertices": [
{
"x": 0,
"y": 1
}
]
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 9",
"placement": {
"height": 75,
"left": 61,
"top": 148,
"width": 178
},
"type": "parallelogram"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-blue"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Recipes"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"direction": "both",
"path": "straight",
"size": {
"fixed": 1,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": 0
},
"sourceOriginal": {
"x": 216,
"y": 356.5
},
"target": {
"x": -1.0112359550561798,
"y": -0.012987012987012988
},
"targetName": "Element 6",
"targetOriginal": {
"x": 252,
"y": 357
}
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 7",
"placement": {
"height": 77,
"left": 38,
"top": 318,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-red"
},
"width": 4
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Queue"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"direction": "both",
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 0,
"y": -1
},
"sourceOriginal": {
"x": 342,
"y": 223
},
"target": {
"x": 0.011235955056179775,
"y": 1.025974025974026
},
"targetName": "Element 6",
"targetOriginal": {
"x": 343,
"y": 317
}
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 8",
"placement": {
"height": 75,
"left": 253,
"top": 148,
"width": 178
},
"type": "parallelogram"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "#5b5959"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Data Pipeline"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 12",
"placement": {
"height": 77,
"left": 253,
"top": 455,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-orange"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Equipment"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"direction": "both",
"path": "straight",
"size": {
"fixed": 1,
"max": 10,
"min": 1
},
"source": {
"x": -1,
"y": 0
},
"sourceOriginal": {
"x": 38,
"y": 493.5
},
"target": {
"x": -1.0224719101123596,
"y": 0.013333333333333334
},
"targetName": "Element 9",
"targetOriginal": {
"x": 59,
"y": 185
},
"vertices": [
{
"x": -0.9523809523809523,
"y": 0
},
{
"x": -0.9523809523809523,
"y": 1
}
]
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 13",
"placement": {
"height": 77,
"left": 38,
"top": 455,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "light-blue"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Subscription"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 14",
"placement": {
"height": 77,
"left": 563,
"top": 318,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "super-light-blue"
}
},
"border": {
"color": {
"fixed": "light-blue"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Filter"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 15",
"placement": {
"height": 36,
"left": 497,
"rotation": 90,
"top": 339,
"width": 97
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "light-blue"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Query"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 16",
"placement": {
"height": 77,
"left": 563,
"top": 201,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "light-blue"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Mutation"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "dark-orange"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": -1,
"y": 0
},
"sourceOriginal": {
"x": 563,
"y": 473.5
},
"target": {
"x": 1.0224719101123596,
"y": -0.45454545454545453
},
"targetName": "Element 6",
"targetOriginal": {
"x": 433,
"y": 374
},
"vertices": [
{
"x": 0.5692307692307692,
"y": 0
},
{
"x": 0.5692307692307692,
"y": 1
}
]
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 17",
"placement": {
"height": 77,
"left": 563,
"top": 435,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-orange"
},
"radius": 0,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Initial State"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": 0
},
"sourceOriginal": {
"x": 989,
"y": 239.5
},
"target": {
"x": -0.96,
"y": 0.4583333333333333
},
"targetName": "Element 2",
"targetOriginal": {
"x": 1392,
"y": 487
},
"vertices": [
{
"x": 0.3349875930521092,
"y": 0
},
{
"x": 0.3349875930521092,
"y": 0.12727272727272726
},
{
"x": 0.826302729528536,
"y": 0.13131313131313133
},
{
"x": 0.826302729528536,
"y": 1
}
]
},
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 1,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": -0.5
},
"sourceOriginal": {
"x": 989,
"y": 258.75
},
"target": {
"x": -0.9761904761904762,
"y": 0.5263157894736842
},
"targetName": "Element 34",
"targetOriginal": {
"x": 1138,
"y": 290
},
"vertices": [
{
"x": 0.5033557046979866,
"y": 0
},
{
"x": 0.5033557046979866,
"y": 1
}
]
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 18",
"placement": {
"height": 77,
"left": 811,
"top": 201,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-orange"
},
"radius": 0,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Subscriptions"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 19",
"placement": {
"height": 77,
"left": 811,
"top": 319,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-orange"
},
"radius": 0,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Controls"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": -1,
"y": 0
},
"sourceOriginal": {
"x": 811,
"y": 473.5
},
"target": {
"x": 1.0112359550561798,
"y": 0.012987012987012988
},
"targetName": "Element 17",
"targetOriginal": {
"x": 742,
"y": 473
}
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 20",
"placement": {
"height": 77,
"left": 811,
"top": 435,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "super-light-blue"
}
},
"border": {
"color": {
"fixed": "orange"
},
"radius": 0,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Filter"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": -0.5
},
"sourceOriginal": {
"x": 1025.5,
"y": 382.25
},
"target": {
"x": -1.0238095238095237,
"y": 0.10526315789473684
},
"targetName": "Element 33",
"targetOriginal": {
"x": 1136,
"y": 384
}
},
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 1,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": 0.5
},
"sourceOriginal": {
"x": 1025.5,
"y": 333.75
},
"target": {
"x": -1,
"y": 0.05263157894736842
},
"targetName": "Element 34",
"targetOriginal": {
"x": 1137,
"y": 299
},
"vertices": [
{
"x": 0.5246636771300448,
"y": 0
},
{
"x": 0.5246636771300448,
"y": 1
}
]
},
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": 0
},
"sourceOriginal": {
"x": 1025.5,
"y": 358
},
"target": {
"x": -0.98,
"y": -0.10416666666666667
},
"targetName": "Element 2",
"targetOriginal": {
"x": 1391,
"y": 514
},
"vertices": [
{
"x": 0.7564979480164159,
"y": 0
},
{
"x": 0.7564979480164159,
"y": 1
}
]
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 29",
"placement": {
"height": 36,
"left": 959,
"rotation": 270,
"top": 340,
"width": 97
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "green"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "text"
},
"size": 20,
"text": {
"fixed": "CAD"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"lineStyle": "dashed",
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": 0
},
"sourceOriginal": {
"x": 742,
"y": 81.5
},
"target": {
"x": -1.0112359550561798,
"y": -0.012987012987012988
},
"targetName": "Element 31",
"targetOriginal": {
"x": 811,
"y": 82
}
},
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"lineStyle": "dashed",
"path": "straight",
"radius": {
"fixed": 15,
"max": 200,
"min": 0
},
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 0,
"y": 1
},
"sourceOriginal": {
"x": 653,
"y": 43
},
"target": {
"x": -0.029411764705882353,
"y": 1
},
"targetName": "Element 38",
"targetOriginal": {
"x": 1673,
"y": 249
},
"vertices": [
{
"x": 0,
"y": -0.08737864077669903
},
{
"x": 1,
"y": -0.08737864077669903
}
]
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 30",
"placement": {
"height": 77,
"left": 564,
"top": 43,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "green"
},
"radius": 28,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "text"
},
"size": 20,
"text": {
"fixed": "Layout"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"radius": {
"fixed": 15,
"max": 200,
"min": 0
},
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": 0
},
"sourceOriginal": {
"x": 990,
"y": 81.5
},
"target": {
"x": -0.9939759036144579,
"y": 0.5061728395061729
},
"targetName": "Element 35",
"targetOriginal": {
"x": 1190,
"y": 138
},
"vertices": [
{
"x": 0.48,
"y": 0
},
{
"x": 0.48,
"y": 1
}
]
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 31",
"placement": {
"height": 77,
"left": 812,
"top": 43,
"width": 178
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "semi-dark-red"
}
},
"border": {
"color": {
"fixed": "dark-orange"
},
"radius": 0,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Alerts"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": 0
},
"sourceOriginal": {
"x": 1221,
"y": 386
},
"target": {
"x": 1.0337078651685394,
"y": 0.45454545454545453
},
"targetName": "Element 20",
"targetOriginal": {
"x": 992,
"y": 456
},
"vertices": [
{
"x": -0.05240174672489083,
"y": 0
},
{
"x": -0.05240174672489083,
"y": 1
}
]
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 33",
"placement": {
"height": 38,
"left": 1137,
"top": 367,
"width": 84
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "light-green"
}
},
"border": {
"color": {
"fixed": "dark-green"
},
"radius": 0,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"fixed": "Jobs"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 34",
"placement": {
"height": 38,
"left": 1137,
"top": 281,
"width": 84
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "dark-green"
},
"radius": 0,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "light-blue"
},
"size": 20,
"text": {
"fixed": "Viewport State"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": -1,
"y": 0
},
"sourceOriginal": {
"x": 1189,
"y": 179
},
"target": {
"x": 0.19444444444444445,
"y": 0.9896907216494846
},
"targetName": "Element 29",
"targetOriginal": {
"x": 1011,
"y": 310
},
"vertices": [
{
"x": 1,
"y": 0
}
]
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 35",
"placement": {
"height": 162,
"left": 1189,
"top": 98,
"width": 332
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "semi-dark-blue"
},
"size": "contain"
},
"border": {
"color": {
"fixed": "transparent"
},
"radius": 0,
"width": 5
},
"config": {
"fill": {
"fixed": "#D9D9D9"
},
"path": {
"field": "",
"fixed": "img/icons/iot/pump.svg",
"mode": "fixed"
}
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 37",
"placement": {
"height": 96,
"left": 1601,
"top": 461,
"width": 100
},
"type": "icon"
},
{
"background": {
"color": {
"fixed": "#222020"
}
},
"border": {
"color": {
"fixed": "dark-green"
},
"radius": 0,
"width": 3
},
"config": {
"align": "center",
"color": {
"fixed": "text"
},
"size": 20,
"text": {
"fixed": "Library"
},
"valign": "middle"
},
"connections": [
{
"color": {
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": -1,
"y": 0
},
"sourceOriginal": {
"x": 1574,
"y": 308
},
"target": {
"x": -0.4977168949771689,
"y": 1.017467248908297
},
"targetName": "Element 36",
"targetOriginal": {
"x": 1450,
"y": 384
},
"vertices": [
{
"x": 1,
"y": 0
}
]
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 38",
"placement": {
"height": 118,
"left": 1574,
"top": 249,
"width": 204
},
"type": "rectangle"
}
],
"name": "Element 1711297729286",
"placement": {
"height": 100,
"left": 0,
"rotation": 0,
"top": 0,
"width": 100
},
"type": "frame"
},
"showAdvancedTypes": true
},
"pluginVersion": "12.1.0-pre",
"targets": [
{
"datasource": {
"type": "grafana-testdata-datasource"
},
"refId": "A",
"scenarioId": "random_walk",
"seriesCount": 1
}
],
"title": "Flowcharting",
"type": "canvas"
},
{
"datasource": {
"type": "grafana-testdata-datasource"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": 0
},
{
"color": "#6ED0E0",
"value": 30
},
{
"color": "#EAB839",
"value": 50
},
{
"color": "#EF843C",
"value": 80
}
]
},
"unit": "none"
},
"overrides": []
},
"gridPos": {
"h": 11,
"w": 15,
"x": 5,
"y": 36
},
"id": 4,
"options": {
"infinitePan": false,
"inlineEditing": true,
"panZoom": false,
"root": {
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"elements": [
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"text": {
"fixed": "gateway"
},
"valign": "middle"
},
"connections": [
{
"color": {
"field": "gateway",
"fixed": "rgb(204, 204, 220)"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": 0
},
"sourceOriginal": {
"x": 216,
"y": 192
},
"target": {
"x": -1.02020202020202,
"y": 0.041666666666666664
},
"targetName": "Element 2",
"targetOriginal": {
"x": 326,
"y": 191
}
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 1",
"placement": {
"height": 48,
"left": 117,
"top": 168,
"width": 99
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"text": {
"fixed": "productpage"
},
"valign": "middle"
},
"connections": [
{
"color": {
"field": "product-details",
"fixed": "white"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": 1
},
"sourceOriginal": {
"x": 426,
"y": 168
},
"target": {
"x": -0.9797979797979798,
"y": 0
},
"targetName": "Element 3",
"targetOriginal": {
"x": 549,
"y": 93
}
},
{
"color": {
"field": "product-reviews",
"fixed": "white"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": -1
},
"sourceOriginal": {
"x": 426,
"y": 216
},
"target": {
"x": -1,
"y": 0.041666666666666664
},
"targetName": "Element 4",
"targetOriginal": {
"x": 549,
"y": 297
}
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 2",
"placement": {
"height": 48,
"left": 327,
"top": 168,
"width": 99
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"text": {
"fixed": "details"
},
"valign": "middle"
},
"connections": [
{
"color": {
"field": "details-checkout",
"fixed": "white"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": 0
},
"sourceOriginal": {
"x": 647,
"y": 93
},
"target": {
"x": -0.9595959595959596,
"y": -0.041666666666666664
},
"targetName": "Element 11",
"targetOriginal": {
"x": 799,
"y": 94
}
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 3",
"placement": {
"height": 48,
"left": 548,
"top": 69,
"width": 99
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"text": {
"fixed": "reviews"
},
"valign": "middle"
},
"connections": [
{
"color": {
"field": "reviews-ratings",
"fixed": "white"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": 0
},
"sourceOriginal": {
"x": 648,
"y": 298
},
"target": {
"x": -0.9797979797979798,
"y": 0.041666666666666664
},
"targetName": "Element 5",
"targetOriginal": {
"x": 798,
"y": 299
}
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 4",
"placement": {
"height": 48,
"left": 549,
"top": 274,
"width": 99
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"text": {
"fixed": "ratings"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 5",
"placement": {
"height": 48,
"left": 797,
"top": 275,
"width": 99
},
"type": "rectangle"
},
{
"background": {
"color": {
"field": "gateway",
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"field": "gateway",
"fixed": "",
"mode": "field"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 6",
"placement": {
"height": 36,
"left": 235,
"top": 148,
"width": 64
},
"type": "metric-value"
},
{
"background": {
"color": {
"field": "product-details",
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"field": "product-details",
"fixed": "",
"mode": "field"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 7",
"placement": {
"height": 36,
"left": 415,
"top": 89,
"width": 64
},
"type": "metric-value"
},
{
"background": {
"color": {
"field": "product-reviews",
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"field": "product-reviews",
"fixed": "",
"mode": "field"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 8",
"placement": {
"height": 36,
"left": 411,
"top": 258,
"width": 64
},
"type": "metric-value"
},
{
"background": {
"color": {
"field": "reviews-ratings",
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"field": "reviews-ratings",
"fixed": "",
"mode": "field"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 9",
"placement": {
"height": 36,
"left": 686,
"top": 250,
"width": 64
},
"type": "metric-value"
},
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "rgb(204, 204, 220)"
},
"size": 16,
"text": {
"fixed": "# of requests"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 10",
"placement": {
"height": 50,
"left": 13,
"top": 309,
"width": 100
},
"type": "text"
},
{
"background": {
"color": {
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"text": {
"fixed": "checkout"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 11",
"placement": {
"height": 48,
"left": 797,
"top": 68,
"width": 99
},
"type": "rectangle"
},
{
"background": {
"color": {
"field": "details-checkout",
"fixed": "#D9D9D9"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"size": 20,
"text": {
"field": "details-checkout",
"fixed": "0",
"mode": "field"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 12",
"placement": {
"height": 36,
"left": 684,
"top": 47,
"width": 63.99609375
},
"type": "metric-value"
}
],
"name": "Element 1672955773575",
"placement": {
"height": 100,
"left": 0,
"rotation": 0,
"top": 0,
"width": 100
},
"type": "frame"
},
"showAdvancedTypes": false
},
"pluginVersion": "12.1.0-pre",
"targets": [
{
"csvContent": "gateway, product-details, product-reviews, reviews-ratings, details-checkout\n100, 56, 44, 22, 28",
"datasource": {
"type": "grafana-testdata-datasource"
},
"refId": "A",
"scenarioId": "csv_content"
}
],
"title": "Service Graph",
"type": "canvas"
},
{
"datasource": {
"type": "grafana-testdata-datasource"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"links": [
{
"targetBlank": true,
"title": "Show details",
"url": "https://ops.grafana-ops.net/d/fU-WBSqWz/synthetic-monitoring-summary?orgId=1"
}
],
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": 0
},
{
"color": "blue",
"value": 30
},
{
"color": "#EAB839",
"value": 50
},
{
"color": "red",
"value": 80
}
]
},
"unit": "percent"
},
"overrides": []
},
"gridPos": {
"h": 15,
"w": 15,
"x": 5,
"y": 47
},
"id": 2,
"options": {
"infinitePan": false,
"inlineEditing": false,
"panZoom": false,
"root": {
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"elements": [
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"blinkRate": {
"fixed": 2
},
"bulbColor": {
"field": "server_database"
},
"statusColor": {
"field": "server_region"
},
"type": "Single"
},
"connections": [
{
"color": {
"field": "server_database2",
"fixed": "white"
},
"direction": {
"field": "server_database2",
"mode": "field"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": -1,
"y": -1
},
"sourceOriginal": {
"x": 468,
"y": 125
},
"target": {
"x": 0.030303030303030304,
"y": 0
},
"targetName": "Element 11",
"targetOriginal": {
"x": 285,
"y": 170
}
},
{
"color": {
"field": "server_database",
"fixed": "white"
},
"direction": {
"field": "server_database",
"mode": "field"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": -1,
"y": -1
},
"sourceOriginal": {
"x": 468,
"y": 125
},
"target": {
"x": 0.09090909090909091,
"y": 0.16666666666666666
},
"targetName": "Element 12",
"targetOriginal": {
"x": 390,
"y": 264
}
},
{
"color": {
"field": "server_region",
"fixed": "white"
},
"direction": {
"field": "server_region",
"mode": "field"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": -1
},
"sourceOriginal": {
"x": 588.9921875,
"y": 125
},
"target": {
"x": -0.2727272727272727,
"y": 0.4166666666666667
},
"targetName": "Element 13",
"targetOriginal": {
"x": 658,
"y": 259
}
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "server 1",
"placement": {
"height": 114,
"left": 468,
"top": 11,
"width": 120.99609375
},
"type": "server"
},
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"blinkRate": {
"fixed": 0.25
},
"bulbColor": {
"field": "database_server"
},
"statusColor": {
"field": "database_server"
},
"type": "Database"
},
"connections": [
{
"color": {
"field": "database_server",
"fixed": "white"
},
"direction": {
"field": "database_server",
"mode": "field"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": 1
},
"sourceOriginal": {
"x": 328,
"y": 367
},
"target": {
"x": -0.15151515151515152,
"y": -0.16666666666666666
},
"targetName": "Element 12",
"targetOriginal": {
"x": 386,
"y": 268
}
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "database 1",
"placement": {
"height": 114,
"left": 207,
"top": 367,
"width": 121
},
"type": "server"
},
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"blinkRate": {
"fixed": 0.5
},
"bulbColor": {
"field": "region_server"
},
"statusColor": {
"field": "region_server"
},
"type": "Terminal"
},
"connections": [
{
"color": {
"field": "region_server",
"fixed": "white"
},
"direction": {
"field": "region_server",
"mode": "field"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": -1,
"y": 1
},
"sourceOriginal": {
"x": 699,
"y": 365
},
"target": {
"x": -0.21212121212121213,
"y": 0.08333333333333333
},
"targetName": "Element 13",
"targetOriginal": {
"x": 659,
"y": 263
}
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "region 1",
"placement": {
"height": 116,
"left": 699,
"top": 365,
"width": 121
},
"type": "server"
},
{
"background": {
"color": {
"fixed": "transparent"
},
"image": {
"field": "",
"fixed": "",
"mode": "fixed"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"field": "database_server",
"fixed": "#ffffff"
},
"size": 20,
"text": {
"field": "database_server",
"fixed": "10%",
"mode": "field"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 4",
"placement": {
"height": 29,
"left": 318,
"top": 282,
"width": 46
},
"type": "text"
},
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"blinkRate": {
"fixed": 0.5,
"mode": "mod"
},
"bulbColor": {
"field": "database2_server"
},
"statusColor": {
"field": "database2_server"
},
"type": "Database"
},
"connections": [
{
"color": {
"field": "database2_server",
"fixed": "white"
},
"direction": {
"field": "database2_server",
"mode": "field"
},
"path": "straight",
"size": {
"fixed": 2,
"max": 10,
"min": 1
},
"source": {
"x": 1,
"y": 1
},
"sourceOriginal": {
"x": 172,
"y": 191
},
"target": {
"x": -0.3333333333333333,
"y": 0
},
"targetName": "Element 11",
"targetOriginal": {
"x": 279,
"y": 170
}
}
],
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "database 2",
"placement": {
"height": 116,
"left": 55,
"top": 191,
"width": 117
},
"type": "server"
},
{
"background": {
"color": {
"fixed": "transparent"
},
"image": {
"field": "",
"fixed": "",
"mode": "fixed"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"field": "server_database",
"fixed": "#ffffff"
},
"size": 20,
"text": {
"field": "server_database",
"fixed": "10%",
"mode": "field"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 6",
"placement": {
"height": 29,
"left": 382,
"top": 162,
"width": 46
},
"type": "text"
},
{
"background": {
"color": {
"fixed": "transparent"
},
"image": {
"field": "",
"fixed": "",
"mode": "fixed"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"field": "database2_server",
"fixed": "#ffffff"
},
"size": 20,
"text": {
"field": "database2_server",
"fixed": "10%",
"mode": "field"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 7",
"placement": {
"height": 29,
"left": 206,
"top": 138,
"width": 46
},
"type": "text"
},
{
"background": {
"color": {
"fixed": "transparent"
},
"image": {
"field": "",
"fixed": "",
"mode": "fixed"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"field": "server_database2",
"fixed": "#ffffff"
},
"size": 20,
"text": {
"field": "server_database2",
"fixed": "10%",
"mode": "field"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 8",
"placement": {
"height": 29,
"left": 374,
"top": 103,
"width": 46
},
"type": "text"
},
{
"background": {
"color": {
"fixed": "transparent"
},
"image": {
"field": "",
"fixed": "",
"mode": "fixed"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"field": "server_region",
"fixed": "#ffffff"
},
"size": 20,
"text": {
"field": "server_region",
"fixed": "10%",
"mode": "field"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 9",
"placement": {
"height": 29,
"left": 625,
"top": 158,
"width": 46
},
"type": "text"
},
{
"background": {
"color": {
"fixed": "transparent"
},
"image": {
"field": "",
"fixed": "",
"mode": "fixed"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"field": "region_server",
"fixed": "#ffffff"
},
"size": 20,
"text": {
"field": "region_server",
"fixed": "10%",
"mode": "field"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 10",
"placement": {
"height": 29,
"left": 676,
"top": 282,
"width": 46
},
"type": "text"
},
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 11",
"placement": {
"height": 24,
"left": 268,
"top": 158,
"width": 33
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 12",
"placement": {
"height": 24,
"left": 372,
"top": 254,
"width": 33
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "transparent"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"fixed": "#000000"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 13",
"placement": {
"height": 24,
"left": 646,
"top": 252,
"width": 33
},
"type": "rectangle"
},
{
"background": {
"color": {
"fixed": "transparent"
},
"image": {
"field": "",
"fixed": "",
"mode": "fixed"
}
},
"border": {
"color": {
"fixed": "dark-green"
}
},
"config": {
"align": "center",
"color": {
"field": "server_region",
"fixed": "#ffffff"
},
"size": 20,
"text": {
"field": "server_database2",
"fixed": "Server 1",
"mode": "fixed"
},
"valign": "middle"
},
"constraint": {
"horizontal": "left",
"vertical": "top"
},
"name": "Element 14",
"placement": {
"height": 29,
"left": 483,
"top": 138,
"width": 89
},
"type": "text"
}
],
"name": "Element 1672954602175",
"placement": {
"height": 100,
"left": 0,
"top": 0,
"width": 100
},
"type": "frame"
},
"showAdvancedTypes": false
},
"pluginVersion": "12.1.0-pre",
"targets": [
{
"csvContent": "database_server, server_database, server_region, region_server, database2_server, server_database2\n10, 53, 35, 12, 22, 81",
"datasource": {
"type": "grafana-testdata-datasource"
},
"refId": "A",
"scenarioId": "csv_content"
}
],
"title": "Network Weathermap",
"type": "canvas"
}
],
"preload": false,
"refresh": "",
"schemaVersion": 42,
"tags": [
"gdev",
"panel-tests",
"canvas"
],
"templating": {
"list": []
},
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "Panel Tests - Canvas Connection Examples",
"uid": "Pu8lwQAVz",
"weekStart": ""
} | json | github | https://github.com/grafana/grafana | apps/dashboard/pkg/migration/conversion/testdata/input/migrated_dev_dashboards/panel-canvas/v1beta1.canvas-connection-examples.v42.json |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Rename ``is_active`` to ``is_stale`` column in ``dag`` table.
Revision ID: 959e216a3abb
Revises: 0e9519b56710
Create Date: 2025-04-09 17:11:08.379065
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
revision = "959e216a3abb"
down_revision = "0e9519b56710"
branch_labels = None
depends_on = None
airflow_version = "3.0.0"
def upgrade():
"""Rename is_active to is_stale column in DAG table."""
with op.batch_alter_table("dag", schema=None) as batch_op:
batch_op.alter_column("is_active", new_column_name="is_stale", type_=sa.Boolean)
op.execute("UPDATE dag SET is_stale = NOT is_stale")
def downgrade():
"""Revert renaming of is_active to is_stale column in DAG table."""
with op.batch_alter_table("dag", schema=None) as batch_op:
batch_op.alter_column("is_stale", new_column_name="is_active", type_=sa.Boolean)
op.execute("UPDATE dag SET is_active = NOT is_active") | python | github | https://github.com/apache/airflow | airflow-core/src/airflow/migrations/versions/0067_3_0_0_rename_is_active_to_is_stale_column_in_.py |
# A simple program that resembles the falling of stars or snow on a screen
# Coded in Python 2.7.10 with PyGame
# by Brett Burley-Inners :: 11/7/2015
import pygame, time, random, sys
pygame.init()
# Default dimensions of the game window (px) test
display_width = 320
display_height = 240
# Create a canvas to display the game on
gameScreen = pygame.display.set_mode((display_width, display_height))
# Title of the game Window
pygame.display.set_caption('Avoid the Falling Stuff')
# This is the player. He is a square. #sadlife
class Player:
def __init__(self, playerSize, xPosition, yPosition, playerColor, display_width):
self.xPosition = xPosition
self.yPosition = yPosition
self.playerSize = playerSize
self.playerColor = playerColor
self.display_width = display_width
pygame.draw.rect(gameScreen, self.playerColor, [self.xPosition, self.yPosition, self.playerSize, self.playerSize])
def getPlayerSize(self):
return self.playerSize
def getPlayerX(self):
return self.xPosition
def getPlayerY(self):
return self.yPosition
def redrawPlayer(self, newXPosition):
self.xPosition = newXPosition
pygame.draw.rect(gameScreen, self.playerColor, [self.xPosition, self.yPosition, self.playerSize, self.playerSize])
def isOverLeftBound(self):
if self.xPosition <= 0:
return True
else:
return False
def isOverRightBound(self):
if self.xPosition >= self.display_width - self.playerSize:
return True
else:
return False
# Class that creates a star object
class Star:
def __init__(self, starSize, xCoordinate, yCoordinate, starColor, fallSpeed, fallDirection, score):
self.starSize = starSize
self.xCoordinate = xCoordinate
self.yCoordinate = yCoordinate
self.starColor = starColor
self.fallSpeed = fallSpeed
self.fallDirection = fallDirection
self.score = 0
def fall(self):
self.yCoordinate += self.fallSpeed
self.xCoordinate += self.fallDirection
pygame.draw.rect(gameScreen, self.starColor, [self.xCoordinate, self.yCoordinate, self.starSize, self.starSize])
if self.yCoordinate > display_height:
fallingStars.remove(self)
self.score += 1
def returnScore(self):
return self.score
def collideWithPlayer(self, objectX, objectY, objectSize):
if self.yCoordinate + self.starSize >= objectY and self.yCoordinate <= objectY + objectSize:
if self.xCoordinate >= objectX and self.xCoordinate + self.starSize <= objectX + objectSize:
return True
if self.yCoordinate + self.starSize >= objectY and self.yCoordinate <= objectY + objectSize:
if self.xCoordinate <= objectX + objectSize and self.xCoordinate + self.starSize >= objectX:
return True
else:
return False
font = pygame.font.SysFont(None, 25)
# Colors
white = (255, 255, 255)
darkGray = (50, 50, 50)
darkerGray = (25, 25, 25)
darkestGray = (10, 10, 10)
lightGray = (150, 150, 150)
rLightGray = (200, 200, 200)
rrLightGray = (220, 220, 220)
black = (0, 0, 0)
red = (245, 0, 0)
darkRed = (150, 0, 0)
green = (0, 235, 0)
darkGreen = (0, 150, 0)
lightBlue = (55, 210, 225)
blue = (0, 0, 215)
darkBlue = (0, 0, 115)
pink = (225, 55, 135)
# List of colors
colorList = []
colorList.append(darkerGray)
colorList.append(darkestGray)
colorList.append(lightGray)
colorList.append(rLightGray)
colorList.append(rrLightGray)
colorList.append(lightBlue)
# Game clock
clock = pygame.time.Clock()
# List to maintain star objects
fallingStars = []
clockTickTimer = 0
# Booleans for the game loop(s)
RUNNING = True
makeStars = True
score = 0
xChange = 0
xPosition = display_width / 2
size = 20
pygame.key.set_repeat(1, 5)
player = Player(30, xPosition, display_height - 50, pink, display_width)
# Main loop to run the game
while RUNNING:
# refresh rate of gameScreen (times per second)
clock.tick(30)
# make the 'close'/'x' button work
for event in pygame.event.get():
if event.type == pygame.QUIT:
starFall = False
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT and not isOverLeftBound:
xChange -= 10
#print("left")
if event.key == pygame.K_RIGHT and not isOverRightBound:
xChange += 10
#print("right")
# background color, first thing drawn
gameScreen.fill(darkGray)
#print(clock.tick())
font = pygame.font.SysFont("monospace", 25)
message = font.render(str(score), True, lightGray)
gameScreen.blit(message, (15, 15))
clockTickTimer += 1
#print (clock.get_fps())
xPosition += xChange
#print(xPosition)
player.redrawPlayer(xPosition)
isOverLeftBound = player.isOverLeftBound()
isOverRightBound = player.isOverRightBound()
#print(isOverLeftBound)
#print(isOverRightBound)
xChange = 0
#print(xPosition)
#print(display_width)
# loop to constantly generate stars
if makeStars and clockTickTimer > 20:
# make a star
fallingStars.append(Star(random.randrange(1, 20), random.randrange(1, display_width), -5, colorList[random.randrange(0, 6)], random.randrange(1, 2), random.randrange(-1, 2)/2, score))
clockTickTimer = 0
# make all of the stars fall
for i in fallingStars:
i.fall()
score += i.returnScore()
#print(score)
#print(len(fallingStars))
# if the list is too big, remove the first item
# for the computer's sake
if len(fallingStars) > 10000:
del fallingStars[0]
if i.collideWithPlayer(player.getPlayerX(), player.getPlayerY(), player.getPlayerSize()):
makeStars = False
del fallingStars[:]
# refresh/update the screen
pygame.display.update()
# That's all, folks! | unknown | codeparrot/codeparrot-clean | ||
from __future__ import with_statement
from docopt import (docopt, DocoptExit, DocoptLanguageError,
Option, Argument, Command, OptionsShortcut,
Required, Optional, Either, OneOrMore,
parse_argv, parse_pattern, parse_section,
parse_defaults, formal_usage, Tokens, transform
)
from pytest import raises
def test_pattern_flat():
assert Required(OneOrMore(Argument('N')),
Option('-a'), Argument('M')).flat() == \
[Argument('N'), Option('-a'), Argument('M')]
assert Required(Optional(OptionsShortcut()),
Optional(Option('-a', None))).flat(OptionsShortcut) == \
[OptionsShortcut()]
def test_option():
assert Option.parse('-h') == Option('-h', None)
assert Option.parse('--help') == Option(None, '--help')
assert Option.parse('-h --help') == Option('-h', '--help')
assert Option.parse('-h, --help') == Option('-h', '--help')
assert Option.parse('-h TOPIC') == Option('-h', None, 1)
assert Option.parse('--help TOPIC') == Option(None, '--help', 1)
assert Option.parse('-h TOPIC --help TOPIC') == Option('-h', '--help', 1)
assert Option.parse('-h TOPIC, --help TOPIC') == Option('-h', '--help', 1)
assert Option.parse('-h TOPIC, --help=TOPIC') == Option('-h', '--help', 1)
assert Option.parse('-h Description...') == Option('-h', None)
assert Option.parse('-h --help Description...') == Option('-h', '--help')
assert Option.parse('-h TOPIC Description...') == Option('-h', None, 1)
assert Option.parse(' -h') == Option('-h', None)
assert Option.parse('-h TOPIC Descripton... [default: 2]') == \
Option('-h', None, 1, '2')
assert Option.parse('-h TOPIC Descripton... [default: topic-1]') == \
Option('-h', None, 1, 'topic-1')
assert Option.parse('--help=TOPIC ... [default: 3.14]') == \
Option(None, '--help', 1, '3.14')
assert Option.parse('-h, --help=DIR ... [default: ./]') == \
Option('-h', '--help', 1, "./")
assert Option.parse('-h TOPIC Descripton... [dEfAuLt: 2]') == \
Option('-h', None, 1, '2')
def test_option_name():
assert Option('-h', None).name == '-h'
assert Option('-h', '--help').name == '--help'
assert Option(None, '--help').name == '--help'
def test_commands():
assert docopt('Usage: prog add', 'add') == {'add': True}
assert docopt('Usage: prog [add]', '') == {'add': False}
assert docopt('Usage: prog [add]', 'add') == {'add': True}
assert docopt('Usage: prog (add|rm)', 'add') == {'add': True, 'rm': False}
assert docopt('Usage: prog (add|rm)', 'rm') == {'add': False, 'rm': True}
assert docopt('Usage: prog a b', 'a b') == {'a': True, 'b': True}
with raises(DocoptExit):
docopt('Usage: prog a b', 'b a')
def test_formal_usage():
doc = """
Usage: prog [-hv] ARG
prog N M
prog is a program."""
usage, = parse_section('usage:', doc)
assert usage == "Usage: prog [-hv] ARG\n prog N M"
assert formal_usage(usage) == "( [-hv] ARG ) | ( N M )"
def test_parse_argv():
o = [Option('-h'), Option('-v', '--verbose'), Option('-f', '--file', 1)]
TS = lambda s: Tokens(s, error=DocoptExit)
assert parse_argv(TS(''), options=o) == []
assert parse_argv(TS('-h'), options=o) == [Option('-h', None, 0, True)]
assert parse_argv(TS('-h --verbose'), options=o) == \
[Option('-h', None, 0, True), Option('-v', '--verbose', 0, True)]
assert parse_argv(TS('-h --file f.txt'), options=o) == \
[Option('-h', None, 0, True), Option('-f', '--file', 1, 'f.txt')]
assert parse_argv(TS('-h --file f.txt arg'), options=o) == \
[Option('-h', None, 0, True),
Option('-f', '--file', 1, 'f.txt'),
Argument(None, 'arg')]
assert parse_argv(TS('-h --file f.txt arg arg2'), options=o) == \
[Option('-h', None, 0, True),
Option('-f', '--file', 1, 'f.txt'),
Argument(None, 'arg'),
Argument(None, 'arg2')]
assert parse_argv(TS('-h arg -- -v'), options=o) == \
[Option('-h', None, 0, True),
Argument(None, 'arg'),
Argument(None, '--'),
Argument(None, '-v')]
def test_parse_pattern():
o = [Option('-h'), Option('-v', '--verbose'), Option('-f', '--file', 1)]
assert parse_pattern('[ -h ]', options=o) == \
Required(Optional(Option('-h')))
assert parse_pattern('[ ARG ... ]', options=o) == \
Required(Optional(OneOrMore(Argument('ARG'))))
assert parse_pattern('[ -h | -v ]', options=o) == \
Required(Optional(Either(Option('-h'),
Option('-v', '--verbose'))))
assert parse_pattern('( -h | -v [ --file <f> ] )', options=o) == \
Required(Required(
Either(Option('-h'),
Required(Option('-v', '--verbose'),
Optional(Option('-f', '--file', 1, None))))))
assert parse_pattern('(-h|-v[--file=<f>]N...)', options=o) == \
Required(Required(Either(Option('-h'),
Required(Option('-v', '--verbose'),
Optional(Option('-f', '--file', 1, None)),
OneOrMore(Argument('N'))))))
assert parse_pattern('(N [M | (K | L)] | O P)', options=[]) == \
Required(Required(Either(
Required(Argument('N'),
Optional(Either(Argument('M'),
Required(Either(Argument('K'),
Argument('L')))))),
Required(Argument('O'), Argument('P')))))
assert parse_pattern('[ -h ] [N]', options=o) == \
Required(Optional(Option('-h')),
Optional(Argument('N')))
assert parse_pattern('[options]', options=o) == \
Required(Optional(OptionsShortcut()))
assert parse_pattern('[options] A', options=o) == \
Required(Optional(OptionsShortcut()),
Argument('A'))
assert parse_pattern('-v [options]', options=o) == \
Required(Option('-v', '--verbose'),
Optional(OptionsShortcut()))
assert parse_pattern('ADD', options=o) == Required(Argument('ADD'))
assert parse_pattern('<add>', options=o) == Required(Argument('<add>'))
assert parse_pattern('add', options=o) == Required(Command('add'))
def test_option_match():
assert Option('-a').match([Option('-a', value=True)]) == \
(True, [], [Option('-a', value=True)])
assert Option('-a').match([Option('-x')]) == (False, [Option('-x')], [])
assert Option('-a').match([Argument('N')]) == (False, [Argument('N')], [])
assert Option('-a').match([Option('-x'), Option('-a'), Argument('N')]) == \
(True, [Option('-x'), Argument('N')], [Option('-a')])
assert Option('-a').match([Option('-a', value=True), Option('-a')]) == \
(True, [Option('-a')], [Option('-a', value=True)])
def test_argument_match():
assert Argument('N').match([Argument(None, 9)]) == \
(True, [], [Argument('N', 9)])
assert Argument('N').match([Option('-x')]) == (False, [Option('-x')], [])
assert Argument('N').match([Option('-x'),
Option('-a'),
Argument(None, 5)]) == \
(True, [Option('-x'), Option('-a')], [Argument('N', 5)])
assert Argument('N').match([Argument(None, 9), Argument(None, 0)]) == \
(True, [Argument(None, 0)], [Argument('N', 9)])
def test_command_match():
assert Command('c').match([Argument(None, 'c')]) == \
(True, [], [Command('c', True)])
assert Command('c').match([Option('-x')]) == (False, [Option('-x')], [])
assert Command('c').match([Option('-x'),
Option('-a'),
Argument(None, 'c')]) == \
(True, [Option('-x'), Option('-a')], [Command('c', True)])
assert Either(Command('add', False), Command('rm', False)).match(
[Argument(None, 'rm')]) == (True, [], [Command('rm', True)])
def test_optional_match():
assert Optional(Option('-a')).match([Option('-a')]) == \
(True, [], [Option('-a')])
assert Optional(Option('-a')).match([]) == (True, [], [])
assert Optional(Option('-a')).match([Option('-x')]) == \
(True, [Option('-x')], [])
assert Optional(Option('-a'), Option('-b')).match([Option('-a')]) == \
(True, [], [Option('-a')])
assert Optional(Option('-a'), Option('-b')).match([Option('-b')]) == \
(True, [], [Option('-b')])
assert Optional(Option('-a'), Option('-b')).match([Option('-x')]) == \
(True, [Option('-x')], [])
assert Optional(Argument('N')).match([Argument(None, 9)]) == \
(True, [], [Argument('N', 9)])
assert Optional(Option('-a'), Option('-b')).match(
[Option('-b'), Option('-x'), Option('-a')]) == \
(True, [Option('-x')], [Option('-a'), Option('-b')])
def test_required_match():
assert Required(Option('-a')).match([Option('-a')]) == \
(True, [], [Option('-a')])
assert Required(Option('-a')).match([]) == (False, [], [])
assert Required(Option('-a')).match([Option('-x')]) == \
(False, [Option('-x')], [])
assert Required(Option('-a'), Option('-b')).match([Option('-a')]) == \
(False, [Option('-a')], [])
def test_either_match():
assert Either(Option('-a'), Option('-b')).match(
[Option('-a')]) == (True, [], [Option('-a')])
assert Either(Option('-a'), Option('-b')).match(
[Option('-a'), Option('-b')]) == \
(True, [Option('-b')], [Option('-a')])
assert Either(Option('-a'), Option('-b')).match(
[Option('-x')]) == (False, [Option('-x')], [])
assert Either(Option('-a'), Option('-b'), Option('-c')).match(
[Option('-x'), Option('-b')]) == \
(True, [Option('-x')], [Option('-b')])
assert Either(Argument('M'),
Required(Argument('N'), Argument('M'))).match(
[Argument(None, 1), Argument(None, 2)]) == \
(True, [], [Argument('N', 1), Argument('M', 2)])
def test_one_or_more_match():
assert OneOrMore(Argument('N')).match([Argument(None, 9)]) == \
(True, [], [Argument('N', 9)])
assert OneOrMore(Argument('N')).match([]) == (False, [], [])
assert OneOrMore(Argument('N')).match([Option('-x')]) == \
(False, [Option('-x')], [])
assert OneOrMore(Argument('N')).match(
[Argument(None, 9), Argument(None, 8)]) == (
True, [], [Argument('N', 9), Argument('N', 8)])
assert OneOrMore(Argument('N')).match(
[Argument(None, 9), Option('-x'), Argument(None, 8)]) == (
True, [Option('-x')], [Argument('N', 9), Argument('N', 8)])
assert OneOrMore(Option('-a')).match(
[Option('-a'), Argument(None, 8), Option('-a')]) == \
(True, [Argument(None, 8)], [Option('-a'), Option('-a')])
assert OneOrMore(Option('-a')).match([Argument(None, 8),
Option('-x')]) == \
(False, [Argument(None, 8), Option('-x')], [])
assert OneOrMore(Required(Option('-a'), Argument('N'))).match(
[Option('-a'), Argument(None, 1), Option('-x'),
Option('-a'), Argument(None, 2)]) == \
(True, [Option('-x')],
[Option('-a'), Argument('N', 1), Option('-a'), Argument('N', 2)])
assert OneOrMore(Optional(Argument('N'))).match([Argument(None, 9)]) == \
(True, [], [Argument('N', 9)])
def test_list_argument_match():
assert Required(Argument('N'), Argument('N')).fix().match(
[Argument(None, '1'), Argument(None, '2')]) == \
(True, [], [Argument('N', ['1', '2'])])
assert OneOrMore(Argument('N')).fix().match(
[Argument(None, '1'), Argument(None, '2'), Argument(None, '3')]) == \
(True, [], [Argument('N', ['1', '2', '3'])])
assert Required(Argument('N'), OneOrMore(Argument('N'))).fix().match(
[Argument(None, '1'), Argument(None, '2'), Argument(None, '3')]) == \
(True, [], [Argument('N', ['1', '2', '3'])])
assert Required(Argument('N'), Required(Argument('N'))).fix().match(
[Argument(None, '1'), Argument(None, '2')]) == \
(True, [], [Argument('N', ['1', '2'])])
def test_basic_pattern_matching():
# ( -a N [ -x Z ] )
pattern = Required(Option('-a'), Argument('N'),
Optional(Option('-x'), Argument('Z')))
# -a N
assert pattern.match([Option('-a'), Argument(None, 9)]) == \
(True, [], [Option('-a'), Argument('N', 9)])
# -a -x N Z
assert pattern.match([Option('-a'), Option('-x'),
Argument(None, 9), Argument(None, 5)]) == \
(True, [], [Option('-a'), Argument('N', 9),
Option('-x'), Argument('Z', 5)])
# -x N Z # BZZ!
assert pattern.match([Option('-x'),
Argument(None, 9),
Argument(None, 5)]) == \
(False, [Option('-x'), Argument(None, 9), Argument(None, 5)], [])
def test_pattern_either():
assert transform(Option('-a')) == Either(Required(Option('-a')))
assert transform(Argument('A')) == Either(Required(Argument('A')))
assert transform(Required(Either(Option('-a'), Option('-b')),
Option('-c'))) == \
Either(Required(Option('-a'), Option('-c')),
Required(Option('-b'), Option('-c')))
assert transform(Optional(Option('-a'), Either(Option('-b'),
Option('-c')))) == \
Either(Required(Option('-b'), Option('-a')),
Required(Option('-c'), Option('-a')))
assert transform(Either(Option('-x'),
Either(Option('-y'), Option('-z')))) == \
Either(Required(Option('-x')),
Required(Option('-y')),
Required(Option('-z')))
assert transform(OneOrMore(Argument('N'), Argument('M'))) == \
Either(Required(Argument('N'), Argument('M'),
Argument('N'), Argument('M')))
def test_pattern_fix_repeating_arguments():
assert Option('-a').fix_repeating_arguments() == Option('-a')
assert Argument('N', None).fix_repeating_arguments() == Argument('N', None)
assert Required(Argument('N'),
Argument('N')).fix_repeating_arguments() == \
Required(Argument('N', []), Argument('N', []))
assert Either(Argument('N'),
OneOrMore(Argument('N'))).fix() == \
Either(Argument('N', []), OneOrMore(Argument('N', [])))
def test_set():
assert Argument('N') == Argument('N')
assert set([Argument('N'), Argument('N')]) == set([Argument('N')])
def test_pattern_fix_identities_1():
pattern = Required(Argument('N'), Argument('N'))
assert pattern.children[0] == pattern.children[1]
assert pattern.children[0] is not pattern.children[1]
pattern.fix_identities()
assert pattern.children[0] is pattern.children[1]
def test_pattern_fix_identities_2():
pattern = Required(Optional(Argument('X'), Argument('N')), Argument('N'))
assert pattern.children[0].children[1] == pattern.children[1]
assert pattern.children[0].children[1] is not pattern.children[1]
pattern.fix_identities()
assert pattern.children[0].children[1] is pattern.children[1]
def test_long_options_error_handling():
# with raises(DocoptLanguageError):
# docopt('Usage: prog --non-existent', '--non-existent')
# with raises(DocoptLanguageError):
# docopt('Usage: prog --non-existent')
with raises(DocoptExit):
docopt('Usage: prog', '--non-existent')
with raises(DocoptExit):
docopt('Usage: prog [--version --verbose]\n'
'Options: --version\n --verbose', '--ver')
with raises(DocoptLanguageError):
docopt('Usage: prog --long\nOptions: --long ARG')
with raises(DocoptExit):
docopt('Usage: prog --long ARG\nOptions: --long ARG', '--long')
with raises(DocoptLanguageError):
docopt('Usage: prog --long=ARG\nOptions: --long')
with raises(DocoptExit):
docopt('Usage: prog --long\nOptions: --long', '--long=ARG')
def test_short_options_error_handling():
with raises(DocoptLanguageError):
docopt('Usage: prog -x\nOptions: -x this\n -x that')
# with raises(DocoptLanguageError):
# docopt('Usage: prog -x')
with raises(DocoptExit):
docopt('Usage: prog', '-x')
with raises(DocoptLanguageError):
docopt('Usage: prog -o\nOptions: -o ARG')
with raises(DocoptExit):
docopt('Usage: prog -o ARG\nOptions: -o ARG', '-o')
def test_matching_paren():
with raises(DocoptLanguageError):
docopt('Usage: prog [a [b]')
with raises(DocoptLanguageError):
docopt('Usage: prog [a [b] ] c )')
def test_allow_double_dash():
assert docopt('usage: prog [-o] [--] <arg>\nkptions: -o',
'-- -o') == {'-o': False, '<arg>': '-o', '--': True}
assert docopt('usage: prog [-o] [--] <arg>\nkptions: -o',
'-o 1') == {'-o': True, '<arg>': '1', '--': False}
with raises(DocoptExit): # "--" is not allowed; FIXME?
docopt('usage: prog [-o] <arg>\noptions:-o', '-- -o')
def test_docopt():
doc = '''Usage: prog [-v] A
Options: -v Be verbose.'''
assert docopt(doc, 'arg') == {'-v': False, 'A': 'arg'}
assert docopt(doc, '-v arg') == {'-v': True, 'A': 'arg'}
doc = """Usage: prog [-vqr] [FILE]
prog INPUT OUTPUT
prog --help
Options:
-v print status messages
-q report only file names
-r show all occurrences of the same error
--help
"""
a = docopt(doc, '-v file.py')
assert a == {'-v': True, '-q': False, '-r': False, '--help': False,
'FILE': 'file.py', 'INPUT': None, 'OUTPUT': None}
a = docopt(doc, '-v')
assert a == {'-v': True, '-q': False, '-r': False, '--help': False,
'FILE': None, 'INPUT': None, 'OUTPUT': None}
with raises(DocoptExit): # does not match
docopt(doc, '-v input.py output.py')
with raises(DocoptExit):
docopt(doc, '--fake')
with raises(SystemExit):
docopt(doc, '--hel')
#with raises(SystemExit):
# docopt(doc, 'help') XXX Maybe help command?
def test_language_errors():
with raises(DocoptLanguageError):
docopt('no usage with colon here')
with raises(DocoptLanguageError):
docopt('usage: here \n\n and again usage: here')
def test_issue_40():
with raises(SystemExit): # i.e. shows help
docopt('usage: prog --help-commands | --help', '--help')
assert docopt('usage: prog --aabb | --aa', '--aa') == {'--aabb': False,
'--aa': True}
def test_issue34_unicode_strings():
try:
assert docopt(eval("u'usage: prog [-o <a>]'"), '') == \
{'-o': False, '<a>': None}
except SyntaxError:
pass # Python 3
def test_count_multiple_flags():
assert docopt('usage: prog [-v]', '-v') == {'-v': True}
assert docopt('usage: prog [-vv]', '') == {'-v': 0}
assert docopt('usage: prog [-vv]', '-v') == {'-v': 1}
assert docopt('usage: prog [-vv]', '-vv') == {'-v': 2}
with raises(DocoptExit):
docopt('usage: prog [-vv]', '-vvv')
assert docopt('usage: prog [-v | -vv | -vvv]', '-vvv') == {'-v': 3}
assert docopt('usage: prog -v...', '-vvvvvv') == {'-v': 6}
assert docopt('usage: prog [--ver --ver]', '--ver --ver') == {'--ver': 2}
def test_any_options_parameter():
with raises(DocoptExit):
docopt('usage: prog [options]', '-foo --bar --spam=eggs')
# assert docopt('usage: prog [options]', '-foo --bar --spam=eggs',
# any_options=True) == {'-f': True, '-o': 2,
# '--bar': True, '--spam': 'eggs'}
with raises(DocoptExit):
docopt('usage: prog [options]', '--foo --bar --bar')
# assert docopt('usage: prog [options]', '--foo --bar --bar',
# any_options=True) == {'--foo': True, '--bar': 2}
with raises(DocoptExit):
docopt('usage: prog [options]', '--bar --bar --bar -ffff')
# assert docopt('usage: prog [options]', '--bar --bar --bar -ffff',
# any_options=True) == {'--bar': 3, '-f': 4}
with raises(DocoptExit):
docopt('usage: prog [options]', '--long=arg --long=another')
# assert docopt('usage: prog [options]', '--long=arg --long=another',
# any_options=True) == {'--long': ['arg', 'another']}
#def test_options_shortcut_multiple_commands():
# # any_options is disabled
# assert docopt('usage: prog c1 [options] prog c2 [options]',
# 'c2 -o', any_options=True) == {'-o': True, 'c1': False, 'c2': True}
# assert docopt('usage: prog c1 [options] prog c2 [options]',
# 'c1 -o', any_options=True) == {'-o': True, 'c1': True, 'c2': False}
def test_default_value_for_positional_arguments():
doc = """Usage: prog [--data=<data>...]\n
Options:\n\t-d --data=<arg> Input data [default: x]
"""
a = docopt(doc, '')
assert a == {'--data': ['x']}
doc = """Usage: prog [--data=<data>...]\n
Options:\n\t-d --data=<arg> Input data [default: x y]
"""
a = docopt(doc, '')
assert a == {'--data': ['x', 'y']}
doc = """Usage: prog [--data=<data>...]\n
Options:\n\t-d --data=<arg> Input data [default: x y]
"""
a = docopt(doc, '--data=this')
assert a == {'--data': ['this']}
#def test_parse_defaults():
# assert parse_defaults("""usage: prog
# options:
# -o, --option <o>
# --another <a> description
# [default: x]
# <a>
# <another> description [default: y]""") == \
# ([Option('-o', '--option', 1, None),
# Option(None, '--another', 1, 'x')],
# [Argument('<a>', None),
# Argument('<another>', 'y')])
#
# doc = '''
# -h, --help Print help message.
# -o FILE Output file.
# --verbose Verbose mode.'''
# assert parse_defaults(doc)[0] == [Option('-h', '--help'),
# Option('-o', None, 1),
# Option(None, '--verbose')]
def test_issue_59():
assert docopt('usage: prog --long=<a>', '--long=') == {'--long': ''}
assert docopt('usage: prog -l <a>\n'
'options: -l <a>', ['-l', '']) == {'-l': ''}
def test_options_first():
assert docopt('usage: prog [--opt] [<args>...]',
'--opt this that') == {'--opt': True,
'<args>': ['this', 'that']}
assert docopt('usage: prog [--opt] [<args>...]',
'this that --opt') == {'--opt': True,
'<args>': ['this', 'that']}
assert docopt('usage: prog [--opt] [<args>...]',
'this that --opt',
options_first=True) == {'--opt': False,
'<args>': ['this', 'that', '--opt']}
def test_issue_68_options_shortcut_does_not_include_options_in_usage_pattern():
args = docopt('usage: prog [-ab] [options]\n'
'options: -x\n -y', '-ax')
# Need to use `is` (not `==`) since we want to make sure
# that they are not 1/0, but strictly True/False:
assert args['-a'] is True
assert args['-b'] is False
assert args['-x'] is True
assert args['-y'] is False
def test_issue_65_evaluate_argv_when_called_not_when_imported():
import sys
sys.argv = 'prog -a'.split()
assert docopt('usage: prog [-ab]') == {'-a': True, '-b': False}
sys.argv = 'prog -b'.split()
assert docopt('usage: prog [-ab]') == {'-a': False, '-b': True}
def test_issue_71_double_dash_is_not_a_valid_option_argument():
with raises(DocoptExit):
docopt('usage: prog [--log=LEVEL] [--] <args>...', '--log -- 1 2')
with raises(DocoptExit):
docopt('''usage: prog [-l LEVEL] [--] <args>...
options: -l LEVEL''', '-l -- 1 2')
usage = '''usage: this
usage:hai
usage: this that
usage: foo
bar
PROGRAM USAGE:
foo
bar
usage:
\ttoo
\ttar
Usage: eggs spam
BAZZ
usage: pit stop'''
def test_parse_section():
assert parse_section('usage:', 'foo bar fizz buzz') == []
assert parse_section('usage:', 'usage: prog') == ['usage: prog']
assert parse_section('usage:',
'usage: -x\n -y') == ['usage: -x\n -y']
assert parse_section('usage:', usage) == [
'usage: this',
'usage:hai',
'usage: this that',
'usage: foo\n bar',
'PROGRAM USAGE:\n foo\n bar',
'usage:\n\ttoo\n\ttar',
'Usage: eggs spam',
'usage: pit stop',
]
def test_issue_126_defaults_not_parsed_correctly_when_tabs():
section = 'Options:\n\t--foo=<arg> [default: bar]'
assert parse_defaults(section) == [Option(None, '--foo', 1, 'bar')] | unknown | codeparrot/codeparrot-clean | ||
set -o errexit
set -o verbose
source "$(dirname $(realpath ${BASH_SOURCE[0]}))"/prelude.sh
activate_venv
[[ "${has_packages}" != "true" ]] && exit 0
if [[ -z "${packager_script+x}" ]]; then
echo "Error: packager run when packager_script is not set, please remove the package task from this variant (or variant task group) or set packager_script if this variant is intended to run the packager."
exit 1
fi
pushd "src/buildscripts" >&/dev/null
trap 'popd >& /dev/null' EXIT
$python \
${packager_script} \
--prefix $(pwd)/.. \
--distros \
${packager_distro} \
--crypt_spec \
--tarball $(pwd)/../mongo_crypt_shared_v1-${version}.${ext:-tgz} \
-s ${version} \
-m HEAD \
-a ${packager_arch} | unknown | github | https://github.com/mongodb/mongo | evergreen/packager_crypt_py_run.sh |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Davide Corio
# Copyright 2015 Agile Business Group <http://www.agilebg.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Italian Localization - FatturaPA - Emission',
'version': '8.0.0.1.1',
'category': 'Localization/Italy',
'summary': 'Electronic invoices emission',
'author': 'Davide Corio, Agile Business Group, Innoviu',
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
"depends": [
'l10n_it_fatturapa',
'l10n_it_split_payment',
],
"data": [
'wizard/wizard_export_fatturapa_view.xml',
'views/attachment_view.xml',
'views/account_view.xml',
'security/ir.model.access.csv',
],
"test": [],
"installable": True,
'external_dependencies': {
'python': ['unidecode'],
}
} | unknown | codeparrot/codeparrot-clean | ||
import unittest
import operator
import sys
import pickle
from test import support
class G:
'Sequence using __getitem__'
def __init__(self, seqn):
self.seqn = seqn
def __getitem__(self, i):
return self.seqn[i]
class I:
'Sequence using iterator protocol'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
def __next__(self):
if self.i >= len(self.seqn): raise StopIteration
v = self.seqn[self.i]
self.i += 1
return v
class Ig:
'Sequence using iterator protocol defined with a generator'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
for val in self.seqn:
yield val
class X:
'Missing __getitem__ and __iter__'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __next__(self):
if self.i >= len(self.seqn): raise StopIteration
v = self.seqn[self.i]
self.i += 1
return v
class E:
'Test propagation of exceptions'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
def __next__(self):
3 // 0
class N:
'Iterator missing __next__()'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
class PickleTest:
# Helper to check picklability
def check_pickle(self, itorg, seq):
d = pickle.dumps(itorg)
it = pickle.loads(d)
self.assertEqual(type(itorg), type(it))
self.assertEqual(list(it), seq)
it = pickle.loads(d)
try:
next(it)
except StopIteration:
self.assertFalse(seq[1:])
return
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(list(it), seq[1:])
class EnumerateTestCase(unittest.TestCase, PickleTest):
enum = enumerate
seq, res = 'abc', [(0,'a'), (1,'b'), (2,'c')]
def test_basicfunction(self):
self.assertEqual(type(self.enum(self.seq)), self.enum)
e = self.enum(self.seq)
self.assertEqual(iter(e), e)
self.assertEqual(list(self.enum(self.seq)), self.res)
self.enum.__doc__
def test_pickle(self):
self.check_pickle(self.enum(self.seq), self.res)
def test_getitemseqn(self):
self.assertEqual(list(self.enum(G(self.seq))), self.res)
e = self.enum(G(''))
self.assertRaises(StopIteration, next, e)
def test_iteratorseqn(self):
self.assertEqual(list(self.enum(I(self.seq))), self.res)
e = self.enum(I(''))
self.assertRaises(StopIteration, next, e)
def test_iteratorgenerator(self):
self.assertEqual(list(self.enum(Ig(self.seq))), self.res)
e = self.enum(Ig(''))
self.assertRaises(StopIteration, next, e)
def test_noniterable(self):
self.assertRaises(TypeError, self.enum, X(self.seq))
def test_illformediterable(self):
self.assertRaises(TypeError, self.enum, N(self.seq))
def test_exception_propagation(self):
self.assertRaises(ZeroDivisionError, list, self.enum(E(self.seq)))
def test_argumentcheck(self):
self.assertRaises(TypeError, self.enum) # no arguments
self.assertRaises(TypeError, self.enum, 1) # wrong type (not iterable)
self.assertRaises(TypeError, self.enum, 'abc', 'a') # wrong type
self.assertRaises(TypeError, self.enum, 'abc', 2, 3) # too many arguments
@support.cpython_only
def test_tuple_reuse(self):
# Tests an implementation detail where tuple is reused
# whenever nothing else holds a reference to it
self.assertEqual(len(set(map(id, list(enumerate(self.seq))))), len(self.seq))
self.assertEqual(len(set(map(id, enumerate(self.seq)))), min(1,len(self.seq)))
class MyEnum(enumerate):
pass
class SubclassTestCase(EnumerateTestCase):
enum = MyEnum
class TestEmpty(EnumerateTestCase):
seq, res = '', []
class TestBig(EnumerateTestCase):
seq = range(10,20000,2)
res = list(zip(range(20000), seq))
class TestReversed(unittest.TestCase, PickleTest):
def test_simple(self):
class A:
def __getitem__(self, i):
if i < 5:
return str(i)
raise StopIteration
def __len__(self):
return 5
for data in 'abc', range(5), tuple(enumerate('abc')), A(), range(1,17,5):
self.assertEqual(list(data)[::-1], list(reversed(data)))
self.assertRaises(TypeError, reversed, {})
# don't allow keyword arguments
self.assertRaises(TypeError, reversed, [], a=1)
def test_range_optimization(self):
x = range(1)
self.assertEqual(type(reversed(x)), type(iter(x)))
def test_len(self):
for s in ('hello', tuple('hello'), list('hello'), range(5)):
self.assertEqual(operator.length_hint(reversed(s)), len(s))
r = reversed(s)
list(r)
self.assertEqual(operator.length_hint(r), 0)
class SeqWithWeirdLen:
called = False
def __len__(self):
if not self.called:
self.called = True
return 10
raise ZeroDivisionError
def __getitem__(self, index):
return index
r = reversed(SeqWithWeirdLen())
self.assertRaises(ZeroDivisionError, operator.length_hint, r)
def test_gc(self):
class Seq:
def __len__(self):
return 10
def __getitem__(self, index):
return index
s = Seq()
r = reversed(s)
s.r = r
def test_args(self):
self.assertRaises(TypeError, reversed)
self.assertRaises(TypeError, reversed, [], 'extra')
@unittest.skipUnless(hasattr(sys, 'getrefcount'), 'test needs sys.getrefcount()')
def test_bug1229429(self):
# this bug was never in reversed, it was in
# PyObject_CallMethod, and reversed_new calls that sometimes.
def f():
pass
r = f.__reversed__ = object()
rc = sys.getrefcount(r)
for i in range(10):
try:
reversed(f)
except TypeError:
pass
else:
self.fail("non-callable __reversed__ didn't raise!")
self.assertEqual(rc, sys.getrefcount(r))
def test_objmethods(self):
# Objects must have __len__() and __getitem__() implemented.
class NoLen(object):
def __getitem__(self): return 1
nl = NoLen()
self.assertRaises(TypeError, reversed, nl)
class NoGetItem(object):
def __len__(self): return 2
ngi = NoGetItem()
self.assertRaises(TypeError, reversed, ngi)
def test_pickle(self):
for data in 'abc', range(5), tuple(enumerate('abc')), range(1,17,5):
self.check_pickle(reversed(data), list(data)[::-1])
class EnumerateStartTestCase(EnumerateTestCase):
def test_basicfunction(self):
e = self.enum(self.seq)
self.assertEqual(iter(e), e)
self.assertEqual(list(self.enum(self.seq)), self.res)
class TestStart(EnumerateStartTestCase):
enum = lambda self, i: enumerate(i, start=11)
seq, res = 'abc', [(11, 'a'), (12, 'b'), (13, 'c')]
class TestLongStart(EnumerateStartTestCase):
enum = lambda self, i: enumerate(i, start=sys.maxsize+1)
seq, res = 'abc', [(sys.maxsize+1,'a'), (sys.maxsize+2,'b'),
(sys.maxsize+3,'c')]
def test_main(verbose=None):
support.run_unittest(__name__)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
counts = [None] * 5
for i in range(len(counts)):
support.run_unittest(__name__)
counts[i] = sys.gettotalrefcount()
print(counts)
if __name__ == "__main__":
test_main(verbose=True) | unknown | codeparrot/codeparrot-clean | ||
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package terraform
import (
"github.com/hashicorp/terraform/internal/dag"
)
const rootNodeName = "root"
// RootTransformer is a GraphTransformer that adds a root to the graph.
type RootTransformer struct{}
func (t *RootTransformer) Transform(g *Graph) error {
addRootNodeToGraph(g)
return nil
}
// addRootNodeToGraph modifies the given graph in-place so that it has a root
// node if it didn't already have one and so that any other node which doesn't
// already depend on something will depend on that root node.
//
// After this function returns, the graph will have only one node that doesn't
// depend on any other nodes.
func addRootNodeToGraph(g *Graph) {
// We always add the root node. This is a singleton so if it's already
// in the graph this will do nothing and just retain the existing root node.
//
// Note that rootNode is intentionally added by value and not by pointer
// so that all root nodes will be equal to one another and therefore
// coalesce when two valid graphs get merged together into a single graph.
g.Add(rootNode)
// Everything that doesn't already depend on at least one other node will
// depend on the root node, except the root node itself.
for _, v := range g.Vertices() {
if v == dag.Vertex(rootNode) {
continue
}
if g.UpEdges(v).Len() == 0 {
g.Connect(dag.BasicEdge(rootNode, v))
}
}
}
type graphNodeRoot struct{}
// rootNode is the singleton value representing all root graph nodes.
//
// The root node for all graphs should be this value directly, and in particular
// _not_ a pointer to this value. Using the value directly here means that
// multiple root nodes will always coalesce together when subsuming one graph
// into another.
var rootNode graphNodeRoot
func (n graphNodeRoot) Name() string {
return rootNodeName
}
// CloseRootModuleTransformer is a GraphTransformer that adds a root to the graph.
type CloseRootModuleTransformer struct{}
func (t *CloseRootModuleTransformer) Transform(g *Graph) error {
// close the root module
closeRoot := &nodeCloseModule{}
g.Add(closeRoot)
// since this is closing the root module, make it depend on everything in
// the root module.
for _, v := range g.Vertices() {
if v == closeRoot {
continue
}
// since this is closing the root module, and must be last, we can
// connect to anything that doesn't have any up edges.
if g.UpEdges(v).Len() == 0 {
g.Connect(dag.BasicEdge(closeRoot, v))
}
}
return nil
} | go | github | https://github.com/hashicorp/terraform | internal/terraform/transform_root.go |
# import the necessary packages
import numpy as np
import cv2
class CropPreprocessor:
def __init__(self, width, height, horiz=True, inter=cv2.INTER_AREA):
# store the target image width, height, whether or not
# horizontal flips should be included, along with the
# interpolation method used when resizing
self.width = width
self.height = height
self.horiz = horiz
self.inter = inter
def preprocess(self, image):
# initialize the list of crops
crops = []
# grab the width and height of the image then use these
# dimensions to define the corners of the image based
(h, w) = image.shape[:2]
coords = [
[0, 0, self.width, self.height],
[w - self.width, 0, w, self.height],
[w - self.width, h - self.height, w, h],
[0, h - self.height, self.width, h]]
# compute the center crop of the image as well
dW = int(0.5 * (w - self.width))
dH = int(0.5 * (h - self.height))
coords.append([dW, dH, w - dW, h - dH])
# loop over the coordinates, extract each of the crops,
# and resize each of them to a fixed size
for (startX, startY, endX, endY) in coords:
crop = image[startY:endY, startX:endX]
crop = cv2.resize(crop, (self.width, self.height),
interpolation=self.inter)
crops.append(crop)
# check to see if the horizontal flips should be taken
if self.horiz:
# compute the horizontal mirror flips for each crop
mirrors = [cv2.flip(c, 1) for c in crops]
crops.extend(mirrors)
# return the set of crops
return np.array(crops) | unknown | codeparrot/codeparrot-clean | ||
/*
* This file was automatically generated.
* DO NOT MODIFY BY HAND.
* Run `yarn fix:special` to update
*/
"use strict";function r(e,{instancePath:t="",parentData:o,parentDataProperty:a,rootData:n=e}={}){if(!e||"object"!=typeof e||Array.isArray(e))return r.errors=[{params:{type:"object"}}],!1;{const t=0;for(const t in e)if("esModule"!==t&&"exportsOnly"!==t)return r.errors=[{params:{additionalProperty:t}}],!1;if(0===t){if(void 0!==e.esModule){const t=0;if("boolean"!=typeof e.esModule)return r.errors=[{params:{type:"boolean"}}],!1;var s=0===t}else s=!0;if(s)if(void 0!==e.exportsOnly){const t=0;if("boolean"!=typeof e.exportsOnly)return r.errors=[{params:{type:"boolean"}}],!1;s=0===t}else s=!0}}return r.errors=null,!0}function e(t,{instancePath:o="",parentData:a,parentDataProperty:n,rootData:s=t}={}){let p=null,l=0;return r(t,{instancePath:o,parentData:a,parentDataProperty:n,rootData:s})||(p=null===p?r.errors:p.concat(r.errors),l=p.length),e.errors=p,0===l}module.exports=e,module.exports.default=e; | javascript | github | https://github.com/webpack/webpack | schemas/plugins/css/CssGeneratorOptions.check.js |
"""Fixer that addes parentheses where they are required
This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``."""
# By Taek Joo Kim and Benjamin Peterson
# Local imports
from .. import fixer_base
from ..fixer_util import LParen, RParen
# XXX This doesn't support nested for loops like [x for x in 1, 2 for x in 1, 2]
class FixParen(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
atom< ('[' | '(')
(listmaker< any
old_comp_for<
'for' NAME 'in'
target=testlist_safe< any (',' any)+ [',']
>
[any]
>
>
|
testlist_gexp< any
old_comp_for<
'for' NAME 'in'
target=testlist_safe< any (',' any)+ [',']
>
[any]
>
>)
(']' | ')') >
"""
def transform(self, node, results):
target = results["target"]
lparen = LParen()
lparen.prefix = target.prefix
target.prefix = "" # Make it hug the parentheses
target.insert_child(0, lparen)
target.append_child(RParen()) | unknown | codeparrot/codeparrot-clean | ||
# Create your views here.
from jsonrpc import *
from djangowanted.wanted.models import Item, Flag, FlagType, Page
from django.template import loader
from django.shortcuts import render_to_response
from django.template import RequestContext, Template
from django.http import HttpResponseRedirect, HttpResponse
import urllib
from copy import copy
from wanted.forms import ItemForm
formsservice = FormProcessor({'itemform': ItemForm})
service = JSONRPCService()
def index(request, path=None):
path = request.GET.get('page', None)
if path == '':
path = 'index'
if path is None:
# workaround in history tokens: must have a query
return HttpResponseRedirect("./?page=#index")
try:
p = Page.objects.get(name=path)
except Page.DoesNotExist:
p = None
if not p and path == 'index':
return render_to_response('index.html', {'title':'', 'noscript':''})
args = {'title': p.name,
'noscript': p.text
}
context_instance=RequestContext(request)
context_instance.autoescape=False
try:
template = Item.objects.get(name='index.html')
except Item.DoesNotExist:
template = None
if not template:
return render_to_response('index.html', args, context_instance)
tpl = loader.get_template_from_string(template)
context_instance.update(args)
tpl = tpl.render(context_instance)
return HttpResponse(tpl)
def _getItem (item):
fields = copy(item._meta.get_all_field_names())
del fields[fields.index('flag')]
del fields[fields.index('id')]
for f in FlagType.objects.all():
fields.append(f.name)
try:
fg = Flag.objects.get(item=item.id, type=f.id)
except Flag.DoesNotExist:
fg = Flag()
setattr(item, f.name, fg)
return json_convert([item], fields=fields)[0]
@jsonremote(service)
def getItem (request, num):
try:
item = Item.objects.get(id=num)
except Item.DoesNotExist:
return None
return _getItem(item)
@jsonremote(service)
def getItemsByName (request, name):
return json_convert([Item.objects.filter(name=name)])
@jsonremote(service)
def getItems (request):
return json_convert(Item.objects.all())
@jsonremote(service)
def updateItem (request, item):
t = Item.objects.get(id=item['id'])
t.name = item['name']
t.text = item['text']
t.save()
return getItems(request)
@jsonremote(service)
def addItem (request, item):
t = Item()
t.name = item['name']
t.short_description = item['short_description']
t.price = item['price']
t.save()
fields = copy(t._meta.get_all_field_names())
del fields[fields.index('flag')]
del fields[fields.index('id')]
for f in FlagType.objects.all():
fields.append(f.name)
fv = item[f.name]
d = {'item': t.id, 'type': f.id, 'value': fv}
try:
fg = Flag.objects.get(item=t.id, type=f.id)
except Flag.DoesNotExist:
fg = Flag()
fg.item = t
fg.type = f
fg.value = fv
fg.save()
setattr(t, f.name, fg)
return json_convert([t], fields=fields)[0]
@jsonremote(service)
def deleteItem (request, num):
t = Item.objects.get(id=num)
t.delete()
return num | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright (C) 2013 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.hash;
import org.jspecify.annotations.NullUnmarked;
/**
* An enum that contains all of the known hash functions.
*
* @author Kurt Alfred Kluever
*/
@NullUnmarked
enum HashFunctionEnum {
ADLER32(Hashing.adler32()),
CRC32(Hashing.crc32()),
GOOD_FAST_HASH_32(Hashing.goodFastHash(32)),
GOOD_FAST_HASH_64(Hashing.goodFastHash(64)),
GOOD_FAST_HASH_128(Hashing.goodFastHash(128)),
GOOD_FAST_HASH_256(Hashing.goodFastHash(256)),
MD5(Hashing.md5()),
MURMUR3_128(Hashing.murmur3_128()),
MURMUR3_32(Hashing.murmur3_32()),
MURMUR3_32_FIXED(Hashing.murmur3_32_fixed()),
SHA1(Hashing.sha1()),
SHA256(Hashing.sha256()),
SHA384(Hashing.sha384()),
SHA512(Hashing.sha512()),
SIP_HASH24(Hashing.sipHash24()),
FARMHASH_FINGERPRINT_64(Hashing.farmHashFingerprint64()),
// Hash functions found in //javatests for comparing against current implementation of CityHash.
// These can probably be removed sooner or later.
;
private final HashFunction hashFunction;
HashFunctionEnum(HashFunction hashFunction) {
this.hashFunction = hashFunction;
}
HashFunction getHashFunction() {
return hashFunction;
}
} | java | github | https://github.com/google/guava | android/guava-tests/test/com/google/common/hash/HashFunctionEnum.java |
/* Timer API example -- Register and handle timer events
*
* -----------------------------------------------------------------------------
*
* Copyright (c) 2018-Present, Redis Ltd.
* All rights reserved.
*
* Licensed under your choice of (a) the Redis Source Available License 2.0
* (RSALv2); or (b) the Server Side Public License v1 (SSPLv1); or (c) the
* GNU Affero General Public License v3 (AGPLv3).
*/
#include "../redismodule.h"
#include <stdio.h>
#include <stdlib.h>
#include <ctype.h>
#include <string.h>
/* Timer callback. */
void timerHandler(RedisModuleCtx *ctx, void *data) {
REDISMODULE_NOT_USED(ctx);
printf("Fired %s!\n", (char *)data);
RedisModule_Free(data);
}
/* HELLOTIMER.TIMER*/
int TimerCommand_RedisCommand(RedisModuleCtx *ctx, RedisModuleString **argv, int argc) {
REDISMODULE_NOT_USED(argv);
REDISMODULE_NOT_USED(argc);
for (int j = 0; j < 10; j++) {
int delay = rand() % 5000;
char *buf = RedisModule_Alloc(256);
snprintf(buf,256,"After %d", delay);
RedisModuleTimerID tid = RedisModule_CreateTimer(ctx,delay,timerHandler,buf);
REDISMODULE_NOT_USED(tid);
}
return RedisModule_ReplyWithSimpleString(ctx, "OK");
}
/* This function must be present on each Redis module. It is used in order to
* register the commands into the Redis server. */
int RedisModule_OnLoad(RedisModuleCtx *ctx, RedisModuleString **argv, int argc) {
REDISMODULE_NOT_USED(argv);
REDISMODULE_NOT_USED(argc);
if (RedisModule_Init(ctx,"hellotimer",1,REDISMODULE_APIVER_1)
== REDISMODULE_ERR) return REDISMODULE_ERR;
if (RedisModule_CreateCommand(ctx,"hellotimer.timer",
TimerCommand_RedisCommand,"readonly",0,0,0) == REDISMODULE_ERR)
return REDISMODULE_ERR;
return REDISMODULE_OK;
} | c | github | https://github.com/redis/redis | src/modules/hellotimer.c |
"""Provides OAuth authorization. Main components are:
* OAuthClient - provides logic for 3-legged OAuth protocol,
* OAuthDanceHandler - wrapper for OAuthClient for handling OAuth requests,
* OAuthHandler - from this handler should inherit all other handlers that want
to be authenticated and have access to BloggerService. Be sure that you
added @requiredOAuth on top of your request method (i.e. post, get).
Request tokens are stored in OAuthRequestToken (explicite) and access tokens are
stored in TokenCollection (implicit) provided by gdata.alt.appengine.
Heavily used resources and ideas from:
* http://github.com/tav/tweetapp,
* Examples of OAuth from GData Python Client written by Eric Bidelman.
"""
__author__ = ('wiktorgworek (Wiktor Gworek), '
'e.bidelman (Eric Bidelman)')
import os
import gdata.auth
import gdata.client
import gdata.alt.appengine
import gdata.blogger.service
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
SETTINGS = {
'APP_NAME': 'YOUR_APPLICATION_NAME',
'CONSUMER_KEY': 'YOUR_CONSUMER_KEY',
'CONSUMER_SECRET': 'YOUR_CONSUMER_SECRET',
'SIG_METHOD': gdata.auth.OAuthSignatureMethod.HMAC_SHA1,
'SCOPES': gdata.service.CLIENT_LOGIN_SCOPES['blogger']
}
# ------------------------------------------------------------------------------
# Data store models.
# ------------------------------------------------------------------------------
class OAuthRequestToken(db.Model):
"""Stores OAuth request token."""
token_key = db.StringProperty(required=True)
token_secret = db.StringProperty(required=True)
created = db.DateTimeProperty(auto_now_add=True)
# ------------------------------------------------------------------------------
# OAuth client.
# ------------------------------------------------------------------------------
class OAuthClient(object):
__public__ = ('request_token', 'callback', 'revoke_token')
def __init__(self, handler):
self.handler = handler
self.blogger = gdata.blogger.service.BloggerService(
source=SETTINGS['APP_NAME'])
self.blogger.SetOAuthInputParameters(SETTINGS['SIG_METHOD'],
SETTINGS['CONSUMER_KEY'], consumer_secret=SETTINGS['CONSUMER_SECRET'])
gdata.alt.appengine.run_on_appengine(self.blogger)
def has_access_token(self):
"""Checks if there is an access token in token store."""
access_token = self.blogger.token_store.find_token(
'%20'.join(SETTINGS['SCOPES']))
return isinstance(access_token, gdata.auth.OAuthToken)
def request_token(self):
"""Fetches a request token and redirects the user to the approval page."""
if users.get_current_user():
# 1.) REQUEST TOKEN STEP. Provide the data scope(s) and the page we'll
# be redirected back to after the user grants access on the approval page.
req_token = self.blogger.FetchOAuthRequestToken(
scopes=SETTINGS['SCOPES'],
oauth_callback=self.handler.request.uri.replace(
'request_token', 'callback'))
# When using HMAC, persist the token secret in order to re-create an
# OAuthToken object coming back from the approval page.
db_token = OAuthRequestToken(token_key = req_token.key,
token_secret=req_token.secret)
db_token.put()
# 2.) APPROVAL STEP. Redirect to user to Google's OAuth approval page.
self.handler.redirect(self.blogger.GenerateOAuthAuthorizationURL())
def callback(self):
"""Invoked after we're redirected back from the approval page."""
oauth_token = gdata.auth.OAuthTokenFromUrl(self.handler.request.uri)
if oauth_token:
# Find request token saved by put() method.
db_token = OAuthRequestToken.all().filter(
'token_key =', oauth_token.key).fetch(1)[0]
oauth_token.secret = db_token.token_secret
oauth_token.oauth_input_params = self.blogger.GetOAuthInputParameters()
self.blogger.SetOAuthToken(oauth_token)
# 3.) Exchange the authorized request token for an access token
oauth_verifier = self.handler.request.get(
'oauth_verifier', default_value='')
access_token = self.blogger.UpgradeToOAuthAccessToken(
oauth_verifier=oauth_verifier)
# Remember the access token in the current user's token store
if access_token and users.get_current_user():
self.blogger.token_store.add_token(access_token)
elif access_token:
self.blogger.current_token = access_token
self.blogger.SetOAuthToken(access_token)
self.handler.redirect('/')
def revoke_token(self):
"""Revokes the current user's OAuth access token."""
try:
self.blogger.RevokeOAuthToken()
except gdata.service.RevokingOAuthTokenFailed:
pass
except gdata.service.NonOAuthToken:
pass
self.blogger.token_store.remove_all_tokens()
self.handler.redirect('/')
# ------------------------------------------------------------------------------
# Request handlers.
# ------------------------------------------------------------------------------
class OAuthDanceHandler(webapp.RequestHandler):
"""Handler for the 3 legged OAuth dance.
This handler is responsible for fetching an initial OAuth request token,
redirecting the user to the approval page. When the user grants access, they
will be redirected back to this GET handler and their authorized request token
will be exchanged for a long-lived access token."""
def __init__(self):
super(OAuthDanceHandler, self).__init__()
self.client = OAuthClient(self)
def get(self, action=''):
if action in self.client.__public__:
self.response.out.write(getattr(self.client, action)())
else:
self.response.out.write(self.client.request_token())
class OAuthHandler(webapp.RequestHandler):
"""All handlers requiring OAuth should inherit from this class."""
def __init__(self):
super(OAuthHandler, self).__init__()
self.client = OAuthClient(self)
def requiresOAuth(fun):
"""Decorator for request handlers to gain authentication via OAuth.
Must be used in a handler that inherits from OAuthHandler."""
def decorate(self, *args, **kwargs):
if self.client.has_access_token():
try:
fun(self, *args, **kwargs)
except gdata.service.RequestError, error:
if error.code in [401, 403]:
self.redirect('/oauth/request_token')
else:
raise
else:
self.redirect('/oauth/request_token')
return decorate | unknown | codeparrot/codeparrot-clean | ||
// Copyright The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package timestamp
import (
"math"
"time"
)
// FromTime returns a new millisecond timestamp from a time.
func FromTime(t time.Time) int64 {
return t.Unix()*1000 + int64(t.Nanosecond())/int64(time.Millisecond)
}
// Time returns a new time.Time object from a millisecond timestamp.
func Time(ts int64) time.Time {
return time.Unix(ts/1000, (ts%1000)*int64(time.Millisecond)).UTC()
}
// FromFloatSeconds returns a millisecond timestamp from float seconds.
func FromFloatSeconds(ts float64) int64 {
return int64(math.Round(ts * 1000))
} | go | github | https://github.com/prometheus/prometheus | model/timestamp/timestamp.go |
"""
Component to interface with binary sensors.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/binary_sensor/
"""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.entity import Entity
from homeassistant.const import (STATE_ON, STATE_OFF)
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
DOMAIN = 'binary_sensor'
SCAN_INTERVAL = timedelta(seconds=30)
ENTITY_ID_FORMAT = DOMAIN + '.{}'
DEVICE_CLASSES = [
'battery', # On means low, Off means normal
'cold', # On means cold, Off means normal
'connectivity', # On means connected, Off means disconnected
'door', # On means open, Off means closed
'garage_door', # On means open, Off means closed
'gas', # On means gas detected, Off means no gas (clear)
'heat', # On means hot, Off means normal
'light', # On means light detected, Off means no light
'lock', # On means open (unlocked), Off means closed (locked)
'moisture', # On means wet, Off means dry
'motion', # On means motion detected, Off means no motion (clear)
'moving', # On means moving, Off means not moving (stopped)
'occupancy', # On means occupied, Off means not occupied (clear)
'opening', # On means open, Off means closed
'plug', # On means plugged in, Off means unplugged
'power', # On means power detected, Off means no power
'presence', # On means home, Off means away
'problem', # On means problem detected, Off means no problem (OK)
'safety', # On means unsafe, Off means safe
'smoke', # On means smoke detected, Off means no smoke (clear)
'sound', # On means sound detected, Off means no sound (clear)
'vibration', # On means vibration detected, Off means no vibration
'window', # On means open, Off means closed
]
DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES))
async def async_setup(hass, config):
"""Track states and offer events for binary sensors."""
component = hass.data[DOMAIN] = EntityComponent(
logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL)
await component.async_setup(config)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class BinarySensorDevice(Entity):
"""Represent a binary sensor."""
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return None
@property
def state(self):
"""Return the state of the binary sensor."""
return STATE_ON if self.is_on else STATE_OFF
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return None | unknown | codeparrot/codeparrot-clean | ||
import re
from collections import OrderedDict
from lxml import etree
import os
from os.path import splitext
from tsg.config import RAW_DIR
import operator
import logging
def parse_link(doc_link):
'''
Define a function which take a link and obtain the doc_filename string
and the type of document: journal, conference or author.
'''
link_parts = list(re.search('([^/]*)/([^/]*)$', doc_link).groups())
if "#" in link_parts[1]:
link_parts[1] = link_parts[1].split("#")[0]
if "/pers/" in doc_link:
category = "author"
elif "/conf/" in doc_link:
category = "conference"
elif "/journals/" in doc_link:
category = "journal"
else:
category = "other"
doc_filename = '{}_{}_{}{}'.format(category,
link_parts[0],
link_parts[1],
'' if link_parts[1][-5:] == '.html'
else'.html')
return doc_filename
def get_page_outlinks(doc_path):
xpath_string = "//div[@class='data']//a/@href"
parser = etree.HTMLParser()
page_outlinks = []
page_outfiles = []
if os.path.exists(doc_path):
with open(doc_path) as doc_f:
tree = etree.parse(doc_f, parser)
page_outlinks = tree.xpath(xpath_string)
for link in page_outlinks:
page_outfiles.append(parse_link(link))
return page_outfiles
def build_link_database(html_files_path=RAW_DIR):
logging.info('Start building link database')
log_cnt = 0
doc_dict = {}
doc_outlinks = {}
for doc_filename in os.listdir(html_files_path):
log_cnt += 1
if log_cnt % 100000 == 0:
logging.info(
'Building Link database. Now at file {}'.format(log_cnt))
if doc_filename.endswith(".html"):
doc_path = html_files_path + doc_filename
doc_outlinks[doc_filename] = get_page_outlinks(doc_path)
for target_doc in doc_outlinks[doc_filename]:
try:
doc_dict[target_doc].append(doc_filename)
except KeyError:
doc_dict[target_doc] = [doc_filename]
try:
doc_dict[doc_filename]
except KeyError:
doc_dict[doc_filename] = []
# Unify lists and convert keys to uuid
doc_dict = {splitext(doc)[0]: [splitext(d)[0] for d in (set(doc_dict[doc]))]
for doc in doc_dict}
doc_outlinks = {splitext(doc)[0]: [splitext(d)[0] for d in doc_outlinks[doc]]
for doc in doc_outlinks}
# Sort alphabetically
ordered_db = OrderedDict(sorted(doc_dict.items(), key=lambda t: t[0]))
logging.info('Finished building link database')
return ordered_db, doc_outlinks
def calc_page_rank(html_files_path=RAW_DIR):
logging.info('Starting calc_page_rank')
d = 0.85 # Damping in PageRank Algorithm
threshold = 0.0001 # 1x 10^-3
iteration_flag = True # Keep page rank iteration until threshold is met
log_cnt = 0
docs_links_db, doc_outlinks = build_link_database(html_files_path)
pagerank_per_doc = {doc: 1.0 for doc in docs_links_db}
while iteration_flag:
log_cnt = 0
logging.info('Starting new iteration in calculating the page rank')
tmp_pagerank_per_doc = {}
for doc, doc_inlinks in docs_links_db.items():
tmp_pagerank_per_doc[doc] = (1 - d)
for inlink in doc_inlinks:
num_outlinks_per_inlink = 0
if inlink in doc_outlinks:
num_outlinks_per_inlink = len(doc_outlinks[inlink])
tmp_pagerank_per_doc[doc] += \
d * (pagerank_per_doc[inlink] /
num_outlinks_per_inlink)
else:
tmp_pagerank_per_doc[doc] = 0
log_cnt += 1
if log_cnt % 100000 == 0:
logging.info('at doc_link {}'.format(log_cnt))
logging.info('Now investigating stop condition for caculating the'
'page rank')
iteration_flag = False
for doc in tmp_pagerank_per_doc:
if (pagerank_per_doc[doc] - tmp_pagerank_per_doc[doc] > threshold):
iteration_flag = True
pagerank_per_doc = tmp_pagerank_per_doc
sorted_pagerank_per_docs = OrderedDict(sorted(pagerank_per_doc.items(),
key=operator.itemgetter(1, 0),
reverse=True))
return sorted_pagerank_per_docs | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
#
# Copyright 2003 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
import sys
import struct
fin = sys.stdin
count = 0
expected = 0
last_correction = 0
while 1:
s = fin.read(2)
if not s or len(s) != 2:
break
v, = struct.unpack ('H', s)
iv = int(v) & 0xffff
# print "%8d %6d 0x%04x" % (count, iv, iv)
if count & 0x1: # only counting on the Q channel
if (expected & 0xffff) != iv:
print "%8d (%6d) %6d 0x%04x" % (count, count - last_correction, iv, iv)
expected = iv # reset expected sequence
last_correction = count
expected = (expected + 1) & 0xffff
count += 1 | unknown | codeparrot/codeparrot-clean | ||
"""
neural network stuff, intended to be used with Lasagne
"""
import numpy as np
import theano as th
import theano.tensor as T
import lasagne
from lasagne.layers import dnn
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
# T.nnet.relu has some stability issues, this is better
def relu(x):
return T.maximum(x, 0)
def lrelu(x, a=0.2):
return T.maximum(x, a*x)
def centered_softplus(x):
return T.nnet.softplus(x) - np.cast[th.config.floatX](np.log(2.))
def log_sum_exp(x, axis=1):
m = T.max(x, axis=axis)
return m+T.log(T.sum(T.exp(x-m.dimshuffle(0,'x')), axis=axis))
def adam_updates(params, cost, lr=0.001, mom1=0.9, mom2=0.999):
updates = []
grads = T.grad(cost, params)
t = th.shared(np.cast[th.config.floatX](1.))
for p, g in zip(params, grads):
v = th.shared(np.cast[th.config.floatX](p.get_value() * 0.))
mg = th.shared(np.cast[th.config.floatX](p.get_value() * 0.))
v_t = mom1*v + (1. - mom1)*g
mg_t = mom2*mg + (1. - mom2)*T.square(g)
v_hat = v_t / (1. - mom1 ** t)
mg_hat = mg_t / (1. - mom2 ** t)
g_t = v_hat / T.sqrt(mg_hat + 1e-8)
p_t = p - lr * g_t
updates.append((v, v_t))
updates.append((mg, mg_t))
updates.append((p, p_t))
updates.append((t, t+1))
return updates
class WeightNormLayer(lasagne.layers.Layer):
def __init__(self, incoming, b=lasagne.init.Constant(0.), g=lasagne.init.Constant(1.),
W=lasagne.init.Normal(0.05), train_g=False, init_stdv=1., nonlinearity=relu, **kwargs):
super(WeightNormLayer, self).__init__(incoming, **kwargs)
self.nonlinearity = nonlinearity
self.init_stdv = init_stdv
k = self.input_shape[1]
if b is not None:
self.b = self.add_param(b, (k,), name="b", regularizable=False)
if g is not None:
self.g = self.add_param(g, (k,), name="g", regularizable=False, trainable=train_g)
if len(self.input_shape)==4:
self.axes_to_sum = (0,2,3)
self.dimshuffle_args = ['x',0,'x','x']
else:
self.axes_to_sum = 0
self.dimshuffle_args = ['x',0]
# scale weights in layer below
incoming.W_param = incoming.W
#incoming.W_param.set_value(W.sample(incoming.W_param.get_value().shape))
if incoming.W_param.ndim==4:
if isinstance(incoming, Deconv2DLayer):
W_axes_to_sum = (0,2,3)
W_dimshuffle_args = ['x',0,'x','x']
else:
W_axes_to_sum = (1,2,3)
W_dimshuffle_args = [0,'x','x','x']
else:
W_axes_to_sum = 0
W_dimshuffle_args = ['x',0]
if g is not None:
incoming.W = incoming.W_param * (self.g/T.sqrt(1e-6 + T.sum(T.square(incoming.W_param),axis=W_axes_to_sum))).dimshuffle(*W_dimshuffle_args)
else:
incoming.W = incoming.W_param / T.sqrt(1e-6 + T.sum(T.square(incoming.W_param),axis=W_axes_to_sum,keepdims=True))
def get_output_for(self, input, init=False, **kwargs):
if init:
m = T.mean(input, self.axes_to_sum)
input -= m.dimshuffle(*self.dimshuffle_args)
inv_stdv = self.init_stdv/T.sqrt(T.mean(T.square(input), self.axes_to_sum))
input *= inv_stdv.dimshuffle(*self.dimshuffle_args)
self.init_updates = [(self.b, -m*inv_stdv), (self.g, self.g*inv_stdv)]
elif hasattr(self,'b'):
input += self.b.dimshuffle(*self.dimshuffle_args)
return self.nonlinearity(input)
def weight_norm(layer, **kwargs):
nonlinearity = getattr(layer, 'nonlinearity', None)
if nonlinearity is not None:
layer.nonlinearity = lasagne.nonlinearities.identity
if hasattr(layer, 'b'):
del layer.params[layer.b]
layer.b = None
return WeightNormLayer(layer, nonlinearity=nonlinearity, **kwargs)
class Deconv2DLayer(lasagne.layers.Layer):
def __init__(self, incoming, target_shape, filter_size, stride=(2, 2),
W=lasagne.init.Normal(0.05), b=lasagne.init.Constant(0.), nonlinearity=relu, **kwargs):
super(Deconv2DLayer, self).__init__(incoming, **kwargs)
self.target_shape = target_shape
self.nonlinearity = (lasagne.nonlinearities.identity if nonlinearity is None else nonlinearity)
self.filter_size = lasagne.layers.dnn.as_tuple(filter_size, 2)
self.stride = lasagne.layers.dnn.as_tuple(stride, 2)
self.target_shape = target_shape
self.W_shape = (incoming.output_shape[1], target_shape[1], filter_size[0], filter_size[1])
self.W = self.add_param(W, self.W_shape, name="W")
if b is not None:
self.b = self.add_param(b, (target_shape[1],), name="b")
else:
self.b = None
def get_output_for(self, input, **kwargs):
op = T.nnet.abstract_conv.AbstractConv2d_gradInputs(imshp=self.target_shape, kshp=self.W_shape, subsample=self.stride, border_mode='half')
activation = op(self.W, input, self.target_shape[2:])
if self.b is not None:
activation += self.b.dimshuffle('x', 0, 'x', 'x')
return self.nonlinearity(activation)
def get_output_shape_for(self, input_shape):
return self.target_shape
# minibatch discrimination layer
class MinibatchLayer(lasagne.layers.Layer):
def __init__(self, incoming, num_kernels, dim_per_kernel=5, theta=lasagne.init.Normal(0.05),
log_weight_scale=lasagne.init.Constant(0.), b=lasagne.init.Constant(-1.), **kwargs):
super(MinibatchLayer, self).__init__(incoming, **kwargs)
self.num_kernels = num_kernels
num_inputs = int(np.prod(self.input_shape[1:]))
self.theta = self.add_param(theta, (num_inputs, num_kernels, dim_per_kernel), name="theta")
self.log_weight_scale = self.add_param(log_weight_scale, (num_kernels, dim_per_kernel), name="log_weight_scale")
self.W = self.theta * (T.exp(self.log_weight_scale)/T.sqrt(T.sum(T.square(self.theta),axis=0))).dimshuffle('x',0,1)
self.b = self.add_param(b, (num_kernels,), name="b")
def get_output_shape_for(self, input_shape):
return (input_shape[0], np.prod(input_shape[1:])+self.num_kernels)
def get_output_for(self, input, init=False, **kwargs):
if input.ndim > 2:
# if the input has more than two dimensions, flatten it into a
# batch of feature vectors.
input = input.flatten(2)
activation = T.tensordot(input, self.W, [[1], [0]])
abs_dif = (T.sum(abs(activation.dimshuffle(0,1,2,'x') - activation.dimshuffle('x',1,2,0)),axis=2)
+ 1e6 * T.eye(input.shape[0]).dimshuffle(0,'x',1))
if init:
mean_min_abs_dif = 0.5 * T.mean(T.min(abs_dif, axis=2),axis=0)
abs_dif /= mean_min_abs_dif.dimshuffle('x',0,'x')
self.init_updates = [(self.log_weight_scale, self.log_weight_scale-T.log(mean_min_abs_dif).dimshuffle(0,'x'))]
f = T.sum(T.exp(-abs_dif),axis=2)
if init:
mf = T.mean(f,axis=0)
f -= mf.dimshuffle('x',0)
self.init_updates.append((self.b, -mf))
else:
f += self.b.dimshuffle('x',0)
return T.concatenate([input, f], axis=1)
class BatchNormLayer(lasagne.layers.Layer):
def __init__(self, incoming, b=lasagne.init.Constant(0.), g=lasagne.init.Constant(1.), nonlinearity=relu, **kwargs):
super(BatchNormLayer, self).__init__(incoming, **kwargs)
self.nonlinearity = nonlinearity
k = self.input_shape[1]
if b is not None:
self.b = self.add_param(b, (k,), name="b", regularizable=False)
if g is not None:
self.g = self.add_param(g, (k,), name="g", regularizable=False)
self.avg_batch_mean = self.add_param(lasagne.init.Constant(0.), (k,), name="avg_batch_mean", regularizable=False, trainable=False)
self.avg_batch_var = self.add_param(lasagne.init.Constant(1.), (k,), name="avg_batch_var", regularizable=False, trainable=False)
if len(self.input_shape)==4:
self.axes_to_sum = (0,2,3)
self.dimshuffle_args = ['x',0,'x','x']
else:
self.axes_to_sum = 0
self.dimshuffle_args = ['x',0]
def get_output_for(self, input, deterministic=False, **kwargs):
if deterministic:
norm_features = (input-self.avg_batch_mean.dimshuffle(*self.dimshuffle_args)) / T.sqrt(1e-6 + self.avg_batch_var).dimshuffle(*self.dimshuffle_args)
else:
batch_mean = T.mean(input,axis=self.axes_to_sum).flatten()
centered_input = input-batch_mean.dimshuffle(*self.dimshuffle_args)
batch_var = T.mean(T.square(centered_input),axis=self.axes_to_sum).flatten()
batch_stdv = T.sqrt(1e-6 + batch_var)
norm_features = centered_input / batch_stdv.dimshuffle(*self.dimshuffle_args)
# BN updates
new_m = 0.9*self.avg_batch_mean + 0.1*batch_mean
new_v = 0.9*self.avg_batch_var + T.cast((0.1*input.shape[0])/(input.shape[0]-1),th.config.floatX)*batch_var
self.bn_updates = [(self.avg_batch_mean, new_m), (self.avg_batch_var, new_v)]
if hasattr(self, 'g'):
activation = norm_features*self.g.dimshuffle(*self.dimshuffle_args)
else:
activation = norm_features
if hasattr(self, 'b'):
activation += self.b.dimshuffle(*self.dimshuffle_args)
return self.nonlinearity(activation)
def batch_norm(layer, b=lasagne.init.Constant(0.), g=lasagne.init.Constant(1.), **kwargs):
"""
adapted from https://gist.github.com/f0k/f1a6bd3c8585c400c190
"""
nonlinearity = getattr(layer, 'nonlinearity', None)
if nonlinearity is not None:
layer.nonlinearity = lasagne.nonlinearities.identity
else:
nonlinearity = lasagne.nonlinearities.identity
if hasattr(layer, 'b'):
del layer.params[layer.b]
layer.b = None
return BatchNormLayer(layer, b, g, nonlinearity=nonlinearity, **kwargs)
class GaussianNoiseLayer(lasagne.layers.Layer):
def __init__(self, incoming, sigma=0.1, **kwargs):
super(GaussianNoiseLayer, self).__init__(incoming, **kwargs)
self._srng = RandomStreams(lasagne.random.get_rng().randint(1, 2147462579))
self.sigma = sigma
def get_output_for(self, input, deterministic=False, use_last_noise=False, **kwargs):
if deterministic or self.sigma == 0:
return input
else:
if not use_last_noise:
self.noise = self._srng.normal(input.shape, avg=0.0, std=self.sigma)
return input + self.noise
# /////////// older code used for MNIST ////////////
# weight normalization
def l2normalize(layer, train_scale=True):
W_param = layer.W
s = W_param.get_value().shape
if len(s)==4:
axes_to_sum = (1,2,3)
dimshuffle_args = [0,'x','x','x']
k = s[0]
else:
axes_to_sum = 0
dimshuffle_args = ['x',0]
k = s[1]
layer.W_scale = layer.add_param(lasagne.init.Constant(1.),
(k,), name="W_scale", trainable=train_scale, regularizable=False)
layer.W = W_param * (layer.W_scale/T.sqrt(1e-6 + T.sum(T.square(W_param),axis=axes_to_sum))).dimshuffle(*dimshuffle_args)
return layer
# fully connected layer with weight normalization
class DenseLayer(lasagne.layers.Layer):
def __init__(self, incoming, num_units, theta=lasagne.init.Normal(0.1), b=lasagne.init.Constant(0.),
weight_scale=lasagne.init.Constant(1.), train_scale=False, nonlinearity=relu, **kwargs):
super(DenseLayer, self).__init__(incoming, **kwargs)
self.nonlinearity = (lasagne.nonlinearities.identity if nonlinearity is None else nonlinearity)
self.num_units = num_units
num_inputs = int(np.prod(self.input_shape[1:]))
self.theta = self.add_param(theta, (num_inputs, num_units), name="theta")
self.weight_scale = self.add_param(weight_scale, (num_units,), name="weight_scale", trainable=train_scale)
self.W = self.theta * (self.weight_scale/T.sqrt(T.sum(T.square(self.theta),axis=0))).dimshuffle('x',0)
self.b = self.add_param(b, (num_units,), name="b")
def get_output_shape_for(self, input_shape):
return (input_shape[0], self.num_units)
def get_output_for(self, input, init=False, deterministic=False, **kwargs):
if input.ndim > 2:
# if the input has more than two dimensions, flatten it into a
# batch of feature vectors.
input = input.flatten(2)
activation = T.dot(input, self.W)
if init:
ma = T.mean(activation, axis=0)
activation -= ma.dimshuffle('x',0)
stdv = T.sqrt(T.mean(T.square(activation),axis=0))
activation /= stdv.dimshuffle('x',0)
self.init_updates = [(self.weight_scale, self.weight_scale/stdv), (self.b, -ma/stdv)]
else:
activation += self.b.dimshuffle('x', 0)
return self.nonlinearity(activation) | unknown | codeparrot/codeparrot-clean | ||
mod lang_tests_common;
fn main() {
lang_tests_common::main_inner(lang_tests_common::Profile::Debug);
} | rust | github | https://github.com/rust-lang/rust | compiler/rustc_codegen_gcc/tests/lang_tests_debug.rs |
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2011 Smartmode LTD (<http://www.smartmode.co.uk>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'UK - Accounting',
'version': '1.0',
'category': 'Localization/Account Charts',
'description': """
This is the latest UK OpenERP localisation necessary to run OpenERP accounting for UK SME's with:
=================================================================================================
- a CT600-ready chart of accounts
- VAT100-ready tax structure
- InfoLogic UK counties listing
- a few other adaptations""",
'author': 'SmartMode LTD',
'website': 'http://www.smartmode.co.uk',
'depends': ['base_iban', 'base_vat', 'account_chart', 'account_anglo_saxon'],
'data': [
'data/account.account.type.csv',
'data/account.account.template.csv',
'data/account.tax.code.template.csv',
'data/account.chart.template.csv',
'data/account.tax.template.csv',
'data/res.country.state.csv',
'l10n_uk_wizard.xml',
],
'demo' : ['demo/demo.xml'],
'installable': 'True',
'images': ['images/config_chart_l10n_uk.jpeg','images/l10n_uk_chart.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | unknown | codeparrot/codeparrot-clean | ||
# coding: utf-8
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import io
from units.compat import unittest
from ansible.parsing import vault
from ansible.parsing.yaml import dumper, objects
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.module_utils.six import PY2
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes
from units.mock.yaml_helper import YamlTestUtils
from units.mock.vault_helper import TextVaultSecret
class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
def setUp(self):
self.vault_password = "hunter42"
vault_secret = TextVaultSecret(self.vault_password)
self.vault_secrets = [('vault_secret', vault_secret)]
self.good_vault = vault.VaultLib(self.vault_secrets)
self.vault = self.good_vault
self.stream = self._build_stream()
self.dumper = dumper.AnsibleDumper
def _build_stream(self, yaml_text=None):
text = yaml_text or u''
stream = io.StringIO(text)
return stream
def _loader(self, stream):
return AnsibleLoader(stream, vault_secrets=self.vault.secrets)
def test_ansible_vault_encrypted_unicode(self):
plaintext = 'This is a string we are going to encrypt.'
avu = objects.AnsibleVaultEncryptedUnicode.from_plaintext(plaintext, vault=self.vault,
secret=vault.match_secrets(self.vault_secrets, ['vault_secret'])[0][1])
yaml_out = self._dump_string(avu, dumper=self.dumper)
stream = self._build_stream(yaml_out)
loader = self._loader(stream)
data_from_yaml = loader.get_single_data()
self.assertEqual(plaintext, data_from_yaml.data)
def test_bytes(self):
b_text = u'tréma'.encode('utf-8')
unsafe_object = AnsibleUnsafeBytes(b_text)
yaml_out = self._dump_string(unsafe_object, dumper=self.dumper)
stream = self._build_stream(yaml_out)
loader = self._loader(stream)
data_from_yaml = loader.get_single_data()
result = b_text
if PY2:
# https://pyyaml.org/wiki/PyYAMLDocumentation#string-conversion-python-2-only
# pyyaml on Python 2 can return either unicode or bytes when given byte strings.
# We normalize that to always return unicode on Python2 as that's right most of the
# time. However, this means byte strings can round trip through yaml on Python3 but
# not on Python2. To make this code work the same on Python2 and Python3 (we want
# the Python3 behaviour) we need to change the methods in Ansible to:
# (1) Let byte strings pass through yaml without being converted on Python2
# (2) Convert byte strings to text strings before being given to pyyaml (Without this,
# strings would end up as byte strings most of the time which would mostly be wrong)
# In practice, we mostly read bytes in from files and then pass that to pyyaml, for which
# the present behavior is correct.
# This is a workaround for the current behavior.
result = u'tr\xe9ma'
self.assertEqual(result, data_from_yaml)
def test_unicode(self):
u_text = u'nöel'
unsafe_object = AnsibleUnsafeText(u_text)
yaml_out = self._dump_string(unsafe_object, dumper=self.dumper)
stream = self._build_stream(yaml_out)
loader = self._loader(stream)
data_from_yaml = loader.get_single_data()
self.assertEqual(u_text, data_from_yaml) | unknown | codeparrot/codeparrot-clean | ||
"""Helper class for TLSConnection."""
from __future__ import generators
from utils.compat import *
from utils.cryptomath import *
from utils.cipherfactory import createAES, createRC4, createTripleDES
from utils.codec import *
from errors import *
from messages import *
from mathtls import *
from constants import *
from utils.cryptomath import getRandomBytes
from utils import hmac
from FileObject import FileObject
# The sha module is deprecated in Python 2.6
try:
import sha
except ImportError:
from hashlib import sha1 as sha
# The md5 module is deprecated in Python 2.6
try:
import md5
except ImportError:
from hashlib import md5
import socket
import errno
import traceback
class _ConnectionState:
def __init__(self):
self.macContext = None
self.encContext = None
self.seqnum = 0
def getSeqNumStr(self):
w = Writer(8)
w.add(self.seqnum, 8)
seqnumStr = bytesToString(w.bytes)
self.seqnum += 1
return seqnumStr
class TLSRecordLayer:
"""
This class handles data transmission for a TLS connection.
Its only subclass is L{tlslite.TLSConnection.TLSConnection}. We've
separated the code in this class from TLSConnection to make things
more readable.
@type sock: socket.socket
@ivar sock: The underlying socket object.
@type session: L{tlslite.Session.Session}
@ivar session: The session corresponding to this connection.
Due to TLS session resumption, multiple connections can correspond
to the same underlying session.
@type version: tuple
@ivar version: The TLS version being used for this connection.
(3,0) means SSL 3.0, and (3,1) means TLS 1.0.
@type closed: bool
@ivar closed: If this connection is closed.
@type resumed: bool
@ivar resumed: If this connection is based on a resumed session.
@type allegedSharedKeyUsername: str or None
@ivar allegedSharedKeyUsername: This is set to the shared-key
username asserted by the client, whether the handshake succeeded or
not. If the handshake fails, this can be inspected to
determine if a guessing attack is in progress against a particular
user account.
@type allegedSrpUsername: str or None
@ivar allegedSrpUsername: This is set to the SRP username
asserted by the client, whether the handshake succeeded or not.
If the handshake fails, this can be inspected to determine
if a guessing attack is in progress against a particular user
account.
@type closeSocket: bool
@ivar closeSocket: If the socket should be closed when the
connection is closed (writable).
If you set this to True, TLS Lite will assume the responsibility of
closing the socket when the TLS Connection is shutdown (either
through an error or through the user calling close()). The default
is False.
@type ignoreAbruptClose: bool
@ivar ignoreAbruptClose: If an abrupt close of the socket should
raise an error (writable).
If you set this to True, TLS Lite will not raise a
L{tlslite.errors.TLSAbruptCloseError} exception if the underlying
socket is unexpectedly closed. Such an unexpected closure could be
caused by an attacker. However, it also occurs with some incorrect
TLS implementations.
You should set this to True only if you're not worried about an
attacker truncating the connection, and only if necessary to avoid
spurious errors. The default is False.
@sort: __init__, read, readAsync, write, writeAsync, close, closeAsync,
getCipherImplementation, getCipherName
"""
def __init__(self, sock):
self.sock = sock
#My session object (Session instance; read-only)
self.session = None
#Am I a client or server?
self._client = None
#Buffers for processing messages
self._handshakeBuffer = []
self._readBuffer = ""
#Handshake digests
self._handshake_md5 = md5.md5()
self._handshake_sha = sha.sha()
#TLS Protocol Version
self.version = (0,0) #read-only
self._versionCheck = False #Once we choose a version, this is True
#Current and Pending connection states
self._writeState = _ConnectionState()
self._readState = _ConnectionState()
self._pendingWriteState = _ConnectionState()
self._pendingReadState = _ConnectionState()
#Is the connection open?
self.closed = True #read-only
self._refCount = 0 #Used to trigger closure
#Is this a resumed (or shared-key) session?
self.resumed = False #read-only
#What username did the client claim in his handshake?
self.allegedSharedKeyUsername = None
self.allegedSrpUsername = None
#On a call to close(), do we close the socket? (writeable)
self.closeSocket = False
#If the socket is abruptly closed, do we ignore it
#and pretend the connection was shut down properly? (writeable)
self.ignoreAbruptClose = False
#Fault we will induce, for testing purposes
self.fault = None
#*********************************************************
# Public Functions START
#*********************************************************
def read(self, max=None, min=1):
"""Read some data from the TLS connection.
This function will block until at least 'min' bytes are
available (or the connection is closed).
If an exception is raised, the connection will have been
automatically closed.
@type max: int
@param max: The maximum number of bytes to return.
@type min: int
@param min: The minimum number of bytes to return
@rtype: str
@return: A string of no more than 'max' bytes, and no fewer
than 'min' (unless the connection has been closed, in which
case fewer than 'min' bytes may be returned).
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
"""
for result in self.readAsync(max, min):
pass
return result
def readAsync(self, max=None, min=1):
"""Start a read operation on the TLS connection.
This function returns a generator which behaves similarly to
read(). Successive invocations of the generator will return 0
if it is waiting to read from the socket, 1 if it is waiting
to write to the socket, or a string if the read operation has
completed.
@rtype: iterable
@return: A generator; see above for details.
"""
try:
while len(self._readBuffer)<min and not self.closed:
try:
for result in self._getMsg(ContentType.application_data):
if result in (0,1):
yield result
applicationData = result
self._readBuffer += bytesToString(applicationData.write())
except TLSRemoteAlert, alert:
if alert.description != AlertDescription.close_notify:
raise
except TLSAbruptCloseError:
if not self.ignoreAbruptClose:
raise
else:
self._shutdown(True)
if max == None:
max = len(self._readBuffer)
returnStr = self._readBuffer[:max]
self._readBuffer = self._readBuffer[max:]
yield returnStr
except:
self._shutdown(False)
raise
def write(self, s):
"""Write some data to the TLS connection.
This function will block until all the data has been sent.
If an exception is raised, the connection will have been
automatically closed.
@type s: str
@param s: The data to transmit to the other party.
@raise socket.error: If a socket error occurs.
"""
for result in self.writeAsync(s):
pass
def writeAsync(self, s):
"""Start a write operation on the TLS connection.
This function returns a generator which behaves similarly to
write(). Successive invocations of the generator will return
1 if it is waiting to write to the socket, or will raise
StopIteration if the write operation has completed.
@rtype: iterable
@return: A generator; see above for details.
"""
try:
if self.closed:
raise ValueError()
index = 0
blockSize = 16384
skipEmptyFrag = False
while 1:
startIndex = index * blockSize
endIndex = startIndex + blockSize
if startIndex >= len(s):
break
if endIndex > len(s):
endIndex = len(s)
block = stringToBytes(s[startIndex : endIndex])
applicationData = ApplicationData().create(block)
for result in self._sendMsg(applicationData, skipEmptyFrag):
yield result
skipEmptyFrag = True #only send an empy fragment on 1st message
index += 1
except:
self._shutdown(False)
raise
def close(self):
"""Close the TLS connection.
This function will block until it has exchanged close_notify
alerts with the other party. After doing so, it will shut down the
TLS connection. Further attempts to read through this connection
will return "". Further attempts to write through this connection
will raise ValueError.
If makefile() has been called on this connection, the connection
will be not be closed until the connection object and all file
objects have been closed.
Even if an exception is raised, the connection will have been
closed.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
"""
if not self.closed:
for result in self._decrefAsync():
pass
def closeAsync(self):
"""Start a close operation on the TLS connection.
This function returns a generator which behaves similarly to
close(). Successive invocations of the generator will return 0
if it is waiting to read from the socket, 1 if it is waiting
to write to the socket, or will raise StopIteration if the
close operation has completed.
@rtype: iterable
@return: A generator; see above for details.
"""
if not self.closed:
for result in self._decrefAsync():
yield result
def _decrefAsync(self):
self._refCount -= 1
if self._refCount == 0 and not self.closed:
try:
for result in self._sendMsg(Alert().create(\
AlertDescription.close_notify, AlertLevel.warning)):
yield result
alert = None
# Forcing a shutdown as WinHTTP does not seem to be
# responsive to the close notify.
prevCloseSocket = self.closeSocket
self.closeSocket = True
self._shutdown(True)
self.closeSocket = prevCloseSocket
while not alert:
for result in self._getMsg((ContentType.alert, \
ContentType.application_data)):
if result in (0,1):
yield result
if result.contentType == ContentType.alert:
alert = result
if alert.description == AlertDescription.close_notify:
self._shutdown(True)
else:
raise TLSRemoteAlert(alert)
except (socket.error, TLSAbruptCloseError):
#If the other side closes the socket, that's okay
self._shutdown(True)
except:
self._shutdown(False)
raise
def getCipherName(self):
"""Get the name of the cipher used with this connection.
@rtype: str
@return: The name of the cipher used with this connection.
Either 'aes128', 'aes256', 'rc4', or '3des'.
"""
if not self._writeState.encContext:
return None
return self._writeState.encContext.name
def getCipherImplementation(self):
"""Get the name of the cipher implementation used with
this connection.
@rtype: str
@return: The name of the cipher implementation used with
this connection. Either 'python', 'cryptlib', 'openssl',
or 'pycrypto'.
"""
if not self._writeState.encContext:
return None
return self._writeState.encContext.implementation
#Emulate a socket, somewhat -
def send(self, s):
"""Send data to the TLS connection (socket emulation).
@raise socket.error: If a socket error occurs.
"""
self.write(s)
return len(s)
def sendall(self, s):
"""Send data to the TLS connection (socket emulation).
@raise socket.error: If a socket error occurs.
"""
self.write(s)
def recv(self, bufsize):
"""Get some data from the TLS connection (socket emulation).
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
"""
return self.read(bufsize)
def makefile(self, mode='r', bufsize=-1):
"""Create a file object for the TLS connection (socket emulation).
@rtype: L{tlslite.FileObject.FileObject}
"""
self._refCount += 1
return FileObject(self, mode, bufsize)
def getsockname(self):
"""Return the socket's own address (socket emulation)."""
return self.sock.getsockname()
def getpeername(self):
"""Return the remote address to which the socket is connected
(socket emulation)."""
return self.sock.getpeername()
def settimeout(self, value):
"""Set a timeout on blocking socket operations (socket emulation)."""
return self.sock.settimeout(value)
def gettimeout(self):
"""Return the timeout associated with socket operations (socket
emulation)."""
return self.sock.gettimeout()
def setsockopt(self, level, optname, value):
"""Set the value of the given socket option (socket emulation)."""
return self.sock.setsockopt(level, optname, value)
#*********************************************************
# Public Functions END
#*********************************************************
def _shutdown(self, resumable):
self._writeState = _ConnectionState()
self._readState = _ConnectionState()
#Don't do this: self._readBuffer = ""
self.version = (0,0)
self._versionCheck = False
self.closed = True
if self.closeSocket:
self.sock.close()
#Even if resumable is False, we'll never toggle this on
if not resumable and self.session:
self.session.resumable = False
def _sendError(self, alertDescription, errorStr=None):
alert = Alert().create(alertDescription, AlertLevel.fatal)
for result in self._sendMsg(alert):
yield result
self._shutdown(False)
raise TLSLocalAlert(alert, errorStr)
def _sendMsgs(self, msgs):
skipEmptyFrag = False
for msg in msgs:
for result in self._sendMsg(msg, skipEmptyFrag):
yield result
skipEmptyFrag = True
def _sendMsg(self, msg, skipEmptyFrag=False):
bytes = msg.write()
contentType = msg.contentType
#Whenever we're connected and asked to send a message,
#we first send an empty Application Data message. This prevents
#an attacker from launching a chosen-plaintext attack based on
#knowing the next IV.
if not self.closed and not skipEmptyFrag and self.version == (3,1):
if self._writeState.encContext:
if self._writeState.encContext.isBlockCipher:
for result in self._sendMsg(ApplicationData(),
skipEmptyFrag=True):
yield result
#Update handshake hashes
if contentType == ContentType.handshake:
bytesStr = bytesToString(bytes)
self._handshake_md5.update(bytesStr)
self._handshake_sha.update(bytesStr)
#Calculate MAC
if self._writeState.macContext:
seqnumStr = self._writeState.getSeqNumStr()
bytesStr = bytesToString(bytes)
mac = self._writeState.macContext.copy()
mac.update(seqnumStr)
mac.update(chr(contentType))
if self.version == (3,0):
mac.update( chr( int(len(bytes)/256) ) )
mac.update( chr( int(len(bytes)%256) ) )
elif self.version in ((3,1), (3,2)):
mac.update(chr(self.version[0]))
mac.update(chr(self.version[1]))
mac.update( chr( int(len(bytes)/256) ) )
mac.update( chr( int(len(bytes)%256) ) )
else:
raise AssertionError()
mac.update(bytesStr)
macString = mac.digest()
macBytes = stringToBytes(macString)
if self.fault == Fault.badMAC:
macBytes[0] = (macBytes[0]+1) % 256
#Encrypt for Block or Stream Cipher
if self._writeState.encContext:
#Add padding and encrypt (for Block Cipher):
if self._writeState.encContext.isBlockCipher:
#Add TLS 1.1 fixed block
if self.version == (3,2):
bytes = self.fixedIVBlock + bytes
#Add padding: bytes = bytes + (macBytes + paddingBytes)
currentLength = len(bytes) + len(macBytes) + 1
blockLength = self._writeState.encContext.block_size
paddingLength = blockLength-(currentLength % blockLength)
paddingBytes = createByteArraySequence([paddingLength] * \
(paddingLength+1))
if self.fault == Fault.badPadding:
paddingBytes[0] = (paddingBytes[0]+1) % 256
endBytes = concatArrays(macBytes, paddingBytes)
bytes = concatArrays(bytes, endBytes)
#Encrypt
plaintext = stringToBytes(bytes)
ciphertext = self._writeState.encContext.encrypt(plaintext)
bytes = stringToBytes(ciphertext)
#Encrypt (for Stream Cipher)
else:
bytes = concatArrays(bytes, macBytes)
plaintext = bytesToString(bytes)
ciphertext = self._writeState.encContext.encrypt(plaintext)
bytes = stringToBytes(ciphertext)
#Add record header and send
r = RecordHeader3().create(self.version, contentType, len(bytes))
s = bytesToString(concatArrays(r.write(), bytes))
while 1:
try:
bytesSent = self.sock.send(s) #Might raise socket.error
except socket.error, why:
if why[0] == errno.EWOULDBLOCK:
yield 1
continue
else:
raise
if bytesSent == len(s):
return
s = s[bytesSent:]
yield 1
def _getMsg(self, expectedType, secondaryType=None, constructorType=None):
try:
if not isinstance(expectedType, tuple):
expectedType = (expectedType,)
#Spin in a loop, until we've got a non-empty record of a type we
#expect. The loop will be repeated if:
# - we receive a renegotiation attempt; we send no_renegotiation,
# then try again
# - we receive an empty application-data fragment; we try again
while 1:
for result in self._getNextRecord():
if result in (0,1):
yield result
recordHeader, p = result
#If this is an empty application-data fragment, try again
if recordHeader.type == ContentType.application_data:
if p.index == len(p.bytes):
continue
#If we received an unexpected record type...
if recordHeader.type not in expectedType:
#If we received an alert...
if recordHeader.type == ContentType.alert:
alert = Alert().parse(p)
#We either received a fatal error, a warning, or a
#close_notify. In any case, we're going to close the
#connection. In the latter two cases we respond with
#a close_notify, but ignore any socket errors, since
#the other side might have already closed the socket.
if alert.level == AlertLevel.warning or \
alert.description == AlertDescription.close_notify:
#If the sendMsg() call fails because the socket has
#already been closed, we will be forgiving and not
#report the error nor invalidate the "resumability"
#of the session.
try:
alertMsg = Alert()
alertMsg.create(AlertDescription.close_notify,
AlertLevel.warning)
for result in self._sendMsg(alertMsg):
yield result
except socket.error:
pass
if alert.description == \
AlertDescription.close_notify:
self._shutdown(True)
elif alert.level == AlertLevel.warning:
self._shutdown(False)
else: #Fatal alert:
self._shutdown(False)
#Raise the alert as an exception
raise TLSRemoteAlert(alert)
#If we received a renegotiation attempt...
if recordHeader.type == ContentType.handshake:
subType = p.get(1)
reneg = False
if self._client:
if subType == HandshakeType.hello_request:
reneg = True
else:
if subType == HandshakeType.client_hello:
reneg = True
#Send no_renegotiation, then try again
if reneg:
alertMsg = Alert()
alertMsg.create(AlertDescription.no_renegotiation,
AlertLevel.warning)
for result in self._sendMsg(alertMsg):
yield result
continue
#Otherwise: this is an unexpected record, but neither an
#alert nor renegotiation
for result in self._sendError(\
AlertDescription.unexpected_message,
"received type=%d" % recordHeader.type):
yield result
break
#Parse based on content_type
if recordHeader.type == ContentType.change_cipher_spec:
yield ChangeCipherSpec().parse(p)
elif recordHeader.type == ContentType.alert:
yield Alert().parse(p)
elif recordHeader.type == ContentType.application_data:
yield ApplicationData().parse(p)
elif recordHeader.type == ContentType.handshake:
#Convert secondaryType to tuple, if it isn't already
if not isinstance(secondaryType, tuple):
secondaryType = (secondaryType,)
#If it's a handshake message, check handshake header
if recordHeader.ssl2:
subType = p.get(1)
if subType != HandshakeType.client_hello:
for result in self._sendError(\
AlertDescription.unexpected_message,
"Can only handle SSLv2 ClientHello messages"):
yield result
if HandshakeType.client_hello not in secondaryType:
for result in self._sendError(\
AlertDescription.unexpected_message):
yield result
subType = HandshakeType.client_hello
else:
subType = p.get(1)
if subType not in secondaryType:
for result in self._sendError(\
AlertDescription.unexpected_message,
"Expecting %s, got %s" % (str(secondaryType), subType)):
yield result
#Update handshake hashes
sToHash = bytesToString(p.bytes)
self._handshake_md5.update(sToHash)
self._handshake_sha.update(sToHash)
#Parse based on handshake type
if subType == HandshakeType.client_hello:
yield ClientHello(recordHeader.ssl2).parse(p)
elif subType == HandshakeType.server_hello:
yield ServerHello().parse(p)
elif subType == HandshakeType.certificate:
yield Certificate(constructorType).parse(p)
elif subType == HandshakeType.certificate_request:
yield CertificateRequest().parse(p)
elif subType == HandshakeType.certificate_verify:
yield CertificateVerify().parse(p)
elif subType == HandshakeType.server_key_exchange:
yield ServerKeyExchange(constructorType).parse(p)
elif subType == HandshakeType.server_hello_done:
yield ServerHelloDone().parse(p)
elif subType == HandshakeType.client_key_exchange:
yield ClientKeyExchange(constructorType, \
self.version).parse(p)
elif subType == HandshakeType.finished:
yield Finished(self.version).parse(p)
else:
raise AssertionError()
#If an exception was raised by a Parser or Message instance:
except SyntaxError, e:
for result in self._sendError(AlertDescription.decode_error,
formatExceptionTrace(e)):
yield result
#Returns next record or next handshake message
def _getNextRecord(self):
#If there's a handshake message waiting, return it
if self._handshakeBuffer:
recordHeader, bytes = self._handshakeBuffer[0]
self._handshakeBuffer = self._handshakeBuffer[1:]
yield (recordHeader, Parser(bytes))
return
#Otherwise...
#Read the next record header
bytes = createByteArraySequence([])
recordHeaderLength = 1
ssl2 = False
while 1:
try:
s = self.sock.recv(recordHeaderLength-len(bytes))
except socket.error, why:
if why[0] == errno.EWOULDBLOCK:
yield 0
continue
else:
raise
#If the connection was abruptly closed, raise an error
if len(s)==0:
raise TLSAbruptCloseError()
bytes += stringToBytes(s)
if len(bytes)==1:
if bytes[0] in ContentType.all:
ssl2 = False
recordHeaderLength = 5
elif bytes[0] == 128:
ssl2 = True
recordHeaderLength = 2
else:
raise SyntaxError()
if len(bytes) == recordHeaderLength:
break
#Parse the record header
if ssl2:
r = RecordHeader2().parse(Parser(bytes))
else:
r = RecordHeader3().parse(Parser(bytes))
#Check the record header fields
if r.length > 18432:
for result in self._sendError(AlertDescription.record_overflow):
yield result
#Read the record contents
bytes = createByteArraySequence([])
while 1:
try:
s = self.sock.recv(r.length - len(bytes))
except socket.error, why:
if why[0] == errno.EWOULDBLOCK:
yield 0
continue
else:
raise
#If the connection is closed, raise a socket error
if len(s)==0:
raise TLSAbruptCloseError()
bytes += stringToBytes(s)
if len(bytes) == r.length:
break
#Check the record header fields (2)
#We do this after reading the contents from the socket, so that
#if there's an error, we at least don't leave extra bytes in the
#socket..
#
# THIS CHECK HAS NO SECURITY RELEVANCE (?), BUT COULD HURT INTEROP.
# SO WE LEAVE IT OUT FOR NOW.
#
#if self._versionCheck and r.version != self.version:
# for result in self._sendError(AlertDescription.protocol_version,
# "Version in header field: %s, should be %s" % (str(r.version),
# str(self.version))):
# yield result
#Decrypt the record
for result in self._decryptRecord(r.type, bytes):
if result in (0,1):
yield result
else:
break
bytes = result
p = Parser(bytes)
#If it doesn't contain handshake messages, we can just return it
if r.type != ContentType.handshake:
yield (r, p)
#If it's an SSLv2 ClientHello, we can return it as well
elif r.ssl2:
yield (r, p)
else:
#Otherwise, we loop through and add the handshake messages to the
#handshake buffer
while 1:
if p.index == len(bytes): #If we're at the end
if not self._handshakeBuffer:
for result in self._sendError(\
AlertDescription.decode_error, \
"Received empty handshake record"):
yield result
break
#There needs to be at least 4 bytes to get a header
if p.index+4 > len(bytes):
for result in self._sendError(\
AlertDescription.decode_error,
"A record has a partial handshake message (1)"):
yield result
p.get(1) # skip handshake type
msgLength = p.get(3)
if p.index+msgLength > len(bytes):
for result in self._sendError(\
AlertDescription.decode_error,
"A record has a partial handshake message (2)"):
yield result
handshakePair = (r, bytes[p.index-4 : p.index+msgLength])
self._handshakeBuffer.append(handshakePair)
p.index += msgLength
#We've moved at least one handshake message into the
#handshakeBuffer, return the first one
recordHeader, bytes = self._handshakeBuffer[0]
self._handshakeBuffer = self._handshakeBuffer[1:]
yield (recordHeader, Parser(bytes))
def _decryptRecord(self, recordType, bytes):
if self._readState.encContext:
#Decrypt if it's a block cipher
if self._readState.encContext.isBlockCipher:
blockLength = self._readState.encContext.block_size
if len(bytes) % blockLength != 0:
for result in self._sendError(\
AlertDescription.decryption_failed,
"Encrypted data not a multiple of blocksize"):
yield result
ciphertext = bytesToString(bytes)
plaintext = self._readState.encContext.decrypt(ciphertext)
if self.version == (3,2): #For TLS 1.1, remove explicit IV
plaintext = plaintext[self._readState.encContext.block_size : ]
bytes = stringToBytes(plaintext)
#Check padding
paddingGood = True
paddingLength = bytes[-1]
if (paddingLength+1) > len(bytes):
paddingGood=False
totalPaddingLength = 0
else:
if self.version == (3,0):
totalPaddingLength = paddingLength+1
elif self.version in ((3,1), (3,2)):
totalPaddingLength = paddingLength+1
paddingBytes = bytes[-totalPaddingLength:-1]
for byte in paddingBytes:
if byte != paddingLength:
paddingGood = False
totalPaddingLength = 0
else:
raise AssertionError()
#Decrypt if it's a stream cipher
else:
paddingGood = True
ciphertext = bytesToString(bytes)
plaintext = self._readState.encContext.decrypt(ciphertext)
bytes = stringToBytes(plaintext)
totalPaddingLength = 0
#Check MAC
macGood = True
macLength = self._readState.macContext.digest_size
endLength = macLength + totalPaddingLength
if endLength > len(bytes):
macGood = False
else:
#Read MAC
startIndex = len(bytes) - endLength
endIndex = startIndex + macLength
checkBytes = bytes[startIndex : endIndex]
#Calculate MAC
seqnumStr = self._readState.getSeqNumStr()
bytes = bytes[:-endLength]
bytesStr = bytesToString(bytes)
mac = self._readState.macContext.copy()
mac.update(seqnumStr)
mac.update(chr(recordType))
if self.version == (3,0):
mac.update( chr( int(len(bytes)/256) ) )
mac.update( chr( int(len(bytes)%256) ) )
elif self.version in ((3,1), (3,2)):
mac.update(chr(self.version[0]))
mac.update(chr(self.version[1]))
mac.update( chr( int(len(bytes)/256) ) )
mac.update( chr( int(len(bytes)%256) ) )
else:
raise AssertionError()
mac.update(bytesStr)
macString = mac.digest()
macBytes = stringToBytes(macString)
#Compare MACs
if macBytes != checkBytes:
macGood = False
if not (paddingGood and macGood):
for result in self._sendError(AlertDescription.bad_record_mac,
"MAC failure (or padding failure)"):
yield result
yield bytes
def _handshakeStart(self, client):
self._client = client
self._handshake_md5 = md5.md5()
self._handshake_sha = sha.sha()
self._handshakeBuffer = []
self.allegedSharedKeyUsername = None
self.allegedSrpUsername = None
self._refCount = 1
def _handshakeDone(self, resumed):
self.resumed = resumed
self.closed = False
def _calcPendingStates(self, clientRandom, serverRandom, implementations):
if self.session.cipherSuite in CipherSuite.aes128Suites:
macLength = 20
keyLength = 16
ivLength = 16
createCipherFunc = createAES
elif self.session.cipherSuite in CipherSuite.aes256Suites:
macLength = 20
keyLength = 32
ivLength = 16
createCipherFunc = createAES
elif self.session.cipherSuite in CipherSuite.rc4Suites:
macLength = 20
keyLength = 16
ivLength = 0
createCipherFunc = createRC4
elif self.session.cipherSuite in CipherSuite.tripleDESSuites:
macLength = 20
keyLength = 24
ivLength = 8
createCipherFunc = createTripleDES
else:
raise AssertionError()
if self.version == (3,0):
createMACFunc = MAC_SSL
elif self.version in ((3,1), (3,2)):
createMACFunc = hmac.HMAC
outputLength = (macLength*2) + (keyLength*2) + (ivLength*2)
#Calculate Keying Material from Master Secret
if self.version == (3,0):
keyBlock = PRF_SSL(self.session.masterSecret,
concatArrays(serverRandom, clientRandom),
outputLength)
elif self.version in ((3,1), (3,2)):
keyBlock = PRF(self.session.masterSecret,
"key expansion",
concatArrays(serverRandom,clientRandom),
outputLength)
else:
raise AssertionError()
#Slice up Keying Material
clientPendingState = _ConnectionState()
serverPendingState = _ConnectionState()
p = Parser(keyBlock)
clientMACBlock = bytesToString(p.getFixBytes(macLength))
serverMACBlock = bytesToString(p.getFixBytes(macLength))
clientKeyBlock = bytesToString(p.getFixBytes(keyLength))
serverKeyBlock = bytesToString(p.getFixBytes(keyLength))
clientIVBlock = bytesToString(p.getFixBytes(ivLength))
serverIVBlock = bytesToString(p.getFixBytes(ivLength))
clientPendingState.macContext = createMACFunc(clientMACBlock,
digestmod=sha)
serverPendingState.macContext = createMACFunc(serverMACBlock,
digestmod=sha)
clientPendingState.encContext = createCipherFunc(clientKeyBlock,
clientIVBlock,
implementations)
serverPendingState.encContext = createCipherFunc(serverKeyBlock,
serverIVBlock,
implementations)
#Assign new connection states to pending states
if self._client:
self._pendingWriteState = clientPendingState
self._pendingReadState = serverPendingState
else:
self._pendingWriteState = serverPendingState
self._pendingReadState = clientPendingState
if self.version == (3,2) and ivLength:
#Choose fixedIVBlock for TLS 1.1 (this is encrypted with the CBC
#residue to create the IV for each sent block)
self.fixedIVBlock = getRandomBytes(ivLength)
def _changeWriteState(self):
self._writeState = self._pendingWriteState
self._pendingWriteState = _ConnectionState()
def _changeReadState(self):
self._readState = self._pendingReadState
self._pendingReadState = _ConnectionState()
def _sendFinished(self):
#Send ChangeCipherSpec
for result in self._sendMsg(ChangeCipherSpec()):
yield result
#Switch to pending write state
self._changeWriteState()
#Calculate verification data
verifyData = self._calcFinished(True)
if self.fault == Fault.badFinished:
verifyData[0] = (verifyData[0]+1)%256
#Send Finished message under new state
finished = Finished(self.version).create(verifyData)
for result in self._sendMsg(finished):
yield result
def _getFinished(self):
#Get and check ChangeCipherSpec
for result in self._getMsg(ContentType.change_cipher_spec):
if result in (0,1):
yield result
changeCipherSpec = result
if changeCipherSpec.type != 1:
for result in self._sendError(AlertDescription.illegal_parameter,
"ChangeCipherSpec type incorrect"):
yield result
#Switch to pending read state
self._changeReadState()
#Calculate verification data
verifyData = self._calcFinished(False)
#Get and check Finished message under new state
for result in self._getMsg(ContentType.handshake,
HandshakeType.finished):
if result in (0,1):
yield result
finished = result
if finished.verify_data != verifyData:
for result in self._sendError(AlertDescription.decrypt_error,
"Finished message is incorrect"):
yield result
def _calcFinished(self, send=True):
if self.version == (3,0):
if (self._client and send) or (not self._client and not send):
senderStr = "\x43\x4C\x4E\x54"
else:
senderStr = "\x53\x52\x56\x52"
verifyData = self._calcSSLHandshakeHash(self.session.masterSecret,
senderStr)
return verifyData
elif self.version in ((3,1), (3,2)):
if (self._client and send) or (not self._client and not send):
label = "client finished"
else:
label = "server finished"
handshakeHashes = stringToBytes(self._handshake_md5.digest() + \
self._handshake_sha.digest())
verifyData = PRF(self.session.masterSecret, label, handshakeHashes,
12)
return verifyData
else:
raise AssertionError()
#Used for Finished messages and CertificateVerify messages in SSL v3
def _calcSSLHandshakeHash(self, masterSecret, label):
masterSecretStr = bytesToString(masterSecret)
imac_md5 = self._handshake_md5.copy()
imac_sha = self._handshake_sha.copy()
imac_md5.update(label + masterSecretStr + '\x36'*48)
imac_sha.update(label + masterSecretStr + '\x36'*40)
md5Str = md5.md5(masterSecretStr + ('\x5c'*48) + \
imac_md5.digest()).digest()
shaStr = sha.sha(masterSecretStr + ('\x5c'*40) + \
imac_sha.digest()).digest()
return stringToBytes(md5Str + shaStr) | unknown | codeparrot/codeparrot-clean | ||
#![warn(rust_2018_idioms)]
#![cfg(all(feature = "full", not(target_os = "wasi"), not(miri)))] // Wasi doesn't support bind
// No socket on `miri`.
use std::io::Read;
use std::io::Result;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::net::TcpListener;
use tokio::net::TcpStream;
#[tokio::test]
async fn tcp_into_std() -> Result<()> {
let mut data = [0u8; 12];
let listener = TcpListener::bind("127.0.0.1:0").await?;
let addr = listener.local_addr().unwrap().to_string();
let handle = tokio::spawn(async {
let stream: TcpStream = TcpStream::connect(addr).await.unwrap();
stream
});
let (tokio_tcp_stream, _) = listener.accept().await?;
let mut std_tcp_stream = tokio_tcp_stream.into_std()?;
std_tcp_stream
.set_nonblocking(false)
.expect("set_nonblocking call failed");
let mut client = handle.await.expect("The task being joined has panicked");
client.write_all(b"Hello world!").await?;
std_tcp_stream
.read_exact(&mut data)
.expect("std TcpStream read failed!");
assert_eq!(b"Hello world!", &data);
// test back to tokio stream
std_tcp_stream
.set_nonblocking(true)
.expect("set_nonblocking call failed");
let mut tokio_tcp_stream = TcpStream::from_std(std_tcp_stream)?;
client.write_all(b"Hello tokio!").await?;
let _size = tokio_tcp_stream.read_exact(&mut data).await?;
assert_eq!(b"Hello tokio!", &data);
Ok(())
} | rust | github | https://github.com/tokio-rs/tokio | tokio/tests/tcp_into_std.rs |
{
"values": {
"id": {
"string": "F40F2AB4-100C-4AE8-BFD0-BF332A158415"
},
"map": {
"map": {
"key_one": {
"object": {
"id": {
"string": "3BFC1A84-023F-44FA-A8EE-EFD88E18B8F7"
}
}
},
"key_three": {
"object": {
"id": {
"string": "4B7178A8-AB9D-4FF4-8B3D-48B754DE537B"
}
}
},
"key_two": {
"object": {
"id": {
"string": "56C7E07F-B9DF-4799-AF62-E703D1167A51"
}
}
}
}
}
}
} | json | github | https://github.com/hashicorp/terraform | testing/equivalence-tests/tests/replace_within_map/terraform.resource/F40F2AB4-100C-4AE8-BFD0-BF332A158415.json |
class BaseDatabaseValidation:
"""Encapsulate backend-specific validation."""
def __init__(self, connection):
self.connection = connection
def __del__(self):
del self.connection
def check(self, **kwargs):
return []
def check_field(self, field, **kwargs):
errors = []
# Backends may implement a check_field_type() method.
if (
hasattr(self, "check_field_type")
and
# Ignore any related fields.
not getattr(field, "remote_field", None)
):
# Ignore fields with unsupported features.
db_supports_all_required_features = all(
getattr(self.connection.features, feature, False)
for feature in field.model._meta.required_db_features
)
if db_supports_all_required_features:
field_type = field.db_type(self.connection)
# Ignore non-concrete fields.
if field_type is not None:
errors.extend(self.check_field_type(field, field_type))
return errors | python | github | https://github.com/django/django | django/db/backends/base/validation.py |
from pyotp.otp import OTP
import datetime
import time
import urllib
class TOTP(OTP):
def __init__(self, *args, **kwargs):
"""
@option options [Integer] interval (30) the time interval in seconds for OTP
This defaults to 30 which is standard.
"""
self.interval = kwargs.pop('interval', 30)
super(TOTP, self).__init__(*args, **kwargs)
def at(self, for_time):
"""
Accepts either a Unix timestamp integer or a Time object.
Time objects will be adjusted to UTC automatically
@param [Time/Integer] time the time to generate an OTP for
"""
if not isinstance(for_time, datetime.datetime):
for_time = datetime.datetime.fromtimestamp(int(for_time))
return self.generate_otp(self.timecode(for_time))
def now(self):
"""
Generate the current time OTP
@return [Integer] the OTP as an integer
"""
return self.generate_otp(self.timecode(datetime.datetime.now()))
def verify(self, otp, for_time=None):
"""
Verifies the OTP passed in against the current time OTP
@param [String/Integer] otp the OTP to check against
"""
if for_time is None:
for_time = datetime.datetime.now()
return otp == self.at(for_time)
def provisioning_uri(self, name):
"""
Returns the provisioning URI for the OTP
This can then be encoded in a QR Code and used
to provision the Google Authenticator app
@param [String] name of the account
@return [String] provisioning uri
"""
return 'otpauth://totp/%(name)s?secret=%(secret)s' % {
'name': urllib.quote(name, safe='@'),
'secret': self.secret,
}
def timecode(self, for_time):
i = time.mktime(for_time.timetuple())
return int(i / self.interval) | unknown | codeparrot/codeparrot-clean | ||
DROP EXTENSION pg_stat_statements; | sql | github | https://github.com/postgres/postgres | contrib/pg_stat_statements/sql/cleanup.sql |
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2019 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
import math
import collections
import numpy as np
import qcelemental as qcel
def BFS(geom, elem, seed_atoms=None, bond_threshold=1.20):
"""Detect fragments among real atoms through a breadth-first search (BFS) algorithm.
Parameters
----------
geom : ndarray of float
(nat x 3) Cartesian coordinates [a0] of real atoms.
elem : ndarray of str or int
(nat) Either element symbols or atomic numbers corresponding to `geom`.
Used for selecting van der Waals radius.
seed_atoms : list (optional)
List of lists of atoms (0-indexed) belonging to independent fragments.
Useful to prompt algorithm or to define intramolecular fragments through
border atoms. Example: `[[1, 0], [2]]`.
bond_threshold : float (optional)
Factor beyond average of covalent radii to determine bond cutoff.
Returns
-------
list of lists
Array of atom indices (0-indexed) of detected fragments. See example
below for how to transform inputs.
Notes
-----
Relies upon van der Waals radii and so faulty for close (especially
hydrogen-bonded) fragments. `seed_atoms` can help.
Authors
-------
Original code from Michael S. Marshall, linear-scaling algorithm from
Trent M. Parker, revamped by Lori A. Burns
Usage
-----
>>> # [1] BFS on large array of jumbled coordinates `geom` and element
>>> # symbols `elem`. Use the output `fragments` to form list of small
>>> # per-fragment arrays.
>>> fragments = BFS(geom, elem)
>>> frag_geoms = [geom[fr] for fr in fragments]
>>> frag_elems = [elem[fr] for fr in fragments]
"""
radii = _get_covalent_radii(elem)
max_covalent_radius = np.max(radii)
blocksize = int(math.ceil(2.0 * bond_threshold * max_covalent_radius))
allblocks = _get_blocks(geom, blocksize)
bond_tree = _get_bond_tree(radii, geom, allblocks, blocksize, bond_threshold)
if seed_atoms is None:
seed_atoms = []
allfragments = seed_atoms
# bare queues
new_list = []
break_list = []
unfound_list = list(range(geom.shape[0]))
# seed queues from intrafrag atom hints
for ifr, fr in enumerate(allfragments):
new_list.append([])
for at in fr:
new_list[ifr].append(at)
break_list.append(at)
unfound_list.remove(at)
# perform BFS
while len(unfound_list) > 0:
for ifr, fr in enumerate(new_list):
while len(fr) > 0:
for at1 in reversed(fr):
for at2 in bond_tree[at1]:
if at2 in unfound_list and at2 not in break_list:
allfragments[ifr].append(at2)
new_list[ifr].append(at2)
unfound_list.remove(at2)
new_list[ifr].remove(at1)
if len(unfound_list) > 0:
at_new = unfound_list[0]
allfragments.append([at_new])
new_list.append([at_new])
unfound_list.remove(at_new)
for fr in range(len(allfragments)):
allfragments[fr] = sorted(allfragments[fr])
return allfragments
def _get_covalent_radii(elem):
"""Return covalent radii [a0] for all atoms
Look-up values for covalent (or ionic) radii by atomic element [A] from
"Inorganic Chemistry" 3rd ed, Housecroft, Appendix 6, pgs 1013-1014
"""
covalent_radii_lookup = {
'H' : 0.37, 'He': 0.30,
'Li': 1.02, 'Be': 0.27, 'B' : 0.88, 'C' : 0.77, 'O' : 0.73, 'N' : 0.75, 'F' : 0.71, 'Ne': 0.84,
'Na': 1.02, 'Mg': 0.72, 'Al': 1.30, 'Si': 1.18, 'P' : 1.10, 'S' : 1.03, 'Cl': 0.99, 'Ar': 1.00,
'K' : 1.38, 'Ca': 1.00,
'Sc': 0.75, 'Ti': 0.86, 'V' : 0.79, 'Cr': 0.73, 'Mn': 0.67,
'Fe': 0.61, 'Co': 0.64, 'Ni': 0.55, 'Cu': 0.46, 'Zn': 0.60,
'Ga': 1.22, 'Ge': 1.22, 'As': 1.22, 'Se': 1.17, 'Br': 1.14, 'Kr': 1.03,
'I' : 1.33,
'X' : 0.00} # yapf: disable
#'RN': 2.40 / 1.5, # extrapolation
#'H': 1.06 / 1.5, # Bondi JPC 68 441 (1964)
#'SN': 2.16 / 1.5, # Bondi JPC 68 441 (1964)
#'SB': 2.12 / 1.5, # Bondi JPC 68 441 (1964)
#'TE': 2.08 / 1.5, # Bondi JPC 68 441 (1964)
#'XE': 2.05 / 1.5} # Bondi JPC 68 441 (1964)
nat = elem.shape[0]
try:
caps = [el.capitalize() for el in elem]
except AttributeError:
caps = [qcel.periodictable.to_E(z) for z in elem]
covrad = np.fromiter((covalent_radii_lookup[caps[at]] for at in range(nat)), dtype=np.float, count=nat)
return np.divide(covrad, qcel.constants.bohr2angstroms)
def _get_key(x, y, z, b):
"""Return key string from point values and block resolution"""
return """{},{},{}""".format(x - x % b, y - y % b, z - z % b)
def _distance2(v, u):
"""Compute the square distance between points defined by vectors *v* and *u*."""
return sum(((v[i] - u[i]) * (v[i] - u[i]) for i in range(len(v))))
def _get_blocks(geom, blocksize):
"""Parition atoms into spatial blocks"""
allblocks = collections.defaultdict(list)
for at in range(geom.shape[0]):
x, y, z = (int(math.floor(geom[at][j])) for j in range(3))
xyz_key = _get_key(x, y, z, blocksize)
allblocks[xyz_key].append(at)
return allblocks
def _get_bond_tree(radii, geom, allblocks, blocksize, bond_threshold):
"""Create bond tree from atomic coordinates"""
bond_tree = [[] for at in range(geom.shape[0])]
for blk in allblocks:
atom_list = _get_atoms_from_blocks(_get_neighbor_blocks(blk, blocksize, allblocks), allblocks)
for at1 in allblocks[blk]:
for at2 in atom_list:
r2_ij = _distance2(geom[at1], geom[at2])
r2_thresh = bond_threshold * (radii[at1] + radii[at2])**2
if at1 != at2 and r2_ij <= r2_thresh:
if at2 not in bond_tree[at1]:
bond_tree[at1].append(at2)
if at1 not in bond_tree[at2]:
bond_tree[at2].append(at1)
return bond_tree
def _get_neighbor_blocks(block, blocksize, allblocks):
"""Find occupied blocks which neighbor `block`, including self"""
x, y, z = (int(block.split(',')[j]) for j in range(3))
neighbor_blocks = [_get_key(x + blocksize * (i - 1),
y + blocksize * (j - 1),
z + blocksize * (k - 1),
blocksize)
for i in range(3)
for j in range(3)
for k in range(3)] # yapf: disable
active_blocks = list(set(neighbor_blocks) & set(allblocks))
return active_blocks
def _get_atoms_from_blocks(blocks, master_blocks):
"""Get list of atoms in a set of blocks"""
atoms_nested = [master_blocks[blk] for blk in blocks]
atoms = [at for sublist in atoms_nested for at in sublist]
return atoms | unknown | codeparrot/codeparrot-clean | ||
##
## This file is part of the libsigrokdecode project.
##
## Copyright (C) 2012-2013 Uwe Hermann <uwe@hermann-uwe.de>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
##
import sigrokdecode as srd
# ...
fields = {
# START field (indicates start or stop of a transaction)
'START': {
0b0000: 'Start of cycle for a target',
0b0001: 'Reserved',
0b0010: 'Grant for bus master 0',
0b0011: 'Grant for bus master 1',
0b0100: 'Reserved',
0b0101: 'Reserved',
0b0110: 'Reserved',
0b0111: 'Reserved',
0b1000: 'Reserved',
0b1001: 'Reserved',
0b1010: 'Reserved',
0b1011: 'Reserved',
0b1100: 'Reserved',
0b1101: 'Start of cycle for a Firmware Memory Read cycle',
0b1110: 'Start of cycle for a Firmware Memory Write cycle',
0b1111: 'Stop/abort (end of a cycle for a target)',
},
# Cycle type / direction field
# Bit 0 (LAD[0]) is unused, should always be 0.
# Neither host nor peripheral are allowed to drive 0b11x0.
'CT_DR': {
0b0000: 'I/O read',
0b0010: 'I/O write',
0b0100: 'Memory read',
0b0110: 'Memory write',
0b1000: 'DMA read',
0b1010: 'DMA write',
0b1100: 'Reserved / not allowed',
0b1110: 'Reserved / not allowed',
},
# SIZE field (determines how many bytes are to be transferred)
# Bits[3:2] are reserved, must be driven to 0b00.
# Neither host nor peripheral are allowed to drive 0b0010.
'SIZE': {
0b0000: '8 bits (1 byte)',
0b0001: '16 bits (2 bytes)',
0b0010: 'Reserved / not allowed',
0b0011: '32 bits (4 bytes)',
},
# CHANNEL field (bits[2:0] contain the DMA channel number)
'CHANNEL': {
0b0000: '0',
0b0001: '1',
0b0010: '2',
0b0011: '3',
0b0100: '4',
0b0101: '5',
0b0110: '6',
0b0111: '7',
},
# SYNC field (used to add wait states)
'SYNC': {
0b0000: 'Ready',
0b0001: 'Reserved',
0b0010: 'Reserved',
0b0011: 'Reserved',
0b0100: 'Reserved',
0b0101: 'Short wait',
0b0110: 'Long wait',
0b0111: 'Reserved',
0b1000: 'Reserved',
0b1001: 'Ready more (DMA only)',
0b1010: 'Error',
0b1011: 'Reserved',
0b1100: 'Reserved',
0b1101: 'Reserved',
0b1110: 'Reserved',
0b1111: 'Reserved',
},
}
class Decoder(srd.Decoder):
api_version = 2
id = 'lpc'
name = 'LPC'
longname = 'Low-Pin-Count'
desc = 'Protocol for low-bandwidth devices on PC mainboards.'
license = 'gplv2+'
inputs = ['logic']
outputs = ['lpc']
channels = (
{'id': 'lframe', 'name': 'LFRAME#', 'desc': 'Frame'},
{'id': 'lclk', 'name': 'LCLK', 'desc': 'Clock'},
{'id': 'lad0', 'name': 'LAD[0]', 'desc': 'Addr/control/data 0'},
{'id': 'lad1', 'name': 'LAD[1]', 'desc': 'Addr/control/data 1'},
{'id': 'lad2', 'name': 'LAD[2]', 'desc': 'Addr/control/data 2'},
{'id': 'lad3', 'name': 'LAD[3]', 'desc': 'Addr/control/data 3'},
)
optional_channels = (
{'id': 'lreset', 'name': 'LRESET#', 'desc': 'Reset'},
{'id': 'ldrq', 'name': 'LDRQ#', 'desc': 'Encoded DMA / bus master request'},
{'id': 'serirq', 'name': 'SERIRQ', 'desc': 'Serialized IRQ'},
{'id': 'clkrun', 'name': 'CLKRUN#', 'desc': 'Clock run'},
{'id': 'lpme', 'name': 'LPME#', 'desc': 'LPC power management event'},
{'id': 'lpcpd', 'name': 'LPCPD#', 'desc': 'Power down'},
{'id': 'lsmi', 'name': 'LSMI#', 'desc': 'System Management Interrupt'},
)
annotations = (
('warnings', 'Warnings'),
('start', 'Start'),
('cycle-type', 'Cycle-type/direction'),
('addr', 'Address'),
('tar1', 'Turn-around cycle 1'),
('sync', 'Sync'),
('data', 'Data'),
('tar2', 'Turn-around cycle 2'),
)
annotation_rows = (
('data', 'Data', (1, 2, 3, 4, 5, 6, 7)),
('warnings', 'Warnings', (0,)),
)
def __init__(self, **kwargs):
self.state = 'IDLE'
self.oldlclk = -1
self.samplenum = 0
self.clocknum = 0
self.lad = -1
self.addr = 0
self.cur_nibble = 0
self.cycle_type = -1
self.databyte = 0
self.tarcount = 0
self.synccount = 0
self.oldpins = None
self.ss_block = self.es_block = None
def start(self):
self.out_ann = self.register(srd.OUTPUT_ANN)
def putb(self, data):
self.put(self.ss_block, self.es_block, self.out_ann, data)
def handle_get_start(self, lad, lad_bits, lframe):
# LAD[3:0]: START field (1 clock cycle).
# The last value of LAD[3:0] before LFRAME# gets de-asserted is what
# the peripherals must use. However, the host can keep LFRAME# asserted
# multiple clocks, and we output all START fields that occur, even
# though the peripherals are supposed to ignore all but the last one.
self.es_block = self.samplenum
self.putb([1, [fields['START'][lad], 'START', 'St', 'S']])
self.ss_block = self.samplenum
# Output a warning if LAD[3:0] changes while LFRAME# is low.
# TODO
if (self.lad != -1 and self.lad != lad):
self.putb([0, ['LAD[3:0] changed while LFRAME# was asserted']])
# LFRAME# is asserted (low). Wait until it gets de-asserted again
# (the host is allowed to keep it asserted multiple clocks).
if lframe != 1:
return
self.start_field = self.lad
self.state = 'GET CT/DR'
def handle_get_ct_dr(self, lad, lad_bits):
# LAD[3:0]: Cycle type / direction field (1 clock cycle).
self.cycle_type = fields['CT_DR'][lad]
# TODO: Warning/error on invalid cycle types.
if self.cycle_type == 'Reserved':
self.putb([0, ['Invalid cycle type (%s)' % lad_bits]])
self.es_block = self.samplenum
self.putb([2, ['Cycle type: %s' % self.cycle_type]])
self.ss_block = self.samplenum
self.state = 'GET ADDR'
self.addr = 0
self.cur_nibble = 0
def handle_get_addr(self, lad, lad_bits):
# LAD[3:0]: ADDR field (4/8/0 clock cycles).
# I/O cycles: 4 ADDR clocks. Memory cycles: 8 ADDR clocks.
# DMA cycles: no ADDR clocks at all.
if self.cycle_type in ('I/O read', 'I/O write'):
addr_nibbles = 4 # Address is 16bits.
elif self.cycle_type in ('Memory read', 'Memory write'):
addr_nibbles = 8 # Address is 32bits.
else:
addr_nibbles = 0 # TODO: How to handle later on?
# Addresses are driven MSN-first.
offset = ((addr_nibbles - 1) - self.cur_nibble) * 4
self.addr |= (lad << offset)
# Continue if we haven't seen all ADDR cycles, yet.
if (self.cur_nibble < addr_nibbles - 1):
self.cur_nibble += 1
return
self.es_block = self.samplenum
s = 'Address: 0x%%0%dx' % addr_nibbles
self.putb([3, [s % self.addr]])
self.ss_block = self.samplenum
self.state = 'GET TAR'
self.tar_count = 0
def handle_get_tar(self, lad, lad_bits):
# LAD[3:0]: First TAR (turn-around) field (2 clock cycles).
self.es_block = self.samplenum
self.putb([4, ['TAR, cycle %d: %s' % (self.tarcount, lad_bits)]])
self.ss_block = self.samplenum
# On the first TAR clock cycle LAD[3:0] is driven to 1111 by
# either the host or peripheral. On the second clock cycle,
# the host or peripheral tri-states LAD[3:0], but its value
# should still be 1111, due to pull-ups on the LAD lines.
if lad_bits != '1111':
self.putb([0, ['TAR, cycle %d: %s (expected 1111)' % \
(self.tarcount, lad_bits)]])
if (self.tarcount != 1):
self.tarcount += 1
return
self.tarcount = 0
self.state = 'GET SYNC'
def handle_get_sync(self, lad, lad_bits):
# LAD[3:0]: SYNC field (1-n clock cycles).
self.sync_val = lad_bits
self.cycle_type = fields['SYNC'][lad]
# TODO: Warnings if reserved value are seen?
if self.cycle_type == 'Reserved':
self.putb([0, ['SYNC, cycle %d: %s (reserved value)' % \
(self.synccount, self.sync_val)]])
self.es_block = self.samplenum
self.putb([5, ['SYNC, cycle %d: %s' % (self.synccount, self.sync_val)]])
self.ss_block = self.samplenum
# TODO
self.cycle_count = 0
self.state = 'GET DATA'
def handle_get_data(self, lad, lad_bits):
# LAD[3:0]: DATA field (2 clock cycles).
# Data is driven LSN-first.
if (self.cycle_count == 0):
self.databyte = lad
elif (self.cycle_count == 1):
self.databyte |= (lad << 4)
else:
raise Exception('Invalid cycle_count: %d' % self.cycle_count)
if (self.cycle_count != 1):
self.cycle_count += 1
return
self.es_block = self.samplenum
self.putb([6, ['DATA: 0x%02x' % self.databyte]])
self.ss_block = self.samplenum
self.cycle_count = 0
self.state = 'GET TAR2'
def handle_get_tar2(self, lad, lad_bits):
# LAD[3:0]: Second TAR field (2 clock cycles).
self.es_block = self.samplenum
self.putb([7, ['TAR, cycle %d: %s' % (self.tarcount, lad_bits)]])
self.ss_block = self.samplenum
# On the first TAR clock cycle LAD[3:0] is driven to 1111 by
# either the host or peripheral. On the second clock cycle,
# the host or peripheral tri-states LAD[3:0], but its value
# should still be 1111, due to pull-ups on the LAD lines.
if lad_bits != '1111':
self.putb([0, ['Warning: TAR, cycle %d: %s (expected 1111)'
% (self.tarcount, lad_bits)]])
if (self.tarcount != 1):
self.tarcount += 1
return
self.tarcount = 0
self.state = 'IDLE'
def decode(self, ss, es, data):
for (self.samplenum, pins) in data:
# If none of the pins changed, there's nothing to do.
if self.oldpins == pins:
continue
# Store current pin values for the next round.
self.oldpins = pins
# Get individual pin values into local variables.
(lframe, lclk, lad0, lad1, lad2, lad3) = pins[:6]
(lreset, ldrq, serirq, clkrun, lpme, lpcpd, lsmi) = pins[6:]
# Only look at the signals upon rising LCLK edges. The LPC clock
# is the same as the PCI clock (which is sampled at rising edges).
if not (self.oldlclk == 0 and lclk == 1):
self.oldlclk = lclk
continue
# Store LAD[3:0] bit values (one nibble) in local variables.
# Most (but not all) states need this.
if self.state != 'IDLE':
lad = (lad3 << 3) | (lad2 << 2) | (lad1 << 1) | lad0
lad_bits = bin(lad)[2:].zfill(4)
# self.putb([0, ['LAD: %s' % lad_bits]])
# TODO: Only memory read/write is currently supported/tested.
# State machine
if self.state == 'IDLE':
# A valid LPC cycle starts with LFRAME# being asserted (low).
if lframe != 0:
continue
self.ss_block = self.samplenum
self.state = 'GET START'
self.lad = -1
# self.clocknum = 0
elif self.state == 'GET START':
self.handle_get_start(lad, lad_bits, lframe)
elif self.state == 'GET CT/DR':
self.handle_get_ct_dr(lad, lad_bits)
elif self.state == 'GET ADDR':
self.handle_get_addr(lad, lad_bits)
elif self.state == 'GET TAR':
self.handle_get_tar(lad, lad_bits)
elif self.state == 'GET SYNC':
self.handle_get_sync(lad, lad_bits)
elif self.state == 'GET DATA':
self.handle_get_data(lad, lad_bits)
elif self.state == 'GET TAR2':
self.handle_get_tar2(lad, lad_bits) | unknown | codeparrot/codeparrot-clean | ||
"""Utility for testing certificate display.
This command will create a fake certificate for a user
in a course. The certificate will display on the student's
dashboard, but no PDF will be generated.
Example usage:
$ ./manage.py lms create_fake_cert test_user edX/DemoX/Demo_Course --mode honor --grade 0.89
"""
import logging
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
from opaque_keys.edx.keys import CourseKey
from certificates.models import CertificateStatuses, GeneratedCertificate
LOGGER = logging.getLogger(__name__)
class Command(BaseCommand):
"""Create a fake certificate for a user in a course. """
USAGE = u'Usage: create_fake_cert <USERNAME> <COURSE_KEY> --mode <MODE> --status <STATUS> --grade <GRADE>'
option_list = BaseCommand.option_list + (
make_option(
'-m', '--mode',
metavar='CERT_MODE',
dest='cert_mode',
default='honor',
help='The course mode of the certificate (e.g. "honor", "verified", or "professional")'
),
make_option(
'-s', '--status',
metavar='CERT_STATUS',
dest='status',
default=CertificateStatuses.downloadable,
help='The status of the certificate'
),
make_option(
'-g', '--grade',
metavar='CERT_GRADE',
dest='grade',
default='',
help='The grade for the course, as a decimal (e.g. "0.89" for 89%)'
),
)
def handle(self, *args, **options):
"""Create a fake certificate for a user.
Arguments:
username (unicode): Identifier for the certificate's user.
course_key (unicode): Identifier for the certificate's course.
Keyword Arguments:
cert_mode (str): The mode of the certificate (e.g "honor")
status (str): The status of the certificate (e.g. "downloadable")
grade (str): The grade of the certificate (e.g "0.89" for 89%)
Raises:
CommandError
"""
if len(args) < 2:
raise CommandError(self.USAGE)
user = User.objects.get(username=args[0])
course_key = CourseKey.from_string(args[1])
cert_mode = options.get('cert_mode', 'honor')
status = options.get('status', CertificateStatuses.downloadable)
grade = options.get('grade', '')
cert, created = GeneratedCertificate.eligible_certificates.get_or_create(
user=user,
course_id=course_key
)
cert.mode = cert_mode
cert.status = status
cert.grade = grade
if status == CertificateStatuses.downloadable:
cert.download_uuid = 'test'
cert.verify_uuid = 'test'
cert.download_url = 'http://www.example.com'
cert.save()
if created:
LOGGER.info(
u"Created certificate for user %s in course %s "
u"with mode %s, status %s, "
u"and grade %s",
user.id, unicode(course_key),
cert_mode, status, grade
)
else:
LOGGER.info(
u"Updated certificate for user %s in course %s "
u"with mode %s, status %s, "
u"and grade %s",
user.id, unicode(course_key),
cert_mode, status, grade
) | unknown | codeparrot/codeparrot-clean | ||
import {arrayPush, mutate} from 'shared-runtime';
function useFoo({value}) {
let items = null;
try {
// Mutable range of `items` begins here, but its reactive scope block
// should be aligned to above the try-block
items = [];
arrayPush(items, value);
} catch {
// ignore
}
mutate(items);
return items;
}
export const FIXTURE_ENTRYPOINT = {
fn: useFoo,
params: [{value: 2}],
sequentialRenders: [{value: 2}, {value: 2}, {value: 3}],
}; | typescript | github | https://github.com/facebook/react | compiler/packages/babel-plugin-react-compiler/src/__tests__/fixtures/compiler/align-scopes-reactive-scope-overlaps-try.ts |
# -*- coding: utf-8 -*-
"""
***************************************************************************
customwidgets.py
---------------------
Date : May 2014
Copyright : (C) 2014 by Denis Rouzaud
Email : denis.rouzaud@gmail.com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
"""
This file is used by pyuic to redirect includes
in custom widgets to the correct QGIS python packages.
It is copied on installation in /pythonX/dist-packages/PyQt5/uic/widget-plugins/
"""
# solution with CW_FILTER not fully working due to include of other files
# (e.g. for flags defined in other source files)
# pluginType = CW_FILTER
# def getFilter():
# import qgis.gui
#
# QGIS_widgets = {}
# for pyClass in dir(qgis.gui):
# QGIS_widgets[pyClass] = 'qgis.gui'
#
# def _QGISfilter(widgetname, baseclassname, module):
# print widgetname, baseclassname, module
# if widgetname in QGIS_widgets:
# return (MATCH, (widgetname, baseclassname, QGIS_widgets[widgetname]))
# else:
# return (NO_MATCH, None)
#
# return _QGISfilter
pluginType = MODULE # noqa
def moduleInformation():
try:
import qgis.gui
widget_list = dir(qgis.gui)
widget_list.remove('QgsScrollArea')
return "qgis.gui", widget_list
except ImportError:
return "", [] | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
"""
Newspaper uses a lot of python-goose's extraction code. View their
license here: https://github.com/codelucas/newspaper/blob/master/GOOSE-LICENSE.txt
Keep all html page extraction code within this file. PLEASE abstract any
lxml or soup parsing mechanisms in the parsers.py file!
"""
__title__ = 'newspaper'
__author__ = 'Lucas Ou-Yang'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014, Lucas Ou-Yang'
import re
import copy
import urlparse
from collections import defaultdict
from .packages.tldextract import tldextract
from .utils import (
StringSplitter, StringReplacement, ReplaceSequence)
from .urls import (
get_path, get_domain, get_scheme, prepare_url)
MOTLEY_REPLACEMENT = StringReplacement("�", "")
ESCAPED_FRAGMENT_REPLACEMENT = StringReplacement(u"#!", u"?_escaped_fragment_=")
TITLE_REPLACEMENTS = ReplaceSequence().create(u"»").append(u"»")
PIPE_SPLITTER = StringSplitter("\\|")
DASH_SPLITTER = StringSplitter(" - ")
ARROWS_SPLITTER = StringSplitter("»")
COLON_SPLITTER = StringSplitter(":")
SPACE_SPLITTER = StringSplitter(' ')
NO_STRINGS = set()
A_REL_TAG_SELECTOR = "a[rel=tag]"
A_HREF_TAG_SELECTOR = "a[href*='/tag/'], a[href*='/tags/'], a[href*='/topic/'], a[href*='?keyword=']"
RE_LANG = r'^[A-Za-z]{2}$'
good_paths = ['story', 'article', 'feature', 'featured', 'slides',
'slideshow', 'gallery', 'news', 'video', 'media',
'v', 'radio', 'press']
bad_chunks = ['careers', 'contact', 'about', 'faq', 'terms', 'privacy',
'advert', 'preferences', 'feedback', 'info', 'browse', 'howto',
'account', 'subscribe', 'donate', 'shop', 'admin']
bad_domains = ['amazon', 'doubleclick', 'twitter']
class ContentExtractor(object):
def __init__(self, config):
self.config = config
self.parser = self.config.get_parser()
self.language = config.language
self.stopwords_class = config.stopwords_class
def get_language(self, article):
"""
Returns the language is by the article or
the configuration language.
"""
if self.config.use_meta_language == True:
if article.meta_lang:
return article.meta_lang
return self.config.language
def get_authors(self, article):
"""
Fetch the authors of the article, return as a list
Only works for english articles.
"""
_digits = re.compile('\d')
def contains_digits(d):
return bool(_digits.search(d))
def parse_byline(search_str):
"""
Takes a candidate line of html or text and
extracts out the name(s) in list form
>>> search_str('<div>By: <strong>Lucas Ou-Yang</strong>, \
<strong>Alex Smith</strong></div>')
['Lucas Ou-Yang', 'Alex Smith']
"""
# Remove HTML boilerplate
search_str = re.sub('<[^<]+?>', '', search_str)
# Remove original By statement
search_str = re.sub('[bB][yY][\:\s]|[fF]rom[\:\s]', '', search_str)
search_str = search_str.strip()
# Chunk the line by non alphanumeric tokens (few name exceptions)
# >>> re.split("[^\w\'\-]", "Lucas Ou-Yang, Dean O'Brian and Ronald")
# ['Lucas Ou-Yang', '', 'Dean O'Brian', 'and', 'Ronald']
name_tokens = re.split("[^\w\'\-]", search_str)
name_tokens = [s.strip() for s in name_tokens]
_authors = []
curname = [] # List of first, last name tokens
DELIM = ['and', '']
for token in name_tokens:
if token in DELIM:
# should we allow middle names?
valid_name = (len(curname) == 2)
if valid_name:
_authors.append(' '.join(curname))
curname = []
elif not contains_digits(token):
curname.append(token)
# One last check at end
valid_name = (len(curname) >= 2)
if valid_name:
_authors.append(' '.join(curname))
return _authors
# Try 1: Search popular author tags for authors
ATTRS = ['name', 'rel', 'itemprop', 'class', 'id']
VALS = ['author', 'byline']
matches = []
_authors, authors = [], []
doc = article.doc
html = article.html
for attr in ATTRS:
for val in VALS:
# found = doc.xpath('//*[@%s="%s"]' % (attr, val))
found = self.parser.getElementsByTag(doc, attr=attr, value=val)
matches.extend(found)
for match in matches:
content = u''
if match.tag == 'meta':
mm = match.xpath('@content')
if len(mm) > 0:
content = mm[0]
else: # match.tag == <any other tag>
content = match.text or u'' # text_content()
if len(content) > 0:
_authors.extend(parse_byline(content))
uniq = list(set([s.lower() for s in _authors]))
for name in uniq:
names = [w.capitalize() for w in name.split(' ')]
authors.append(' '.join(names))
return authors or []
# TODO Method 2: Search raw html for a by-line
# match = re.search('By[\: ].*\\n|From[\: ].*\\n', html)
# try:
# # Don't let zone be too long
# line = match.group(0)[:100]
# authors = parse_byline(line)
# except:
# return [] # Failed to find anything
# return authors
def get_title(self, article):
"""
Fetch the article title and analyze it.
"""
title = ''
doc = article.doc
title_element = self.parser.getElementsByTag(doc, tag='title')
# no title found
if title_element is None or len(title_element) == 0:
return title
# title elem found
title_text = self.parser.getText(title_element[0])
used_delimeter = False
# split title with |
if '|' in title_text:
title_text = self.split_title(title_text, PIPE_SPLITTER)
used_delimeter = True
# split title with -
if not used_delimeter and '-' in title_text:
title_text = self.split_title(title_text, DASH_SPLITTER)
used_delimeter = True
# split title with »
if not used_delimeter and u'»' in title_text:
title_text = self.split_title(title_text, ARROWS_SPLITTER)
used_delimeter = True
# split title with :
if not used_delimeter and ':' in title_text:
title_text = self.split_title(title_text, COLON_SPLITTER)
used_delimeter = True
title = MOTLEY_REPLACEMENT.replaceAll(title_text)
return title
def split_title(self, title, splitter):
"""
Split the title to best part possible.
"""
large_text_length = 0
large_text_index = 0
title_pieces = splitter.split(title)
# find the largest title piece
for i in range(len(title_pieces)):
current = title_pieces[i]
if len(current) > large_text_length:
large_text_length = len(current)
large_text_index = i
# replace content
title = title_pieces[large_text_index]
return TITLE_REPLACEMENTS.replaceAll(title).strip()
def get_favicon(self, article):
"""
Extract the favicon from a website
http://en.wikipedia.org/wiki/Favicon
<link rel="shortcut icon" type="image/png" href="favicon.png" />
<link rel="icon" type="image/png" href="favicon.png" />
"""
kwargs = {'tag': 'link', 'attr': 'rel', 'value': 'icon'}
meta = self.parser.getElementsByTag(article.doc, **kwargs)
if meta:
favicon = self.parser.getAttribute(meta[0], 'href')
return favicon
return ''
def get_meta_lang(self, article):
"""
Extract content language from meta.
"""
# we have a lang attribute in html
attr = self.parser.getAttribute(article.doc, attr='lang')
if attr is None:
# look up for a Content-Language in meta
items = [
{'tag': 'meta', 'attr': 'http-equiv', 'value': 'content-language'},
{'tag': 'meta', 'attr': 'name', 'value': 'lang'}
]
for item in items:
meta = self.parser.getElementsByTag(article.doc, **item)
if meta:
attr = self.parser.getAttribute(meta[0], attr='content')
break
if attr:
value = attr[:2]
if re.search(RE_LANG, value):
return value.lower()
return None
def get_meta_content(self, doc, metaName):
"""
Extract a given meta content form document.
"""
meta = self.parser.css_select(doc, metaName)
content = None
if meta is not None and len(meta) > 0:
content = self.parser.getAttribute(meta[0], 'content')
if content:
return content.strip()
return ''
def get_meta_description(self, article):
"""
If the article has meta description set in the source, use that.
"""
return self.get_meta_content(article.doc, "meta[name=description]")
def get_meta_keywords(self, article):
"""
If the article has meta keywords set in the source, use that.
"""
return self.get_meta_content(article.doc, "meta[name=keywords]")
def get_meta_data(self, article):
data = defaultdict(dict)
props = self.parser.css_select(article.doc, 'meta')
for prop in props:
key = prop.attrib.get('property')
if not key:
key = prop.attrib.get('name')
if not key:
continue
key = key.split(':')
value = prop.attrib.get('content')
if not value:
value = prop.attrib.get('value')
if not value:
continue
value = value.strip()
if value.isdigit():
value = int(value)
ref = data[key.pop(0)]
for idx, part in enumerate(key):
if not key[idx:-1]: # no next values
ref[part] = value
break
if not ref.get(part):
ref[part] = dict()
else:
if isinstance(ref.get(part), basestring):
ref[part] = {'url': ref[part]}
ref = ref[part]
return data
def get_canonical_link(self, article):
"""
If the article has meta canonical link set in the url.
"""
kwargs = {'tag': 'link', 'attr': 'rel', 'value': 'canonical'}
meta = self.parser.getElementsByTag(article.doc, **kwargs)
if meta is not None and len(meta) > 0:
href = self.parser.getAttribute(meta[0], 'href')
if href:
href = href.strip()
o = urlparse.urlparse(href)
if not o.hostname:
z = urlparse.urlparse(article.url)
domain = '%s://%s' % (z.scheme, z.hostname)
href = urlparse.urljoin(domain, href)
return href
return u''
def get_img_urls(self, article):
"""
Return all of the images on an html page, lxml root.
"""
doc = article.raw_doc
urls = self.parser.get_img_urls(doc)
img_links = set([ urlparse.urljoin(article.url, url) for url in urls ])
return img_links
def get_top_img_url(self, article):
"""
"""
# !important, we must use raw_doc because at this point doc has been cleaned
doc = article.raw_doc
return self.parser.get_top_img_url(doc)
def get_category_urls(self, source, source_url=None, page_urls=None):
"""
Requires: source lxml root and source url takes a domain and finds all of the
top level urls, we are assuming that these are the category urls.
cnn.com --> [cnn.com/latest, world.cnn.com, cnn.com/asia]
"""
source_url = source.url if not source_url else source_url
page_urls = self.parser.get_urls(source.doc) if not page_urls else page_urls
valid_categories = []
for p_url in page_urls:
scheme = get_scheme(p_url, allow_fragments=False)
domain = get_domain(p_url, allow_fragments=False)
path = get_path(p_url, allow_fragments=False)
if not domain and not path:
if source.config.verbose:
print 'elim category url %s for no domain and path' % p_url
continue
if path and path.startswith('#'):
if source.config.verbose:
print 'elim category url %s path starts with #' % p_url
continue
if scheme and (scheme!='http' and scheme!='https'):
if source.config.verbose:
print 'elim category url %s for bad scheme, not http nor https' % p_url
continue
if domain:
child_tld = tldextract.extract(p_url)
domain_tld = tldextract.extract(source_url)
child_subdomain_parts = child_tld.subdomain.split('.')
subdomain_contains = False
for part in child_subdomain_parts:
if part == domain_tld.domain:
if source.config.verbose:
print 'subdomain contains at %s and %s' % (str(part), str(domain_tld.domain))
subdomain_contains = True
break
# microsoft.com is definitely not related to espn.com, but espn.go.com is probably
# related to espn.com
if not subdomain_contains and (child_tld.domain != domain_tld.domain):
if source.config.verbose:
print 'elim category url %s for domain mismatch' % p_url
continue
elif child_tld.subdomain in ['m', 'i']:
if source.config.verbose:
print 'elim category url %s for mobile subdomain' % p_url
continue
else:
valid_categories.append(scheme+'://'+domain)
# TODO account for case where category is in form http://subdomain.domain.tld/category/ <-- it's still legal!
else:
# we want a path with just one subdir
# cnn.com/world and cnn.com/world/ are both valid_categories
path_chunks = [ x for x in path.split('/') if len(x) > 0 ]
if 'index.html' in path_chunks:
path_chunks.remove('index.html')
if len(path_chunks) == 1 and len(path_chunks[0]) < 14:
valid_categories.append(domain+path)
else:
if source.config.verbose:
print 'elim category url %s for >1 path chunks or size path chunks' % p_url
stopwords = [
'about', 'help', 'privacy', 'legal', 'feedback', 'sitemap',
'profile', 'account', 'mobile', 'sitemap', 'facebook', 'myspace',
'twitter', 'linkedin', 'bebo', 'friendster', 'stumbleupon', 'youtube',
'vimeo', 'store', 'mail', 'preferences', 'maps', 'password', 'imgur',
'flickr', 'search', 'subscription', 'itunes', 'siteindex', 'events',
'stop', 'jobs', 'careers', 'newsletter', 'subscribe', 'academy',
'shopping', 'purchase', 'site-map', 'shop', 'donate', 'newsletter',
'product', 'advert', 'info', 'tickets', 'coupons', 'forum', 'board',
'archive', 'browse', 'howto', 'how to', 'faq', 'terms', 'charts',
'services', 'contact', 'plus', 'admin', 'login', 'signup', 'register',
'developer', 'proxy']
_valid_categories = []
# TODO Stop spamming urlparse and tldextract calls...
for p_url in valid_categories:
path = get_path(p_url)
subdomain = tldextract.extract(p_url).subdomain
conjunction = path + ' ' + subdomain
bad = False
for badword in stopwords:
if badword.lower() in conjunction.lower():
if source.config.verbose:
print 'elim category url %s for subdomain contain stopword!' % p_url
bad=True
break
if not bad:
_valid_categories.append(p_url)
_valid_categories.append('/') # add the root!
for i, p_url in enumerate(_valid_categories):
if p_url.startswith('://') :
p_url = 'http' + p_url
_valid_categories[i] = p_url
elif p_url.startswith('//'):
p_url = 'http:' + p_url
_valid_categories[i] = p_url
if p_url.endswith('/'):
p_url = p_url[:-1]
_valid_categories[i] = p_url
_valid_categories = list(set(_valid_categories))
category_urls = [prepare_url(p_url, source_url) for p_url in _valid_categories]
category_urls = [c for c in category_urls if c is not None]
return category_urls
def get_feed_urls(self, source):
"""
Requires: List of category lxml roots, two types of anchors: categories
and feeds (rss). we extract category urls first and then feeds.
"""
feed_urls = []
for category in source.categories:
root = category.doc
feed_urls.extend(self.parser.get_feed_urls(root))
feed_urls = feed_urls[:50]
feed_urls = [ prepare_url(f, source.url) for f in feed_urls ]
feeds = list(set(feed_urls))
return feeds
def extract_tags(self, article):
node = article.doc
# node doesn't have chidren
if len(list(node)) == 0:
return NO_STRINGS
elements = self.parser.css_select(node, A_REL_TAG_SELECTOR)
if not elements:
elements = self.parser.css_select(node, A_HREF_TAG_SELECTOR)
if not elements:
return NO_STRINGS
tags = []
for el in elements:
tag = self.parser.getText(el)
if tag:
tags.append(tag)
return set(tags)
def calculate_best_node(self, article):
doc = article.doc
top_node = None
nodes_to_check = self.nodes_to_check(doc)
starting_boost = float(1.0)
cnt = 0
i = 0
parent_nodes = []
nodes_with_text = []
for node in nodes_to_check:
text_node = self.parser.getText(node)
word_stats = self.stopwords_class(language=self.language).get_stopword_count(text_node)
high_link_density = self.is_highlink_density(node)
if word_stats.get_stopword_count() > 2 and not high_link_density:
nodes_with_text.append(node)
nodes_number = len(nodes_with_text)
negative_scoring = 0
bottom_negativescore_nodes = float(nodes_number) * 0.25
for node in nodes_with_text:
boost_score = float(0)
# boost
if(self.is_boostable(node)):
if cnt >= 0:
boost_score = float((1.0 / starting_boost) * 50)
starting_boost += 1
# nodes_number
if nodes_number > 15:
if (nodes_number - i) <= bottom_negativescore_nodes:
booster = float(bottom_negativescore_nodes - (nodes_number - i))
boost_score = float(-pow(booster, float(2)))
negscore = -abs(boost_score) + negative_scoring
if negscore > 40:
boost_score = float(5)
text_node = self.parser.getText(node)
word_stats = self.stopwords_class(language=self.language).get_stopword_count(text_node)
upscore = int(word_stats.get_stopword_count() + boost_score)
# parent node
parent_node = self.parser.getParent(node)
self.update_score(parent_node, upscore)
self.update_node_count(parent_node, 1)
if parent_node not in parent_nodes:
parent_nodes.append(parent_node)
# parent of parent node
parent_parent_node = self.parser.getParent(parent_node)
if parent_parent_node is not None:
self.update_node_count(parent_parent_node, 1)
self.update_score(parent_parent_node, upscore / 2)
if parent_parent_node not in parent_nodes:
parent_nodes.append(parent_parent_node)
cnt += 1
i += 1
top_node_score = 0
for e in parent_nodes:
score = self.get_score(e)
if score > top_node_score:
top_node = e
top_node_score = score
if top_node is None:
top_node = e
return top_node
def is_boostable(self, node):
"""
Alot of times the first paragraph might be the caption under an image
so we'll want to make sure if we're going to boost a parent node that
it should be connected to other paragraphs,
at least for the first n paragraphs so we'll want to make sure that
the next sibling is a paragraph and has at
least some substantial weight to it.
"""
para = "p"
steps_away = 0
minimum_stopword_count = 5
max_stepsaway_from_node = 3
nodes = self.walk_siblings(node)
for current_node in nodes:
# p
current_node_tag = self.parser.getTag(current_node)
if current_node_tag == para:
if steps_away >= max_stepsaway_from_node:
return False
paraText = self.parser.getText(current_node)
word_stats = self.stopwords_class(language=self.language).get_stopword_count(paraText)
if word_stats.get_stopword_count() > minimum_stopword_count:
return True
steps_away += 1
return False
def walk_siblings(self, node):
current_sibling = self.parser.previousSibling(node)
b = []
while current_sibling is not None:
b.append(current_sibling)
previousSibling = self.parser.previousSibling(current_sibling)
current_sibling = None if previousSibling is None else previousSibling
return b
def add_siblings(self, top_node):
baselinescore_siblings_para = self.get_siblings_score(top_node)
results = self.walk_siblings(top_node)
for current_node in results:
ps = self.get_siblings_content(current_node, baselinescore_siblings_para)
for p in ps:
top_node.insert(0, p)
return top_node
def get_siblings_content(self, current_sibling, baselinescore_siblings_para):
"""
Adds any siblings that may have a decent score to this node.
"""
if current_sibling.tag == 'p' and len(self.parser.getText(current_sibling)) > 0:
e0 = current_sibling
if e0.tail:
e0 = copy.deepcopy(e0)
e0.tail = ''
return [e0]
else:
potential_paragraphs = self.parser.getElementsByTag(current_sibling, tag='p')
if potential_paragraphs is None:
return None
else:
ps = []
for first_paragraph in potential_paragraphs:
text = self.parser.getText(first_paragraph)
if len(text) > 0:
word_stats = self.stopwords_class(language=self.language).get_stopword_count(text)
paragraph_score = word_stats.get_stopword_count()
sibling_baseline_score = float(.30)
high_link_density = self.is_highlink_density(first_paragraph)
score = float(baselinescore_siblings_para * sibling_baseline_score)
if score < paragraph_score and not high_link_density:
p = self.parser.createElement(tag='p', text=text, tail=None)
ps.append(p)
return ps
def get_siblings_score(self, top_node):
"""
We could have long articles that have tons of paragraphs
so if we tried to calculate the base score against
the total text score of those paragraphs it would be unfair.
So we need to normalize the score based on the average scoring
of the paragraphs within the top node.
For example if our total score of 10 paragraphs was 1000
but each had an average value of 100 then 100 should be our base.
"""
base = 100000
paragraphs_number = 0
paragraphs_score = 0
nodes_to_check = self.parser.getElementsByTag(top_node, tag='p')
for node in nodes_to_check:
text_node = self.parser.getText(node)
word_stats = self.stopwords_class(language=self.language).get_stopword_count(text_node)
high_link_density = self.is_highlink_density(node)
if word_stats.get_stopword_count() > 2 and not high_link_density:
paragraphs_number += 1
paragraphs_score += word_stats.get_stopword_count()
if paragraphs_number > 0:
base = paragraphs_score / paragraphs_number
return base
def update_score(self, node, addToScore):
"""
Adds a score to the gravityScore Attribute we put on divs
we'll get the current score then add the score
we're passing in to the current.
"""
current_score = 0
score_string = self.parser.getAttribute(node, 'gravityScore')
if score_string:
current_score = int(score_string)
new_score = current_score + addToScore
self.parser.setAttribute(node, "gravityScore", str(new_score))
def update_node_count(self, node, add_to_count):
"""
Stores how many decent nodes are under a parent node.
"""
current_score = 0
count_string = self.parser.getAttribute(node, 'gravityNodes')
if count_string:
current_score = int(count_string)
new_score = current_score + add_to_count
self.parser.setAttribute(node, "gravityNodes", str(new_score))
def is_highlink_density(self, e):
"""
Checks the density of links within a node,
is there not much text and most of it contains linky shit?
if so it's no good.
"""
links = self.parser.getElementsByTag(e, tag='a')
if links is None or len(links) == 0:
return False
text = self.parser.getText(e)
words = text.split(' ')
words_number = float(len(words))
sb = []
for link in links:
sb.append(self.parser.getText(link))
linkText = ''.join(sb)
linkWords = linkText.split(' ')
numberOfLinkWords = float(len(linkWords))
numberOfLinks = float(len(links))
linkDivisor = float(numberOfLinkWords / words_number)
score = float(linkDivisor * numberOfLinks)
if score >= 1.0:
return True
return False
# return True if score > 1.0 else False
def get_score(self, node):
"""
Returns the gravityScore as an integer from this node.
"""
return self.get_node_gravity_score(node) or 0
def get_node_gravity_score(self, node):
grvScoreString = self.parser.getAttribute(node, 'gravityScore')
if not grvScoreString:
return None
return int(grvScoreString)
def nodes_to_check(self, doc):
"""
Returns a list of nodes we want to search
on like paragraphs and tables.
"""
nodes_to_check = []
for tag in ['p', 'pre', 'td']:
items = self.parser.getElementsByTag(doc, tag=tag)
nodes_to_check += items
return nodes_to_check
def is_table_and_no_para_exist(self, e):
subParagraphs = self.parser.getElementsByTag(e, tag='p')
for p in subParagraphs:
txt = self.parser.getText(p)
if len(txt) < 25:
self.parser.remove(p)
subParagraphs2 = self.parser.getElementsByTag(e, tag='p')
if len(subParagraphs2) == 0 and e.tag is not "td":
return True
return False
def is_nodescore_threshold_met(self, node, e):
top_node_score = self.get_score(node)
current_nodeScore = self.get_score(e)
thresholdScore = float(top_node_score * .08)
if (current_nodeScore < thresholdScore) and e.tag != 'td':
return False
return True
def post_cleanup(self, targetNode):
"""
Remove any divs that looks like non-content,
clusters of links, or paras with no gusto.
"""
node = self.add_siblings(targetNode)
for e in self.parser.getChildren(node):
e_tag = self.parser.getTag(e)
if e_tag != 'p':
if self.is_highlink_density(e) \
or self.is_table_and_no_para_exist(e) \
or not self.is_nodescore_threshold_met(node, e):
self.parser.remove(e)
return node
class StandardContentExtractor(ContentExtractor):
pass | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2012-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.configurationsample.lombok;
import java.util.ArrayList;
import java.util.List;
import lombok.Value;
import org.springframework.boot.configurationsample.TestConfigurationProperties;
/**
* Configuration properties using Lombok {@code @Value}.
*
* @author Mark Jeffrey
*/
@Value
@TestConfigurationProperties("value")
@SuppressWarnings("unused")
public class LombokSimpleValueProperties {
private final String id = "super-id";
/**
* Name description.
*/
private String name;
private String description;
private Integer counter;
@Deprecated
private Integer number = 0;
private final List<String> items = new ArrayList<>();
private final String ignored = "foo";
} | java | github | https://github.com/spring-projects/spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationsample/lombok/LombokSimpleValueProperties.java |
// RUN: json_to_flatbuffer %p/test_schema.fbs %s | flatbuffer_translate --tflite-flatbuffer-to-mlir -o - | FileCheck %s
// This test is to test that if the flatbuffer omits the last optional input `bias` of tfl.conv_2d op, the flatbuffer_importer will automatically adds `none` value to tfl.conv_2d.
// CHECK: %[[CST:.*]] = "tfl.no_value"() <{value}> : () -> none
// CHECK: %[[RES0:.*]] = "tfl.conv_2d"(%arg0, %arg1, %[[CST]]) <{dilation_h_factor = 1 : i32, dilation_w_factor = 1 : i32, fused_activation_function = "NONE", padding = "SAME", stride_h = 1 : i32, stride_w = 1 : i32}> : (tensor<256x32x32x3xf32>, tensor<16x3x3x3xf32>, none) -> tensor<256x32x32x16xf32>
// CHECK: return %[[RES0]] : tensor<256x32x32x16xf32>
{
"version": 3,
"operator_codes": [
{
"builtin_code": "CONV_2D"
}
],
"subgraphs": [
{
"tensors": [
{
"shape": [
256,
32,
32,
3
],
"name": "arg0",
"quantization": {
}
},
{
"shape": [
16,
3,
3,
3
],
"name": "arg1",
"quantization": {
}
},
{
"shape": [
256,
32,
32,
16
],
"name": "output",
"quantization": {
}
}
],
"inputs": [
0,
1
],
"outputs": [
2
],
"operators": [
{
"inputs": [
0,
1
],
"outputs": [
2
],
"builtin_options_type": "Conv2DOptions",
"builtin_options": {
"stride_w": 1,
"stride_h": 1
}
}
],
"name": "main"
}
],
"description": "MLIR Converted."
} | json | github | https://github.com/tensorflow/tensorflow | tensorflow/compiler/mlir/lite/tests/flatbuffer2mlir/optional_input.json |
# Copyright 2010 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
from __future__ import division
from whoosh.compat import xrange
from whoosh.matching import mcore
class WrappingMatcher(mcore.Matcher):
"""Base class for matchers that wrap sub-matchers.
"""
def __init__(self, child, boost=1.0):
self.child = child
self.boost = boost
def __repr__(self):
return "%s(%r, boost=%s)" % (self.__class__.__name__, self.child,
self.boost)
def copy(self):
kwargs = {}
if hasattr(self, "boost"):
kwargs["boost"] = self.boost
return self.__class__(self.child.copy(), **kwargs)
def depth(self):
return 1 + self.child.depth()
def _replacement(self, newchild):
return self.__class__(newchild, boost=self.boost)
def replace(self, minquality=0):
# Replace the child matcher
r = self.child.replace(minquality)
if r is not self.child:
# If the child changed, return a new wrapper on the new child
return self._replacement(r)
else:
return self
def id(self):
return self.child.id()
def all_ids(self):
return self.child.all_ids()
def is_active(self):
return self.child.is_active()
def reset(self):
self.child.reset()
def children(self):
return [self.child]
def supports(self, astype):
return self.child.supports(astype)
def value(self):
return self.child.value()
def value_as(self, astype):
return self.child.value_as(astype)
def spans(self):
return self.child.spans()
def skip_to(self, id):
return self.child.skip_to(id)
def next(self):
self.child.next()
def supports_block_quality(self):
return self.child.supports_block_quality()
def skip_to_quality(self, minquality):
return self.child.skip_to_quality(minquality / self.boost)
def max_quality(self):
return self.child.max_quality() * self.boost
def block_quality(self):
return self.child.block_quality() * self.boost
def weight(self):
return self.child.weight() * self.boost
def score(self):
return self.child.score() * self.boost
class MultiMatcher(mcore.Matcher):
"""Serializes the results of a list of sub-matchers.
"""
def __init__(self, matchers, idoffsets, scorer=None, current=0):
"""
:param matchers: a list of Matcher objects.
:param idoffsets: a list of offsets corresponding to items in the
``matchers`` list.
"""
self.matchers = matchers
self.offsets = idoffsets
self.scorer = scorer
self.current = current
self._next_matcher()
def __repr__(self):
return "%s(%r, %r, current=%s)" % (self.__class__.__name__,
self.matchers, self.offsets,
self.current)
def is_active(self):
return self.current < len(self.matchers)
def reset(self):
for mr in self.matchers:
mr.reset()
self.current = 0
def children(self):
return [self.matchers[self.current]]
def _next_matcher(self):
matchers = self.matchers
while (self.current < len(matchers)
and not matchers[self.current].is_active()):
self.current += 1
def copy(self):
return self.__class__([mr.copy() for mr in self.matchers],
self.offsets, current=self.current)
def depth(self):
if self.is_active():
return 1 + max(mr.depth() for mr in self.matchers[self.current:])
else:
return 0
def replace(self, minquality=0):
m = self
if minquality:
# Skip sub-matchers that don't have a high enough max quality to
# contribute
while (m.is_active()
and m.matchers[m.current].max_quality() < minquality):
m = self.__class__(self.matchers, self.offsets, self.scorer,
m.current + 1)
m._next_matcher()
if not m.is_active():
return mcore.NullMatcher()
# TODO: Possible optimization: if the last matcher is current, replace
# this with the last matcher, but wrap it with a matcher that adds the
# offset. Have to check whether that's actually faster, though.
return m
def id(self):
current = self.current
return self.matchers[current].id() + self.offsets[current]
def all_ids(self):
offsets = self.offsets
for i, mr in enumerate(self.matchers):
for id in mr.all_ids():
yield id + offsets[i]
def spans(self):
return self.matchers[self.current].spans()
def supports(self, astype):
return self.matchers[self.current].supports(astype)
def value(self):
return self.matchers[self.current].value()
def value_as(self, astype):
return self.matchers[self.current].value_as(astype)
def next(self):
if not self.is_active():
raise mcore.ReadTooFar
self.matchers[self.current].next()
if not self.matchers[self.current].is_active():
self._next_matcher()
def skip_to(self, id):
if not self.is_active():
raise mcore.ReadTooFar
if id <= self.id():
return
matchers = self.matchers
offsets = self.offsets
r = False
while self.current < len(matchers) and id > self.id():
mr = matchers[self.current]
sr = mr.skip_to(id - offsets[self.current])
r = sr or r
if mr.is_active():
break
self._next_matcher()
return r
def supports_block_quality(self):
return all(mr.supports_block_quality() for mr
in self.matchers[self.current:])
def max_quality(self):
return max(m.max_quality() for m in self.matchers[self.current:])
def block_quality(self):
return self.matchers[self.current].block_quality()
def weight(self):
return self.matchers[self.current].weight()
def score(self):
return self.scorer.score(self)
def ExcludeMatcher(child, excluded, boost=1.0):
return FilterMatcher(child, excluded, exclude=True, boost=boost)
class FilterMatcher(WrappingMatcher):
"""Filters the postings from the wrapped based on whether the IDs are
present in or absent from a set.
"""
def __init__(self, child, ids, exclude=False, boost=1.0):
"""
:param child: the child matcher.
:param ids: a set of IDs to filter by.
:param exclude: by default, only IDs from the wrapped matcher that are
**in** the set are used. If this argument is True, only IDs from
the wrapped matcher that are **not in** the set are used.
"""
super(FilterMatcher, self).__init__(child)
self._ids = ids
self._exclude = exclude
self.boost = boost
self._find_next()
def __repr__(self):
return "%s(%r, %r, %r, boost=%s)" % (self.__class__.__name__,
self.child, self._ids,
self._exclude, self.boost)
def reset(self):
self.child.reset()
self._find_next()
def copy(self):
return self.__class__(self.child.copy(), self._ids, self._exclude,
boost=self.boost)
def _replacement(self, newchild):
return self.__class__(newchild, self._ids, exclude=self._exclude,
boost=self.boost)
def _find_next(self):
child = self.child
ids = self._ids
r = False
if self._exclude:
while child.is_active() and child.id() in ids:
r = child.next() or r
else:
while child.is_active() and child.id() not in ids:
r = child.next() or r
return r
def next(self):
self.child.next()
self._find_next()
def skip_to(self, id):
self.child.skip_to(id)
self._find_next()
def all_ids(self):
ids = self._ids
if self._exclude:
return (id for id in self.child.all_ids() if id not in ids)
else:
return (id for id in self.child.all_ids() if id in ids)
def all_items(self):
ids = self._ids
if self._exclude:
return (item for item in self.child.all_items()
if item[0] not in ids)
else:
return (item for item in self.child.all_items() if item[0] in ids)
class InverseMatcher(WrappingMatcher):
"""Synthetic matcher, generates postings that are NOT present in the
wrapped matcher.
"""
def __init__(self, child, limit, missing=None, weight=1.0, id=0):
super(InverseMatcher, self).__init__(child)
self.limit = limit
self._weight = weight
self.missing = missing or (lambda id: False)
self._id = id
self._find_next()
def copy(self):
return self.__class__(self.child.copy(), self.limit,
weight=self._weight, missing=self.missing,
id=self._id)
def _replacement(self, newchild):
return self.__class__(newchild, self.limit, missing=self.missing,
weight=self._weight, id=self._id)
def is_active(self):
return self._id < self.limit
def reset(self):
self.child.reset()
self._id = 0
self._find_next()
def supports_block_quality(self):
return False
def _find_next(self):
child = self.child
missing = self.missing
# If the current docnum isn't missing and the child matcher is
# exhausted (so we don't have to worry about skipping its matches), we
# don't have to do anything
if not child.is_active() and not missing(self._id):
return
# Skip missing documents
while self._id < self.limit and missing(self._id):
self._id += 1
# Catch the child matcher up to where this matcher is
if child.is_active() and child.id() < self._id:
child.skip_to(self._id)
# While self._id is missing or is in the child matcher, increase it
while child.is_active() and self._id < self.limit:
if missing(self._id):
self._id += 1
continue
if self._id == child.id():
self._id += 1
child.next()
continue
break
def id(self):
return self._id
def all_ids(self):
return mcore.Matcher.all_ids(self)
def next(self):
if self._id >= self.limit:
raise mcore.ReadTooFar
self._id += 1
self._find_next()
def skip_to(self, id):
if self._id >= self.limit:
raise mcore.ReadTooFar
if id < self._id:
return
self._id = id
self._find_next()
def weight(self):
return self._weight
def score(self):
return self._weight
class RequireMatcher(WrappingMatcher):
"""Matches postings that are in both sub-matchers, but only uses scores
from the first.
"""
def __init__(self, a, b):
from whoosh.matching.binary import IntersectionMatcher
self.a = a
self.b = b
WrappingMatcher.__init__(self, IntersectionMatcher(a, b))
def copy(self):
return self.__class__(self.a.copy(), self.b.copy())
def supports_block_quality(self):
return self.a.supports_block_quality()
def replace(self, minquality=0):
if not self.child.is_active():
# If one of the sub-matchers is inactive, go inactive
return mcore.NullMatcher()
elif minquality and self.a.max_quality() < minquality:
# If the required matcher doesn't have a high enough max quality
# to possibly contribute, return an inactive matcher
return mcore.NullMatcher()
new_a = self.a.replace(minquality)
new_b = self.b.replace()
if not new_a.is_active():
return mcore.NullMatcher()
elif new_a is not self.a or new_b is not self.b:
# If one of the sub-matchers changed, return a new Require
return self.__class__(new_a, self.b)
else:
return self
def max_quality(self):
return self.a.max_quality()
def block_quality(self):
return self.a.block_quality()
def skip_to_quality(self, minquality):
skipped = self.a.skip_to_quality(minquality)
self.child._find_next()
return skipped
def weight(self):
return self.a.weight()
def score(self):
return self.a.score()
def supports(self, astype):
return self.a.supports(astype)
def value(self):
return self.a.value()
def value_as(self, astype):
return self.a.value_as(astype)
class ConstantScoreWrapperMatcher(WrappingMatcher):
def __init__(self, child, score=1.0):
WrappingMatcher.__init__(self, child)
self._score = score
def copy(self):
return self.__class__(self.child.copy(), score=self._score)
def _replacement(self, newchild):
return self.__class__(newchild, score=self._score)
def max_quality(self):
return self._score
def block_quality(self):
return self._score
def score(self):
return self._score
class SingleTermMatcher(WrappingMatcher):
"""Makes a tree of matchers act as if they were a matcher for a single
term for the purposes of "what terms are matching?" questions.
"""
def __init__(self, child, term):
WrappingMatcher.__init__(self, child)
self._term = term
def term(self):
return self._term
def replace(self, minquality=0):
return self
class CoordMatcher(WrappingMatcher):
"""Modifies the computed score to penalize documents that don't match all
terms in the matcher tree.
Because this matcher modifies the score, it may give unexpected results
when compared to another matcher returning the unmodified score.
"""
def __init__(self, child, scale=1.0):
WrappingMatcher.__init__(self, child)
self._termcount = len(list(child.term_matchers()))
self._maxqual = child.max_quality()
self._scale = scale
def _replacement(self, newchild):
return self.__class__(newchild, scale=self._scale)
def _sqr(self, score, matching):
# This is the "SQR" (Short Query Ranking) function used by Apple's old
# V-twin search library, described in the paper "V-Twin: A Lightweight
# Engine for Interactive Use".
#
# http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.56.1916
# score - document score using the current weighting function
# matching - number of matching terms in the current document
termcount = self._termcount # Number of terms in this tree
scale = self._scale # Scaling factor
sqr = ((score + ((matching - 1) / (termcount - scale) ** 2))
* ((termcount - 1) / termcount))
return sqr
def max_quality(self):
return self._sqr(self.child.max_quality(), self._termcount)
def block_quality(self):
return self._sqr(self.child.block_quality(), self._termcount)
def score(self):
child = self.child
score = child.score()
matching = 0
for _ in child.matching_terms(child.id()):
matching += 1
return self._sqr(score, matching) | unknown | codeparrot/codeparrot-clean | ||
/* Copyright 2017 - 2025 R. Thomas
* Copyright 2017 - 2025 Quarkslab
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef LIEF_PE_LOAD_CONFIGURATION_DYNAMIC_RELOCATION_FUNC_OVERRIDE_INFO_H
#define LIEF_PE_LOAD_CONFIGURATION_DYNAMIC_RELOCATION_FUNC_OVERRIDE_INFO_H
#include <memory>
#include <string>
#include <vector>
#include "LIEF/visibility.h"
#include "LIEF/iterators.hpp"
namespace LIEF {
class SpanStream;
namespace PE {
class Parser;
class Relocation;
class LIEF_API FunctionOverrideInfo {
public:
using relocations_t = std::vector<std::unique_ptr<Relocation>>;
using it_relocations = ref_iterator<relocations_t&, Relocation*>;
using it_const_relocations = const_ref_iterator<const relocations_t&, const Relocation*>;
FunctionOverrideInfo() = default;
FunctionOverrideInfo(uint32_t original_rva, uint32_t bdd_offset,
uint32_t base_reloc_size);
FunctionOverrideInfo(const FunctionOverrideInfo&);
FunctionOverrideInfo& operator=(const FunctionOverrideInfo&);
FunctionOverrideInfo(FunctionOverrideInfo&&);
FunctionOverrideInfo& operator=(FunctionOverrideInfo&&);
std::string to_string() const;
/// RVA of the original function
uint32_t original_rva() const {
return original_rva_;
}
/// Offset into the BDD region
uint32_t bdd_offset() const {
return bdd_offset_;
}
/// Size in bytes taken by RVAs
uint32_t rva_size() const {
return rvas_.size() * sizeof(uint32_t);
}
/// Size in bytes taken by BaseRelocs
uint32_t base_reloc_size() const {
return base_relocsz_;
}
const std::vector<uint32_t>& functions_rva() const {
return rvas_;
}
it_relocations relocations() {
return relocations_;
}
it_const_relocations relocations() const {
return relocations_;
}
FunctionOverrideInfo& original_rva(uint32_t value) {
original_rva_ = value;
return *this;
}
FunctionOverrideInfo& bdd_offset(uint32_t value) {
bdd_offset_ = value;
return *this;
}
FunctionOverrideInfo& base_reloc_size(uint32_t value) {
base_relocsz_ = value;
return *this;
}
FunctionOverrideInfo& overriding_funcs(std::vector<uint32_t> funcs) {
rvas_ = std::move(funcs);
return *this;
}
friend LIEF_API
std::ostream& operator<<(std::ostream& os, const FunctionOverrideInfo& info)
{
os << info.to_string();
return os;
}
~FunctionOverrideInfo();
/// \private
LIEF_LOCAL static
std::unique_ptr<FunctionOverrideInfo> parse(Parser& ctx, SpanStream& strm);
private:
uint32_t original_rva_ = 0;
uint32_t bdd_offset_ = 0;
uint32_t base_relocsz_ = 0;
std::vector<uint32_t> rvas_;
relocations_t relocations_;
};
}
}
#endif | unknown | github | https://github.com/nodejs/node | deps/LIEF/include/LIEF/PE/LoadConfigurations/DynamicRelocation/FunctionOverrideInfo.hpp |
/*
* Copyright 2002-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.jndi;
import javax.naming.Context;
import javax.naming.NamingException;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.beans.testfixture.beans.DerivedTestBean;
import org.springframework.beans.testfixture.beans.ITestBean;
import org.springframework.beans.testfixture.beans.TestBean;
import org.springframework.context.testfixture.jndi.ExpectedLookupTemplate;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
import static org.assertj.core.api.Assertions.assertThatIllegalStateException;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
/**
* @author Rod Johnson
* @author Juergen Hoeller
* @author Chris Beams
*/
class JndiObjectFactoryBeanTests {
@Test
void testNoJndiName() {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
assertThatIllegalArgumentException().isThrownBy(jof::afterPropertiesSet);
}
@Test
void testLookupWithFullNameAndResourceRefTrue() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
Object o = new Object();
jof.setJndiTemplate(new ExpectedLookupTemplate("java:comp/env/foo", o));
jof.setJndiName("java:comp/env/foo");
jof.setResourceRef(true);
jof.afterPropertiesSet();
assertThat(jof.getObject()).isSameAs(o);
}
@Test
void testLookupWithFullNameAndResourceRefFalse() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
Object o = new Object();
jof.setJndiTemplate(new ExpectedLookupTemplate("java:comp/env/foo", o));
jof.setJndiName("java:comp/env/foo");
jof.setResourceRef(false);
jof.afterPropertiesSet();
assertThat(jof.getObject()).isSameAs(o);
}
@Test
void testLookupWithSchemeNameAndResourceRefTrue() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
Object o = new Object();
jof.setJndiTemplate(new ExpectedLookupTemplate("java:foo", o));
jof.setJndiName("java:foo");
jof.setResourceRef(true);
jof.afterPropertiesSet();
assertThat(jof.getObject()).isSameAs(o);
}
@Test
void testLookupWithSchemeNameAndResourceRefFalse() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
Object o = new Object();
jof.setJndiTemplate(new ExpectedLookupTemplate("java:foo", o));
jof.setJndiName("java:foo");
jof.setResourceRef(false);
jof.afterPropertiesSet();
assertThat(jof.getObject()).isSameAs(o);
}
@Test
void testLookupWithShortNameAndResourceRefTrue() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
Object o = new Object();
jof.setJndiTemplate(new ExpectedLookupTemplate("java:comp/env/foo", o));
jof.setJndiName("foo");
jof.setResourceRef(true);
jof.afterPropertiesSet();
assertThat(jof.getObject()).isSameAs(o);
}
@Test
void testLookupWithShortNameAndResourceRefFalse() {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
Object o = new Object();
jof.setJndiTemplate(new ExpectedLookupTemplate("java:comp/env/foo", o));
jof.setJndiName("foo");
jof.setResourceRef(false);
assertThatExceptionOfType(NamingException.class).isThrownBy(jof::afterPropertiesSet);
}
@Test
void testLookupWithArbitraryNameAndResourceRefFalse() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
Object o = new Object();
jof.setJndiTemplate(new ExpectedLookupTemplate("foo", o));
jof.setJndiName("foo");
jof.setResourceRef(false);
jof.afterPropertiesSet();
assertThat(jof.getObject()).isSameAs(o);
}
@Test
void testLookupWithExpectedTypeAndMatch() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
String s = "";
jof.setJndiTemplate(new ExpectedLookupTemplate("foo", s));
jof.setJndiName("foo");
jof.setExpectedType(String.class);
jof.afterPropertiesSet();
assertThat(jof.getObject()).isSameAs(s);
}
@Test
void testLookupWithExpectedTypeAndNoMatch() {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
jof.setJndiTemplate(new ExpectedLookupTemplate("foo", new Object()));
jof.setJndiName("foo");
jof.setExpectedType(String.class);
assertThatExceptionOfType(NamingException.class).isThrownBy(
jof::afterPropertiesSet)
.withMessageContaining("java.lang.String");
}
@Test
void testLookupWithDefaultObject() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
jof.setJndiTemplate(new ExpectedLookupTemplate("foo", ""));
jof.setJndiName("myFoo");
jof.setExpectedType(String.class);
jof.setDefaultObject("myString");
jof.afterPropertiesSet();
assertThat(jof.getObject()).isEqualTo("myString");
}
@Test
void testLookupWithDefaultObjectAndExpectedType() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
jof.setJndiTemplate(new ExpectedLookupTemplate("foo", ""));
jof.setJndiName("myFoo");
jof.setExpectedType(String.class);
jof.setDefaultObject("myString");
jof.afterPropertiesSet();
assertThat(jof.getObject()).isEqualTo("myString");
}
@Test
void testLookupWithDefaultObjectAndExpectedTypeConversion() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
jof.setJndiTemplate(new ExpectedLookupTemplate("foo", ""));
jof.setJndiName("myFoo");
jof.setExpectedType(Integer.class);
jof.setDefaultObject("5");
jof.afterPropertiesSet();
assertThat(jof.getObject()).isEqualTo(5);
}
@Test
void testLookupWithDefaultObjectAndExpectedTypeConversionViaBeanFactory() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
jof.setJndiTemplate(new ExpectedLookupTemplate("foo", ""));
jof.setJndiName("myFoo");
jof.setExpectedType(Integer.class);
jof.setDefaultObject("5");
jof.setBeanFactory(new DefaultListableBeanFactory());
jof.afterPropertiesSet();
assertThat(jof.getObject()).isEqualTo(5);
}
@Test
void testLookupWithDefaultObjectAndExpectedTypeNoMatch() {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
jof.setJndiTemplate(new ExpectedLookupTemplate("foo", ""));
jof.setJndiName("myFoo");
jof.setExpectedType(Boolean.class);
jof.setDefaultObject("5");
assertThatIllegalArgumentException().isThrownBy(jof::afterPropertiesSet);
}
@Test
void testLookupWithProxyInterface() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
TestBean tb = new TestBean();
jof.setJndiTemplate(new ExpectedLookupTemplate("foo", tb));
jof.setJndiName("foo");
jof.setProxyInterface(ITestBean.class);
jof.afterPropertiesSet();
boolean condition = jof.getObject() instanceof ITestBean;
assertThat(condition).isTrue();
ITestBean proxy = (ITestBean) jof.getObject();
assertThat(tb.getAge()).isEqualTo(0);
proxy.setAge(99);
assertThat(tb.getAge()).isEqualTo(99);
}
@Test
void testLookupWithProxyInterfaceAndDefaultObject() {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
TestBean tb = new TestBean();
jof.setJndiTemplate(new ExpectedLookupTemplate("foo", tb));
jof.setJndiName("myFoo");
jof.setProxyInterface(ITestBean.class);
jof.setDefaultObject(Boolean.TRUE);
assertThatIllegalArgumentException().isThrownBy(jof::afterPropertiesSet);
}
@Test
void testLookupWithProxyInterfaceAndLazyLookup() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
final TestBean tb = new TestBean();
jof.setJndiTemplate(new JndiTemplate() {
@Override
public Object lookup(String name) {
if ("foo".equals(name)) {
tb.setName("tb");
return tb;
}
return null;
}
});
jof.setJndiName("foo");
jof.setProxyInterface(ITestBean.class);
jof.setLookupOnStartup(false);
jof.afterPropertiesSet();
boolean condition = jof.getObject() instanceof ITestBean;
assertThat(condition).isTrue();
ITestBean proxy = (ITestBean) jof.getObject();
assertThat(tb.getName()).isNull();
assertThat(tb.getAge()).isEqualTo(0);
proxy.setAge(99);
assertThat(tb.getName()).isEqualTo("tb");
assertThat(tb.getAge()).isEqualTo(99);
}
@Test
void testLookupWithProxyInterfaceWithNotCache() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
final TestBean tb = new TestBean();
jof.setJndiTemplate(new JndiTemplate() {
@Override
public Object lookup(String name) {
if ("foo".equals(name)) {
tb.setName("tb");
tb.setAge(tb.getAge() + 1);
return tb;
}
return null;
}
});
jof.setJndiName("foo");
jof.setProxyInterface(ITestBean.class);
jof.setCache(false);
jof.afterPropertiesSet();
boolean condition = jof.getObject() instanceof ITestBean;
assertThat(condition).isTrue();
ITestBean proxy = (ITestBean) jof.getObject();
assertThat(tb.getName()).isEqualTo("tb");
assertThat(tb.getAge()).isEqualTo(1);
proxy.returnsThis();
assertThat(tb.getAge()).isEqualTo(2);
proxy.haveBirthday();
assertThat(tb.getAge()).isEqualTo(4);
}
@Test
void testLookupWithProxyInterfaceWithLazyLookupAndNotCache() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
final TestBean tb = new TestBean();
jof.setJndiTemplate(new JndiTemplate() {
@Override
public Object lookup(String name) {
if ("foo".equals(name)) {
tb.setName("tb");
tb.setAge(tb.getAge() + 1);
return tb;
}
return null;
}
});
jof.setJndiName("foo");
jof.setProxyInterface(ITestBean.class);
jof.setLookupOnStartup(false);
jof.setCache(false);
jof.afterPropertiesSet();
boolean condition = jof.getObject() instanceof ITestBean;
assertThat(condition).isTrue();
ITestBean proxy = (ITestBean) jof.getObject();
assertThat(tb.getName()).isNull();
assertThat(tb.getAge()).isEqualTo(0);
proxy.returnsThis();
assertThat(tb.getName()).isEqualTo("tb");
assertThat(tb.getAge()).isEqualTo(1);
proxy.returnsThis();
assertThat(tb.getAge()).isEqualTo(2);
proxy.haveBirthday();
assertThat(tb.getAge()).isEqualTo(4);
}
@Test
void testLazyLookupWithoutProxyInterface() {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
jof.setJndiName("foo");
jof.setLookupOnStartup(false);
assertThatIllegalStateException().isThrownBy(jof::afterPropertiesSet);
}
@Test
void testNotCacheWithoutProxyInterface() {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
jof.setJndiName("foo");
jof.setCache(false);
jof.setLookupOnStartup(false);
assertThatIllegalStateException().isThrownBy(jof::afterPropertiesSet);
}
@Test
void testLookupWithProxyInterfaceAndExpectedTypeAndMatch() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
TestBean tb = new TestBean();
jof.setJndiTemplate(new ExpectedLookupTemplate("foo", tb));
jof.setJndiName("foo");
jof.setExpectedType(TestBean.class);
jof.setProxyInterface(ITestBean.class);
jof.afterPropertiesSet();
boolean condition = jof.getObject() instanceof ITestBean;
assertThat(condition).isTrue();
ITestBean proxy = (ITestBean) jof.getObject();
assertThat(tb.getAge()).isEqualTo(0);
proxy.setAge(99);
assertThat(tb.getAge()).isEqualTo(99);
}
@Test
void testLookupWithProxyInterfaceAndExpectedTypeAndNoMatch() {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
TestBean tb = new TestBean();
jof.setJndiTemplate(new ExpectedLookupTemplate("foo", tb));
jof.setJndiName("foo");
jof.setExpectedType(DerivedTestBean.class);
jof.setProxyInterface(ITestBean.class);
assertThatExceptionOfType(NamingException.class).isThrownBy(
jof::afterPropertiesSet)
.withMessageContaining("org.springframework.beans.testfixture.beans.DerivedTestBean");
}
@Test
void testLookupWithExposeAccessContext() throws Exception {
JndiObjectFactoryBean jof = new JndiObjectFactoryBean();
TestBean tb = new TestBean();
final Context mockCtx = mock();
given(mockCtx.lookup("foo")).willReturn(tb);
jof.setJndiTemplate(new JndiTemplate() {
@Override
protected Context createInitialContext() {
return mockCtx;
}
});
jof.setJndiName("foo");
jof.setProxyInterface(ITestBean.class);
jof.setExposeAccessContext(true);
jof.afterPropertiesSet();
boolean condition = jof.getObject() instanceof ITestBean;
assertThat(condition).isTrue();
ITestBean proxy = (ITestBean) jof.getObject();
assertThat(tb.getAge()).isEqualTo(0);
proxy.setAge(99);
assertThat(tb.getAge()).isEqualTo(99);
proxy.equals(proxy);
proxy.hashCode();
proxy.toString();
verify(mockCtx, times(2)).close();
}
} | java | github | https://github.com/spring-projects/spring-framework | spring-context/src/test/java/org/springframework/jndi/JndiObjectFactoryBeanTests.java |
from django.test import TestCase
from models import GlobalRegion, Country, StateProvince, RegionDistrict, Locality
from exceptions import IllegalMove, SameLevelMove, WrongLevelMove
class MoveTest(TestCase):
fixtures = ['testlocations.json']
def setUp(self):
self.asia = GlobalRegion.objects.get(name='Asia')
self.gr_australia = GlobalRegion.objects.get(name='Australia')
self.country_australia = Country.objects.get(name='Australia', parent=self.gr_australia)
self.country_thailand = Country.objects.get(name='Thailand', parent=self.asia)
self.country_no_children = Country.objects.get(name='NoChildren', parent=self.gr_australia)
self.stateprovince_1 = StateProvince.objects.get(name='Queensland',
parent=self.country_australia)
self.regiondistrict_1 = RegionDistrict.objects.get(name='SEQ',
parent=self.stateprovince_1)
self.locality_1 = Locality.objects.get(name='Brisbane',
parent=self.regiondistrict_1)
def test_cannot_move_globalregions(self):
"""
Try moving a global region inside another global region
"""
with self.assertRaises(IllegalMove):
self.gr_australia.moveto_parent(self.asia)
def test_cannot_move_to_same_level(self):
"""
Try moving a country into another country
"""
with self.assertRaises(SameLevelMove):
self.country_australia.moveto_parent(self.country_thailand)
def test_cannot_move_to_wrong_level(self):
"""
Try (and fail) moving a StateProvince into a GlobalRegion
"""
with self.assertRaises(WrongLevelMove):
self.stateprovince_1.moveto_parent(self.asia)
def test_move_locality(self):
maleny = Locality.objects.get(name='Maleny')
orig_parent = maleny.parent
northern = RegionDistrict.objects.get(name='Northern')
num_in_child = maleny.museumobject_set.count()
num_in_new_parent = northern.museumobject_set.count()
num_in_orig_parent = maleny.parent.museumobject_set.count()
# import ipdb; ipdb.set_trace()
child = maleny.moveto_parent(northern)
# Check counts
self.assertEqual(num_in_child,
child.museumobject_set.count())
self.assertEqual(num_in_new_parent + num_in_child,
northern.museumobject_set.count())
self.assertEqual(num_in_orig_parent - num_in_child,
orig_parent.museumobject_set.count())
def test_simple_country_move(self):
"""
Move a country to another global region
The country has linked museumobjects, but no children
and no merge required.
"""
child = Country.objects.get(name='NoChildren', parent=self.gr_australia)
num_in_child = child.museumobject_set.count()
num_in_new_parent = self.asia.museumobject_set.count()
num_in_orig_parent = self.gr_australia.museumobject_set.count()
# Perform the move
child = child.moveto_parent(self.asia)
# Check parent field updated
self.assertEqual(self.asia, child.parent)
# Check counts
self.assertEqual(num_in_child,
child.museumobject_set.count())
self.assertEqual(num_in_new_parent + num_in_child,
self.asia.museumobject_set.count())
self.assertEqual(num_in_orig_parent - num_in_child,
self.gr_australia.museumobject_set.count())
self.assertTreeCounts(self.gr_australia)
self.assertTreeCounts(self.asia)
def test_simple_country_merge(self):
"""
Move a country to another global region
A country with the same name exists already, and both countries
have some linked museum objects, so must be merged.
"""
child = self.country_no_children
old_parent = child.parent
new_parent = GlobalRegion.objects.get(name='Europe')
merge_target = Country.objects.get(name='NoChildren', parent=new_parent)
num_in_child = child.museumobject_set.count()
num_in_old_parent = old_parent.museumobject_set.count()
num_in_new_parent = new_parent.museumobject_set.count()
num_in_merge_target = merge_target.museumobject_set.count()
# Perform the move
child = child.moveto_parent(new_parent)
# Check parent field updated
self.assertEqual(new_parent, child.parent)
# Check counts
self.assertEqual(num_in_old_parent - num_in_child,
old_parent.museumobject_set.count())
self.assertEqual(num_in_new_parent + num_in_child,
new_parent.museumobject_set.count())
self.assertEqual(num_in_child + num_in_merge_target,
merge_target.museumobject_set.count())
self.assertTreeCounts(new_parent)
self.assertTreeCounts(old_parent)
def test_complex_country_merge(self):
"""
Move a country to another global region
A country with the same name exists already, and both countries
have both linked museum objects and conflicting children, so must be merged.
"""
child = Country.objects.get(name='Australia', parent=self.asia)
old_parent = child.parent
new_parent = GlobalRegion.objects.get(name='Australia')
merge_target = Country.objects.get(name='Australia', parent=self.gr_australia)
num_in_child = child.museumobject_set.count()
num_in_old_parent = old_parent.museumobject_set.count()
num_in_new_parent = new_parent.museumobject_set.count()
num_in_merge_target = merge_target.museumobject_set.count()
# Perform the move
child = child.moveto_parent(new_parent)
# Check parent field updated
self.assertEqual(new_parent, child.parent)
# Check counts
self.assertEqual(num_in_old_parent - num_in_child,
old_parent.museumobject_set.count())
self.assertEqual(num_in_new_parent + num_in_child,
new_parent.museumobject_set.count())
self.assertEqual(num_in_child + num_in_merge_target,
merge_target.museumobject_set.count())
self.assertTreeCounts(new_parent)
self.assertTreeCounts(old_parent)
def move_country_with_children(self):
"""
Move a country that countains children (and linked MOs)
There is however no merging required
"""
pass
def assertTreeCounts(self, location):
"""
Check that a there are at least as many MOs linked
to a parent location as to it's children.
"""
count = location.museumobject_set.count()
if hasattr(location, 'children') and location.children.exists():
child_count = 0
for child in location.children.all():
child_count += child.museumobject_set.count()
self.assertTreeCounts(child)
self.assertTrue(count >= child_count) | unknown | codeparrot/codeparrot-clean | ||
import pytest
import lcs.agents.acs as acs
import lcs.agents.acs2 as acs2
from lcs.strategies.action_selection import EpsilonGreedy
class TestEpsilonGreedy:
@pytest.fixture
def acs_cfg(self):
return acs.Configuration(
classifier_length=4,
number_of_possible_actions=4,
epsilon=0.5)
@pytest.fixture
def acs2_cfg(self):
return acs2.Configuration(
classifier_length=4,
number_of_possible_actions=4,
epsilon=0.5)
def test_should_raise_error_when_epsilon_is_missing(self):
with pytest.raises(KeyError):
EpsilonGreedy(4)
def test_should_assign_custom_epsilon(self):
strategy = EpsilonGreedy(4, epsilon=0.9)
assert strategy.epsilon == 0.9
def test_should_work_with_acs(self, acs_cfg):
# given
c1 = acs.Classifier(
condition='1##1', action=0, effect='0###',
quality=0.571313, reward=7.67011,
cfg=acs_cfg
)
c2 = acs.Classifier(
condition='1##1', action=0, effect='0###',
quality=0.571313, reward=6.67011,
cfg=acs_cfg
)
population = acs.ClassifiersList(*[c1, c2])
# when
eg = EpsilonGreedy(acs_cfg.number_of_possible_actions,
epsilon=0.0)
best_action = eg(population)
# then
assert best_action == 0
def test_should_work_with_acs2(self, acs2_cfg):
# given
c1 = acs2.Classifier(
condition='1##1', action=0, effect='0###',
quality=0.571313, reward=7.67011,
cfg=acs2_cfg
)
c2 = acs2.Classifier(
condition='1##1', action=1, effect='0###',
quality=0.581313, reward=7.67011,
cfg=acs2_cfg
)
population = acs2.ClassifiersList(*[c1, c2])
# when
eg = EpsilonGreedy(acs2_cfg.number_of_possible_actions,
epsilon=0.0)
best_action = eg(population)
# then
assert best_action == 1 | unknown | codeparrot/codeparrot-clean | ||
# Dependency Review Action
#
# This Action will scan dependency manifest files that change as part of a Pull Request, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging.
#
# Source repository: https://github.com/actions/dependency-review-action
# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement
name: 'Dependency Review'
on: [pull_request]
permissions:
contents: read
jobs:
dependency-review:
runs-on: ubuntu-latest
steps:
- name: 'Checkout Repository'
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: 'Dependency Review'
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4.8.2
with:
allow-ghsas: GHSA-cx63-2mw6-8hw5 | unknown | github | https://github.com/numpy/numpy | .github/workflows/dependency-review.yml |
import traceback
import model.downloadVotes
# Completes the member-vote merge.
def prepVotes(voteQuery, person):
if not "errorMessage" in voteQuery and "rollcalls" in voteQuery:
votes = voteQuery["rollcalls"]
idSet = [v["id"] for v in votes]
print idSet
rollcallsFinal = model.downloadVotes.downloadAPI(idSet,"Web_Person", person["icpsr"])
if "rollcalls" in rollcallsFinal and len(rollcallsFinal["rollcalls"])>0:
for i in xrange(0, len(idSet)):
# Isolate votes from the rollcall
try:
iV = next((r for r in rollcallsFinal["rollcalls"] if r["id"]==votes[i]["id"]), None)
if not iV or iV is None:
print "Error finding the rollcall data based on vote id."
votes[i]["myVote"] = "Abs"
votes[i]["partyLabelVote"] = "N/A"
votes[i]["pVSum"] = 0
else:
myVote = next((v["vote"] for v in iV["votes"] if v["icpsr"]==person["icpsr"]), None)
if myVote is not None:
votes[i]["myVote"] = myVote
else:
print "Error matching individual vote to vote."
continue
except:
print traceback.format_exc()
votes[i]["myVote"] = "Abs"
votes[i]["partyLabelVote"] = "N/A"
votes[i]["pVSum"] = 0
continue
# Isolate my probability from the rollcall, if it's there.
try:
probResult = next((v["prob"] for v in iV["votes"] if v["icpsr"]==person["icpsr"]), None)
if probResult is not None:
votes[i]["myProb"] = probResult
except:
print "Error calculating probability of vote."
pass
# Now isolate the party vote info.
try:
votes[i]["partyVote"] = next((v for k, v in iV["party_vote_counts"].iteritems() if k==str(person["party_code"])),None)
if votes[i]["partyVote"] is not None:
votes[i]["pVSum"] = sum([1*v if int(k)<=3 else -1*v if int(k)<=6 else 0 for k, v in votes[i]["partyVote"].iteritems()])
votes[i]["yea"] = sum([1*v if int(k)<=3 else 0 for k, v in votes[i]["partyVote"].iteritems()])
votes[i]["nay"] = sum([1*v if int(k)>3 and int(k)<=6 else 0 for k, v in votes[i]["partyVote"].iteritems()])
votes[i]["abs"] = sum([1*v if int(k)>6 else 0 for k, v in votes[i]["partyVote"].iteritems()])
votes[i]["partyLabelVote"] = "Yea" if votes[i]["pVSum"]>0 else "Nay" if votes[i]["pVSum"]<0 else "Tie"
else:
votes[i]["partyLabelVote"] = "N/A"
votes[i]["pVSum"] = 0
except:
print "Error calculating party vote."
votes[i]["partyLabelVote"] = "N/A"
votes[i]["pVSum"] = 0
else:
votes = []
else:
votes = []
return votes
def fixVoteProb(prob):
if int(round(prob)) == 100:
return ">99"
elif int(round(prob)) < 1:
return "<1"
else:
return int(round(prob))
def fixPunc(text):
if text.endswith(".") or text.endswith(". "):
return text
else:
return text + ". " | unknown | codeparrot/codeparrot-clean | ||
# Copyright (c) 2016-2019 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import requests
from utils import tls_opts_with_client_certs
class Uploader(object):
def __init__(self, addr):
self.addr = addr
def _start(self, name):
url = 'https://{addr}/namespace/testfs/blobs/sha256:{name}/uploads'.format(
addr=self.addr, name=name)
res = requests.post(url, **tls_opts_with_client_certs())
res.raise_for_status()
return res.headers['Location']
def _patch(self, name, uid, start, stop, chunk):
url = 'https://{addr}/namespace/testfs/blobs/sha256:{name}/uploads/{uid}'.format(
addr=self.addr, name=name, uid=uid)
res = requests.patch(url, headers={'Content-Range': '%d-%d' % (start, stop)}, data=chunk, **tls_opts_with_client_certs())
res.raise_for_status()
def _commit(self, name, uid):
url = 'https://{addr}/namespace/testfs/blobs/sha256:{name}/uploads/{uid}'.format(
addr=self.addr, name=name, uid=uid)
res = requests.put(url, **tls_opts_with_client_certs())
res.raise_for_status()
def upload(self, name, blob):
uid = self._start(name)
self._patch(name, uid, 0, len(blob), blob)
self._commit(name, uid) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.delete_column('auth_user', 'last_name')
def backwards(self, orm):
# Adding field 'User.last_name'
db.add_column('auth_user', 'last_name',
self.gf('django.db.models.fields.CharField')(default='', max_length=30, blank=True),
keep_default=False)
models = {
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Event']", 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.apikey': {
'Meta': {'object_name': 'ApiKey'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Organization']"}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.auditlogentry': {
'Meta': {'object_name': 'AuditLogEntry'},
'actor': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_actors'", 'null': 'True', 'to': "orm['sentry.User']"}),
'actor_key': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiKey']", 'null': 'True', 'blank': 'True'}),
'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'target_user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.authidentity': {
'Meta': {'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity'},
'auth_provider': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.AuthProvider']"}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authprovider': {
'Meta': {'object_name': 'AuthProvider'},
'config': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'default_teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'unique': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.broadcast': {
'Meta': {'object_name': 'Broadcast'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_expires': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 12, 15, 0, 0)', 'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'upstream_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'})
},
'sentry.broadcastseen': {
'Meta': {'unique_together': "(('broadcast', 'user'),)", 'object_name': 'BroadcastSeen'},
'broadcast': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Broadcast']"}),
'date_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group', 'datetime'),)"},
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.eventuser': {
'Meta': {'unique_together': "(('project', 'ident'), ('project', 'hash'))", 'object_name': 'EventUser', 'index_together': "(('project', 'email'), ('project', 'username'), ('project', 'ip_address'))"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'})
},
'sentry.file': {
'Meta': {'object_name': 'File'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.FileBlob']", 'null': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'headers': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.fileblob': {
'Meta': {'object_name': 'FileBlob'},
'checksum': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'})
},
'sentry.group': {
'Meta': {'object_name': 'Group', 'db_table': "'sentry_groupedmessage'", 'index_together': "(('project', 'first_release'),)"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']", 'null': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupassignee': {
'Meta': {'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_assignee_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupemailthread': {
'Meta': {'unique_together': "(('email', 'group'), ('email', 'msgid'))", 'object_name': 'GroupEmailThread'},
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'msgid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Project']"})
},
'sentry.grouphash': {
'Meta': {'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupresolution': {
'Meta': {'object_name': 'GroupResolution'},
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.grouprulestatus': {
'Meta': {'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'rule': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
},
'sentry.groupsnooze': {
'Meta': {'object_name': 'GroupSnooze'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'until': ('django.db.models.fields.DateTimeField', [], {})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.grouptagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'null': 'True', 'to': "orm['sentry.Project']"}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.helppage': {
'Meta': {'object_name': 'HelpPage'},
'content': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True'}),
'priority': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {'object_name': 'Organization'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.organizationaccessrequest': {
'Meta': {'unique_together': "(('team', 'member'),)", 'object_name': 'OrganizationAccessRequest'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'member': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationmember': {
'Meta': {'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember'},
'counter': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Organization']"}),
'role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMemberTeam']", 'blank': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.organizationmemberteam': {
'Meta': {'unique_together': "(('team', 'organizationmember'),)", 'object_name': 'OrganizationMemberTeam', 'db_table': "'sentry_organizationmember_teams'"},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'organizationmember': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationoption': {
'Meta': {'unique_together': "(('organization', 'key'),)", 'object_name': 'OrganizationOption', 'db_table': "'sentry_organizationoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'), ('organization', 'slug'))", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'first_event': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.release': {
'Meta': {'unique_together': "(('project', 'version'),)", 'object_name': 'Release'},
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_released': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'ref': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.releasefile': {
'Meta': {'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile'},
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'name': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.rule': {
'Meta': {'object_name': 'Rule'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.savedsearch': {
'Meta': {'unique_together': "(('project', 'name'),)", 'object_name': 'SavedSearch'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_column': "'first_name'", 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.userreport': {
'Meta': {'object_name': 'UserReport', 'index_together': "(('project', 'event_id'),)"},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
}
}
complete_apps = ['sentry'] | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-03 15:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('username', models.CharField(db_index=True, max_length=10, unique=True, verbose_name='username')),
('email', models.EmailField(max_length=254, unique=True, verbose_name='email address')),
('joined', models.DateTimeField(auto_now_add=True)),
('is_active', models.BooleanField(default=True)),
('is_admin', models.BooleanField(default=False)),
('following', models.IntegerField(default=0)),
('followers', models.IntegerField(default=0)),
('birth_date', models.DateField(auto_now_add=True)),
('blog_name', models.CharField(max_length=20, unique=True)),
],
options={
'abstract': False,
},
),
] | unknown | codeparrot/codeparrot-clean | ||
"""Client for ldap
SYNOPSIS:
ldap
DESCRIPTION:
> ldap connect <host> <login> <password>
Connect to service
> ldap list <dn>
List node
> ldap search <dn> <filter>
Find node based on filter
> ldap set
List env var
> ldap set <key> <val>
Set env var
EXAMPLES:
> ldap connect 10.0.0.100
- Connect anonymously
> ldap connect 10.0.0.100 "cn=admin,dc=example,dc=org" admin
- Connect to 10.0.0.100 as "admin" with password "admin"
> ldap list "dc=example,dc=org"
> ldap search "dc=example,dc=org" "userpassword=*"
> ldap set VERSION 3
- Set LDAP protocol V3
AUTHOR:
Shiney <http://goo.gl/D6g4wl>
"""
import sys
from api import plugin
from api import server
from api import environ
import objects
from ui.color import colorize
def print_node(node):
if 'dn' in node:
print(colorize('%BlueBold', node['dn']))
else:
print(colorize('%BlueBold', '----------------------'))
for key, val in node.items():
if isinstance(val, dict):
del val['count']
line = " %s : %s" % (colorize('%Bold', "%20s" % key), ' | '.join(val.values()))
print(line)
print()
if len(plugin.argv) < 2:
sys.exit(plugin.help)
# Set env var
if plugin.argv[1].lower() == "set":
if len(plugin.argv) < 3:
print(environ['LDAP'])
sys.exit(0)
if len(plugin.argv) < 4:
print('Missing parameter\n> ldap set VAR value')
sys.exit(0)
if plugin.argv[2].upper() in environ['LDAP']:
environ['LDAP'][plugin.argv[2].upper()] = plugin.argv[3]
else:
sys.exit('This setting doesn\'t exist')
sys.exit(0)
# Connecting to service
if plugin.argv[1].lower() == "connect":
if 3 < len(plugin.argv) < 5:
sys.exit("Missing parameter")
environ['LDAP'] = objects.VarContainer(title="LDAP settings")
environ['LDAP']['HOST'] = plugin.argv[2]
environ['LDAP']['LOGIN'] = plugin.argv[3] if len(plugin.argv) > 3 else " "
environ['LDAP']['PASS'] = plugin.argv[4] if len(plugin.argv) > 4 else " "
environ['LDAP']['VERSION'] = 3
sys.exit(0)
# check and load MYSQL_CRED environment variable
if "LDAP" not in environ:
sys.exit("Not connected to any server, use `ldap connect` before")
# List node
if plugin.argv[1].lower() == "list":
if len(plugin.argv) < 3:
sys.exit("Missing parameter")
payload = server.payload.Payload("list.php")
payload.update(environ['LDAP'])
payload['BASE_DN'] = plugin.argv[2]
response = payload.send()
if response['count'] == 0:
print('No result')
sys.exit(0)
for k, row in response.items():
if k == 'count':
continue
print_node(row)
pass
# Search node
if plugin.argv[1].lower() == "search":
if len(plugin.argv) < 4:
sys.exit("Missing parameter")
payload = server.payload.Payload("search.php")
payload.update(environ['LDAP'])
payload['BASE_DN'] = plugin.argv[2]
payload['SEARCH'] = plugin.argv[3]
response = payload.send()
if response['count'] == 0:
print('No result')
sys.exit(0)
for k, row in response.items():
if k == 'count':
continue
print_node(row)
pass
sys.exit(0) | unknown | codeparrot/codeparrot-clean | ||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 Dell Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Rajesh Mohan, Rajesh_Mohan3@Dell.com, DELL Inc.
from neutron.agent.linux import iptables_manager
from neutron.extensions import firewall as fw_ext
from neutron.openstack.common import log as logging
from neutron.services.firewall.drivers import fwaas_base
LOG = logging.getLogger(__name__)
FWAAS_DRIVER_NAME = 'Fwaas iptables driver'
FWAAS_DEFAULT_CHAIN = 'fwaas-default-policy'
INGRESS_DIRECTION = 'ingress'
EGRESS_DIRECTION = 'egress'
CHAIN_NAME_PREFIX = {INGRESS_DIRECTION: 'i',
EGRESS_DIRECTION: 'o'}
""" Firewall rules are applied on internal-interfaces of Neutron router.
The packets ingressing tenant's network will be on the output
direction on internal-interfaces.
"""
IPTABLES_DIR = {INGRESS_DIRECTION: '-o',
EGRESS_DIRECTION: '-i'}
IPV4 = 'ipv4'
IPV6 = 'ipv6'
IP_VER_TAG = {IPV4: 'v4',
IPV6: 'v6'}
class IptablesFwaasDriver(fwaas_base.FwaasDriverBase):
"""IPTables driver for Firewall As A Service."""
def __init__(self):
LOG.debug(_("Initializing fwaas iptables driver"))
def create_firewall(self, apply_list, firewall):
LOG.debug(_('Creating firewall %(fw_id)s for tenant %(tid)s)'),
{'fw_id': firewall['id'], 'tid': firewall['tenant_id']})
try:
if firewall['admin_state_up']:
self._setup_firewall(apply_list, firewall)
else:
self.apply_default_policy(apply_list, firewall)
except (LookupError, RuntimeError):
# catch known library exceptions and raise Fwaas generic exception
LOG.exception(_("Failed to create firewall: %s"), firewall['id'])
raise fw_ext.FirewallInternalDriverError(driver=FWAAS_DRIVER_NAME)
def delete_firewall(self, apply_list, firewall):
LOG.debug(_('Deleting firewall %(fw_id)s for tenant %(tid)s)'),
{'fw_id': firewall['id'], 'tid': firewall['tenant_id']})
fwid = firewall['id']
try:
for router_info in apply_list:
ipt_mgr = router_info.iptables_manager
self._remove_chains(fwid, ipt_mgr)
self._remove_default_chains(ipt_mgr)
# apply the changes immediately (no defer in firewall path)
ipt_mgr.defer_apply_off()
except (LookupError, RuntimeError):
# catch known library exceptions and raise Fwaas generic exception
LOG.exception(_("Failed to delete firewall: %s"), fwid)
raise fw_ext.FirewallInternalDriverError(driver=FWAAS_DRIVER_NAME)
def update_firewall(self, apply_list, firewall):
LOG.debug(_('Updating firewall %(fw_id)s for tenant %(tid)s)'),
{'fw_id': firewall['id'], 'tid': firewall['tenant_id']})
try:
if firewall['admin_state_up']:
self._setup_firewall(apply_list, firewall)
else:
self.apply_default_policy(apply_list, firewall)
except (LookupError, RuntimeError):
# catch known library exceptions and raise Fwaas generic exception
LOG.exception(_("Failed to update firewall: %s"), firewall['id'])
raise fw_ext.FirewallInternalDriverError(driver=FWAAS_DRIVER_NAME)
def apply_default_policy(self, apply_list, firewall):
LOG.debug(_('Applying firewall %(fw_id)s for tenant %(tid)s)'),
{'fw_id': firewall['id'], 'tid': firewall['tenant_id']})
fwid = firewall['id']
try:
for router_info in apply_list:
ipt_mgr = router_info.iptables_manager
# the following only updates local memory; no hole in FW
self._remove_chains(fwid, ipt_mgr)
self._remove_default_chains(ipt_mgr)
# create default 'DROP ALL' policy chain
self._add_default_policy_chain_v4v6(ipt_mgr)
self._enable_policy_chain(fwid, ipt_mgr)
# apply the changes immediately (no defer in firewall path)
ipt_mgr.defer_apply_off()
except (LookupError, RuntimeError):
# catch known library exceptions and raise Fwaas generic exception
LOG.exception(_("Failed to apply default policy on firewall: %s"),
fwid)
raise fw_ext.FirewallInternalDriverError(driver=FWAAS_DRIVER_NAME)
def _setup_firewall(self, apply_list, firewall):
fwid = firewall['id']
for router_info in apply_list:
ipt_mgr = router_info.iptables_manager
# the following only updates local memory; no hole in FW
self._remove_chains(fwid, ipt_mgr)
self._remove_default_chains(ipt_mgr)
# create default 'DROP ALL' policy chain
self._add_default_policy_chain_v4v6(ipt_mgr)
#create chain based on configured policy
self._setup_chains(firewall, ipt_mgr)
# apply the changes immediately (no defer in firewall path)
ipt_mgr.defer_apply_off()
def _get_chain_name(self, fwid, ver, direction):
return '%s%s%s' % (CHAIN_NAME_PREFIX[direction],
IP_VER_TAG[ver],
fwid)
def _setup_chains(self, firewall, ipt_mgr):
"""Create Fwaas chain using the rules in the policy
"""
fw_rules_list = firewall['firewall_rule_list']
fwid = firewall['id']
#default rules for invalid packets and established sessions
invalid_rule = self._drop_invalid_packets_rule()
est_rule = self._allow_established_rule()
for ver in [IPV4, IPV6]:
if ver == IPV4:
table = ipt_mgr.ipv4['filter']
else:
table = ipt_mgr.ipv6['filter']
ichain_name = self._get_chain_name(fwid, ver, INGRESS_DIRECTION)
ochain_name = self._get_chain_name(fwid, ver, EGRESS_DIRECTION)
for name in [ichain_name, ochain_name]:
table.add_chain(name)
table.add_rule(name, invalid_rule)
table.add_rule(name, est_rule)
for rule in fw_rules_list:
if not rule['enabled']:
continue
iptbl_rule = self._convert_fwaas_to_iptables_rule(rule)
if rule['ip_version'] == 4:
ver = IPV4
table = ipt_mgr.ipv4['filter']
else:
ver = IPV6
table = ipt_mgr.ipv6['filter']
ichain_name = self._get_chain_name(fwid, ver, INGRESS_DIRECTION)
ochain_name = self._get_chain_name(fwid, ver, EGRESS_DIRECTION)
table.add_rule(ichain_name, iptbl_rule)
table.add_rule(ochain_name, iptbl_rule)
self._enable_policy_chain(fwid, ipt_mgr)
def _remove_default_chains(self, nsid):
"""Remove fwaas default policy chain."""
self._remove_chain_by_name(IPV4, FWAAS_DEFAULT_CHAIN, nsid)
self._remove_chain_by_name(IPV6, FWAAS_DEFAULT_CHAIN, nsid)
def _remove_chains(self, fwid, ipt_mgr):
"""Remove fwaas policy chain."""
for ver in [IPV4, IPV6]:
for direction in [INGRESS_DIRECTION, EGRESS_DIRECTION]:
chain_name = self._get_chain_name(fwid, ver, direction)
self._remove_chain_by_name(ver, chain_name, ipt_mgr)
def _add_default_policy_chain_v4v6(self, ipt_mgr):
ipt_mgr.ipv4['filter'].add_chain(FWAAS_DEFAULT_CHAIN)
ipt_mgr.ipv4['filter'].add_rule(FWAAS_DEFAULT_CHAIN, '-j DROP')
ipt_mgr.ipv6['filter'].add_chain(FWAAS_DEFAULT_CHAIN)
ipt_mgr.ipv6['filter'].add_rule(FWAAS_DEFAULT_CHAIN, '-j DROP')
def _remove_chain_by_name(self, ver, chain_name, ipt_mgr):
if ver == IPV4:
ipt_mgr.ipv4['filter'].ensure_remove_chain(chain_name)
else:
ipt_mgr.ipv6['filter'].ensure_remove_chain(chain_name)
def _add_rules_to_chain(self, ipt_mgr, ver, chain_name, rules):
if ver == IPV4:
table = ipt_mgr.ipv4['filter']
else:
table = ipt_mgr.ipv6['filter']
for rule in rules:
table.add_rule(chain_name, rule)
def _enable_policy_chain(self, fwid, ipt_mgr):
bname = iptables_manager.binary_name
for (ver, tbl) in [(IPV4, ipt_mgr.ipv4['filter']),
(IPV6, ipt_mgr.ipv6['filter'])]:
for direction in [INGRESS_DIRECTION, EGRESS_DIRECTION]:
chain_name = self._get_chain_name(fwid, ver, direction)
chain_name = iptables_manager.get_chain_name(chain_name)
if chain_name in tbl.chains:
jump_rule = ['%s qr-+ -j %s-%s' % (IPTABLES_DIR[direction],
bname, chain_name)]
self._add_rules_to_chain(ipt_mgr, ver, 'FORWARD',
jump_rule)
#jump to DROP_ALL policy
chain_name = iptables_manager.get_chain_name(FWAAS_DEFAULT_CHAIN)
jump_rule = ['-o qr-+ -j %s-%s' % (bname, chain_name)]
self._add_rules_to_chain(ipt_mgr, IPV4, 'FORWARD', jump_rule)
self._add_rules_to_chain(ipt_mgr, IPV6, 'FORWARD', jump_rule)
#jump to DROP_ALL policy
chain_name = iptables_manager.get_chain_name(FWAAS_DEFAULT_CHAIN)
jump_rule = ['-i qr-+ -j %s-%s' % (bname, chain_name)]
self._add_rules_to_chain(ipt_mgr, IPV4, 'FORWARD', jump_rule)
self._add_rules_to_chain(ipt_mgr, IPV6, 'FORWARD', jump_rule)
def _convert_fwaas_to_iptables_rule(self, rule):
action = rule.get('action') == 'allow' and 'ACCEPT' or 'DROP'
args = [self._protocol_arg(rule.get('protocol')),
self._port_arg('dport',
rule.get('protocol'),
rule.get('destination_port')),
self._port_arg('sport',
rule.get('protocol'),
rule.get('source_port')),
self._ip_prefix_arg('s', rule.get('source_ip_address')),
self._ip_prefix_arg('d', rule.get('destination_ip_address')),
self._action_arg(action)]
iptables_rule = ' '.join(args)
return iptables_rule
def _drop_invalid_packets_rule(self):
return '-m state --state INVALID -j DROP'
def _allow_established_rule(self):
return '-m state --state ESTABLISHED,RELATED -j ACCEPT'
def _action_arg(self, action):
if action:
return '-j %s' % action
return ''
def _protocol_arg(self, protocol):
if protocol:
return '-p %s' % protocol
return ''
def _port_arg(self, direction, protocol, port):
if not (protocol in ['udp', 'tcp'] and port):
return ''
return '--%s %s' % (direction, port)
def _ip_prefix_arg(self, direction, ip_prefix):
if ip_prefix:
return '-%s %s' % (direction, ip_prefix)
return '' | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import packaging.version
import pkg_resources
import google.auth # type: ignore
import google.api_core # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.talent_v4.types import tenant
from google.cloud.talent_v4.types import tenant as gct_tenant
from google.cloud.talent_v4.types import tenant_service
from google.protobuf import empty_pb2 # type: ignore
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-talent",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
try:
# google.auth.__version__ was added in 1.26.0
_GOOGLE_AUTH_VERSION = google.auth.__version__
except AttributeError:
try: # try pkg_resources if it is available
_GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
except pkg_resources.DistributionNotFound: # pragma: NO COVER
_GOOGLE_AUTH_VERSION = None
class TenantServiceTransport(abc.ABC):
"""Abstract transport class for TenantService."""
AUTH_SCOPES = (
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/jobs",
)
DEFAULT_HOST: str = "jobs.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials is service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
# TODO(busunkim): This method is in the base transport
# to avoid duplicating code across the transport classes. These functions
# should be deleted once the minimum required versions of google-auth is increased.
# TODO: Remove this function once google-auth >= 1.25.0 is required
@classmethod
def _get_scopes_kwargs(
cls, host: str, scopes: Optional[Sequence[str]]
) -> Dict[str, Optional[Sequence[str]]]:
"""Returns scopes kwargs to pass to google-auth methods depending on the google-auth version"""
scopes_kwargs = {}
if _GOOGLE_AUTH_VERSION and (
packaging.version.parse(_GOOGLE_AUTH_VERSION)
>= packaging.version.parse("1.25.0")
):
scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES}
else:
scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES}
return scopes_kwargs
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.create_tenant: gapic_v1.method.wrap_method(
self.create_tenant, default_timeout=30.0, client_info=client_info,
),
self.get_tenant: gapic_v1.method.wrap_method(
self.get_tenant,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.ServiceUnavailable,
),
deadline=30.0,
),
default_timeout=30.0,
client_info=client_info,
),
self.update_tenant: gapic_v1.method.wrap_method(
self.update_tenant, default_timeout=30.0, client_info=client_info,
),
self.delete_tenant: gapic_v1.method.wrap_method(
self.delete_tenant,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.ServiceUnavailable,
),
deadline=30.0,
),
default_timeout=30.0,
client_info=client_info,
),
self.list_tenants: gapic_v1.method.wrap_method(
self.list_tenants,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.ServiceUnavailable,
),
deadline=30.0,
),
default_timeout=30.0,
client_info=client_info,
),
}
@property
def create_tenant(
self,
) -> Callable[
[tenant_service.CreateTenantRequest],
Union[gct_tenant.Tenant, Awaitable[gct_tenant.Tenant]],
]:
raise NotImplementedError()
@property
def get_tenant(
self,
) -> Callable[
[tenant_service.GetTenantRequest],
Union[tenant.Tenant, Awaitable[tenant.Tenant]],
]:
raise NotImplementedError()
@property
def update_tenant(
self,
) -> Callable[
[tenant_service.UpdateTenantRequest],
Union[gct_tenant.Tenant, Awaitable[gct_tenant.Tenant]],
]:
raise NotImplementedError()
@property
def delete_tenant(
self,
) -> Callable[
[tenant_service.DeleteTenantRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def list_tenants(
self,
) -> Callable[
[tenant_service.ListTenantsRequest],
Union[
tenant_service.ListTenantsResponse,
Awaitable[tenant_service.ListTenantsResponse],
],
]:
raise NotImplementedError()
__all__ = ("TenantServiceTransport",) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
# This copy of shlex.py is distributed with argcomplete.
# It incorporates changes proposed in http://bugs.python.org/issue1521950 and changes to allow it to match Unicode
# word characters.
"""A lexical analyzer class for simple shell-like syntaxes."""
# Module and documentation by Eric S. Raymond, 21 Dec 1998
# Input stacking and error message cleanup added by ESR, March 2000
# push_source() and pop_source() made explicit by ESR, January 2001.
# Posix compliance, split(), string arguments, and
# iterator interface by Gustavo Niemeyer, April 2003.
# changes to tokenize more like Posix shells by Vinay Sajip, January 2012.
import os.path, sys, re
from collections import deque
# Note: cStringIO is not compatible with Unicode
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
try:
basestring
except NameError:
basestring = str
__all__ = ["shlex", "split"]
class UnicodeWordchars:
''' A replacement for shlex.wordchars that also matches (__contains__) any Unicode wordchars.
'''
def __init__(self, wordchars):
self.wordchars = wordchars
self.uw_regex = re.compile('\w', flags=re.UNICODE)
def __contains__(self, c):
return c in self.wordchars or self.uw_regex.match(c)
class shlex:
"A lexical analyzer class for simple shell-like syntaxes."
def __init__(self, instream=None, infile=None, posix=False, punctuation_chars=False):
if isinstance(instream, basestring):
instream = StringIO(instream)
if instream is not None:
self.instream = instream
self.infile = infile
else:
self.instream = sys.stdin
self.infile = None
self.posix = posix
if posix:
self.eof = None
else:
self.eof = ''
self.commenters = '#'
self.wordchars = ('abcdfeghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_')
self.whitespace = ' \t\r\n'
self.whitespace_split = False
self.quotes = '\'"'
self.escape = '\\'
self.escapedquotes = '"'
self.state = ' '
self.pushback = deque()
self.lineno = 1
self.debug = 0
self.token = ''
self.filestack = deque()
self.source = None
if not punctuation_chars:
punctuation_chars = ''
elif punctuation_chars is True:
punctuation_chars = '();<>|&'
self.punctuation_chars = punctuation_chars
if punctuation_chars:
# _pushback_chars is a push back queue used by lookahead logic
self._pushback_chars = deque()
# these chars added because allowed in file names, args, wildcards
self.wordchars += '~-./*?=:@'
#remove any punctuation chars from wordchars
self.wordchars = ''.join(c for c in self.wordchars if c not in
self.punctuation_chars)
for c in punctuation_chars:
if c in self.wordchars:
self.wordchars.remove(c)
if self.posix:
self.wordchars = UnicodeWordchars(self.wordchars)
self.first_colon_pos = None
def push_token(self, tok):
"Push a token onto the stack popped by the get_token method"
self.pushback.appendleft(tok)
def push_source(self, newstream, newfile=None):
"Push an input source onto the lexer's input source stack."
if isinstance(newstream, basestring):
newstream = StringIO(newstream)
self.filestack.appendleft((self.infile, self.instream, self.lineno))
self.infile = newfile
self.instream = newstream
self.lineno = 1
def pop_source(self):
"Pop the input source stack."
self.instream.close()
(self.infile, self.instream, self.lineno) = self.filestack.popleft()
self.state = ' '
def get_token(self):
"Get a token from the input stream (or from stack if it's nonempty)"
if self.pushback:
tok = self.pushback.popleft()
return tok
# No pushback. Get a token.
raw = self.read_token()
# Handle inclusions
if self.source is not None:
while raw == self.source:
spec = self.sourcehook(self.read_token())
if spec:
(newfile, newstream) = spec
self.push_source(newstream, newfile)
raw = self.get_token()
# Maybe we got EOF instead?
while raw == self.eof:
if not self.filestack:
return self.eof
else:
self.pop_source()
raw = self.get_token()
# Neither inclusion nor EOF
return raw
def read_token(self):
quoted = False
escapedstate = ' '
while True:
if self.punctuation_chars and self._pushback_chars:
nextchar = self._pushback_chars.pop()
else:
nextchar = self.instream.read(1)
if nextchar == '\n':
self.lineno += 1
if self.state is None:
self.token = '' # past end of file
break
elif self.state == ' ':
if not nextchar:
self.state = None # end of file
break
elif nextchar in self.whitespace:
if self.token or (self.posix and quoted):
break # emit current token
else:
continue
elif nextchar in self.commenters:
self.instream.readline()
self.lineno += 1
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars:
self.token = nextchar
self.state = 'a'
elif nextchar in self.punctuation_chars:
self.token = nextchar
self.state = 'c'
elif nextchar in self.quotes:
if not self.posix:
self.token = nextchar
self.state = nextchar
elif self.whitespace_split:
self.token = nextchar
self.state = 'a'
else:
self.token = nextchar
if self.token or (self.posix and quoted):
break # emit current token
else:
continue
elif self.state in self.quotes:
quoted = True
if not nextchar: # end of file
# XXX what error should be raised here?
raise ValueError("No closing quotation")
if nextchar == self.state:
if not self.posix:
self.token += nextchar
self.state = ' '
break
else:
self.state = 'a'
elif (self.posix and nextchar in self.escape and self.state
in self.escapedquotes):
escapedstate = self.state
self.state = nextchar
else:
self.token += nextchar
elif self.state in self.escape:
if not nextchar: # end of file
# XXX what error should be raised here?
raise ValueError("No escaped character")
# In posix shells, only the quote itself or the escape
# character may be escaped within quotes.
if (escapedstate in self.quotes and
nextchar != self.state and nextchar != escapedstate):
self.token += self.state
self.token += nextchar
self.state = escapedstate
elif self.state in ('a', 'c'):
if not nextchar:
self.state = None # end of file
break
elif nextchar in self.whitespace:
self.state = ' '
if self.token or (self.posix and quoted):
break # emit current token
else:
continue
elif nextchar in self.commenters:
self.instream.readline()
self.lineno += 1
if self.posix:
self.state = ' '
if self.token or (self.posix and quoted):
break # emit current token
else:
continue
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif (nextchar in self.wordchars or nextchar in self.quotes
or self.whitespace_split):
self.token += nextchar
if nextchar == ':':
self.first_colon_pos = len(self.token)-1
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
self.state = ' '
if self.token:
break # emit current token
else:
continue
result = self.token
self.token = ''
if self.posix and not quoted and result == '':
result = None
return result
def sourcehook(self, newfile):
"Hook called on a filename to be sourced."
if newfile[0] == '"':
newfile = newfile[1:-1]
# This implements cpp-like semantics for relative-path inclusion.
if isinstance(self.infile, basestring) and not os.path.isabs(newfile):
newfile = os.path.join(os.path.dirname(self.infile), newfile)
return (newfile, open(newfile, "r"))
def error_leader(self, infile=None, lineno=None):
"Emit a C-compiler-like, Emacs-friendly error-message leader."
if infile is None:
infile = self.infile
if lineno is None:
lineno = self.lineno
return "\"%s\", line %d: " % (infile, lineno)
def __iter__(self):
return self
def next(self):
token = self.get_token()
if token == self.eof:
raise StopIteration
return token
def split(s, comments=False, posix=True, punctuation_chars=False):
lex = shlex(s, posix=posix, punctuation_chars=punctuation_chars)
lex.whitespace_split = True
if not comments:
lex.commenters = ''
return list(lex) | unknown | codeparrot/codeparrot-clean | ||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.six.moves import queue
from ansible.compat.six import iteritems, text_type
from ansible.vars import strip_internal_keys
import multiprocessing
import time
import traceback
# TODO: not needed if we use the cryptography library with its default RNG
# engine
HAS_ATFORK=True
try:
from Crypto.Random import atfork
except ImportError:
HAS_ATFORK=False
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['ResultProcess']
class ResultProcess(multiprocessing.Process):
'''
The result worker thread, which reads results from the results
queue and fires off callbacks/etc. as necessary.
'''
def __init__(self, final_q, workers):
# takes a task queue manager as the sole param:
self._final_q = final_q
self._workers = workers
self._cur_worker = 0
self._terminated = False
super(ResultProcess, self).__init__()
def _send_result(self, result):
display.debug(u"sending result: %s" % ([text_type(x) for x in result],))
self._final_q.put(result)
display.debug("done sending result")
def _read_worker_result(self):
result = None
starting_point = self._cur_worker
while True:
(worker_prc, rslt_q) = self._workers[self._cur_worker]
self._cur_worker += 1
if self._cur_worker >= len(self._workers):
self._cur_worker = 0
try:
if not rslt_q.empty():
display.debug("worker %d has data to read" % self._cur_worker)
result = rslt_q.get()
display.debug("got a result from worker %d: %s" % (self._cur_worker, result))
break
except queue.Empty:
pass
if self._cur_worker == starting_point:
break
return result
def terminate(self):
self._terminated = True
super(ResultProcess, self).terminate()
def run(self):
'''
The main thread execution, which reads from the results queue
indefinitely and sends callbacks/etc. when results are received.
'''
if HAS_ATFORK:
atfork()
while True:
try:
result = self._read_worker_result()
if result is None:
time.sleep(0.0001)
continue
# send callbacks for 'non final' results
if '_ansible_retry' in result._result:
self._send_result(('v2_runner_retry', result))
continue
elif '_ansible_item_result' in result._result:
if result.is_failed() or result.is_unreachable():
self._send_result(('v2_runner_item_on_failed', result))
elif result.is_skipped():
self._send_result(('v2_runner_item_on_skipped', result))
else:
self._send_result(('v2_runner_item_on_ok', result))
if 'diff' in result._result:
self._send_result(('v2_on_file_diff', result))
continue
clean_copy = strip_internal_keys(result._result)
if 'invocation' in clean_copy:
del clean_copy['invocation']
# if this task is registering a result, do it now
if result._task.register:
self._send_result(('register_host_var', result._host, result._task, clean_copy))
# send callbacks, execute other options based on the result status
# TODO: this should all be cleaned up and probably moved to a sub-function.
# the fact that this sometimes sends a TaskResult and other times
# sends a raw dictionary back may be confusing, but the result vs.
# results implementation for tasks with loops should be cleaned up
# better than this
if result.is_unreachable():
self._send_result(('host_unreachable', result))
elif result.is_failed():
self._send_result(('host_task_failed', result))
elif result.is_skipped():
self._send_result(('host_task_skipped', result))
else:
if result._task.loop:
# this task had a loop, and has more than one result, so
# loop over all of them instead of a single result
result_items = result._result.get('results', [])
else:
result_items = [ result._result ]
for result_item in result_items:
# if this task is notifying a handler, do it now
if '_ansible_notify' in result_item:
if result.is_changed():
# The shared dictionary for notified handlers is a proxy, which
# does not detect when sub-objects within the proxy are modified.
# So, per the docs, we reassign the list so the proxy picks up and
# notifies all other threads
for notify in result_item['_ansible_notify']:
self._send_result(('notify_handler', result, notify))
if 'add_host' in result_item:
# this task added a new host (add_host module)
self._send_result(('add_host', result_item))
elif 'add_group' in result_item:
# this task added a new group (group_by module)
self._send_result(('add_group', result._host, result_item))
elif 'ansible_facts' in result_item:
# if this task is registering facts, do that now
loop_var = 'item'
if result._task.loop_control:
loop_var = result._task.loop_control.get('loop_var') or 'item'
item = result_item.get(loop_var, None)
if result._task.action == 'include_vars':
for (key, value) in iteritems(result_item['ansible_facts']):
self._send_result(('set_host_var', result._host, result._task, item, key, value))
else:
self._send_result(('set_host_facts', result._host, result._task, item, result_item['ansible_facts']))
# finally, send the ok for this task
self._send_result(('host_task_ok', result))
except queue.Empty:
pass
except (KeyboardInterrupt, SystemExit, IOError, EOFError):
break
except:
# TODO: we should probably send a proper callback here instead of
# simply dumping a stack trace on the screen
traceback.print_exc()
break | unknown | codeparrot/codeparrot-clean | ||
import numbers
import math
import ply.lex as lex
variables = {}
usr_funcs = {}
usr_basecases = {}
debug = False
argcounts = {
"+": 2,
"/": 2,
"-": 2,
"*": 2,
"**": 2,
">>": 2,
"<<": 2,
"=": 2,
"==": 2,
">=": 2,
"<=": 2,
">": 2,
"<": 2,
"!=": 2,
"if": 2,
"sin": 1,
"cos": 1,
"asin": 1,
"acos": 1,
"debug": 1,
}
def varr(x, lvars):
if x is None:
return None
if isinstance(x, numbers.Real):
return x
elif x.type == "VAR" and x.value in lvars:
return lvars[x.value]
elif x.type == "NUMBER":
return x.value
else:
return x
def eval_rpn(intoks, lvars=None):
if lvars is None:
lvars = variables
values = [x for x in intoks]
stack = []
while len(values) > 0:
t = values.pop(0)
if (isinstance(t, lex.LexToken) and t.type in ("VAR", "NUMBER")) or \
(isinstance(t, numbers.Real)):
stack.append(t)
else:
if t.type == "FUNC" and t.value not in usr_funcs:
argcount = len(stack)
elif t.type == "THEN":
argcount = 1
else:
if t.value in argcounts:
argcount = argcounts[t.value]
else:
raise NameError("function {} not defined".format(t.value))
args = []
while len(args) < argcount:
try:
args.append(stack.pop())
except IndexError:
raise IndexError("Not enough arguments")
args = args[::-1]
# Need to reverse it.
if t.type == "PLUS":
x1 = varr(args[0], lvars)
x2 = varr(args[1], lvars)
res = x1 + x2
stack.append(res)
elif t.type == "MINUS":
x1 = varr(args[0], lvars)
x2 = varr(args[1], lvars)
res = x1 - x2
stack.append(res)
elif t.type == "TIMES":
x1 = varr(args[0], lvars)
x2 = varr(args[1], lvars)
res = x1 * x2
stack.append(res)
elif t.type == "DIVIDE":
x1 = varr(args[0], lvars)
x2 = varr(args[1], lvars)
res = x1 / x2
stack.append(res)
elif t.type == "LSHIFT":
x1 = varr(args[0], lvars)
x2 = varr(args[1], lvars)
res = x1 << x2
stack.append(res)
elif t.type == "RSHIFT":
x1 = varr(args[0], lvars)
x2 = varr(args[1], lvars)
res = x1 >> x2
stack.append(res)
elif t.type == "EXP":
x1 = varr(args[0], lvars)
x2 = varr(args[1], lvars)
res = x1 ** x2
stack.append(res)
elif t.type == "COMPARE":
x1 = varr(args[0], lvars)
x2 = varr(args[1], lvars)
if t.value == "==":
res = int(x1 == x2)
if t.value == ">=":
res = int(x1 >= x2)
if t.value == "<=":
res = int(x1 <= x2)
if t.value == ">":
res = int(x1 > x2)
if t.value == "<":
res = int(x1 < x2)
if t.value == "!=":
res = int(x1 != x2)
stack.append(res)
elif t.type == "ASSIGN":
lvars[args[0].value] = varr(args[1], {})
elif t.type == "THEN":
x1 = varr(args[0], lvars)
if x1 > 0:
stack.append(eval_rpn(values, lvars))
else:
return
elif t.type == "FUNC":
if t.value == "sin":
res = math.sin(varr(args[0], lvars))
stack.append(res)
elif t.value == "cos":
res = math.cos(varr(args[0], lvars))
stack.append(res)
elif t.value == "asin":
res = math.asin(varr(args[0], lvars))
stack.append(res)
elif t.value == "acos":
res = math.acos(varr(args[0], lvars))
stack.append(res)
elif t.value == "debug":
global debug
debug = not(debug)
elif t.value in usr_funcs:
func = usr_funcs[t.value]
fargs = {}
for index, item in enumerate(func[0]):
fargs[item.value] = varr(args[index], lvars)
if (t.value in usr_basecases) and \
(tuple(varr(x, {}) for x in args) in usr_basecases[t.value]):
r = usr_basecases[t.value][tuple(varr(x, {}) for x in args)]
stack.append(r[0])
else:
stack.append(eval_rpn(func[1], fargs))
else:
newfunc = t
funccode = []
while len(values) > 0:
t = values.pop(0)
funccode.append(varr(t, {}))
if len(funccode) == 0:
raise ValueError("No function body given")
if funccode[-1].type == "ASSIGN":
funccode.pop()
usr_funcs[newfunc.value] = (args, funccode)
argcounts[newfunc.value] = len(args)
return
elif t.type == "FUNC_BASE":
funcname = t.value.replace("@", "")
funccode = []
while len(values) > 0:
t = values.pop(0)
funccode.append(varr(t, {}))
if len(funccode) == 0:
raise ValueError("No function body given")
if funccode[-1].type == "ASSIGN":
funccode.pop()
if funcname in usr_basecases:
usr_basecases[funcname][tuple([varr(x, {}) for x in args])] = funccode
else:
usr_basecases[funcname] = {}
usr_basecases[funcname][tuple([varr(x, {}) for x in args])] = funccode
else:
raise TypeError("Unknown token")
if len(stack) == 1:
return varr(stack[0], lvars)
elif len(stack) > 1:
raise IndexError("Too many values!") | unknown | codeparrot/codeparrot-clean | ||
// Copyright IBM Corp. 2016, 2025
// SPDX-License-Identifier: BUSL-1.1
package cache
import (
"context"
"fmt"
gohttp "net/http"
"sync"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-retryablehttp"
"github.com/hashicorp/vault/api"
"github.com/hashicorp/vault/helper/namespace"
"github.com/hashicorp/vault/http"
)
//go:generate enumer -type=EnforceConsistency -trimprefix=EnforceConsistency
type EnforceConsistency int
const (
EnforceConsistencyNever EnforceConsistency = iota
EnforceConsistencyAlways
)
//go:generate enumer -type=WhenInconsistentAction -trimprefix=WhenInconsistent
type WhenInconsistentAction int
const (
WhenInconsistentFail WhenInconsistentAction = iota
WhenInconsistentRetry
WhenInconsistentForward
)
// APIProxy is an implementation of the proxier interface that is used to
// forward the request to Vault and get the response.
type APIProxy struct {
client *api.Client
logger hclog.Logger
enforceConsistency EnforceConsistency
whenInconsistentAction WhenInconsistentAction
l sync.RWMutex
lastIndexStates []string
userAgentString string
userAgentStringFunction func(string) string
// clientNamespace is a one-time set representation of the namespace of the client
// (i.e. client.Namespace()) to avoid repeated calls and lock usage.
clientNamespace string
prependConfiguredNamespace bool
}
var _ Proxier = &APIProxy{}
type APIProxyConfig struct {
Client *api.Client
Logger hclog.Logger
EnforceConsistency EnforceConsistency
WhenInconsistentAction WhenInconsistentAction
// UserAgentString is used as the User Agent when the proxied client
// does not have a user agent of its own.
UserAgentString string
// UserAgentStringFunction is the function to transform the proxied client's
// user agent into one that includes Vault-specific information.
UserAgentStringFunction func(string) string
// PrependConfiguredNamespace configures whether the client's namespace
// should be prepended to proxied requests
PrependConfiguredNamespace bool
}
func NewAPIProxy(config *APIProxyConfig) (Proxier, error) {
if config.Client == nil {
return nil, fmt.Errorf("nil API client")
}
return &APIProxy{
client: config.Client,
logger: config.Logger,
enforceConsistency: config.EnforceConsistency,
whenInconsistentAction: config.WhenInconsistentAction,
userAgentString: config.UserAgentString,
userAgentStringFunction: config.UserAgentStringFunction,
prependConfiguredNamespace: config.PrependConfiguredNamespace,
clientNamespace: namespace.Canonicalize(config.Client.Namespace()),
}, nil
}
func (ap *APIProxy) Send(ctx context.Context, req *SendRequest) (*SendResponse, error) {
client, err := ap.client.Clone()
if err != nil {
return nil, err
}
client.SetToken(req.Token)
// Derive and set a logger for the client
clientLogger := ap.logger.Named("client")
client.SetLogger(clientLogger)
// http.Transport will transparently request gzip and decompress the response, but only if
// the client doesn't manually set the header. Removing any Accept-Encoding header allows the
// transparent compression to occur.
req.Request.Header.Del("Accept-Encoding")
if req.Request.Header == nil {
req.Request.Header = make(gohttp.Header)
}
// Set our User-Agent to be one indicating we are Vault Agent's API proxy.
// If the sending client had one, preserve it.
if req.Request.Header.Get("User-Agent") != "" {
initialUserAgent := req.Request.Header.Get("User-Agent")
req.Request.Header.Set("User-Agent", ap.userAgentStringFunction(initialUserAgent))
} else {
req.Request.Header.Set("User-Agent", ap.userAgentString)
}
client.SetHeaders(req.Request.Header)
if ap.prependConfiguredNamespace && ap.clientNamespace != "" {
currentNamespace := namespace.Canonicalize(client.Namespace())
newNamespace := namespace.Canonicalize(ap.clientNamespace + currentNamespace)
client.SetNamespace(newNamespace)
}
fwReq := client.NewRequest(req.Request.Method, req.Request.URL.Path)
fwReq.BodyBytes = req.RequestBody
query := req.Request.URL.Query()
if len(query) != 0 {
fwReq.Params = query
}
var newState string
manageState := ap.enforceConsistency == EnforceConsistencyAlways &&
req.Request.Header.Get(http.VaultIndexHeaderName) == "" &&
req.Request.Header.Get(http.VaultForwardHeaderName) == "" &&
req.Request.Header.Get(http.VaultInconsistentHeaderName) == ""
if manageState {
client = client.WithResponseCallbacks(api.RecordState(&newState))
ap.l.RLock()
lastStates := ap.lastIndexStates
ap.l.RUnlock()
if len(lastStates) != 0 {
client = client.WithRequestCallbacks(api.RequireState(lastStates...))
switch ap.whenInconsistentAction {
case WhenInconsistentFail:
// In this mode we want to delegate handling of inconsistency
// failures to the external client talking to Agent.
client.SetCheckRetry(retryablehttp.DefaultRetryPolicy)
case WhenInconsistentRetry:
// In this mode we want to handle retries due to inconsistency
// internally. This is the default api.Client behaviour so
// we needn't do anything.
case WhenInconsistentForward:
fwReq.Headers.Set(http.VaultInconsistentHeaderName, http.VaultInconsistentForward)
}
}
}
// Make the request to Vault and get the response
ap.logger.Info("forwarding request to Vault", "method", req.Request.Method, "path", req.Request.URL.Path)
resp, err := client.RawRequestWithContext(ctx, fwReq)
if resp == nil && err != nil {
// We don't want to cache nil responses, so we simply return the error
return nil, err
}
if newState != "" {
ap.l.Lock()
// We want to be using the "newest" states seen, but newer isn't well
// defined here. There can be two states S1 and S2 which aren't strictly ordered:
// S1 could have a newer localindex and S2 could have a newer replicatedindex. So
// we need to merge them. But we can't merge them because we wouldn't be able to
// "sign" the resulting header because we don't have access to the HMAC key that
// Vault uses to do so. So instead we compare any of the 0-2 saved states
// we have to the new header, keeping the newest 1-2 of these, and sending
// them to Vault to evaluate.
ap.lastIndexStates = api.MergeReplicationStates(ap.lastIndexStates, newState)
ap.l.Unlock()
}
// Before error checking from the request call, we'd want to initialize a SendResponse to
// potentially return
sendResponse, newErr := NewSendResponse(resp, nil)
if newErr != nil {
return nil, newErr
}
// Bubble back the api.Response as well for error checking/handling at the handler layer.
return sendResponse, err
} | go | github | https://github.com/hashicorp/vault | command/agentproxyshared/cache/api_proxy.go |
# -*- Mode: Python; fill-column: 80 -*-
# vi:si:et:sw=4:sts=4:ts=4
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007,2008,2009 Fluendo, S.L.
# Copyright (C) 2010,2011 Flumotion Services, S.A.
# All rights reserved.
#
# This file may be distributed and/or modified under the terms of
# the GNU Lesser General Public License version 2.1 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.LGPL" in the source distribution for more information.
#
# Headers in this file shall remain intact.
"""recent connections"""
import datetime
import fnmatch
import os
from xml.dom import minidom, Node
from flumotion.common import log, common, xdg
from flumotion.common.connection import PBConnectionInfo, parsePBConnectionInfo
from flumotion.common.errors import OptionError
from flumotion.configure import configure
from flumotion.twisted.pb import Authenticator
__version__ = "$Rev$"
class ConnectionInfo(object):
"""
I wrap the information contained in a single connection file entry.
I can be used to construct L{PBConnectionInfo} object, but because some of
my variables can be shell globs, they are all strings.
"""
def __init__(self, host, port, use_insecure, user, passwd, manager):
self.host = host
self.port = port
self.use_insecure = use_insecure
self.user = user
self.passwd = passwd
self.manager = manager
def asPBConnectionInfo(self):
"""
Return a L{PBConnectionInfo} object constructed from my state. If my
state contains shell globs, I might throw a ValueError.
"""
if ('*' in self.host) or (self.use_insecure not in ('0', '1')):
raise ValueError("Shell glob in connection info")
return PBConnectionInfo(self.host, int(self.port),
self.use_insecure == '0',
Authenticator(username=self.user,
password=self.passwd))
def __str__(self):
return '%s@%s:%s' % (self.user, self.host, self.port)
class RecentConnection(object):
"""
I am an object representing a recent connection.
You can access some of my state and update the timestamp
(eg, when I was last connected to) by calling L{updateTimestamp}.
@ivar name: name of the recent connection usually host:port
@type name: string
@ivar host: hostname
@type host: string
@ivar filename: filename of the connection
@type filename: string
@ivar info: connection info
@type info: L{PBConnectionInfo}
@ivar timestamp: timestamp
@type timestamp: datetime.datetime
"""
def __init__(self, host, filename, info):
self.name = str(info)
self.host = host
self.filename = filename
self.info = info.asPBConnectionInfo()
self.manager = info.manager
self.timestamp = datetime.datetime.fromtimestamp(
os.stat(filename).st_ctime)
def updateTimestamp(self):
os.utime(self.filename, None)
def asConnectionInfo(self):
"""
Return a L{ConnectionInfo} object constructed from my state.
"""
info = self.info
return ConnectionInfo(info.host, str(info.port),
info.use_ssl and '0' or '1',
info.authenticator.username,
info.authenticator.password, '')
def _getRecentFilenames():
# DSU, or as perl folks call it, a Schwartz Transform
common.ensureDir(configure.registrydir, "registry dir")
for filename in os.listdir(configure.registrydir):
filename = os.path.join(configure.registrydir, filename)
if filename.endswith('.connection'):
yield filename
def hasRecentConnections():
"""
Returns if we have at least one recent connection
@returns: if we have a recent connection
@rtype: bool
"""
gen = _getRecentFilenames()
try:
gen.next()
except StopIteration:
return False
return True
def _parseConnection(element):
state = {}
for childNode in element.childNodes:
if (childNode.nodeType != Node.TEXT_NODE and
childNode.nodeType != Node.COMMENT_NODE):
state[childNode.nodeName] = childNode.childNodes[0].wholeText
return ConnectionInfo(state['host'], state['port'], state['use_insecure'],
state['user'], state['passwd'], state['manager'])
def _parseSingleConnectionFile(filename):
tree = minidom.parse(filename)
return _parseConnection(tree.documentElement)
def _parseMultipleConnectionsFile(filename):
tree = minidom.parse(filename)
return map(_parseConnection, tree.getElementsByTagName('connection'))
def getRecentConnections():
"""
Fetches a list of recently used connections
@returns: recently used connections
@rtype: list of L{RecentConnection}
"""
recentFilenames = _getRecentFilenames()
recentConnections = []
for filename in sorted(recentFilenames, reverse=True):
try:
state = _parseSingleConnectionFile(filename)
recentConnections.append(
RecentConnection(str(state),
filename=filename,
info=state))
except Exception, e:
log.warning('connections', 'Error parsing %s: %r', filename, e)
return recentConnections
def getDefaultConnections():
"""
Fetches a list of default connections.
@returns: default connections
@rtype: list of L{ConnectionInfo}
"""
filename = xdg.config_read_path('connections')
if not filename:
return []
try:
return _parseMultipleConnectionsFile(filename)
except Exception, e:
log.warning('connections', 'Error parsing %s: %r', filename, e)
return []
def updateFromConnectionList(info, connections, match_glob=False):
"""
Updates the info object with the username and password taken from the list
of connections.
@param info: connection info
@type info: L{PBConnectionInfo}
@param connections: recent or default connections
@type: a list of L{ConnectionInfo}
@param match_glob: if values of host, port, etc. to be matched between
info and the recent or default connections should be
treated as shell globs
@type: boolean
@returns: None
"""
def match(v1, v2):
if match_glob:
# v2 is the candidate, which might be a shell glob
return fnmatch.fnmatch(v1, v2)
else:
return v1 == v2
def compatible(info, c_info):
if not match(info.host, c_info.host):
return False
port = str(info.port)
if not match(port, c_info.port):
return False
use_insecure = info.use_ssl and '0' or '1'
if not match(use_insecure, c_info.use_insecure):
return False
auth = info.authenticator
if auth.username and not match(auth.username, c_info.user):
return False
# doesn't make sense to match the password, if everything before that
# matched, we won't fill in anything
return True
for candidate in connections:
if compatible(info, candidate):
# it's compatible, fill in the variables
if not info.authenticator.username:
info.authenticator.username = candidate.user
if not info.authenticator.password:
info.authenticator.password = candidate.passwd
break
return info
def parsePBConnectionInfoRecent(managerString, use_ssl=True,
defaultPort=configure.defaultSSLManagerPort):
"""The same as L{flumotion.common.connection.parsePBConnectionInfo},
but fills in missing information from the recent connections cache or
from the default user and password definitions file if possible.
@param managerString: manager string we should connect to
@type managerString: string
@param use_ssl: True if we should use ssl
@type use_ssl: bool
@param defaultPort: default port to use
@type defaultPort: int
@returns: connection info
@rtype: a L{PBConnectionInfo}
"""
recent = getRecentConnections()
if not managerString:
if recent:
return recent[0].info
else:
raise OptionError('No string given and no recent '
'connections to use')
info = parsePBConnectionInfo(managerString, username=None,
password=None,
port=defaultPort,
use_ssl=use_ssl)
if not (info.authenticator.username and info.authenticator.password):
recent_infos = [r.asConnectionInfo() for r in recent]
updateFromConnectionList(info, recent_infos, match_glob=False)
if not (info.authenticator.username and info.authenticator.password):
defaults = getDefaultConnections()
updateFromConnectionList(info, defaults, match_glob=True)
if not (info.authenticator.username and info.authenticator.password):
raise OptionError('You are connecting to %s for the '
'first time; please specify a user and '
'password (e.g. user:test@%s).'
% (managerString, managerString))
else:
return info | unknown | codeparrot/codeparrot-clean | ||
/* Copyright 2021 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_C_EXPERIMENTAL_OPS_GEN_CPP_VIEWS_OP_VIEW_H_
#define TENSORFLOW_C_EXPERIMENTAL_OPS_GEN_CPP_VIEWS_OP_VIEW_H_
#include <vector>
#include "tensorflow/c/experimental/ops/gen/cpp/views/arg_view.h"
#include "tensorflow/c/experimental/ops/gen/cpp/views/attr_view.h"
#include "tensorflow/c/experimental/ops/gen/cpp/views/op_argument_view.h"
#include "tensorflow/c/experimental/ops/gen/model/op_spec.h"
#include "tensorflow/core/platform/types.h"
namespace tensorflow {
namespace generator {
namespace cpp {
class OpView {
public:
explicit OpView(OpSpec op);
const std::vector<ArgView> &Inputs() const;
const std::vector<ArgView> &Outputs() const;
const std::vector<AttrView> &Attributes() const;
const std::vector<OpArgumentView> &AllArguments() const;
int NumInputs() const;
int NumOutputs() const;
ArgView OnlyInput() const;
ArgView OnlyOutput() const;
std::string FunctionName() const;
std::string VariableName() const;
std::string OpNameString() const;
std::string Summary() const;
std::vector<std::string> Description() const;
bool IsListOp() const;
private:
OpSpec op_;
std::vector<ArgView> input_args_;
std::vector<ArgView> output_args_;
std::vector<AttrView> argument_attrs_;
std::vector<OpArgumentView> all_arguments_;
};
} // namespace cpp
} // namespace generator
} // namespace tensorflow
#endif // TENSORFLOW_C_EXPERIMENTAL_OPS_GEN_CPP_VIEWS_OP_VIEW_H_ | c | github | https://github.com/tensorflow/tensorflow | tensorflow/c/experimental/ops/gen/cpp/views/op_view.h |
"""
=========================================
Image denoising using dictionary learning
=========================================
An example comparing the effect of reconstructing noisy fragments
of a raccoon face image using firstly online :ref:`DictionaryLearning` and
various transform methods.
The dictionary is fitted on the distorted left half of the image, and
subsequently used to reconstruct the right half. Note that even better
performance could be achieved by fitting to an undistorted (i.e.
noiseless) image, but here we start from the assumption that it is not
available.
A common practice for evaluating the results of image denoising is by looking
at the difference between the reconstruction and the original image. If the
reconstruction is perfect this will look like Gaussian noise.
It can be seen from the plots that the results of :ref:`omp` with two
non-zero coefficients is a bit less biased than when keeping only one
(the edges look less prominent). It is in addition closer from the ground
truth in Frobenius norm.
The result of :ref:`least_angle_regression` is much more strongly biased: the
difference is reminiscent of the local intensity value of the original image.
Thresholding is clearly not useful for denoising, but it is here to show that
it can produce a suggestive output with very high speed, and thus be useful
for other tasks such as object classification, where performance is not
necessarily related to visualisation.
"""
print(__doc__)
from time import time
import matplotlib.pyplot as plt
import numpy as np
import scipy as sp
from sklearn.decomposition import MiniBatchDictionaryLearning
from sklearn.feature_extraction.image import extract_patches_2d
from sklearn.feature_extraction.image import reconstruct_from_patches_2d
from sklearn.utils.testing import SkipTest
from sklearn.utils.fixes import sp_version
if sp_version < (0, 12):
raise SkipTest("Skipping because SciPy version earlier than 0.12.0 and "
"thus does not include the scipy.misc.face() image.")
###############################################################################
try:
from scipy import misc
face = misc.face(gray=True)
except AttributeError:
# Old versions of scipy have face in the top level package
face = sp.face(gray=True)
# Convert from uint8 representation with values between 0 and 255 to
# a floating point representation with values between 0 and 1.
face = face / 255
# downsample for higher speed
face = face[::2, ::2] + face[1::2, ::2] + face[::2, 1::2] + face[1::2, 1::2]
face /= 4.0
height, width = face.shape
# Distort the right half of the image
print('Distorting image...')
distorted = face.copy()
distorted[:, width // 2:] += 0.075 * np.random.randn(height, width // 2)
# Extract all reference patches from the left half of the image
print('Extracting reference patches...')
t0 = time()
patch_size = (7, 7)
data = extract_patches_2d(distorted[:, :width // 2], patch_size)
data = data.reshape(data.shape[0], -1)
data -= np.mean(data, axis=0)
data /= np.std(data, axis=0)
print('done in %.2fs.' % (time() - t0))
###############################################################################
# Learn the dictionary from reference patches
print('Learning the dictionary...')
t0 = time()
dico = MiniBatchDictionaryLearning(n_components=100, alpha=1, n_iter=500)
V = dico.fit(data).components_
dt = time() - t0
print('done in %.2fs.' % dt)
plt.figure(figsize=(4.2, 4))
for i, comp in enumerate(V[:100]):
plt.subplot(10, 10, i + 1)
plt.imshow(comp.reshape(patch_size), cmap=plt.cm.gray_r,
interpolation='nearest')
plt.xticks(())
plt.yticks(())
plt.suptitle('Dictionary learned from face patches\n' +
'Train time %.1fs on %d patches' % (dt, len(data)),
fontsize=16)
plt.subplots_adjust(0.08, 0.02, 0.92, 0.85, 0.08, 0.23)
###############################################################################
# Display the distorted image
def show_with_diff(image, reference, title):
"""Helper function to display denoising"""
plt.figure(figsize=(5, 3.3))
plt.subplot(1, 2, 1)
plt.title('Image')
plt.imshow(image, vmin=0, vmax=1, cmap=plt.cm.gray,
interpolation='nearest')
plt.xticks(())
plt.yticks(())
plt.subplot(1, 2, 2)
difference = image - reference
plt.title('Difference (norm: %.2f)' % np.sqrt(np.sum(difference ** 2)))
plt.imshow(difference, vmin=-0.5, vmax=0.5, cmap=plt.cm.PuOr,
interpolation='nearest')
plt.xticks(())
plt.yticks(())
plt.suptitle(title, size=16)
plt.subplots_adjust(0.02, 0.02, 0.98, 0.79, 0.02, 0.2)
show_with_diff(distorted, face, 'Distorted image')
###############################################################################
# Extract noisy patches and reconstruct them using the dictionary
print('Extracting noisy patches... ')
t0 = time()
data = extract_patches_2d(distorted[:, width // 2:], patch_size)
data = data.reshape(data.shape[0], -1)
intercept = np.mean(data, axis=0)
data -= intercept
print('done in %.2fs.' % (time() - t0))
transform_algorithms = [
('Orthogonal Matching Pursuit\n1 atom', 'omp',
{'transform_n_nonzero_coefs': 1}),
('Orthogonal Matching Pursuit\n2 atoms', 'omp',
{'transform_n_nonzero_coefs': 2}),
('Least-angle regression\n5 atoms', 'lars',
{'transform_n_nonzero_coefs': 5}),
('Thresholding\n alpha=0.1', 'threshold', {'transform_alpha': .1})]
reconstructions = {}
for title, transform_algorithm, kwargs in transform_algorithms:
print(title + '...')
reconstructions[title] = face.copy()
t0 = time()
dico.set_params(transform_algorithm=transform_algorithm, **kwargs)
code = dico.transform(data)
patches = np.dot(code, V)
patches += intercept
patches = patches.reshape(len(data), *patch_size)
if transform_algorithm == 'threshold':
patches -= patches.min()
patches /= patches.max()
reconstructions[title][:, width // 2:] = reconstruct_from_patches_2d(
patches, (height, width // 2))
dt = time() - t0
print('done in %.2fs.' % dt)
show_with_diff(reconstructions[title], face,
title + ' (time: %.1fs)' % dt)
plt.show() | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...utils import _LazyModule
from ...utils.import_utils import define_import_structure
if TYPE_CHECKING:
pass
# Add models to deprecate like:
# from .XXX import *
else:
import sys
_file = globals()["__file__"]
sys.modules[__name__] = _LazyModule(__name__, _file, define_import_structure(_file), module_spec=__spec__) | python | github | https://github.com/huggingface/transformers | src/transformers/models/deprecated/__init__.py |
from __future__ import unicode_literals
import copy
import inspect
import warnings
from itertools import chain
from django.apps import apps
from django.conf import settings
from django.core import checks
from django.core.exceptions import (
NON_FIELD_ERRORS, FieldDoesNotExist, FieldError, MultipleObjectsReturned,
ObjectDoesNotExist, ValidationError,
)
from django.db import (
DEFAULT_DB_ALIAS, DJANGO_VERSION_PICKLE_KEY, DatabaseError, connections,
router, transaction,
)
from django.db.models import signals
from django.db.models.constants import LOOKUP_SEP
from django.db.models.deletion import CASCADE, Collector
from django.db.models.fields import AutoField
from django.db.models.fields.related import (
ForeignObjectRel, ManyToOneRel, OneToOneField, lazy_related_operation,
resolve_relation,
)
from django.db.models.manager import ensure_default_manager
from django.db.models.options import Options
from django.db.models.query import Q
from django.db.models.query_utils import (
DeferredAttribute, deferred_class_factory,
)
from django.db.models.utils import make_model_tuple
from django.utils import six
from django.utils.encoding import force_str, force_text
from django.utils.functional import curry
from django.utils.six.moves import zip
from django.utils.text import capfirst, get_text_list
from django.utils.translation import ugettext_lazy as _
from django.utils.version import get_version
def subclass_exception(name, parents, module, attached_to=None):
"""
Create exception subclass. Used by ModelBase below.
If 'attached_to' is supplied, the exception will be created in a way that
allows it to be pickled, assuming the returned exception class will be added
as an attribute to the 'attached_to' class.
"""
class_dict = {'__module__': module}
if attached_to is not None:
def __reduce__(self):
# Exceptions are special - they've got state that isn't
# in self.__dict__. We assume it is all in self.args.
return (unpickle_inner_exception, (attached_to, name), self.args)
def __setstate__(self, args):
self.args = args
class_dict['__reduce__'] = __reduce__
class_dict['__setstate__'] = __setstate__
return type(name, parents, class_dict)
class ModelBase(type):
"""
Metaclass for all models.
"""
def __new__(cls, name, bases, attrs):
super_new = super(ModelBase, cls).__new__
# Also ensure initialization is only performed for subclasses of Model
# (excluding Model class itself).
parents = [b for b in bases if isinstance(b, ModelBase)]
if not parents:
return super_new(cls, name, bases, attrs)
# Create the class.
module = attrs.pop('__module__')
new_class = super_new(cls, name, bases, {'__module__': module})
attr_meta = attrs.pop('Meta', None)
abstract = getattr(attr_meta, 'abstract', False)
if not attr_meta:
meta = getattr(new_class, 'Meta', None)
else:
meta = attr_meta
base_meta = getattr(new_class, '_meta', None)
app_label = None
# Look for an application configuration to attach the model to.
app_config = apps.get_containing_app_config(module)
if getattr(meta, 'app_label', None) is None:
if app_config is None:
if not abstract:
raise RuntimeError(
"Model class %s.%s doesn't declare an explicit "
"app_label and isn't in an application in "
"INSTALLED_APPS." % (module, name)
)
else:
app_label = app_config.label
new_class.add_to_class('_meta', Options(meta, app_label))
if not abstract:
new_class.add_to_class(
'DoesNotExist',
subclass_exception(
str('DoesNotExist'),
tuple(
x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract
) or (ObjectDoesNotExist,),
module,
attached_to=new_class))
new_class.add_to_class(
'MultipleObjectsReturned',
subclass_exception(
str('MultipleObjectsReturned'),
tuple(
x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract
) or (MultipleObjectsReturned,),
module,
attached_to=new_class))
if base_meta and not base_meta.abstract:
# Non-abstract child classes inherit some attributes from their
# non-abstract parent (unless an ABC comes before it in the
# method resolution order).
if not hasattr(meta, 'ordering'):
new_class._meta.ordering = base_meta.ordering
if not hasattr(meta, 'get_latest_by'):
new_class._meta.get_latest_by = base_meta.get_latest_by
is_proxy = new_class._meta.proxy
# If the model is a proxy, ensure that the base class
# hasn't been swapped out.
if is_proxy and base_meta and base_meta.swapped:
raise TypeError("%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped))
if getattr(new_class, '_default_manager', None):
if not is_proxy:
# Multi-table inheritance doesn't inherit default manager from
# parents.
new_class._default_manager = None
new_class._base_manager = None
else:
# Proxy classes do inherit parent's default manager, if none is
# set explicitly.
new_class._default_manager = new_class._default_manager._copy_to_model(new_class)
new_class._base_manager = new_class._base_manager._copy_to_model(new_class)
# Add all attributes to the class.
for obj_name, obj in attrs.items():
new_class.add_to_class(obj_name, obj)
# All the fields of any type declared on this model
new_fields = chain(
new_class._meta.local_fields,
new_class._meta.local_many_to_many,
new_class._meta.virtual_fields
)
field_names = {f.name for f in new_fields}
# Basic setup for proxy models.
if is_proxy:
base = None
for parent in [kls for kls in parents if hasattr(kls, '_meta')]:
if parent._meta.abstract:
if parent._meta.fields:
raise TypeError(
"Abstract base class containing model fields not "
"permitted for proxy model '%s'." % name
)
else:
continue
if base is not None:
raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name)
else:
base = parent
if base is None:
raise TypeError("Proxy model '%s' has no non-abstract model base class." % name)
new_class._meta.setup_proxy(base)
new_class._meta.concrete_model = base._meta.concrete_model
base._meta.concrete_model._meta.proxied_children.append(new_class._meta)
else:
new_class._meta.concrete_model = new_class
# Collect the parent links for multi-table inheritance.
parent_links = {}
for base in reversed([new_class] + parents):
# Conceptually equivalent to `if base is Model`.
if not hasattr(base, '_meta'):
continue
# Skip concrete parent classes.
if base != new_class and not base._meta.abstract:
continue
# Locate OneToOneField instances.
for field in base._meta.local_fields:
if isinstance(field, OneToOneField):
related = resolve_relation(new_class, field.remote_field.model)
parent_links[make_model_tuple(related)] = field
# Do the appropriate setup for any model parents.
for base in parents:
original_base = base
if not hasattr(base, '_meta'):
# Things without _meta aren't functional models, so they're
# uninteresting parents.
continue
parent_fields = base._meta.local_fields + base._meta.local_many_to_many
# Check for clashes between locally declared fields and those
# on the base classes (we cannot handle shadowed fields at the
# moment).
for field in parent_fields:
if field.name in field_names:
raise FieldError(
'Local field %r in class %r clashes '
'with field of similar name from '
'base class %r' % (field.name, name, base.__name__)
)
if not base._meta.abstract:
# Concrete classes...
base = base._meta.concrete_model
base_key = make_model_tuple(base)
if base_key in parent_links:
field = parent_links[base_key]
elif not is_proxy:
attr_name = '%s_ptr' % base._meta.model_name
field = OneToOneField(
base,
on_delete=CASCADE,
name=attr_name,
auto_created=True,
parent_link=True,
)
# Only add the ptr field if it's not already present;
# e.g. migrations will already have it specified
if not hasattr(new_class, attr_name):
new_class.add_to_class(attr_name, field)
else:
field = None
new_class._meta.parents[base] = field
else:
# .. and abstract ones.
for field in parent_fields:
new_field = copy.deepcopy(field)
new_class.add_to_class(field.name, new_field)
# Pass any non-abstract parent classes onto child.
new_class._meta.parents.update(base._meta.parents)
# Inherit managers from the abstract base classes.
new_class.copy_managers(base._meta.abstract_managers)
# Proxy models inherit the non-abstract managers from their base,
# unless they have redefined any of them.
if is_proxy:
new_class.copy_managers(original_base._meta.concrete_managers)
# Inherit virtual fields (like GenericForeignKey) from the parent
# class
for field in base._meta.virtual_fields:
if base._meta.abstract and field.name in field_names:
raise FieldError(
'Local field %r in class %r clashes '
'with field of similar name from '
'abstract base class %r' % (field.name, name, base.__name__)
)
new_class.add_to_class(field.name, copy.deepcopy(field))
if abstract:
# Abstract base models can't be instantiated and don't appear in
# the list of models for an app. We do the final setup for them a
# little differently from normal models.
attr_meta.abstract = False
new_class.Meta = attr_meta
return new_class
new_class._prepare()
new_class._meta.apps.register_model(new_class._meta.app_label, new_class)
return new_class
def copy_managers(cls, base_managers):
# This is in-place sorting of an Options attribute, but that's fine.
base_managers.sort()
for _, mgr_name, manager in base_managers: # NOQA (redefinition of _)
val = getattr(cls, mgr_name, None)
if not val or val is manager:
new_manager = manager._copy_to_model(cls)
cls.add_to_class(mgr_name, new_manager)
def add_to_class(cls, name, value):
# We should call the contribute_to_class method only if it's bound
if not inspect.isclass(value) and hasattr(value, 'contribute_to_class'):
value.contribute_to_class(cls, name)
else:
setattr(cls, name, value)
def _prepare(cls):
"""
Creates some methods once self._meta has been populated.
"""
opts = cls._meta
opts._prepare(cls)
if opts.order_with_respect_to:
cls.get_next_in_order = curry(cls._get_next_or_previous_in_order, is_next=True)
cls.get_previous_in_order = curry(cls._get_next_or_previous_in_order, is_next=False)
# Defer creating accessors on the foreign class until it has been
# created and registered. If remote_field is None, we're ordering
# with respect to a GenericForeignKey and don't know what the
# foreign class is - we'll add those accessors later in
# contribute_to_class().
if opts.order_with_respect_to.remote_field:
wrt = opts.order_with_respect_to
remote = wrt.remote_field.model
lazy_related_operation(make_foreign_order_accessors, cls, remote)
# Give the class a docstring -- its definition.
if cls.__doc__ is None:
cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join(f.name for f in opts.fields))
get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get(opts.label_lower)
if get_absolute_url_override:
setattr(cls, 'get_absolute_url', get_absolute_url_override)
ensure_default_manager(cls)
signals.class_prepared.send(sender=cls)
class ModelState(object):
"""
A class for storing instance state
"""
def __init__(self, db=None):
self.db = db
# If true, uniqueness validation checks will consider this a new, as-yet-unsaved object.
# Necessary for correct validation of new instances of objects with explicit (non-auto) PKs.
# This impacts validation only; it has no effect on the actual save.
self.adding = True
class Model(six.with_metaclass(ModelBase)):
_deferred = False
def __init__(self, *args, **kwargs):
signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs)
# Set up the storage for instance state
self._state = ModelState()
# There is a rather weird disparity here; if kwargs, it's set, then args
# overrides it. It should be one or the other; don't duplicate the work
# The reason for the kwargs check is that standard iterator passes in by
# args, and instantiation for iteration is 33% faster.
args_len = len(args)
if args_len > len(self._meta.concrete_fields):
# Daft, but matches old exception sans the err msg.
raise IndexError("Number of args exceeds number of fields")
if not kwargs:
fields_iter = iter(self._meta.concrete_fields)
# The ordering of the zip calls matter - zip throws StopIteration
# when an iter throws it. So if the first iter throws it, the second
# is *not* consumed. We rely on this, so don't change the order
# without changing the logic.
for val, field in zip(args, fields_iter):
setattr(self, field.attname, val)
else:
# Slower, kwargs-ready version.
fields_iter = iter(self._meta.fields)
for val, field in zip(args, fields_iter):
setattr(self, field.attname, val)
kwargs.pop(field.name, None)
# Maintain compatibility with existing calls.
if isinstance(field.remote_field, ManyToOneRel):
kwargs.pop(field.attname, None)
# Now we're left with the unprocessed fields that *must* come from
# keywords, or default.
for field in fields_iter:
is_related_object = False
# This slightly odd construct is so that we can access any
# data-descriptor object (DeferredAttribute) without triggering its
# __get__ method.
if (field.attname not in kwargs and
(isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute)
or field.column is None)):
# This field will be populated on request.
continue
if kwargs:
if isinstance(field.remote_field, ForeignObjectRel):
try:
# Assume object instance was passed in.
rel_obj = kwargs.pop(field.name)
is_related_object = True
except KeyError:
try:
# Object instance wasn't passed in -- must be an ID.
val = kwargs.pop(field.attname)
except KeyError:
val = field.get_default()
else:
# Object instance was passed in. Special case: You can
# pass in "None" for related objects if it's allowed.
if rel_obj is None and field.null:
val = None
else:
try:
val = kwargs.pop(field.attname)
except KeyError:
# This is done with an exception rather than the
# default argument on pop because we don't want
# get_default() to be evaluated, and then not used.
# Refs #12057.
val = field.get_default()
else:
val = field.get_default()
if is_related_object:
# If we are passed a related instance, set it using the
# field.name instead of field.attname (e.g. "user" instead of
# "user_id") so that the object gets properly cached (and type
# checked) by the RelatedObjectDescriptor.
setattr(self, field.name, rel_obj)
else:
setattr(self, field.attname, val)
if kwargs:
for prop in list(kwargs):
try:
if isinstance(getattr(self.__class__, prop), property):
setattr(self, prop, kwargs.pop(prop))
except AttributeError:
pass
if kwargs:
raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
super(Model, self).__init__()
signals.post_init.send(sender=self.__class__, instance=self)
@classmethod
def from_db(cls, db, field_names, values):
if cls._deferred:
new = cls(**dict(zip(field_names, values)))
else:
new = cls(*values)
new._state.adding = False
new._state.db = db
return new
def __repr__(self):
try:
u = six.text_type(self)
except (UnicodeEncodeError, UnicodeDecodeError):
u = '[Bad Unicode data]'
return force_str('<%s: %s>' % (self.__class__.__name__, u))
def __str__(self):
if six.PY2 and hasattr(self, '__unicode__'):
return force_text(self).encode('utf-8')
return str('%s object' % self.__class__.__name__)
def __eq__(self, other):
if not isinstance(other, Model):
return False
if self._meta.concrete_model != other._meta.concrete_model:
return False
my_pk = self._get_pk_val()
if my_pk is None:
return self is other
return my_pk == other._get_pk_val()
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
if self._get_pk_val() is None:
raise TypeError("Model instances without primary key value are unhashable")
return hash(self._get_pk_val())
def __reduce__(self):
"""
Provides pickling support. Normally, this just dispatches to Python's
standard handling. However, for models with deferred field loading, we
need to do things manually, as they're dynamically created classes and
only module-level classes can be pickled by the default path.
"""
data = self.__dict__
data[DJANGO_VERSION_PICKLE_KEY] = get_version()
if not self._deferred:
class_id = self._meta.app_label, self._meta.object_name
return model_unpickle, (class_id, [], simple_class_factory), data
defers = []
for field in self._meta.fields:
if isinstance(self.__class__.__dict__.get(field.attname),
DeferredAttribute):
defers.append(field.attname)
model = self._meta.proxy_for_model
class_id = model._meta.app_label, model._meta.object_name
return (model_unpickle, (class_id, defers, deferred_class_factory), data)
def __setstate__(self, state):
msg = None
pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY)
if pickled_version:
current_version = get_version()
if current_version != pickled_version:
msg = ("Pickled model instance's Django version %s does"
" not match the current version %s."
% (pickled_version, current_version))
else:
msg = "Pickled model instance's Django version is not specified."
if msg:
warnings.warn(msg, RuntimeWarning, stacklevel=2)
self.__dict__.update(state)
def _get_pk_val(self, meta=None):
if not meta:
meta = self._meta
return getattr(self, meta.pk.attname)
def _set_pk_val(self, value):
return setattr(self, self._meta.pk.attname, value)
pk = property(_get_pk_val, _set_pk_val)
def get_deferred_fields(self):
"""
Returns a set containing names of deferred fields for this instance.
"""
return {
f.attname for f in self._meta.concrete_fields
if isinstance(self.__class__.__dict__.get(f.attname), DeferredAttribute)
}
def refresh_from_db(self, using=None, fields=None, **kwargs):
"""
Reloads field values from the database.
By default, the reloading happens from the database this instance was
loaded from, or by the read router if this instance wasn't loaded from
any database. The using parameter will override the default.
Fields can be used to specify which fields to reload. The fields
should be an iterable of field attnames. If fields is None, then
all non-deferred fields are reloaded.
When accessing deferred fields of an instance, the deferred loading
of the field will call this method.
"""
if fields is not None:
if len(fields) == 0:
return
if any(LOOKUP_SEP in f for f in fields):
raise ValueError(
'Found "%s" in fields argument. Relations and transforms '
'are not allowed in fields.' % LOOKUP_SEP)
db = using if using is not None else self._state.db
if self._deferred:
non_deferred_model = self._meta.proxy_for_model
else:
non_deferred_model = self.__class__
db_instance_qs = non_deferred_model._default_manager.using(db).filter(pk=self.pk)
# Use provided fields, if not set then reload all non-deferred fields.
if fields is not None:
fields = list(fields)
db_instance_qs = db_instance_qs.only(*fields)
elif self._deferred:
deferred_fields = self.get_deferred_fields()
fields = [f.attname for f in self._meta.concrete_fields
if f.attname not in deferred_fields]
db_instance_qs = db_instance_qs.only(*fields)
db_instance = db_instance_qs.get()
non_loaded_fields = db_instance.get_deferred_fields()
for field in self._meta.concrete_fields:
if field.attname in non_loaded_fields:
# This field wasn't refreshed - skip ahead.
continue
setattr(self, field.attname, getattr(db_instance, field.attname))
# Throw away stale foreign key references.
if field.is_relation and field.get_cache_name() in self.__dict__:
rel_instance = getattr(self, field.get_cache_name())
local_val = getattr(db_instance, field.attname)
related_val = None if rel_instance is None else getattr(rel_instance, field.target_field.attname)
if local_val != related_val or (local_val is None and related_val is None):
del self.__dict__[field.get_cache_name()]
self._state.db = db_instance._state.db
def serializable_value(self, field_name):
"""
Returns the value of the field name for this instance. If the field is
a foreign key, returns the id value, instead of the object. If there's
no Field object with this name on the model, the model attribute's
value is returned directly.
Used to serialize a field's value (in the serializer, or form output,
for example). Normally, you would just access the attribute directly
and not use this method.
"""
try:
field = self._meta.get_field(field_name)
except FieldDoesNotExist:
return getattr(self, field_name)
return getattr(self, field.attname)
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
"""
Saves the current instance. Override this in a subclass if you want to
control the saving process.
The 'force_insert' and 'force_update' parameters can be used to insist
that the "save" must be an SQL insert or update (or equivalent for
non-SQL backends), respectively. Normally, they should not be set.
"""
# Ensure that a model instance without a PK hasn't been assigned to
# a ForeignKey or OneToOneField on this model. If the field is
# nullable, allowing the save() would result in silent data loss.
for field in self._meta.concrete_fields:
if field.is_relation:
# If the related field isn't cached, then an instance hasn't
# been assigned and there's no need to worry about this check.
try:
getattr(self, field.get_cache_name())
except AttributeError:
continue
obj = getattr(self, field.name, None)
# A pk may have been assigned manually to a model instance not
# saved to the database (or auto-generated in a case like
# UUIDField), but we allow the save to proceed and rely on the
# database to raise an IntegrityError if applicable. If
# constraints aren't supported by the database, there's the
# unavoidable risk of data corruption.
if obj and obj.pk is None:
# Remove the object from a related instance cache.
if not field.remote_field.multiple:
delattr(obj, field.remote_field.get_cache_name())
raise ValueError(
"save() prohibited to prevent data loss due to "
"unsaved related object '%s'." % field.name
)
using = using or router.db_for_write(self.__class__, instance=self)
if force_insert and (force_update or update_fields):
raise ValueError("Cannot force both insert and updating in model saving.")
if update_fields is not None:
# If update_fields is empty, skip the save. We do also check for
# no-op saves later on for inheritance cases. This bailout is
# still needed for skipping signal sending.
if len(update_fields) == 0:
return
update_fields = frozenset(update_fields)
field_names = set()
for field in self._meta.fields:
if not field.primary_key:
field_names.add(field.name)
if field.name != field.attname:
field_names.add(field.attname)
non_model_fields = update_fields.difference(field_names)
if non_model_fields:
raise ValueError("The following fields do not exist in this "
"model or are m2m fields: %s"
% ', '.join(non_model_fields))
# If saving to the same database, and this model is deferred, then
# automatically do a "update_fields" save on the loaded fields.
elif not force_insert and self._deferred and using == self._state.db:
field_names = set()
for field in self._meta.concrete_fields:
if not field.primary_key and not hasattr(field, 'through'):
field_names.add(field.attname)
deferred_fields = [
f.attname for f in self._meta.fields
if (f.attname not in self.__dict__ and
isinstance(self.__class__.__dict__[f.attname], DeferredAttribute))
]
loaded_fields = field_names.difference(deferred_fields)
if loaded_fields:
update_fields = frozenset(loaded_fields)
self.save_base(using=using, force_insert=force_insert,
force_update=force_update, update_fields=update_fields)
save.alters_data = True
def save_base(self, raw=False, force_insert=False,
force_update=False, using=None, update_fields=None):
"""
Handles the parts of saving which should be done only once per save,
yet need to be done in raw saves, too. This includes some sanity
checks and signal sending.
The 'raw' argument is telling save_base not to save any parent
models and not to do any changes to the values before save. This
is used by fixture loading.
"""
using = using or router.db_for_write(self.__class__, instance=self)
assert not (force_insert and (force_update or update_fields))
assert update_fields is None or len(update_fields) > 0
cls = origin = self.__class__
# Skip proxies, but keep the origin as the proxy model.
if cls._meta.proxy:
cls = cls._meta.concrete_model
meta = cls._meta
if not meta.auto_created:
signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using,
update_fields=update_fields)
with transaction.atomic(using=using, savepoint=False):
if not raw:
self._save_parents(cls, using, update_fields)
updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields)
# Store the database on which the object was saved
self._state.db = using
# Once saved, this is no longer a to-be-added instance.
self._state.adding = False
# Signal that the save is complete
if not meta.auto_created:
signals.post_save.send(sender=origin, instance=self, created=(not updated),
update_fields=update_fields, raw=raw, using=using)
save_base.alters_data = True
def _save_parents(self, cls, using, update_fields):
"""
Saves all the parents of cls using values from self.
"""
meta = cls._meta
for parent, field in meta.parents.items():
# Make sure the link fields are synced between parent and self.
if (field and getattr(self, parent._meta.pk.attname) is None
and getattr(self, field.attname) is not None):
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
self._save_parents(cls=parent, using=using, update_fields=update_fields)
self._save_table(cls=parent, using=using, update_fields=update_fields)
# Set the parent's PK value to self.
if field:
setattr(self, field.attname, self._get_pk_val(parent._meta))
# Since we didn't have an instance of the parent handy set
# attname directly, bypassing the descriptor. Invalidate
# the related object cache, in case it's been accidentally
# populated. A fresh instance will be re-built from the
# database if necessary.
cache_name = field.get_cache_name()
if hasattr(self, cache_name):
delattr(self, cache_name)
def _save_table(self, raw=False, cls=None, force_insert=False,
force_update=False, using=None, update_fields=None):
"""
Does the heavy-lifting involved in saving. Updates or inserts the data
for a single table.
"""
meta = cls._meta
non_pks = [f for f in meta.local_concrete_fields if not f.primary_key]
if update_fields:
non_pks = [f for f in non_pks
if f.name in update_fields or f.attname in update_fields]
pk_val = self._get_pk_val(meta)
if pk_val is None:
pk_val = meta.pk.get_pk_value_on_save(self)
setattr(self, meta.pk.attname, pk_val)
pk_set = pk_val is not None
if not pk_set and (force_update or update_fields):
raise ValueError("Cannot force an update in save() with no primary key.")
updated = False
# If possible, try an UPDATE. If that doesn't update anything, do an INSERT.
if pk_set and not force_insert:
base_qs = cls._base_manager.using(using)
values = [(f, None, (getattr(self, f.attname) if raw else f.pre_save(self, False)))
for f in non_pks]
forced_update = update_fields or force_update
updated = self._do_update(base_qs, using, pk_val, values, update_fields,
forced_update)
if force_update and not updated:
raise DatabaseError("Forced update did not affect any rows.")
if update_fields and not updated:
raise DatabaseError("Save with update_fields did not affect any rows.")
if not updated:
if meta.order_with_respect_to:
# If this is a model with an order_with_respect_to
# autopopulate the _order field
field = meta.order_with_respect_to
filter_args = field.get_filter_kwargs_for_object(self)
order_value = cls._base_manager.using(using).filter(**filter_args).count()
self._order = order_value
fields = meta.local_concrete_fields
if not pk_set:
fields = [f for f in fields if not isinstance(f, AutoField)]
update_pk = bool(meta.has_auto_field and not pk_set)
result = self._do_insert(cls._base_manager, using, fields, update_pk, raw)
if update_pk:
setattr(self, meta.pk.attname, result)
return updated
def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update):
"""
This method will try to update the model. If the model was updated (in
the sense that an update query was done and a matching row was found
from the DB) the method will return True.
"""
filtered = base_qs.filter(pk=pk_val)
if not values:
# We can end up here when saving a model in inheritance chain where
# update_fields doesn't target any field in current model. In that
# case we just say the update succeeded. Another case ending up here
# is a model with just PK - in that case check that the PK still
# exists.
return update_fields is not None or filtered.exists()
if self._meta.select_on_save and not forced_update:
if filtered.exists():
# It may happen that the object is deleted from the DB right after
# this check, causing the subsequent UPDATE to return zero matching
# rows. The same result can occur in some rare cases when the
# database returns zero despite the UPDATE being executed
# successfully (a row is matched and updated). In order to
# distinguish these two cases, the object's existence in the
# database is again checked for if the UPDATE query returns 0.
return filtered._update(values) > 0 or filtered.exists()
else:
return False
return filtered._update(values) > 0
def _do_insert(self, manager, using, fields, update_pk, raw):
"""
Do an INSERT. If update_pk is defined then this method should return
the new pk for the model.
"""
return manager._insert([self], fields=fields, return_id=update_pk,
using=using, raw=raw)
def delete(self, using=None, keep_parents=False):
using = using or router.db_for_write(self.__class__, instance=self)
assert self._get_pk_val() is not None, (
"%s object can't be deleted because its %s attribute is set to None." %
(self._meta.object_name, self._meta.pk.attname)
)
collector = Collector(using=using)
collector.collect([self], keep_parents=keep_parents)
return collector.delete()
delete.alters_data = True
def _get_FIELD_display(self, field):
value = getattr(self, field.attname)
return force_text(dict(field.flatchoices).get(value, value), strings_only=True)
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
if not self.pk:
raise ValueError("get_next/get_previous cannot be used on unsaved objects.")
op = 'gt' if is_next else 'lt'
order = '' if is_next else '-'
param = force_text(getattr(self, field.attname))
q = Q(**{'%s__%s' % (field.name, op): param})
q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk})
qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by(
'%s%s' % (order, field.name), '%spk' % order
)
try:
return qs[0]
except IndexError:
raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name)
def _get_next_or_previous_in_order(self, is_next):
cachename = "__%s_order_cache" % is_next
if not hasattr(self, cachename):
op = 'gt' if is_next else 'lt'
order = '_order' if is_next else '-_order'
order_field = self._meta.order_with_respect_to
filter_args = order_field.get_filter_kwargs_for_object(self)
obj = self._default_manager.filter(**filter_args).filter(**{
'_order__%s' % op: self._default_manager.values('_order').filter(**{
self._meta.pk.name: self.pk
})
}).order_by(order)[:1].get()
setattr(self, cachename, obj)
return getattr(self, cachename)
def prepare_database_save(self, field):
if self.pk is None:
raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self)
return getattr(self, field.remote_field.get_related_field().attname)
def clean(self):
"""
Hook for doing any extra model-wide validation after clean() has been
called on every field by self.clean_fields. Any ValidationError raised
by this method will not be associated with a particular field; it will
have a special-case association with the field defined by NON_FIELD_ERRORS.
"""
pass
def validate_unique(self, exclude=None):
"""
Checks unique constraints on the model and raises ``ValidationError``
if any failed.
"""
unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
errors = self._perform_unique_checks(unique_checks)
date_errors = self._perform_date_checks(date_checks)
for k, v in date_errors.items():
errors.setdefault(k, []).extend(v)
if errors:
raise ValidationError(errors)
def _get_unique_checks(self, exclude=None):
"""
Gather a list of checks to perform. Since validate_unique could be
called from a ModelForm, some fields may have been excluded; we can't
perform a unique check on a model that is missing fields involved
in that check.
Fields that did not validate should also be excluded, but they need
to be passed in via the exclude argument.
"""
if exclude is None:
exclude = []
unique_checks = []
unique_togethers = [(self.__class__, self._meta.unique_together)]
for parent_class in self._meta.get_parent_list():
if parent_class._meta.unique_together:
unique_togethers.append((parent_class, parent_class._meta.unique_together))
for model_class, unique_together in unique_togethers:
for check in unique_together:
for name in check:
# If this is an excluded field, don't add this check.
if name in exclude:
break
else:
unique_checks.append((model_class, tuple(check)))
# These are checks for the unique_for_<date/year/month>.
date_checks = []
# Gather a list of checks for fields declared as unique and add them to
# the list of checks.
fields_with_class = [(self.__class__, self._meta.local_fields)]
for parent_class in self._meta.get_parent_list():
fields_with_class.append((parent_class, parent_class._meta.local_fields))
for model_class, fields in fields_with_class:
for f in fields:
name = f.name
if name in exclude:
continue
if f.unique:
unique_checks.append((model_class, (name,)))
if f.unique_for_date and f.unique_for_date not in exclude:
date_checks.append((model_class, 'date', name, f.unique_for_date))
if f.unique_for_year and f.unique_for_year not in exclude:
date_checks.append((model_class, 'year', name, f.unique_for_year))
if f.unique_for_month and f.unique_for_month not in exclude:
date_checks.append((model_class, 'month', name, f.unique_for_month))
return unique_checks, date_checks
def _perform_unique_checks(self, unique_checks):
errors = {}
for model_class, unique_check in unique_checks:
# Try to look up an existing object with the same values as this
# object's values for all the unique field.
lookup_kwargs = {}
for field_name in unique_check:
f = self._meta.get_field(field_name)
lookup_value = getattr(self, f.attname)
if lookup_value is None:
# no value, skip the lookup
continue
if f.primary_key and not self._state.adding:
# no need to check for unique primary key when editing
continue
lookup_kwargs[str(field_name)] = lookup_value
# some fields were skipped, no reason to do the check
if len(unique_check) != len(lookup_kwargs):
continue
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
# Note that we need to use the pk as defined by model_class, not
# self.pk. These can be different fields because model inheritance
# allows single model to have effectively multiple primary keys.
# Refs #17615.
model_class_pk = self._get_pk_val(model_class._meta)
if not self._state.adding and model_class_pk is not None:
qs = qs.exclude(pk=model_class_pk)
if qs.exists():
if len(unique_check) == 1:
key = unique_check[0]
else:
key = NON_FIELD_ERRORS
errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check))
return errors
def _perform_date_checks(self, date_checks):
errors = {}
for model_class, lookup_type, field, unique_for in date_checks:
lookup_kwargs = {}
# there's a ticket to add a date lookup, we can remove this special
# case if that makes it's way in
date = getattr(self, unique_for)
if date is None:
continue
if lookup_type == 'date':
lookup_kwargs['%s__day' % unique_for] = date.day
lookup_kwargs['%s__month' % unique_for] = date.month
lookup_kwargs['%s__year' % unique_for] = date.year
else:
lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type)
lookup_kwargs[field] = getattr(self, field)
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
if not self._state.adding and self.pk is not None:
qs = qs.exclude(pk=self.pk)
if qs.exists():
errors.setdefault(field, []).append(
self.date_error_message(lookup_type, field, unique_for)
)
return errors
def date_error_message(self, lookup_type, field_name, unique_for):
opts = self._meta
field = opts.get_field(field_name)
return ValidationError(
message=field.error_messages['unique_for_date'],
code='unique_for_date',
params={
'model': self,
'model_name': six.text_type(capfirst(opts.verbose_name)),
'lookup_type': lookup_type,
'field': field_name,
'field_label': six.text_type(capfirst(field.verbose_name)),
'date_field': unique_for,
'date_field_label': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)),
}
)
def unique_error_message(self, model_class, unique_check):
opts = model_class._meta
params = {
'model': self,
'model_class': model_class,
'model_name': six.text_type(capfirst(opts.verbose_name)),
'unique_check': unique_check,
}
# A unique field
if len(unique_check) == 1:
field = opts.get_field(unique_check[0])
params['field_label'] = six.text_type(capfirst(field.verbose_name))
return ValidationError(
message=field.error_messages['unique'],
code='unique',
params=params,
)
# unique_together
else:
field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check]
params['field_labels'] = six.text_type(get_text_list(field_labels, _('and')))
return ValidationError(
message=_("%(model_name)s with this %(field_labels)s already exists."),
code='unique_together',
params=params,
)
def full_clean(self, exclude=None, validate_unique=True):
"""
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ``ValidationError`` for any errors that occurred.
"""
errors = {}
if exclude is None:
exclude = []
else:
exclude = list(exclude)
try:
self.clean_fields(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
# Form.clean() is run even if other validation fails, so do the
# same with Model.clean() for consistency.
try:
self.clean()
except ValidationError as e:
errors = e.update_error_dict(errors)
# Run unique checks, but only for fields that passed validation.
if validate_unique:
for name in errors.keys():
if name != NON_FIELD_ERRORS and name not in exclude:
exclude.append(name)
try:
self.validate_unique(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
if errors:
raise ValidationError(errors)
def clean_fields(self, exclude=None):
"""
Cleans all fields and raises a ValidationError containing a dict
of all validation errors if any occur.
"""
if exclude is None:
exclude = []
errors = {}
for f in self._meta.fields:
if f.name in exclude:
continue
# Skip validation for empty fields with blank=True. The developer
# is responsible for making sure they have a valid value.
raw_value = getattr(self, f.attname)
if f.blank and raw_value in f.empty_values:
continue
try:
setattr(self, f.attname, f.clean(raw_value, self))
except ValidationError as e:
errors[f.name] = e.error_list
if errors:
raise ValidationError(errors)
@classmethod
def check(cls, **kwargs):
errors = []
errors.extend(cls._check_swappable())
errors.extend(cls._check_model())
errors.extend(cls._check_managers(**kwargs))
if not cls._meta.swapped:
errors.extend(cls._check_fields(**kwargs))
errors.extend(cls._check_m2m_through_same_relationship())
errors.extend(cls._check_long_column_names())
clash_errors = cls._check_id_field() + cls._check_field_name_clashes()
errors.extend(clash_errors)
# If there are field name clashes, hide consequent column name
# clashes.
if not clash_errors:
errors.extend(cls._check_column_name_clashes())
errors.extend(cls._check_index_together())
errors.extend(cls._check_unique_together())
errors.extend(cls._check_ordering())
return errors
@classmethod
def _check_swappable(cls):
""" Check if the swapped model exists. """
errors = []
if cls._meta.swapped:
try:
apps.get_model(cls._meta.swapped)
except ValueError:
errors.append(
checks.Error(
"'%s' is not of the form 'app_label.app_name'." % cls._meta.swappable,
hint=None,
obj=None,
id='models.E001',
)
)
except LookupError:
app_label, model_name = cls._meta.swapped.split('.')
errors.append(
checks.Error(
"'%s' references '%s.%s', which has not been "
"installed, or is abstract." % (
cls._meta.swappable, app_label, model_name
),
hint=None,
obj=None,
id='models.E002',
)
)
return errors
@classmethod
def _check_model(cls):
errors = []
if cls._meta.proxy:
if cls._meta.local_fields or cls._meta.local_many_to_many:
errors.append(
checks.Error(
"Proxy model '%s' contains model fields." % cls.__name__,
hint=None,
obj=None,
id='models.E017',
)
)
return errors
@classmethod
def _check_managers(cls, **kwargs):
""" Perform all manager checks. """
errors = []
for __, manager, __ in cls._meta.managers:
errors.extend(manager.check(**kwargs))
return errors
@classmethod
def _check_fields(cls, **kwargs):
""" Perform all field checks. """
errors = []
for field in cls._meta.local_fields:
errors.extend(field.check(**kwargs))
for field in cls._meta.local_many_to_many:
errors.extend(field.check(from_model=cls, **kwargs))
return errors
@classmethod
def _check_m2m_through_same_relationship(cls):
""" Check if no relationship model is used by more than one m2m field.
"""
errors = []
seen_intermediary_signatures = []
fields = cls._meta.local_many_to_many
# Skip when the target model wasn't found.
fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase))
# Skip when the relationship model wasn't found.
fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase))
for f in fields:
signature = (f.remote_field.model, cls, f.remote_field.through)
if signature in seen_intermediary_signatures:
errors.append(
checks.Error(
"The model has two many-to-many relations through "
"the intermediate model '%s'." % f.remote_field.through._meta.label,
hint=None,
obj=cls,
id='models.E003',
)
)
else:
seen_intermediary_signatures.append(signature)
return errors
@classmethod
def _check_id_field(cls):
""" Check if `id` field is a primary key. """
fields = list(f for f in cls._meta.local_fields
if f.name == 'id' and f != cls._meta.pk)
# fields is empty or consists of the invalid "id" field
if fields and not fields[0].primary_key and cls._meta.pk.name == 'id':
return [
checks.Error(
"'id' can only be used as a field name if the field also "
"sets 'primary_key=True'.",
hint=None,
obj=cls,
id='models.E004',
)
]
else:
return []
@classmethod
def _check_field_name_clashes(cls):
""" Ref #17673. """
errors = []
used_fields = {} # name or attname -> field
# Check that multi-inheritance doesn't cause field name shadowing.
for parent in cls._meta.get_parent_list():
for f in parent._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
if clash:
errors.append(
checks.Error(
"The field '%s' from parent model "
"'%s' clashes with the field '%s' "
"from parent model '%s'." % (
clash.name, clash.model._meta,
f.name, f.model._meta
),
hint=None,
obj=cls,
id='models.E005',
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
# Check that fields defined in the model don't clash with fields from
# parents, including auto-generated fields like multi-table inheritance
# child accessors.
for parent in cls._meta.get_parent_list():
for f in parent._meta.get_fields():
if f not in used_fields:
used_fields[f.name] = f
for f in cls._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
# Note that we may detect clash between user-defined non-unique
# field "id" and automatically added unique field "id", both
# defined at the same model. This special case is considered in
# _check_id_field and here we ignore it.
id_conflict = (f.name == "id" and
clash and clash.name == "id" and clash.model == cls)
if clash and not id_conflict:
errors.append(
checks.Error(
"The field '%s' clashes with the field '%s' "
"from model '%s'." % (
f.name, clash.name, clash.model._meta
),
hint=None,
obj=f,
id='models.E006',
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
return errors
@classmethod
def _check_column_name_clashes(cls):
# Store a list of column names which have already been used by other fields.
used_column_names = []
errors = []
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
# Ensure the column name is not already in use.
if column_name and column_name in used_column_names:
errors.append(
checks.Error(
"Field '%s' has column name '%s' that is used by "
"another field." % (f.name, column_name),
hint="Specify a 'db_column' for the field.",
obj=cls,
id='models.E007'
)
)
else:
used_column_names.append(column_name)
return errors
@classmethod
def _check_index_together(cls):
""" Check the value of "index_together" option. """
if not isinstance(cls._meta.index_together, (tuple, list)):
return [
checks.Error(
"'index_together' must be a list or tuple.",
hint=None,
obj=cls,
id='models.E008',
)
]
elif any(not isinstance(fields, (tuple, list))
for fields in cls._meta.index_together):
return [
checks.Error(
"All 'index_together' elements must be lists or tuples.",
hint=None,
obj=cls,
id='models.E009',
)
]
else:
errors = []
for fields in cls._meta.index_together:
errors.extend(cls._check_local_fields(fields, "index_together"))
return errors
@classmethod
def _check_unique_together(cls):
""" Check the value of "unique_together" option. """
if not isinstance(cls._meta.unique_together, (tuple, list)):
return [
checks.Error(
"'unique_together' must be a list or tuple.",
hint=None,
obj=cls,
id='models.E010',
)
]
elif any(not isinstance(fields, (tuple, list))
for fields in cls._meta.unique_together):
return [
checks.Error(
"All 'unique_together' elements must be lists or tuples.",
hint=None,
obj=cls,
id='models.E011',
)
]
else:
errors = []
for fields in cls._meta.unique_together:
errors.extend(cls._check_local_fields(fields, "unique_together"))
return errors
@classmethod
def _check_local_fields(cls, fields, option):
from django.db import models
# In order to avoid hitting the relation tree prematurely, we use our
# own fields_map instead of using get_field()
forward_fields_map = {
field.name: field for field in cls._meta._get_fields(reverse=False)
}
errors = []
for field_name in fields:
try:
field = forward_fields_map[field_name]
except KeyError:
errors.append(
checks.Error(
"'%s' refers to the non-existent field '%s'." % (
option, field_name,
),
hint=None,
obj=cls,
id='models.E012',
)
)
else:
if isinstance(field.remote_field, models.ManyToManyRel):
errors.append(
checks.Error(
"'%s' refers to a ManyToManyField '%s', but "
"ManyToManyFields are not permitted in '%s'." % (
option, field_name, option,
),
hint=None,
obj=cls,
id='models.E013',
)
)
elif field not in cls._meta.local_fields:
errors.append(
checks.Error(
("'%s' refers to field '%s' which is not local "
"to model '%s'.") % (
option, field_name, cls._meta.object_name,
),
hint=("This issue may be caused by multi-table "
"inheritance."),
obj=cls,
id='models.E016',
)
)
return errors
@classmethod
def _check_ordering(cls):
""" Check "ordering" option -- is it a list of strings and do all fields
exist? """
if cls._meta._ordering_clash:
return [
checks.Error(
"'ordering' and 'order_with_respect_to' cannot be used together.",
hint=None,
obj=cls,
id='models.E021',
),
]
if cls._meta.order_with_respect_to or not cls._meta.ordering:
return []
if not isinstance(cls._meta.ordering, (list, tuple)):
return [
checks.Error(
("'ordering' must be a tuple or list "
"(even if you want to order by only one field)."),
hint=None,
obj=cls,
id='models.E014',
)
]
errors = []
fields = cls._meta.ordering
# Skip '?' fields.
fields = (f for f in fields if f != '?')
# Convert "-field" to "field".
fields = ((f[1:] if f.startswith('-') else f) for f in fields)
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
fields = (f for f in fields if '__' not in f)
# Skip ordering on pk. This is always a valid order_by field
# but is an alias and therefore won't be found by opts.get_field.
fields = {f for f in fields if f != 'pk'}
# Check for invalid or non-existent fields in ordering.
invalid_fields = []
# Any field name that is not present in field_names does not exist.
# Also, ordering by m2m fields is not allowed.
opts = cls._meta
valid_fields = set(chain.from_iterable(
(f.name, f.attname) if not (f.auto_created and not f.concrete) else (f.field.related_query_name(),)
for f in chain(opts.fields, opts.related_objects)
))
invalid_fields.extend(fields - valid_fields)
for invalid_field in invalid_fields:
errors.append(
checks.Error(
"'ordering' refers to the non-existent field '%s'." % invalid_field,
hint=None,
obj=cls,
id='models.E015',
)
)
return errors
@classmethod
def _check_long_column_names(cls):
"""
Check that any auto-generated column names are shorter than the limits
for each database in which the model will be created.
"""
errors = []
allowed_len = None
db_alias = None
# Find the minimum max allowed length among all specified db_aliases.
for db in settings.DATABASES.keys():
# skip databases where the model won't be created
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
max_name_length = connection.ops.max_name_length()
if max_name_length is None or connection.features.truncates_names:
continue
else:
if allowed_len is None:
allowed_len = max_name_length
db_alias = db
elif max_name_length < allowed_len:
allowed_len = max_name_length
db_alias = db
if allowed_len is None:
return errors
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
# Check if auto-generated name for the field is too long
# for the database.
if (f.db_column is None and column_name is not None
and len(column_name) > allowed_len):
errors.append(
checks.Error(
'Autogenerated column name too long for field "%s". '
'Maximum length is "%s" for database "%s".'
% (column_name, allowed_len, db_alias),
hint="Set the column name manually using 'db_column'.",
obj=cls,
id='models.E018',
)
)
for f in cls._meta.local_many_to_many:
# Check if auto-generated name for the M2M field is too long
# for the database.
for m2m in f.remote_field.through._meta.local_fields:
_, rel_name = m2m.get_attname_column()
if (m2m.db_column is None and rel_name is not None
and len(rel_name) > allowed_len):
errors.append(
checks.Error(
'Autogenerated column name too long for M2M field '
'"%s". Maximum length is "%s" for database "%s".'
% (rel_name, allowed_len, db_alias),
hint=("Use 'through' to create a separate model "
"for M2M and then set column_name using "
"'db_column'."),
obj=cls,
id='models.E019',
)
)
return errors
############################################
# HELPER FUNCTIONS (CURRIED MODEL METHODS) #
############################################
# ORDERING METHODS #########################
def method_set_order(ordered_obj, self, id_list, using=None):
if using is None:
using = DEFAULT_DB_ALIAS
order_wrt = ordered_obj._meta.order_with_respect_to
filter_args = order_wrt.get_forward_related_filter(self)
# FIXME: It would be nice if there was an "update many" version of update
# for situations like this.
with transaction.atomic(using=using, savepoint=False):
for i, j in enumerate(id_list):
ordered_obj.objects.filter(pk=j, **filter_args).update(_order=i)
def method_get_order(ordered_obj, self):
order_wrt = ordered_obj._meta.order_with_respect_to
filter_args = order_wrt.get_forward_related_filter(self)
pk_name = ordered_obj._meta.pk.name
return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True)
def make_foreign_order_accessors(model, related_model):
setattr(
related_model,
'get_%s_order' % model.__name__.lower(),
curry(method_get_order, model)
)
setattr(
related_model,
'set_%s_order' % model.__name__.lower(),
curry(method_set_order, model)
)
########
# MISC #
########
def simple_class_factory(model, attrs):
"""
Needed for dynamic classes.
"""
return model
def model_unpickle(model_id, attrs, factory):
"""
Used to unpickle Model subclasses with deferred fields.
"""
if isinstance(model_id, tuple):
if not apps.ready:
apps.populate(settings.INSTALLED_APPS)
model = apps.get_model(*model_id)
else:
# Backwards compat - the model was cached directly in earlier versions.
model = model_id
cls = factory(model, attrs)
return cls.__new__(cls)
model_unpickle.__safe_for_unpickle__ = True
def unpickle_inner_exception(klass, exception_name):
# Get the exception class from the class it is attached to:
exception = getattr(klass, exception_name)
return exception.__new__(exception) | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2013 Nicira, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import mock
from neutronclient.common import exceptions as n_exc
from neutronclient.neutron import v2_0 as neutronv20
from oslo_config import cfg
from oslo_serialization import jsonutils
import webob
from nova.api.openstack.compute.contrib import security_groups
from nova import compute
from nova import context
import nova.db
from nova import exception
from nova.network import model
from nova.network.neutronv2 import api as neutron_api
from nova.network.security_group import neutron_driver
from nova.objects import instance as instance_obj
from nova import test
from nova.tests.unit.api.openstack.compute.contrib import test_security_groups
from nova.tests.unit.api.openstack import fakes
class TestNeutronSecurityGroupsTestCase(test.TestCase):
def setUp(self):
super(TestNeutronSecurityGroupsTestCase, self).setUp()
cfg.CONF.set_override('security_group_api', 'neutron')
self.original_client = neutron_api.get_client
neutron_api.get_client = get_client
def tearDown(self):
neutron_api.get_client = self.original_client
get_client()._reset()
super(TestNeutronSecurityGroupsTestCase, self).tearDown()
class TestNeutronSecurityGroupsV21(
test_security_groups.TestSecurityGroupsV21,
TestNeutronSecurityGroupsTestCase):
def _create_sg_template(self, **kwargs):
sg = test_security_groups.security_group_template(**kwargs)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
return self.controller.create(req, {'security_group': sg})
def _create_network(self):
body = {'network': {'name': 'net1'}}
neutron = get_client()
net = neutron.create_network(body)
body = {'subnet': {'network_id': net['network']['id'],
'cidr': '10.0.0.0/24'}}
neutron.create_subnet(body)
return net
def _create_port(self, **kwargs):
body = {'port': {'binding:vnic_type': model.VNIC_TYPE_NORMAL}}
fields = ['security_groups', 'device_id', 'network_id',
'port_security_enabled']
for field in fields:
if field in kwargs:
body['port'][field] = kwargs[field]
neutron = get_client()
return neutron.create_port(body)
def _create_security_group(self, **kwargs):
body = {'security_group': {}}
fields = ['name', 'description']
for field in fields:
if field in kwargs:
body['security_group'][field] = kwargs[field]
neutron = get_client()
return neutron.create_security_group(body)
def test_create_security_group_with_no_description(self):
# Neutron's security group description field is optional.
pass
def test_create_security_group_with_empty_description(self):
# Neutron's security group description field is optional.
pass
def test_create_security_group_with_blank_name(self):
# Neutron's security group name field is optional.
pass
def test_create_security_group_with_whitespace_name(self):
# Neutron allows security group name to be whitespace.
pass
def test_create_security_group_with_blank_description(self):
# Neutron's security group description field is optional.
pass
def test_create_security_group_with_whitespace_description(self):
# Neutron allows description to be whitespace.
pass
def test_create_security_group_with_duplicate_name(self):
# Neutron allows duplicate names for security groups.
pass
def test_create_security_group_non_string_name(self):
# Neutron allows security group name to be non string.
pass
def test_create_security_group_non_string_description(self):
# Neutron allows non string description.
pass
def test_create_security_group_quota_limit(self):
# Enforced by Neutron server.
pass
def test_update_security_group(self):
# Enforced by Neutron server.
pass
def test_get_security_group_list(self):
self._create_sg_template().get('security_group')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
list_dict = self.controller.index(req)
self.assertEqual(len(list_dict['security_groups']), 2)
def test_get_security_group_list_all_tenants(self):
pass
def test_get_security_group_by_instance(self):
sg = self._create_sg_template().get('security_group')
net = self._create_network()
self._create_port(
network_id=net['network']['id'], security_groups=[sg['id']],
device_id=test_security_groups.FAKE_UUID1)
expected = [{'rules': [], 'tenant_id': 'fake', 'id': sg['id'],
'name': 'test', 'description': 'test-description'}]
self.stubs.Set(nova.db, 'instance_get_by_uuid',
test_security_groups.return_server_by_uuid)
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s/os-security-groups'
% test_security_groups.FAKE_UUID1)
res_dict = self.server_controller.index(
req, test_security_groups.FAKE_UUID1)['security_groups']
self.assertEqual(expected, res_dict)
def test_get_security_group_by_id(self):
sg = self._create_sg_template().get('security_group')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/%s'
% sg['id'])
res_dict = self.controller.show(req, sg['id'])
expected = {'security_group': sg}
self.assertEqual(res_dict, expected)
def test_delete_security_group_by_id(self):
sg = self._create_sg_template().get('security_group')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/%s' %
sg['id'])
self.controller.delete(req, sg['id'])
def test_delete_security_group_by_admin(self):
sg = self._create_sg_template().get('security_group')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/%s' %
sg['id'], use_admin_context=True)
self.controller.delete(req, sg['id'])
def test_delete_security_group_in_use(self):
sg = self._create_sg_template().get('security_group')
self._create_network()
db_inst = fakes.stub_instance(id=1, nw_cache=[], security_groups=[])
_context = context.get_admin_context()
instance = instance_obj.Instance._from_db_object(
_context, instance_obj.Instance(), db_inst,
expected_attrs=instance_obj.INSTANCE_DEFAULT_FIELDS)
neutron = neutron_api.API()
with mock.patch.object(nova.db, 'instance_get_by_uuid',
return_value=db_inst):
neutron.allocate_for_instance(_context, instance,
security_groups=[sg['id']])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/%s'
% sg['id'])
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, sg['id'])
def test_associate_non_running_instance(self):
# Neutron does not care if the instance is running or not. When the
# instances is detected by nuetron it will push down the security
# group policy to it.
pass
def test_associate_already_associated_security_group_to_instance(self):
# Neutron security groups does not raise an error if you update a
# port adding a security group to it that was already associated
# to the port. This is because PUT semantics are used.
pass
def test_associate(self):
sg = self._create_sg_template().get('security_group')
net = self._create_network()
self._create_port(
network_id=net['network']['id'], security_groups=[sg['id']],
device_id=test_security_groups.FAKE_UUID1)
self.stubs.Set(nova.db, 'instance_get',
test_security_groups.return_server)
body = dict(addSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.manager._addSecurityGroup(req, '1', body)
def test_associate_duplicate_names(self):
sg1 = self._create_security_group(name='sg1',
description='sg1')['security_group']
self._create_security_group(name='sg1',
description='sg1')['security_group']
net = self._create_network()
self._create_port(
network_id=net['network']['id'], security_groups=[sg1['id']],
device_id=test_security_groups.FAKE_UUID1)
self.stubs.Set(nova.db, 'instance_get',
test_security_groups.return_server)
body = dict(addSecurityGroup=dict(name="sg1"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPConflict,
self.manager._addSecurityGroup, req, '1', body)
def test_associate_port_security_enabled_true(self):
sg = self._create_sg_template().get('security_group')
net = self._create_network()
self._create_port(
network_id=net['network']['id'], security_groups=[sg['id']],
port_security_enabled=True,
device_id=test_security_groups.FAKE_UUID1)
self.stubs.Set(nova.db, 'instance_get',
test_security_groups.return_server)
body = dict(addSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.manager._addSecurityGroup(req, '1', body)
def test_associate_port_security_enabled_false(self):
self._create_sg_template().get('security_group')
net = self._create_network()
self._create_port(
network_id=net['network']['id'], port_security_enabled=False,
device_id=test_security_groups.FAKE_UUID1)
self.stubs.Set(nova.db, 'instance_get',
test_security_groups.return_server)
body = dict(addSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._addSecurityGroup,
req, '1', body)
def test_disassociate_by_non_existing_security_group_name(self):
self.stubs.Set(nova.db, 'instance_get',
test_security_groups.return_server)
body = dict(removeSecurityGroup=dict(name='non-existing'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._removeSecurityGroup, req, '1', body)
def test_disassociate_non_running_instance(self):
# Neutron does not care if the instance is running or not. When the
# instances is detected by neutron it will push down the security
# group policy to it.
pass
def test_disassociate_already_associated_security_group_to_instance(self):
# Neutron security groups does not raise an error if you update a
# port adding a security group to it that was already associated
# to the port. This is because PUT semantics are used.
pass
def test_disassociate(self):
sg = self._create_sg_template().get('security_group')
net = self._create_network()
self._create_port(
network_id=net['network']['id'], security_groups=[sg['id']],
device_id=test_security_groups.FAKE_UUID1)
self.stubs.Set(nova.db, 'instance_get',
test_security_groups.return_server)
body = dict(removeSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.manager._removeSecurityGroup(req, '1', body)
def test_get_raises_no_unique_match_error(self):
def fake_find_resourceid_by_name_or_id(client, param, name,
project_id=None):
raise n_exc.NeutronClientNoUniqueMatch()
self.stubs.Set(neutronv20, 'find_resourceid_by_name_or_id',
fake_find_resourceid_by_name_or_id)
security_group_api = self.controller.security_group_api
self.assertRaises(exception.NoUniqueMatch, security_group_api.get,
context.get_admin_context(), 'foobar')
def test_get_instances_security_groups_bindings(self):
servers = [{'id': test_security_groups.FAKE_UUID1},
{'id': test_security_groups.FAKE_UUID2}]
sg1 = self._create_sg_template(name='test1').get('security_group')
sg2 = self._create_sg_template(name='test2').get('security_group')
# test name='' is replaced with id
sg3 = self._create_sg_template(name='').get('security_group')
net = self._create_network()
self._create_port(
network_id=net['network']['id'], security_groups=[sg1['id'],
sg2['id']],
device_id=test_security_groups.FAKE_UUID1)
self._create_port(
network_id=net['network']['id'], security_groups=[sg2['id'],
sg3['id']],
device_id=test_security_groups.FAKE_UUID2)
expected = {test_security_groups.FAKE_UUID1: [{'name': sg1['name']},
{'name': sg2['name']}],
test_security_groups.FAKE_UUID2: [{'name': sg2['name']},
{'name': sg3['id']}]}
security_group_api = self.controller.security_group_api
bindings = (
security_group_api.get_instances_security_groups_bindings(
context.get_admin_context(), servers))
self.assertEqual(bindings, expected)
def test_get_instance_security_groups(self):
sg1 = self._create_sg_template(name='test1').get('security_group')
sg2 = self._create_sg_template(name='test2').get('security_group')
# test name='' is replaced with id
sg3 = self._create_sg_template(name='').get('security_group')
net = self._create_network()
self._create_port(
network_id=net['network']['id'], security_groups=[sg1['id'],
sg2['id'],
sg3['id']],
device_id=test_security_groups.FAKE_UUID1)
expected = [{'name': sg1['name']}, {'name': sg2['name']},
{'name': sg3['id']}]
security_group_api = self.controller.security_group_api
sgs = security_group_api.get_instance_security_groups(
context.get_admin_context(), test_security_groups.FAKE_UUID1)
self.assertEqual(sgs, expected)
@mock.patch('nova.network.security_group.neutron_driver.SecurityGroupAPI.'
'get_instances_security_groups_bindings')
def test_get_security_group_empty_for_instance(self, neutron_sg_bind_mock):
servers = [{'id': test_security_groups.FAKE_UUID1}]
neutron_sg_bind_mock.return_value = {}
security_group_api = self.controller.security_group_api
ctx = context.get_admin_context()
sgs = security_group_api.get_instance_security_groups(ctx,
test_security_groups.FAKE_UUID1)
neutron_sg_bind_mock.assert_called_once_with(ctx, servers, False)
self.assertEqual([], sgs)
def test_create_port_with_sg_and_port_security_enabled_true(self):
sg1 = self._create_sg_template(name='test1').get('security_group')
net = self._create_network()
self._create_port(
network_id=net['network']['id'], security_groups=[sg1['id']],
port_security_enabled=True,
device_id=test_security_groups.FAKE_UUID1)
security_group_api = self.controller.security_group_api
sgs = security_group_api.get_instance_security_groups(
context.get_admin_context(), test_security_groups.FAKE_UUID1)
self.assertEqual(sgs, [{'name': 'test1'}])
def test_create_port_with_sg_and_port_security_enabled_false(self):
sg1 = self._create_sg_template(name='test1').get('security_group')
net = self._create_network()
self.assertRaises(exception.SecurityGroupCannotBeApplied,
self._create_port,
network_id=net['network']['id'],
security_groups=[sg1['id']],
port_security_enabled=False,
device_id=test_security_groups.FAKE_UUID1)
class TestNeutronSecurityGroupsV2(TestNeutronSecurityGroupsV21):
controller_cls = security_groups.SecurityGroupController
server_secgrp_ctl_cls = security_groups.ServerSecurityGroupController
secgrp_act_ctl_cls = security_groups.SecurityGroupActionController
class TestNeutronSecurityGroupRulesTestCase(TestNeutronSecurityGroupsTestCase):
def setUp(self):
super(TestNeutronSecurityGroupRulesTestCase, self).setUp()
id1 = '11111111-1111-1111-1111-111111111111'
sg_template1 = test_security_groups.security_group_template(
security_group_rules=[], id=id1)
id2 = '22222222-2222-2222-2222-222222222222'
sg_template2 = test_security_groups.security_group_template(
security_group_rules=[], id=id2)
self.controller_sg = security_groups.SecurityGroupController()
neutron = get_client()
neutron._fake_security_groups[id1] = sg_template1
neutron._fake_security_groups[id2] = sg_template2
def tearDown(self):
neutron_api.get_client = self.original_client
get_client()._reset()
super(TestNeutronSecurityGroupsTestCase, self).tearDown()
class _TestNeutronSecurityGroupRulesBase(object):
def test_create_add_existing_rules_by_cidr(self):
sg = test_security_groups.security_group_template()
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.controller_sg.create(req, {'security_group': sg})
rule = test_security_groups.security_group_rule_template(
cidr='15.0.0.0/8', parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.controller.create(req, {'security_group_rule': rule})
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_add_existing_rules_by_group_id(self):
sg = test_security_groups.security_group_template()
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.controller_sg.create(req, {'security_group': sg})
rule = test_security_groups.security_group_rule_template(
group=self.sg1['id'], parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.controller.create(req, {'security_group_rule': rule})
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_delete(self):
rule = test_security_groups.security_group_rule_template(
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules/%s'
% security_group_rule['id'])
self.controller.delete(req, security_group_rule['id'])
def test_create_rule_quota_limit(self):
# Enforced by neutron
pass
class TestNeutronSecurityGroupRulesV2(
_TestNeutronSecurityGroupRulesBase,
test_security_groups.TestSecurityGroupRulesV2,
TestNeutronSecurityGroupRulesTestCase):
pass
class TestNeutronSecurityGroupRulesV21(
_TestNeutronSecurityGroupRulesBase,
test_security_groups.TestSecurityGroupRulesV21,
TestNeutronSecurityGroupRulesTestCase):
pass
class TestNeutronSecurityGroupsOutputTest(TestNeutronSecurityGroupsTestCase):
content_type = 'application/json'
def setUp(self):
super(TestNeutronSecurityGroupsOutputTest, self).setUp()
fakes.stub_out_nw_api(self.stubs)
self.controller = security_groups.SecurityGroupController()
self.stubs.Set(compute.api.API, 'get',
test_security_groups.fake_compute_get)
self.stubs.Set(compute.api.API, 'get_all',
test_security_groups.fake_compute_get_all)
self.stubs.Set(compute.api.API, 'create',
test_security_groups.fake_compute_create)
self.stubs.Set(neutron_driver.SecurityGroupAPI,
'get_instances_security_groups_bindings',
(test_security_groups.
fake_get_instances_security_groups_bindings))
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Security_groups'])
def _make_request(self, url, body=None):
req = webob.Request.blank(url)
if body:
req.method = 'POST'
req.body = self._encode_body(body)
req.content_type = self.content_type
req.headers['Accept'] = self.content_type
res = req.get_response(fakes.wsgi_app(init_only=('servers',)))
return res
def _encode_body(self, body):
return jsonutils.dumps(body)
def _get_server(self, body):
return jsonutils.loads(body).get('server')
def _get_servers(self, body):
return jsonutils.loads(body).get('servers')
def _get_groups(self, server):
return server.get('security_groups')
def test_create(self):
url = '/v2/fake/servers'
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
security_groups = [{'name': 'fake-2-0'}, {'name': 'fake-2-1'}]
for security_group in security_groups:
sg = test_security_groups.security_group_template(
name=security_group['name'])
self.controller.create(req, {'security_group': sg})
server = dict(name='server_test', imageRef=image_uuid, flavorRef=2,
security_groups=security_groups)
res = self._make_request(url, {'server': server})
self.assertEqual(res.status_int, 202)
server = self._get_server(res.body)
for i, group in enumerate(self._get_groups(server)):
name = 'fake-2-%s' % i
self.assertEqual(group.get('name'), name)
def test_create_server_get_default_security_group(self):
url = '/v2/fake/servers'
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
server = dict(name='server_test', imageRef=image_uuid, flavorRef=2)
res = self._make_request(url, {'server': server})
self.assertEqual(res.status_int, 202)
server = self._get_server(res.body)
group = self._get_groups(server)[0]
self.assertEqual(group.get('name'), 'default')
def test_show(self):
def fake_get_instance_security_groups(inst, context, id):
return [{'name': 'fake-2-0'}, {'name': 'fake-2-1'}]
self.stubs.Set(neutron_driver.SecurityGroupAPI,
'get_instance_security_groups',
fake_get_instance_security_groups)
url = '/v2/fake/servers'
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
security_groups = [{'name': 'fake-2-0'}, {'name': 'fake-2-1'}]
for security_group in security_groups:
sg = test_security_groups.security_group_template(
name=security_group['name'])
self.controller.create(req, {'security_group': sg})
server = dict(name='server_test', imageRef=image_uuid, flavorRef=2,
security_groups=security_groups)
res = self._make_request(url, {'server': server})
self.assertEqual(res.status_int, 202)
server = self._get_server(res.body)
for i, group in enumerate(self._get_groups(server)):
name = 'fake-2-%s' % i
self.assertEqual(group.get('name'), name)
# Test that show (GET) returns the same information as create (POST)
url = '/v2/fake/servers/' + test_security_groups.UUID3
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
server = self._get_server(res.body)
for i, group in enumerate(self._get_groups(server)):
name = 'fake-2-%s' % i
self.assertEqual(group.get('name'), name)
def test_detail(self):
url = '/v2/fake/servers/detail'
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
for i, server in enumerate(self._get_servers(res.body)):
for j, group in enumerate(self._get_groups(server)):
name = 'fake-%s-%s' % (i, j)
self.assertEqual(group.get('name'), name)
def test_no_instance_passthrough_404(self):
def fake_compute_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(compute.api.API, 'get', fake_compute_get)
url = '/v2/fake/servers/70f6db34-de8d-4fbd-aafb-4065bdfa6115'
res = self._make_request(url)
self.assertEqual(res.status_int, 404)
def get_client(context=None, admin=False):
return MockClient()
class MockClient(object):
# Needs to be global to survive multiple calls to get_client.
_fake_security_groups = {}
_fake_ports = {}
_fake_networks = {}
_fake_subnets = {}
_fake_security_group_rules = {}
def __init__(self):
# add default security group
if not len(self._fake_security_groups):
ret = {'name': 'default', 'description': 'default',
'tenant_id': 'fake_tenant', 'security_group_rules': [],
'id': str(uuid.uuid4())}
self._fake_security_groups[ret['id']] = ret
def _reset(self):
self._fake_security_groups.clear()
self._fake_ports.clear()
self._fake_networks.clear()
self._fake_subnets.clear()
self._fake_security_group_rules.clear()
def create_security_group(self, body=None):
s = body.get('security_group')
if len(s.get('name')) > 255 or len(s.get('description')) > 255:
msg = 'Security Group name great than 255'
raise n_exc.NeutronClientException(message=msg, status_code=401)
ret = {'name': s.get('name'), 'description': s.get('description'),
'tenant_id': 'fake', 'security_group_rules': [],
'id': str(uuid.uuid4())}
self._fake_security_groups[ret['id']] = ret
return {'security_group': ret}
def create_network(self, body):
n = body.get('network')
ret = {'status': 'ACTIVE', 'subnets': [], 'name': n.get('name'),
'admin_state_up': n.get('admin_state_up', True),
'tenant_id': 'fake_tenant',
'id': str(uuid.uuid4())}
if 'port_security_enabled' in n:
ret['port_security_enabled'] = n['port_security_enabled']
self._fake_networks[ret['id']] = ret
return {'network': ret}
def create_subnet(self, body):
s = body.get('subnet')
try:
net = self._fake_networks[s.get('network_id')]
except KeyError:
msg = 'Network %s not found' % s.get('network_id')
raise n_exc.NeutronClientException(message=msg, status_code=404)
ret = {'name': s.get('name'), 'network_id': s.get('network_id'),
'tenant_id': 'fake_tenant', 'cidr': s.get('cidr'),
'id': str(uuid.uuid4()), 'gateway_ip': '10.0.0.1'}
net['subnets'].append(ret['id'])
self._fake_networks[net['id']] = net
self._fake_subnets[ret['id']] = ret
return {'subnet': ret}
def create_port(self, body):
p = body.get('port')
ret = {'status': 'ACTIVE', 'id': str(uuid.uuid4()),
'mac_address': p.get('mac_address', 'fa:16:3e:b8:f5:fb'),
'device_id': p.get('device_id', str(uuid.uuid4())),
'admin_state_up': p.get('admin_state_up', True),
'security_groups': p.get('security_groups', []),
'network_id': p.get('network_id'),
'binding:vnic_type':
p.get('binding:vnic_type') or model.VNIC_TYPE_NORMAL}
network = self._fake_networks[p['network_id']]
if 'port_security_enabled' in p:
ret['port_security_enabled'] = p['port_security_enabled']
elif 'port_security_enabled' in network:
ret['port_security_enabled'] = network['port_security_enabled']
port_security = ret.get('port_security_enabled', True)
# port_security must be True if security groups are present
if not port_security and ret['security_groups']:
raise exception.SecurityGroupCannotBeApplied()
if network['subnets']:
ret['fixed_ips'] = [{'subnet_id': network['subnets'][0],
'ip_address': '10.0.0.1'}]
if not ret['security_groups'] and (port_security is None or
port_security is True):
for security_group in self._fake_security_groups.values():
if security_group['name'] == 'default':
ret['security_groups'] = [security_group['id']]
break
self._fake_ports[ret['id']] = ret
return {'port': ret}
def create_security_group_rule(self, body):
# does not handle bulk case so just picks rule[0]
r = body.get('security_group_rules')[0]
fields = ['direction', 'protocol', 'port_range_min', 'port_range_max',
'ethertype', 'remote_ip_prefix', 'tenant_id',
'security_group_id', 'remote_group_id']
ret = {}
for field in fields:
ret[field] = r.get(field)
ret['id'] = str(uuid.uuid4())
self._fake_security_group_rules[ret['id']] = ret
return {'security_group_rules': [ret]}
def show_security_group(self, security_group, **_params):
try:
sg = self._fake_security_groups[security_group]
except KeyError:
msg = 'Security Group %s not found' % security_group
raise n_exc.NeutronClientException(message=msg, status_code=404)
for security_group_rule in self._fake_security_group_rules.values():
if security_group_rule['security_group_id'] == sg['id']:
sg['security_group_rules'].append(security_group_rule)
return {'security_group': sg}
def show_security_group_rule(self, security_group_rule, **_params):
try:
return {'security_group_rule':
self._fake_security_group_rules[security_group_rule]}
except KeyError:
msg = 'Security Group rule %s not found' % security_group_rule
raise n_exc.NeutronClientException(message=msg, status_code=404)
def show_network(self, network, **_params):
try:
return {'network':
self._fake_networks[network]}
except KeyError:
msg = 'Network %s not found' % network
raise n_exc.NeutronClientException(message=msg, status_code=404)
def show_port(self, port, **_params):
try:
return {'port':
self._fake_ports[port]}
except KeyError:
msg = 'Port %s not found' % port
raise n_exc.NeutronClientException(message=msg, status_code=404)
def show_subnet(self, subnet, **_params):
try:
return {'subnet':
self._fake_subnets[subnet]}
except KeyError:
msg = 'Port %s not found' % subnet
raise n_exc.NeutronClientException(message=msg, status_code=404)
def list_security_groups(self, **_params):
ret = []
for security_group in self._fake_security_groups.values():
names = _params.get('name')
if names:
if not isinstance(names, list):
names = [names]
for name in names:
if security_group.get('name') == name:
ret.append(security_group)
ids = _params.get('id')
if ids:
if not isinstance(ids, list):
ids = [ids]
for id in ids:
if security_group.get('id') == id:
ret.append(security_group)
elif not (names or ids):
ret.append(security_group)
return {'security_groups': ret}
def list_networks(self, **_params):
# neutronv2/api.py _get_available_networks calls this assuming
# search_opts filter "shared" is implemented and not ignored
shared = _params.get("shared", None)
if shared:
return {'networks': []}
else:
return {'networks':
[network for network in self._fake_networks.values()]}
def list_ports(self, **_params):
ret = []
device_id = _params.get('device_id')
for port in self._fake_ports.values():
if device_id:
if port['device_id'] in device_id:
ret.append(port)
else:
ret.append(port)
return {'ports': ret}
def list_subnets(self, **_params):
return {'subnets':
[subnet for subnet in self._fake_subnets.values()]}
def list_floatingips(self, **_params):
return {'floatingips': []}
def delete_security_group(self, security_group):
self.show_security_group(security_group)
ports = self.list_ports()
for port in ports.get('ports'):
for sg_port in port['security_groups']:
if sg_port == security_group:
msg = ('Unable to delete Security group %s in use'
% security_group)
raise n_exc.NeutronClientException(message=msg,
status_code=409)
del self._fake_security_groups[security_group]
def delete_security_group_rule(self, security_group_rule):
self.show_security_group_rule(security_group_rule)
del self._fake_security_group_rules[security_group_rule]
def delete_network(self, network):
self.show_network(network)
self._check_ports_on_network(network)
for subnet in self._fake_subnets.values():
if subnet['network_id'] == network:
del self._fake_subnets[subnet['id']]
del self._fake_networks[network]
def delete_subnet(self, subnet):
subnet = self.show_subnet(subnet).get('subnet')
self._check_ports_on_network(subnet['network_id'])
del self._fake_subnet[subnet]
def delete_port(self, port):
self.show_port(port)
del self._fake_ports[port]
def update_port(self, port, body=None):
self.show_port(port)
self._fake_ports[port].update(body['port'])
return {'port': self._fake_ports[port]}
def list_extensions(self, **_parms):
return {'extensions': []}
def _check_ports_on_network(self, network):
ports = self.list_ports()
for port in ports:
if port['network_id'] == network:
msg = ('Unable to complete operation on network %s. There is '
'one or more ports still in use on the network'
% network)
raise n_exc.NeutronClientException(message=msg, status_code=409) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
# Import the relevant PTS classes and modules
from pts.core.basics.configuration import ConfigurationDefinition
from pts.core.remote.host import find_host_ids
from pts.core.launch.analyser import all_steps
from pts.core.config.analyse_simulation import definition as analysis_definition
# -----------------------------------------------------------------
host_ids = find_host_ids()
# -----------------------------------------------------------------
# Create the configuration definition
definition = ConfigurationDefinition()
# Assignment file
definition.add_positional_optional("assignment", "file_path", "path of assignment file")
# Status file
definition.add_optional("status", "file_path", "path of status file")
# Remotes for which to find corresponding simulations
definition.add_optional("remotes", "string_list", "remote hosts for which to look for matching simulations (not necessary when assignment is specified)", default=host_ids, choices=host_ids)
# To specify the simulations
definition.add_optional("simulation_names", "string_list", "names of the simulations to look for")
definition.add_optional("simulation_ids", "integer_list", "IDs of the simulations (only if one remote host is specified)")
definition.add_flag("from_directories", "use directory names as simulation names")
# Timing and memory table
definition.add_optional("timing", "file_path", "timing table path")
definition.add_optional("memory", "file_path", "memory table path")
# Commands to be run
definition.add_optional("commands", "string_list", "commands to be run in interactive mode")
# Interactive mode
definition.add_flag("interactive", "use interactive mode", default=None)
# Offline?
definition.add_flag("offline", "offline mode")
# Dry: don't actually launch any simulation
definition.add_flag("dry", "dry mode")
# Fix success flags
definition.add_flag("fix_success", "fix success flags in assignment table if necessary")
# Backup
definition.add_flag("backup_simulations", "backup simulation files")
definition.add_flag("backup_assignment", "backup assignment table")
definition.add_optional("backup_path", "directory_path", "backup directory")
definition.add_optional("backup_dir_path", "directory_path", "directory to contain the backup directory")
definition.add_optional("backup_dirname", "string", "name of the backup directory")
# -----------------------------------------------------------------
# Flags
definition.add_flag("local", "treat simulations without a match as local simulations", False)
definition.add_flag("warn_local", "give a warning for each simulation that didn't have a match for any host ID")
definition.add_flag("success", "success flag to fill in into the assignment table for all simulations", True)
# -----------------------------------------------------------------
# Move simulations
definition.add_flag("move", "move simulations", False)
definition.add_flag("move_running", "move running simulations", False)
definition.add_optional("move_simulations", "string_list", "simulation names for moving")
definition.add_optional("move_remotes", "string_list", "host IDs for moving simulations from")
definition.add_flag("prompt_simulations_move", "prompt before moving a particular simulation", None)
definition.add_optional("move_to_host", "host", "move simulations to this remote host")
# -----------------------------------------------------------------
# Showing
definition.add_flag("show", "showing", True)
definition.add_flag("show_assignment", "show the assignment scheme")
definition.add_flag("show_status", "show the simulation status")
definition.add_flag("show_runtimes", "show runtimes")
definition.add_flag("show_memory", "show memory")
# -----------------------------------------------------------------
runtimes_phases = ["total", "setup", "stellar", "spectra", "dust", "writing", "waiting", "communication", "intermediate"]
memory_phases = ["total", "setup", "stellar", "spectra", "dust", "writing"]
# Plotting
definition.add_flag("plot", "plotting", True)
definition.add_flag("plot_runtimes", "plot runtimes")
definition.add_optional("plot_runtimes_phases", "string_list", "simulation phases for which to plot the runtimes", runtimes_phases, choices=runtimes_phases)
definition.add_flag("plot_memory", "plot memory usage")
definition.add_optional("plot_memory_phases", "string_list", "simulation phases for which to plot the memory usage", memory_phases, choices=memory_phases)
# Observed SEDs as reference for plotting simulation SEDs
definition.add_optional("reference_seds", "string_filepath_dictionary", "file paths of SEDs to use as reference for plotting simulated SEDs")
# -----------------------------------------------------------------
# Caching simulation output
definition.add_optional("cache_path", "directory_path", "path to be used for caching")
definition.add_optional("cache_root", "directory_path", "path of directory up in the hierarchy with respect to the simulation directories that should be set equivalent to the 'cache_path' and the intermediate directory structure should be created if necessary")
definition.add_flag("cache_output", "cache the output of retrieved simulations")
definition.add_flag("cache_datacubes", "cache the datacubes of retrieved simulations")
definition.add_flag("cache_misc", "cache the misc output of analysed simulations")
definition.add_flag("cache_images", "cache the images output of analysed simulations")
definition.add_flag("cache_after_analysis", "cache immediately after a simulation has been analysed")
# -----------------------------------------------------------------
# Writing
definition.add_flag("write", "writing", True)
definition.add_flag("write_assignment", "write the assignent scheme", None)
definition.add_flag("write_status", "write the status table", False)
definition.add_flag("write_moved", "write the moved simulations table", False)
definition.add_flag("write_relaunched", "write the relaunched simulations table", False)
definition.add_flag("write_commands", "write the commands", False)
# -----------------------------------------------------------------
# Retrieve?
definition.add_flag("retrieve", "retrieve finished simulations", False)
# -----------------------------------------------------------------
# Analyse?
definition.add_flag("analyse", "run analysis", False)
# -----------------------------------------------------------------
# (Re)analyse only certain simulations
definition.add_optional("analyse_simulations", "string_list", "simulation names for analysis")
definition.add_optional("reanalyse_simulations", "string_list", "simulation names for re-analysis")
# (Re)analyse only for certain host IDs
definition.add_optional("analyse_remotes", "string_list", "host IDs for analysis")
definition.add_optional("reanalyse_remotes", "string_list", "host IDs for re-analysis")
# Prompt
definition.add_flag("prompt_simulations_analysis", "prompt before analysing a particular simulation (by default, all retrieved simulations are analysed without prompt)", False)
definition.add_flag("prompt_simulations_reanalysis", "prompt before re-analysing a particular simulation (by default, will be promted if renalaysis_simulations are not defined)", None)
# Add section for analysis options (also for re-analysis)
definition.import_section("analysis", "analyser options", analysis_definition)
# Re-analysis options
# steps: extraction, plotting, misc, batch, scaling
# features:
# - extraction: progress, timeline, memory
# - plotting: = progress, timeline, memory, seds, grids
# - misc: = rgb, animations, fluxes, fluxes_from_images, images
definition.add_optional("reanalyse", "string_list", "apply re-analysis of these steps", choices=all_steps)
definition.add_optional("features_reanalysis", "string_list", "re-analyse only certain features (if a single re-analysis step is defined)")
# -----------------------------------------------------------------
definition.add_optional("info_tables", "filepath_list", "tables with additional information for the simulations")
# -----------------------------------------------------------------
definition.add_flag("info_scientific", "use scientific notation for formatting info values (default is automatic)", None)
definition.add_optional("info_ndecimal_places", "positive_integer", "number of decimal places for formatting info values", 3)
# -----------------------------------------------------------------
# Shared input?
definition.add_flag("shared_input", "whether the different simulations share their input files", None)
# -----------------------------------------------------------------
# List of local screen script paths
definition.add_optional("screen_scripts", "filepath_list", "filepaths of local screen scripts associated with the simulations")
# ----------------------------------------------------------------- | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
__doc__ = '''art.py -- Approximate Randomization Test
This script carries out a significance test on the output of an
instance-based machine learner based on the theory of
approximate randomization tests:
Eric W. Noreen, Computer-intensive Methods for Testing Hypotheses: An Introduction, John Wiley & Sons, New York, NY, USA, 1989.
No assumptions are made on the distribution of the variables. The only assumption made is that there are
no inter-instance dependencies, i.e. knowing the class label of 1 instance should not help
knowing the class label of another instance. This assumption is violated in the output from the MBT (memory-based tagger).
A nice example of why no inter-instance dependencies should be present is in:
Alexander Yeh, More accurate tests for the statistical significance of result differences,
in: Proceedings of the 18th International Conference on Computational Linguistics, Volume 2,
pages 947-953, 2000.
TEST STATISTICS
At the moment, the test statitics tested are differences in macro-recall, macro-precision, macro-f-score, micro-f-score, and accuracy.
This can be changed by changing the getscores() function.
DEPENDENCIES
This script depends on confusionmatrix.py and combinations.py (www.clips.ua.ac.be/~vincent/software.html)
and optionally scipy (www.scipy.org).
Copyright (c) 2013 CLiPS. All rights reserved.
# License: GNU General Public License, see http://www.clips.ua.ac.be/~vincent/scripts/LICENSE.txt
'''
__author__="Vincent Van Asch"
__date__="September 2013"
__version__="3.0.3"
__url__ = 'http://www.clips.ua.ac.be/~vincent/software.html'
import sys, os, time
import random
import getopt
from math import pow
try:
from scipy.stats import binom_test
except ImportError:
print >>sys.stderr, 'INFO: Could not import scipy (www.scipy.org): signtest is not available.'
try:
import confusionmatrix
except ImportError:
raise ImportError('''This script depends on confusionmatrix.py (www.clips.ua.ac.be/~vincent/software.html).
Place the script in the same folder as the art.py script.''')
try:
import combinations
except ImportError:
raise ImportError('''This script depends on combinations.py (www.clips.ua.ac.be/~vincent/software.html).
Place the script in the same folder as the art.py script.''')
def loginfo(s):
print >>sys.stderr, '%s: %s' %(time.strftime('%d/%m/%Y %H:%M:%S'), s)
def fread(fname, index=None, sep=None, encoding='utf8'):
'''Reads in files as lists.
sep: feature separator
index: if None, the elements of the output list are the full lines
if int, the elements of the output list are string at position index
if tuple, the elements of the output list slices from the full lines (as lists)
'''
output=[]
with open(os.path.abspath(os.path.expanduser(fname)), 'rU') as f:
for l in f:
line = l.strip()
if line:
line = line.decode(encoding)
if index is None:
output.append(line)
else:
line = line.split(sep)
if isinstance(index, int):
output.append(line[index])
elif isinstance(index, (list, tuple)):
if len(index) != 2: raise ValueError('index should have length 2 not %d' %len(index))
output.append(line[index[0]:index[1]])
else:
raise TypeError('index should be None, int or tuple')
return output
def strata_read(fname, sep=None, encoding='utf8'):
out={}
with open(os.path.abspath(os.path.expanduser(fname)), 'rU') as f:
for l in f:
line = l.strip().decode(encoding)
if line:
parts = line.split(sep)
stratum = parts[0]
group = parts[1]
data = [float(x) for x in parts[2:]]
if stratum in out.keys():
out[stratum][group] = data
else:
out[stratum] = {group:data}
return out
MBTSEP = '\x13'
def mbtread(fname, sep="<utt>"):
'''Reads in the sentences from an mbt format file.
sep: sentence seperator (empty lines are also considered as sentence
boundaries)
Returns a list of strings.
Each string are the concatenated token labels from 1 sentence'''
output=[]
sentence=[]
with open(os.path.abspath(os.path.expanduser(fname)), 'rU') as f:
for l in f:
line = l.strip()
if line and line != sep:
sentence.append(line.split()[-1])
else:
if sentence: output.append(MBTSEP.join(sentence))
sentence=[]
if sentence: output.append(MBTSEP.join(sentence))
return output
def readtraining(fname, index=-1, sep=None):
'''Reads in training and returns a dictionary with the distribution of the
classes in training'''
d={}
for label in fread(fname, sep=sep, index=index):
try:
d[label]+=1
except KeyError:
d[label] = 1
return d
def signtest(gold, system1, system2):
'''Sign test for labeling accuracy'''
assert len(gold) == len(system1) == len(system2)
# Get all number where system1 is correct and the other false
s1correct=0
s2correct=0
wrong=0
for g, s1, s2 in zip(gold, system1, system2):
if g==s1:
s1correct+=1
elif g==s2:
s2correct+=1
else:
wrong+=1
# The total number of predictions that are only correctly predicted
# by 1 system
total = s1correct+s2correct
# make sure we test the smallest because of
# bug with unequal N in binom_test
correct = min([s1correct, s2correct])
try:
p = binom_test(correct, total)
except NameError:
raise NameError('Module scipy (www.scipy.org) was not imported.')
return p
def termsigntest(gold, system1, system2):
'''Sign test for term extraction recall'''
print >>sys.stderr, 'WARNING: this function has not been validated'
# True postives for only 1 system
s1correct=0
s2correct=0
fn=0
for t in gold:
if t in system1:
if t not in system2:
s1correct+=1
elif t in system2:
s2correct+=1
else:
fn +=1
# The total number of predictions that are only correctly predicted
# by 1 system
total = s1correct+s2correct
try:
p = binom_test(s1correct, total)
except NameError:
raise NameError('Module scipy (www.scipy.org) was not imported.')
return p
def getscores(gold, system, training=None):
'''
Takes a gold and system list and returns a dictionary with
macro-recall, macro-precision, macro-f-score, micro-f-score, accuracy.
If training is given it uses the class label counts from training to compute the scores.
gold: a list of class labels
system: a list of class labels (in the same order as gold)
training: a dictionary:
key: class label
value: number of occurrences
Returns a dictionary:
key: performance measure name
value: performance score
'''
# Get confusion matrix
assert len(gold) == len(system)
# light mode for speed
cm = confusionmatrix.ConfusionMatrix(light=True)
# Add training
if training:
for k, v in training.items():
for i in range(v):
cm.add_training([k])
# Add data
for g, s in zip(gold, system):
cm.single_add(g, s)
output={'macro-av. recall': cm.averaged(level=confusionmatrix.MACRO, score=confusionmatrix.RECALL, training=bool(training)), \
'macro-av. precision': cm.averaged(level=confusionmatrix.MACRO, score=confusionmatrix.PRECISION, training=bool(training)), \
'macro-av. f-score': cm.averaged(level=confusionmatrix.MACRO, score=confusionmatrix.FSCORE, training=bool(training)), \
'micro-av. f-score': cm.averaged(level=confusionmatrix.MICRO, score=confusionmatrix.FSCORE, training=bool(training)), \
'micro-av. precision': cm.averaged(level=confusionmatrix.MICRO, score=confusionmatrix.PRECISION, training=bool(training)), \
'micro-av. recall': cm.averaged(level=confusionmatrix.MICRO, score=confusionmatrix.RECALL, training=bool(training)), \
'lfb-micro-av. f-score': cm.averaged(level=confusionmatrix.MICROt, score=confusionmatrix.FSCORE, training=bool(training)), \
'lfb-micro-av. precision': cm.averaged(level=confusionmatrix.MICROt, score=confusionmatrix.PRECISION, training=bool(training)), \
'lfb-micro-av. recall': cm.averaged(level=confusionmatrix.MICROt, score=confusionmatrix.RECALL, training=bool(training)), \
'accuracy': cm.accuracy()}
return output
def getscores2(gold, system, training=None):
P = float(len([i for i in system if i in gold])) / len(system)
R = float(len([i for i in system if i in gold])) / len(gold)
if P==0 or R==0:
F=0.0
else:
F = 2*P*R/(P+R)
return {'recall': R, 'precision':P, 'f1-score': F}
def getscoresmbt(gold, system, training=None):
'''Returns the mbt accuracy for the sentence'''
correct=0
total=0
for g, s in zip(gold, system):
g = g.split(MBTSEP)
s = s.split(MBTSEP)
assert len(g) == len(s)
total += len(g)
for gi, si in zip(g, s):
if gi == si:
correct+=1
return {'accuracy': correct/float(total)}
def getscoresmbtmulti(gold, system, training=None, sep='_'):
'''Returns scores for mbt'''
# Create the yielder
def reader(gold, system):
for g,s in zip(gold, system):
g = g.split(MBTSEP)
s = s.split(MBTSEP)
assert len(g) == len(s)
for gi, si in zip(g, s):
gi = set(gi.split(sep))
si = set(si.split('_'))
yield gi, si
r = reader(gold, system)
cm = confusionmatrix.ConfusionMatrix(compute_none=True)
for g, p in r:
cm.add(list(g), list(p))
out={}
for label in cm.labels:
out[label] = cm.fscore(label)
out['micro-fscore']=cm.averaged(level=confusionmatrix.MICRO, score=confusionmatrix.FSCORE)
return out
def average(dumy, values, training=None):
return {'mean': sum(values)/float(len(values))}
def teststatistic(gold, system1, system2, training=None, scoring=getscores, absolute=True):
'''Takes all lists and returns the value for 5 test statistics:
macro-recall, macro-precision, macro-f-score, micro-f-score, accuracy
scoring: the function that calcutates the performances
absolute: if True : the absolute difference of system1 performance and system2 performance
if False: system1 performance minus system2 performance
'''
# Get the reference performance difference
scores1 = scoring(gold, system1, training=training)
scores2 = scoring(gold, system2, training=training)
# Compute the differences between system1 and system2
diffs={}
for k in set(scores1.keys()+scores2.keys()):
diff = scores1.get(k, 0)-scores2.get(k, 0)
if absolute: diff = abs(diff)
diffs[k] = diff
return diffs
def distribute(s):
'''Distribute the elements of s randomly over 2 lists'''
batch1=[]; batch2 =[]
data=s[:]
while data:
d = data.pop()
b = random.choice([batch1, batch2])
b.append(d)
assert len(data) == 0, data
return batch1, batch2
def getprobabilities(ngecounts, N, add=1, verbose=False):
'''Calculates the probabilities from the ngecounts.
The probabilities are calculated as:
(neg + add)/(N + add)
Returns a dictionay:
keys: performance name
value: probaility
ngecounts: a dictionary:
keys: performance name
value: nge
N: number of trials
add: integer
'''
# Calculate probabilities
probs={}
for k, nge in ngecounts.items():
prob = (nge + add)/float(N + add)
probs[k] = prob
if verbose:
print >>sys.stderr, 'Probabilities for accepting H0:'
names = probs.keys()
names.sort()
for name in names:
print ' %-23s: %.5g' %(name, probs[name])
return probs
def get_alternatives(l):
'''The length of the outputs'''
# number of bins
nbins = int(pow(2, l))
# Fill the bins
bins=[[] for i in range(nbins)]
for i in range(l):
switchpoint = pow(2, i)
filler=False
for j, bin in enumerate(bins):
if not j%switchpoint:
filler = not filler
bin.append(int(filler))
return bins
def exactlabelingsignificance(gold, system1, system2, verbose=False, training=None, scoring=getscores, common=[], common_gold=[]):
'''Carries out exact randomization'''
# number of permutations
N = pow(2, len(gold))
if verbose: loginfo('%d permutations' %N)
if N > 5000000: raise ValueError('The number of permutations is too big. Aborting.')
# the reference test statitsics
refdiffs = teststatistic(gold+common_gold, system1+common, system2+common, training=training, scoring=scoring)
# Get all combinations
size = len(gold)
count=0
systems = [system1, system2]
ngecounts = {}
if N >= 10:
nom = int(N/10.0)
else:
nom=1
alternatives = get_alternatives(size)
while alternatives:
alt = alternatives.pop()
count+=1
shuffle1 = [systems[k][j] for j,k in enumerate(alt)]
shuffle2 = [systems[1-k][j] for j,k in enumerate(alt)]
# the test statistics
diffs = teststatistic(gold+common_gold, shuffle1+common, shuffle2+common, training=training, scoring=scoring)
if verbose and not (count%nom): loginfo('Calculated permutation %d/%d' %(count, N))
for k in refdiffs.keys():
pseudo = diffs[k]
actual = refdiffs[k]
if pseudo >= actual:
ngecounts[k] = ngecounts.get(k, 0) + 1
elif k not in ngecounts.keys():
ngecounts[k]=0
assert count == N
assert set(ngecounts.keys()) == set(refdiffs.keys())
# Calculate probabilities
probs=getprobabilities(ngecounts, N, add=0, verbose=True)
return probs
def labelingsignificance(gold, system1, system2, N=1000, verbose=False, training=None, scoring=getscores, show_probs=True, common=[], common_gold=[]):
'''Calculate approximate randomization test for class labeling experiments
Returns the probabilities for accepting H0 for
macro-recall, macro-precision, macro-fscore, micro-fscore, accuracy
training: the counts of the class labels in the training file
N: number of iterations
'''
# the reference test statitsics
refdiffs = teststatistic(gold+common_gold, system1+common, system2+common, training=training, scoring=scoring)
# start shuffling
source = [[s1,s2] for s1,s2 in zip(system1, system2)]
if N >= 10:
nom = int(N/10.0)
else:
nom=1
ngecounts={}
for i in range(N):
shuffle1=[]
shuffle2=[]
for preds in source:
random.shuffle(preds)
shuffle1.append(preds[0])
shuffle2.append(preds[1])
# the test statistics
diffs = teststatistic(gold+common_gold, shuffle1+common, shuffle2+common, training=training, scoring=scoring)
# see whether the shuffled system performs better than the originals
for k in refdiffs.keys():
pseudo = diffs[k]
actual = refdiffs[k]
if pseudo >= actual:
ngecounts[k] = ngecounts.get(k, 0) + 1
elif k not in ngecounts.keys():
ngecounts[k]=0
if verbose and not ((i+1)%nom):
loginfo('Calculated shuffle %d/%d' %(i+1, N))
#getprobabilities(ngecounts, i+1, add=1, verbose=True)
# Sign test check
if scoring.func_name == 'getscores':
try:
s = signtest(gold, system1, system2)
if verbose: loginfo('Sign-test probability: %.4g' %s)
except NameError:
pass
assert set(ngecounts.keys()) == set(refdiffs.keys())
# Calculate probabilities
probs=getprobabilities(ngecounts, N, add=1, verbose=show_probs)
return probs
def exacttermsignificance(gold, system1, system2, verbose=False, absolute=False):
'''Compute exact term significance'''
# Take unique terms
source = []
doubles=[]
for t in list(set(system1+system2)):
if t in system1 and t not in system2:
source.append(t)
elif t not in system1 and t in system2:
source.append(t)
else:
doubles.append(t)
# The number of combinations
N=1
for i in range(len(source)+1):
N+=combinations.ncombinations(len(source), i)
if verbose: loginfo('%d combinations' %N)
if N > 5000000: raise ValueError('The number of permutations is too big. Aborting.')
# the reference test statitsics
refdiffs = teststatistic(gold, system1, system2, scoring=getscores2, absolute=absolute)
if N >= 10:
nom = int(N/10.0)
else:
nom=1
count=0
ngecounts={}
for i in range(len(source)+1):
for subset in combinations.subsets(source, i):
count+=1
shuffle1 = list(subset)
shuffle2 = []
for x in source:
if x not in shuffle1:
shuffle2.append(x)
#print shuffle1, shuffle2, doubles
# the test statistics
diffs = teststatistic(gold, shuffle1+doubles, shuffle2+doubles, scoring=getscores2, absolute=absolute)
# see whether the shuffled system performs better than the originals
for k in refdiffs.keys():
pseudo = diffs[k]
actual = refdiffs[k]
if pseudo >= actual:
ngecounts[k] = ngecounts.get(k, 0) + 1
elif k not in ngecounts.keys():
ngecounts[k]=0
if verbose and not ((count)%nom):
loginfo('Calculated combination %d/%d' %(count, N))
#getprobabilities(ngecounts, i+1, add=1, verbose=verbose)
assert count == N
assert set(ngecounts.keys()) == set(refdiffs.keys())
# Calculate probabilities
probs = getprobabilities(ngecounts, N, add=0, verbose=True)
return probs
def termsignificance(gold, system1, system2, N=10000, verbose=False, absolute=False):
'''Calculate randomized term significance'''
# Only uniques terms in a system
assert len(set(gold)) == len(gold)
assert len(set(system1)) == len(system1)
assert len(set(system2)) == len(system2)
# Get all terms that are unique for a system
source = []
doubles=[]
news1=[]; news2=[]
for t in list(set(system1+system2)):
if t in system1 and t not in system2:
source.append(t)
news1.append(t)
elif t not in system1 and t in system2:
source.append(t)
news2.append(t)
else:
doubles.append(t)
# the reference test statitsics
refdiffs = teststatistic(gold, system1, system2, scoring=getscores2, absolute=absolute)
if N >= 10:
nom = int(N/10.0)
else:
nom=1
ngecounts={}
for i in range(N):
shuffle1, shuffle2 = distribute(source)
# the test statistics
diffs = teststatistic(gold, shuffle1+doubles, shuffle2+doubles, scoring=getscores2, absolute=absolute)
# see whether the shuffled system performs better than the originals
for k in refdiffs.keys():
pseudo = diffs[k]
actual = refdiffs[k]
if pseudo >= actual:
ngecounts[k] = ngecounts.get(k, 0) + 1
elif k not in ngecounts.keys():
ngecounts[k]=0
if verbose and not ((i+1)%nom):
loginfo('Calculated shuffle %d/%d' %(i+1, N))
#getprobabilities(ngecounts, i+1, add=1, verbose=verbose)
assert set(ngecounts.keys()) == set(refdiffs.keys())
# Calculate probabilities
probs = getprobabilities(ngecounts, N, add=1, verbose=True)
return probs
def getdifference(system1, system2, gold=None):
'''
Takes lists of labels and returns lists with only those
entries for which s1!=s2.
If the list gold is given, it also returns only the gold labels
for those elements.
'''
new_system1=[]
new_system2=[]
new_gold=[]
rest1=[]
rest2=[]
common_gold=[]
G=True
if gold is None:
G=False
gold = system1[:]
if len(system1) != len(system1) != len(gold): raise ValueError('Input lists should have the same length')
for g, s1, s2 in zip(gold, system1, system2):
if s1!=s2:
new_system1.append(s1)
new_system2.append(s2)
if G:
new_gold.append(g)
else:
rest1.append(s1)
rest2.append(s2)
common_gold.append(g)
if not G: new_gold=[]
assert rest1 == rest2
return new_system1, new_system2, new_gold, rest1, common_gold
def main(gold, system1, system2, verbose=False, N=10000, exact_threshold=20, training=None, scoring=getscores):
'''
exact_threshold: the maximum number of instance to calculate exact randomization instead of approximate
'''
# Check
if not (len(gold) == len(system1) == len(system2)):
raise ValueError('There should be an equal number of non-empty lines in each input file.')
# Shuffle only those instances that have a different class label
news1, news2, newgold, common, common_gold = getdifference(system1, system2, gold)
if verbose:
for i,s in enumerate([system1, system2]):
scores = scoring(gold, s, training=training)
lines=['Scores for system%d:' %(i+1)]
keys = scores.keys()
keys.sort()
for k in keys:
lines.append(' %-23s : %.4f' %(k, scores[k]))
print >>sys.stderr, '\n'.join(lines)
print >>sys.stderr
loginfo('-'*50)
# only shuffle difference: quicker and same probability results
gold = newgold
system1 = news1
system2 = news2
total_uniq = len(gold)
# The number of instances with different predictions
if verbose: loginfo('Found %d predictions that are different for the 2 systems' %(total_uniq))
# number of permutations
try:
np = pow(2, len(gold))
except OverflowError:
np = 1000000001
if np > 1000000000:
loginfo('Number of permutations: more than 1,000,000,000')
else:
loginfo('Number of permutations: %d' %np)
if np <= N and total_uniq > exact_threshold:
loginfo('NOTE:')
loginfo('The number of permutations is lower than the number of shuffles.')
loginfo('You may want to calculate exact randomization. To do this')
loginfo('set option -t higher than %d.' %total_uniq)
if total_uniq <= exact_threshold:
if verbose: loginfo('This is equal or less than the %d predictions threshold: calculating exact randomization' %(exact_threshold))
probs = exactlabelingsignificance(gold, system1, system2, verbose=verbose, training=training, scoring=scoring, common=common, common_gold=common_gold)
else:
probs = labelingsignificance(gold, system1, system2, N=N, verbose=verbose, training=training, scoring=scoring, common=common, common_gold=common_gold)
if verbose: loginfo('Done')
return probs
def main2(gold, system1, system2, verbose=False, N=1048576, absolute=True, exact_threshold=10):
''' the main for term extraction'''
# No doubles
news1 = list(set(system1))
news2 = list(set(system2))
newgold = list(set(gold))
gold = newgold
system1 = news1
system2 = news2
if verbose:
print >>sys.stderr
for i,s in enumerate([system1, system2]):
scores = getscores2(gold, s, training=training)
lines=['Scores for system%d:' %(i+1)]
keys = scores.keys()
keys.sort()
for k in keys:
lines.append(' %-23s : %.4f' %(k, scores[k]))
print >>sys.stderr, '\n'.join(lines)
print >>sys.stderr
loginfo('-'*50)
# the number of terms that occur only in s1 or in s2
union=set(system1+system2)
intersect = set(system1).intersection(set(system2))
total_uniq = len(union) - len(intersect)
if verbose: loginfo('Found %d predictions that are different for the 2 systems' %(total_uniq))
if total_uniq < exact_threshold:
if verbose: loginfo('This is equal of less than the %d terms threshold: calculating exact randomization' %(exact_threshold))
probs = exacttermsignificance(gold, system1, system2, verbose=verbose, absolute=absolute)
else:
probs= termsignificance(gold, system1, system2, N=N, verbose=verbose, absolute=absolute)
if verbose: loginfo('Done')
return probs
def main3(data, verbose=False, N=1048576, absolute=True):
'''For stratified shuffling'''
# The groups
scoring_func=average
groups = data[data.keys()[0]].keys()
groups.sort()
assert len(groups) == 2
if verbose:
strata = data.keys()
strata.sort()
stext = 'a'
if len(strata) == 1: stext='um'
loginfo('Found %d strat%s: %s' %(len(data), stext, ', '.join(strata)))
loginfo('')
loginfo('Computing %d shuffles' %N)
loginfo('H0: there is no absolute difference between the means of %s and %s' %tuple(groups))
loginfo(' Commonly, you reject H0 if the probability drops below')
loginfo(' a predefined significance level, e.g 0.05.')
loginfo('-'*50)
systems={groups[0]:[], groups[1]:[]}
for stratum, d in data.items():
for g in groups:
systems[g] += d[g]
if verbose:
print >>sys.stderr
for g in groups:
s = systems[g]
scores = scoring_func(None, s)
lines=['Scores for group %s:' %(g)]
keys = scores.keys()
keys.sort()
for k in keys:
lines.append(' %-23s : %.4f' %(k, scores[k]))
print >>sys.stderr, '\n'.join(lines)
print >>sys.stderr
loginfo('-'*50)
# Reference
refdiffs = teststatistic(None, systems[groups[0]], systems[groups[1]], training=None, scoring=average, absolute=absolute)
if N >= 10:
nom = int(N/10.0)
else:
nom=1
# Start shuffling
ngecounts={}
for i in range(N):
shuffled={}
for stratum, d in data.items():
values = d[groups[0]] + d[groups[1]]
n1 = len(d[groups[0]])
n2 = len(d[groups[1]])
labels = [groups[0]]*n1+ [groups[1]]*n2
random.shuffle(labels)
for l, v in zip(labels, values):
shuffled[l] = shuffled.get(l ,[]) + [v]
# the test statistics
diffs = teststatistic(None, shuffled[groups[0]], shuffled[groups[1]], scoring=scoring_func, absolute=absolute)
# see whether the shuffled system performs better than the originals
for k in refdiffs.keys():
pseudo = diffs[k]
actual = refdiffs[k]
if pseudo >= actual:
ngecounts[k] = ngecounts.get(k, 0) + 1
elif k not in ngecounts.keys():
ngecounts[k] = 0
if verbose and not ((i+1)%nom):
loginfo('Calculated shuffle %d/%d' %(i+1, N))
assert set(ngecounts.keys()) == set(refdiffs.keys())
# Calculate probabilities
probs = getprobabilities(ngecounts, N, add=1, verbose=True)
return probs
# ========================================================================================================================
# TESTING
# ========================================================================================================================
def Yeh():
'''Creates 3 synthetic files to reproduce the results from Section3.3 of
Alexander Yeh, More accurate tests for the statistical significance of result differences,
in: Proceedings of the 18th International Conference on Computational Linguistics, Volume 2,
pages 947-953, 2000.
The filenames are yeh.gold, yeh.s1 and yeh.s2
Running the following command reproduces the reported results:
$ python art.py -c yeh.gold -n1048576 -v -r -a yeh.s1 yeh.s2
Probabilities for accepting H0:
f1-score : 0.014643
precision : 0.97995
recall : 0.00010204
Note that the test statistic is system1-system2, so for precision the
probability from Yeh is 1 - 0.97995 = 0.02005
'''
gold = 'yeh.gold'
s1 = 'yeh.s1'
s2 = 'yeh.s2'
# The gold standard
with open(gold, 'w') as f:
for i in range(103):
f.write('%d\n' %i)
# System 1: R45.6 P49.5 F47.5
with open(s1, 'w') as f:
for i in range(19+28):
f.write('%d\n' %i) # retrieved by both and system1
for i in range(5):
f.write('B%d\n' %(i)) # spurious retrieved by both
for i in range(43):
f.write('one%d\n' %(i)) # spurious retrieved by system1
# System 2: R24.3 P64.1 F35.2
with open(s2, 'w') as f:
for i in range(19+6):
if i < 19:
f.write('%d\n' %i) # retrieved by both
else:
f.write('%d\n' %(i+28)) # retrieved by system2
for i in range(5):
f.write('B%d\n' %(i)) # spurious retrieved by both
for i in range(9):
f.write('two%d\n' %(i)) # spurious retrieved by system1
print 'Written:', gold, s1, s2
# ==================================================================================================================
if __name__ == '__main__':
def _usage():
print >>sys.stderr, '''Approximate Randomization testing (version %s)
This script can be used to assess the significance for differences in recall, precision,
f-score, and accuracy for two machine learner outputs.
The H0 hypothesis tested is:
There is no difference between SYSTEM1 and SYSTEM2 for a given score.
This hypothesis is tested for: macro-av. recall, macro-av. precision, macro-av. f-score, micro-av. f-score, and accuracy.
The output is a set of probabilities for accepting H0. If this probability is lower
than a predefined level (e.g. 0.05) then H0 is rejected.
USAGE
./art.py [-m] [-n int] [-c <gold-standard>] [-s sep] [-t int] [-T training] [-r] [-a] [-h] [-H] [-v] <output_a> <output_b>
OPTIONS
-n : Number of shuffles (default: 10000)
-c : Change the expected format for the input files, see FORMAT below
-s : Feature separator (default: whitespace)
-t : Define the maximal number of instances that can be in the input files
for exact randomization. The lower this value, the quicker approximate
randomization is carried out. If set to 0, approximation is always
carried out. Note that exact randomization for input files with
only 10 instances can already take a long time. (default: 10)
-T : Path to the training file used by both systems, see TRAINING below
-r : term extraction significance testing instead of labeling significance
testing, see TERM EXTRACTION below. -c is mandatory; -T is ignored
-a : use the actual difference instead of the absolute difference when
calculating test extraction significance
-m : test for MBT experiments, see MBT below. -c is obligatory.
-h : Print help
-H : Print more background information
-v : Verbose processing
FORMAT
Per default, the script expects 2 instance files tagged with
different classifiers.
- Each instance should be on a new line.
- All features and class labels should be separated with the feature
separator. This can be set with the -s option.
- An instance is a list of features; followed by the gold standard class label;
followed by the class label as predicted by the classifier (=standard Timbl output)
If option -c is set, an extra input file with the gold-standard class labels
should be provided. The format of all input files should be:
- one class label per new line (and nothing else)
- class labels belonging to the same instance should
be on the same line in all 3 input files.
VALIDITY
If scipy (www.scipy.org) is available and -v is set, the sign-test probability is also reported when
carrying out approximate randomization. This probability can be compared with the reported probability
for "accuracy" to check the validity of the randomization method. Both probabilities should be similar
or should at least lead to similar conclusions; otherwise you might consider increasing the number of
shuffles with option -n. Another validity check is rerunning the randomization test and comparing the
results.
The test carried out by the two-sided paired sign-test is:
H0: The number of correct predictions from SYSTEM1 that are incorrectly predicted by SYSTEM2
equals the number of correct predictions from SYSTEM2 that are incorrectly predicted by
SYSTEM1. (Predictions that are correct or incorrect for both systems are ignored.)
H0 is rejected if the reported sign-test probability is lower than a predefined level.
TRAINING
Macro- and micro-averaging is carried out by taking the class counts from the input files. If not every class
from the original training file occurs in the input files to the same extend, then the reported averaged scores
may differ from the scores from Timbl.
This averaging difference can be solved by supplying the training file with the -T option. The same training file
should be used by both systems.
When the -c option is set, the format of supplied file should be the same as the input files (only class labels);
if -c is not set, the supplied training file should contain instances but without predicted class labels, only
the gold standards labels.
Because setting and not setting the -T option influences the way the performance scores are computed, this also
influences the reported probabilities.
See also from confusionmatrix.py: $ python confusionmatrix.py -V
TERM EXTRACTION
The default setup is to compute the significance for Timbl style output. Is is possible to use this script
to calculate significance for term extraction. The -r option should be set. In this mode, the script
expects 3 files: gold_standard, system1, system2. All files should contain terms; each term on a new line.
It is not required that the number of extracted terms is the same for both systems, nor should it be
the same as the number of gold standard terms.
By default, the test statistic is the absolute difference of the performance from system1 and system2.
If the -a option is set, the test statistic is the signed difference.
The -ar mode is identical of the system described by Yeh, 2000, Section3.3. To reproduce the results:
To create the files:
$ python art.py --yeh
To run the randomization:
$ python art.py -ar -v -n1048576 -c yeh.gold yeh.s1 yeh.s2
For precision, the probability is (1 - reported_probability) because system2 has a higher precision than
system1.
MBT
It is also possible to process files in the MBT format. An MBT command looks like this:
$ Mbt -s training1.settings -T testfile > output1
$ Mbt -s training2.settings -T testfile > output2
If is now possible to test the significance of the accuracy:
$ python art.py -m -c testfile output1 output2
The probability computation is carried out in the same way as with the basic command for instance files
except that the "instances" in the case of Mbt are complete sentences -- there is no shuffling at the
token level because there are interdependencies between the token labels.
STRATIFIED SHUFFLING
It is also possible to reproduce the stratified shuffling example of Noreen 1989 (Section 2.7):
$ ./art.py -v -n 999 transfer.data
In which the format of transfer.data is 'stratum group values', like:
A transfer 2.0 3.0 2.2 2.1 2.2
A non-transfer 3.2 2.9 2.0 2.2 2.1 1.4
...
This option can also be used for the example in Section 2.1. Using ony one stratum.
NOTE
No assumptions are made on the distribution of the performance scores. The only assumption that is made is
that there are no inter-instance dependencies, i.e. knowing the class label of 1 instance should not help
knowing the class label of another instance. This assumption is violated in the output from the memory-based
tagger (MBT). This is the reason why the -m option shuffles at sentence level instead of token level.
DEPENDENCIES
This script depends on confusionmatrix.py and combinations.py (www.clips.ua.ac.be/~vincent/software.html)
and optionally scipy (www.scipy.org).
REFERENCES
Eric W. Noreen, Computer-intensive Methods for Testing Hypotheses: An Introduction, John Wiley & Sons, New York, NY, USA, 1989.
Alexander Yeh, More accurate tests for the statistical significance of result differences, in: Proceedings of the 18th International Conference on Computational Linguistics, Volume 2, pages 947-953, 2000.
%s, %s
''' %(__version__, __author__, __date__)
try:
opts,args=getopt.getopt(sys.argv[1:],'hHc:s:vn:t:T:ram', ['help', 'yeh'])
except getopt.GetoptError:
# print help information and exit:
_usage()
sys.exit(2)
sep=None
gold = None
verbose=False
N=10000
exact_threshold=10
trainingfile = None
training=None
terms=False
absolute=True
mbt=False
for o, a in opts:
if o in ('-h', '--help'):
_usage()
sys.exit()
if o in ('-H',):
print >>sys.stderr, __doc__
sys.exit(2)
if o in ('-s',):
sep = a
if sep == '\\t': sep='\t'
if o in ('-c',):
gold = a
if o in ('-v',):
verbose = True
if o in ('-n',):
N = int(a)
if o in ('-t',):
exact_threshold = int(a)
if o in ('-T',):
trainingfile = a
if o in ('-r',):
terms = True
if o in ('-a',):
absolute = False
if o in ('-m',):
mbt = True
if o in ('--yeh',):
Yeh()
sys.exit(0)
if len(args) == 1:
data = strata_read(args[0], sep=sep)
loginfo('-'*50)
loginfo('Datafile: %s' %os.path.basename(args[0]))
main3(data, verbose=verbose, N=N)
sys.exit(0)
elif len(args) != 2:
_usage()
sys.exit(1)
# The files with the systems
output1, output2 = args
if terms and not gold:
print >>sys.stderr, 'ERROR 2: when doing term significance testing a gold standard is needed (-c option)'
sys.exit(1)
if mbt and not gold:
print >>sys.stderr, 'ERROR 3: when doing MBT significance testing a gold standard is needed (-c option)'
sys.exit(1)
# Reading in the class labels
if gold:
if mbt:
goldlabels = mbtread(gold)
system1 = mbtread(output1)
system2 = mbtread(output2)
else:
if trainingfile: training = readtraining(trainingfile, sep=sep, index=None)
goldlabels = fread(gold, index=None)
system1 = fread(output1, index=None)
system2 = fread(output2, index=None)
else:
if trainingfile: training = readtraining(trainingfile, sep=sep, index=-1)
try:
goldlabels = fread(output1, index=-2, sep=sep)
except IndexError:
print >>sys.stderr, 'ERROR 4: Is the feature separator set correctly? (option -s is currently "%s")' %str(sep)
sys.exit(1)
check = fread(output2, index=-2, sep=sep)
if check != goldlabels:
print check, goldlabels
print >>sys.stderr, 'ERROR 5: File %s and %s should have the same gold reference labels.' %(output1, output2)
sys.exit(1)
del check
check1 = fread(output1, index=(0,-1), sep=sep)
check2 = fread(output2, index=(0,-1), sep=sep)
if check1 != check2:
print >>sys.stderr, 'ERROR 5: File %s and %s should be exactly the same up until the predicted class label.' %(output1, output2)
sys.exit(1)
del check1, check2
system1=fread(output1, index=-1, sep=sep)
system2=fread(output2, index=-1, sep=sep)
# Info
if verbose:
loginfo('-'*50)
loginfo('SYSTEM1 :%s' %output1)
loginfo('SYSTEM2 :%s' %output2)
if mbt:
loginfo('GOLD :%s' %gold)
loginfo('MBT style formatted files')
loginfo('%d sentences in input files' %len(system1))
else:
if gold:
loginfo('GOLD :%s' %gold)
if not terms: loginfo('Considering entire lines as class labels')
else:
loginfo('Considering the last field as the predicted class label')
loginfo('Considering the one but last field as the gold standard class label')
if sep is not None: loginfo('Using "%s" as feature separator' %sep)
if not terms: loginfo('%d instances in input files' %len(system1))
labels=set(goldlabels)
labels = labels.union(set(system1))
labels = labels.union(set(system2))
nlabels = len(labels)
labels=list(labels)
labels.sort()
if not mbt: loginfo('Found %d different labels/terms' %nlabels)
if nlabels < 10: loginfo(' %s' %(', '.join(labels)))
if trainingfile: loginfo('Computing averaged scores using class label counts from: %s' %trainingfile)
loginfo('')
loginfo('Computing %d shuffles' %N)
loginfo('H0: there is no difference between SYSTEM1 and SYSTEM2')
if terms and not absolute: loginfo('H1: SYSTEM1 performs better than SYSTEM2')
loginfo(' Commonly, you reject H0 if the probability drops below')
loginfo(' a predefined significance level, e.g 0.05.')
loginfo('-'*50)
# Run
try:
if gold and mbt:
probs = main(goldlabels, system1, system2, verbose=verbose, N=N, exact_threshold=exact_threshold, training=None, scoring=getscoresmbt)
#probs = main(goldlabels, system1, system2, verbose=verbose, N=N, exact_threshold=exact_threshold, training=None, scoring=getscoresmbtmulti)
elif gold and terms:
probs = main2(goldlabels, system1, system2, N=N, verbose=verbose, absolute=absolute, exact_threshold=exact_threshold)
else:
probs = main(goldlabels, system1, system2, verbose=verbose, N=N, exact_threshold=exact_threshold, training=training) #, scoring=getscoresmbtmulti)
except Exception, e:
raise
print >>sys.stderr, 'ERROR 1: %s' %(e.message)
sys.exit(1) | unknown | codeparrot/codeparrot-clean | ||
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-TODAY OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Survey',
'version': '2.0',
'category': 'Marketing',
'description': """
Create beautiful web surveys and visualize answers
==================================================
It depends on the answers or reviews of some questions by different users. A
survey may have multiple pages. Each page may contain multiple questions and
each question may have multiple answers. Different users may give different
answers of question and according to that survey is done. Partners are also
sent mails with personal token for the invitation of the survey.
""",
'summary': 'Create surveys, collect answers and print statistics',
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/survey',
'depends': ['email_template', 'mail', 'website', 'marketing'],
'data': [
'security/survey_security.xml',
'security/ir.model.access.csv',
'views/survey_views.xml',
'views/survey_templates.xml',
'views/survey_result.xml',
'wizard/survey_email_compose_message.xml',
'data/survey_stages.xml',
'data/survey_cron.xml'
],
'demo': ['data/survey_demo_user.xml',
'data/survey_demo_feedback.xml',
'data/survey.user_input.csv',
'data/survey.user_input_line.csv'],
'installable': True,
'auto_install': False,
'application': True,
'sequence': 10,
} | unknown | codeparrot/codeparrot-clean | ||
import asyncio
import secrets
import aiohttp
import discord
from lxml import html
from .mech.utils import scramble
ongoing_list = []
async def animechargame(cmd, message, args):
if message.channel.id not in ongoing_list:
try:
ongoing_list.append(message.channel.id)
mal_icon = 'https://myanimelist.cdn-dena.com/img/sp/icon/apple-touch-icon-256.png'
wait_embed = discord.Embed(color=0x1d439b)
wait_embed.set_author(name='Hunting for a good specimen...', icon_url=mal_icon)
working_response = await message.channel.send(embed=wait_embed)
if args:
if args[0].lower() == 'hint':
hint = True
else:
hint = False
else:
hint = False
ani_order = secrets.randbelow(3) * 50
if ani_order:
ani_top_list_url = f'https://myanimelist.net/topanime.php?limit={ani_order}'
else:
ani_top_list_url = 'https://myanimelist.net/topanime.php'
async with aiohttp.ClientSession() as session:
async with session.get(ani_top_list_url) as ani_top_list_session:
ani_top_list_html = await ani_top_list_session.text()
ani_top_list_data = html.fromstring(ani_top_list_html)
ani_list_objects = ani_top_list_data.cssselect('.ranking-list')
ani_choice = secrets.choice(ani_list_objects)
ani_url = ani_choice[1][0].attrib['href']
async with aiohttp.ClientSession() as session:
async with session.get(f'{ani_url}/characters') as ani_page_session:
ani_page_html = await ani_page_session.text()
ani_page_data = html.fromstring(ani_page_html)
cover_object = ani_page_data.cssselect('.ac')[0]
anime_cover = cover_object.attrib['src']
anime_title = cover_object.attrib['alt'].strip()
character_object_list = ani_page_data.cssselect('.borderClass')
character_list = []
for char_obj in character_object_list:
if 'href' in char_obj[0].attrib:
if '/character/' in char_obj[0].attrib['href']:
character_list.append(char_obj)
char_choice = secrets.choice(character_list)
char_url = char_choice[0].attrib['href']
async with aiohttp.ClientSession() as session:
async with session.get(char_url) as char_page_session:
char_page_html = await char_page_session.text()
char_page_data = html.fromstring(char_page_html)
char_img_obj = char_page_data.cssselect('.borderClass')[0][0][0][0]
char_img = char_img_obj.attrib['src']
char_name = ' '.join(char_img_obj.attrib['alt'].strip().split(', '))
await working_response.delete()
question_embed = discord.Embed(color=0x1d439b)
question_embed.set_image(url=char_img)
question_embed.set_footer(text='You have 30 seconds to guess it.')
question_embed.set_author(name=anime_title, icon_url=anime_cover, url=char_img)
kud_reward = None
name_split = char_name.split()
for name_piece in name_split:
if kud_reward is None:
kud_reward = len(name_piece)
else:
if kud_reward >= len(name_piece):
kud_reward = len(name_piece)
if hint:
kud_reward = kud_reward // 2
scrambled_name = scramble(char_name)
question_embed.description = f'Name: {scrambled_name}'
await message.channel.send(embed=question_embed)
def check_answer(msg):
if message.channel.id == msg.channel.id:
if msg.content.lower() in char_name.lower().split():
correct = True
elif msg.content.lower() == char_name.lower():
correct = True
else:
correct = False
else:
correct = False
return correct
try:
answer_message = await cmd.bot.wait_for('message', check=check_answer, timeout=30)
cmd.db.add_currency(answer_message.author, message.guild, kud_reward)
author = answer_message.author.display_name
currency = cmd.bot.cfg.pref.currency
win_title = f'🎉 Correct, {author}, it was {char_name}. You won {kud_reward} {currency}!'
win_embed = discord.Embed(color=0x77B255, title=win_title)
await message.channel.send(embed=win_embed)
except asyncio.TimeoutError:
timeout_title = f'🕙 Time\'s up! It was {char_name} from {anime_title}...'
timeout_embed = discord.Embed(color=0x696969, title=timeout_title)
await message.channel.send(embed=timeout_embed)
except IndexError:
grab_error = discord.Embed(color=0xBE1931, title='❗ I failed to grab a character, try again.')
await message.channel.send(embed=grab_error)
if message.channel.id in ongoing_list:
ongoing_list.remove(message.channel.id)
else:
ongoing_error = discord.Embed(color=0xBE1931, title='❗ There is one already ongoing.')
await message.channel.send(embed=ongoing_error) | unknown | codeparrot/codeparrot-clean | ||
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2010-2011 University of California, Berkeley, 2005-2009 University of Washington
# See opus_core/LICENSE
import os
import re
import xml.etree.cElementTree as ET
from numpy import array, dtype, empty
from opus_core.opus_error import OpusError
from opus_core.store.storage import Storage
class fixed_field_storage(Storage):
"""
A storage object that saves table and value data into a directory,
giving each table its own file in the directory. Fields are written
with fixed width and no delimiters. The format info optionally is
written in a commented header or a separate file. Format info is XML:
<fixed_field>
<field name="column_name_1" format="5.2f" />
<field name="column_name_2" format="10s" />
...
</fixed_field>
The 'format' attribute is a printf-style format string:
http://docs.python.org/lib/typesseq-strings.html
When invoking fixed field storage creation from Python, the format
info may also be supplied as a list of tuples, of the form:
[["name1", "5.2f"], ["name2", "10s"], ... ]
"""
#
# Static members
#
_root_element = 'fixed_field'
_field_element = 'field'
class Location:
NONE = 0
HEADER = 1
FILE = 2
_format_re = re.compile("^[-#0 +]*(?P<size>[1-9]+[0-9]*)(?:\.[0-9]*)?(?P<type>[diouxXeEfFgGcrs])$")
#
# Constructors
#
def __init__(self,
storage_location,
file_extension = 'dat',
format_location = Location.HEADER,
format_file_extension = 'fmt',
format_prefix = '# ',
data_prefix = '',
line_terminator = '\n'
):
if not file_extension:
raise ValueError('File extension must be a non-empty string.')
if not ( format_location == self.Location.NONE or
format_location == self.Location.HEADER or
format_location == self.Location.FILE ) :
raise ValueError('format_location must be a fixed_field_storage.Location value.')
self._output_directory = storage_location
self._file_extension = file_extension
self._format_location = format_location
self._format_file_extension = format_file_extension
self._format_prefix = format_prefix
self._data_prefix = data_prefix
self._line_terminator = line_terminator
#
# Storage interface implementation
#
def get_storage_location(self):
return self._output_directory
def write_table(self,
table_name,
table_data,
mode = Storage.OVERWRITE,
format = None,
):
if not format:
raise ValueError('No format supplied.')
# Append columns to the existing stored table if requested
if mode == Storage.APPEND:
old_data = self.load_table(table_name = table_name)
old_data.update(table_data)
table_data = old_data
column_size, column_names = self._get_column_size_and_names(table_data)
format_et = self._parse_format_to_et(format)
try:
python_format, target_size = self._convert_et_format_to_python_format_and_get_target_size(format_et)
except:
raise Exception('Could not parse the format string for the fixed field indicator: %s'%format)
# Normalize the format XML string
format = ET.tostring(format_et).replace('\n','').replace('\r','')
output = open(self._get_file_path_for_table(table_name), 'wb')
# Write out the format XML, either to the data header, or to a separate file
if self._format_location == self.Location.HEADER:
output.write(self._format_prefix
+ format.replace(self._line_terminator,'')
+ self._line_terminator)
elif self._format_location == self.Location.FILE:
format_output = open(self._get_format_file_path_for_table(table_name), 'wb')
format_output.write(format)
format_output.close()
# Write out the data
for row_index in range(column_size):
row = {}
for column_name, column_values in table_data.iteritems():
row[column_name] = column_values[row_index]
formatted_row = python_format % row
if len(formatted_row) != target_size:
raise ValueError('Input data went over fixed field size.')
output.write(self._data_prefix
+ formatted_row
+ self._line_terminator)
# Done with the file
output.close()
def load_table(self,
table_name,
column_names = Storage.ALL_COLUMNS,
lowercase = True,
format = None,
strip = True
):
# Get the data file and the format element tree
data_file, format_et = self._get_file_and_format_et_for_table(table_name, format)
# Initialize intermediate storage for table data
table_data_lists = {}
for field_et in format_et:
if lowercase:
field_et.set('name', field_et.get('name').lower())
if column_names == Storage.ALL_COLUMNS or field_et.get('name') in column_names:
table_data_lists[field_et.get('name')] = []
# Split the data file into fields, and get the field types
field_types = {}
for line in data_file:
location = len(self._data_prefix)
for field_et in format_et:
next_location = location + self._get_field_size(field_et)
field_text = line[location:next_location]
location = next_location
name = field_et.get('name')
if column_names == Storage.ALL_COLUMNS or name in column_names:
table_data_lists[name].append(field_text)
field_types[name] = self._format_re.match(field_et.get('format')).group('type')
# Convert the textual data into its proper type and load into numpy arrays
table_data = {}
for name in table_data_lists.keys():
rows = len(table_data_lists[name])
type_char = field_types[name]
if type_char in ['d','i','u']:
table_data[name] = empty(rows, dtype=int)
for x in range(rows): table_data[name][x] = int(table_data_lists[name][x])
elif type_char in ['o']:
table_data[name] = empty(rows, dtype=int)
for x in range(rows): table_data[name][x] = int(table_data_lists[name][x], 8)
elif type_char in ['x','X']:
table_data[name] = empty(rows, dtype=int)
for x in range(rows): table_data[name][x] = int(table_data_lists[name][x], 16)
elif type_char in ['e','E','f','F','g','G']:
table_data[name] = empty(rows, dtype=float)
for x in range(rows): table_data[name][x] = float(table_data_lists[name][x])
elif strip:
table_data[name] = empty(rows, dtype='S'+str(len(table_data_lists[name][0])))
for x in range(rows): table_data[name][x] = table_data_lists[name][x].strip()
# Done
return table_data
def get_column_names(self,
table_name,
lowercase = True,
format = None
):
# Get the format element tree
data_file, format_et = self._get_file_and_format_et_for_table(table_name, format)
# Get the column names from the format element tree
column_names = []
for field_et in format_et:
if lowercase:
column_names.append(field_et.get('name').lower())
else:
column_names.append(field_et.get('name'))
# Done
return column_names
#
# fixed_field_storage interface
#
def get_file_extension(self):
return self._file_extension
#
# Private utility methods
#
def _get_file_path_for_table(self, table_name):
filename_with_extention = '%s.%s' % (table_name, self._file_extension)
return os.path.join(self._output_directory, filename_with_extention)
def _get_format_file_path_for_table(self, table_name):
filename_with_extention = '%s.%s' % (table_name, self._format_file_extension)
return os.path.join(self._output_directory, filename_with_extention)
def _parse_format_to_et(self, format_xml_or_list):
result = ET.Element(self._root_element)
if isinstance(format_xml_or_list, list):
for item in format_xml_or_list:
ET.SubElement(result, self._field_element, name=item[0], format=item[1])
else:
format_et = ET.fromstring(format_xml_or_list)
if format_et.tag != self._root_element:
raise ValueError('Format root element is not "'+self._root_element+'".')
for field_et in format_et:
if field_et.tag == self._field_element:
ET.SubElement(result, self._field_element, name=field_et.get('name'), format=field_et.get('format'))
return result
def _get_file_and_format_et_for_table(self, table_name, format = None):
# Load data file
data_file = open(self._get_file_path_for_table(table_name))
# Look for a format spec
if not format:
try:
format_file = open(self._get_format_file_path_for_table(table_name))
format = format_file.read()
format_file.close()
except IOError:
data_file.read(len(self._format_prefix))
format = data_file.readline().strip()
# Parse out the field formats
format_et = self._parse_format_to_et(format)
# Done
return data_file, format_et
def _get_field_size(self, field_et):
return int(self._format_re.match(field_et.get('format')).group('size'))
def _convert_et_format_to_python_format_and_get_target_size(self, format_et):
python_format = ""
target_size = 0
for field_et in format_et:
python_format += "%%(%(name)s)%(format)s" % field_et.attrib
target_size += self._get_field_size(field_et)
return python_format, target_size
################################################################################
# Unit Tests
#
from opus_core.tests import opus_unittest
from opus_core.store.storage import TestStorageInterface
import os
import xml.etree.cElementTree as ET
from shutil import rmtree
from tempfile import mkdtemp
class TestFixedFieldStorageBase(object):
format_xml = '''
<fixed_field>
<field name="strs" format="5s" />
<field name="flts" format="6.2f" />
<field name="ints" format=" 05i"/>
</fixed_field>
'''
format_list = [['strs','5s'], ['flts','6.2f'], ['ints',' 05i']]
data_in = {
'ints': array([1,2202,-303]),
'strs': array(['one', 'two', 'three']),
'flts': array([1.11,22.2,3.3333]),
'misc': array([10,20,30])
}
data_out = {
'ints': array([1,2202,-303]),
'strs': array(['one', 'two', 'three']),
'flts': array([1.11,22.2,3.33])
}
data_text = ' one 1.11 0001\n two 22.20 2202\nthree 3.33-0303\n'
def setUp(self):
self.temp_dir = mkdtemp(prefix='opus_core_test_fixed_field_storage')
def tearDown(self):
if os.path.exists(self.temp_dir):
rmtree(self.temp_dir)
def fixed_field_read_setup(self):
pass
def fixed_field_read(self):
actual = self.storage.load_table(table_name = 'foo')
self.assertDictsEqual(actual, self.data_out)
def fixed_field_get_column_names(self):
actual = self.storage.get_column_names(table_name = 'foo')
self.assertEqual(actual, ['strs','flts','ints'])
def test_fixed_field_read(self):
self.fixed_field_read_setup()
self.fixed_field_read()
def test_fixed_field_get_column_names(self):
self.fixed_field_read_setup()
self.fixed_field_get_column_names()
class TestFixedFieldStorageWithFormatFile(TestStorageInterface,TestFixedFieldStorageBase):
def setUp(self):
TestFixedFieldStorageBase.setUp(self)
self.storage = fixed_field_storage(
storage_location=self.temp_dir,
file_extension='dat',
format_location=fixed_field_storage.Location.FILE)
def tearDown(self):
TestFixedFieldStorageBase.tearDown(self)
def test_fixed_field_write(self):
self.storage.write_table(
table_name = 'foo',
table_data = self.data_in,
format = self.format_list)
file = open(self.temp_dir+'/foo.dat', 'r')
self.assertEqual(file.read(), self.data_text)
file.close()
# Do I need to test XML equality...? Or is that covered by testing if the file can be read back in?
file = open(self.temp_dir+'/foo.fmt', 'r')
file.close()
def fixed_field_read_setup(self):
format_file = open(self.temp_dir+'/foo.fmt', 'wb')
format_file.write(self.format_xml)
format_file.close()
data_file = open(self.temp_dir+'/foo.dat', 'wb')
data_file.write(self.data_text)
data_file.close()
class TestFixedFieldStorageWithFormatHeader(TestStorageInterface,TestFixedFieldStorageBase):
def setUp(self):
TestFixedFieldStorageBase.setUp(self)
self.storage = fixed_field_storage(
storage_location=self.temp_dir,
file_extension='dat',
format_location=fixed_field_storage.Location.HEADER)
def tearDown(self):
TestFixedFieldStorageBase.tearDown(self)
def test_fixed_field_write(self):
self.storage.write_table(
table_name = 'foo',
table_data = self.data_out,
format = self.format_list)
file = open(self.temp_dir+'/foo.dat', 'r')
self.assertEqual(file.read(2), '# ')
file.readline()
self.assertEqual(file.read(), self.data_text)
file.close()
def fixed_field_read_setup(self):
data_file = open(self.temp_dir+'/foo.dat', 'wb')
data_file.write('# ' + self.format_xml.replace('\n','') + '\n')
data_file.write(self.data_text)
data_file.close()
if __name__ == '__main__':
opus_unittest.main() | unknown | codeparrot/codeparrot-clean | ||
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: ["master"]
pull_request:
# The branches below must be a subset of the branches above
branches: ["master"]
schedule:
- cron: "0 0 * * 1"
workflow_dispatch:
permissions:
contents: read
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [javascript, actions]
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@b20883b0cd1f46c72ae0ba6d1090936928f9fa30 # v4.32.0
with:
languages: ${{ matrix.language }}
config: |
paths-ignore:
- test
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
# - name: Autobuild
# uses: github/codeql-action/autobuild@3ab4101902695724f9365a384f86c1074d94e18c # v3.24.7
# ℹ️ Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@b20883b0cd1f46c72ae0ba6d1090936928f9fa30 # v4.32.0 | unknown | github | https://github.com/expressjs/express | .github/workflows/codeql.yml |
#!/usr/bin/env python
#
# (c) 2017 Apstra Inc, <community@apstra.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
"""
Apstra AOS external inventory script
====================================
Ansible has a feature where instead of reading from /etc/ansible/hosts
as a text file, it can query external programs to obtain the list
of hosts, groups the hosts are in, and even variables to assign to each host.
To use this:
- copy this file over /etc/ansible/hosts and chmod +x the file.
- Copy both files (.py and .ini) in your preferred directory
More information about Ansible Dynamic Inventory here
http://unix.stackexchange.com/questions/205479/in-ansible-dynamic-inventory-json-can-i-render-hostvars-based-on-the-hostname
2 modes are currently, supported: **device based** or **blueprint based**:
- For **Device based**, the list of device is taken from the global device list
the serial ID will be used as the inventory_hostname
- For **Blueprint based**, the list of device is taken from the given blueprint
the Node name will be used as the inventory_hostname
Input parameters parameter can be provided using either with the ini file or by using Environment Variables:
The following list of Environment Variables are supported: AOS_SERVER, AOS_PORT, AOS_USERNAME, AOS_PASSWORD, AOS_BLUEPRINT
The config file takes precedence over the Environment Variables
Tested with Apstra AOS 1.1
This script has been inspired by the cobbler.py inventory. thanks
Author: Damien Garros (@dgarros)
Version: 0.2.0
"""
import json
import os
import re
import sys
try:
import argparse
HAS_ARGPARSE = True
except ImportError:
HAS_ARGPARSE = False
try:
from apstra.aosom.session import Session
HAS_AOS_PYEZ = True
except ImportError:
HAS_AOS_PYEZ = False
from ansible.module_utils.six.moves import configparser
"""
##
Expected output format in Device mode
{
"Cumulus": {
"hosts": [
"52540073956E",
"52540022211A"
],
"vars": {}
},
"EOS": {
"hosts": [
"5254001CAFD8",
"525400DDDF72"
],
"vars": {}
},
"Generic Model": {
"hosts": [
"525400E5486D"
],
"vars": {}
},
"Ubuntu GNU/Linux": {
"hosts": [
"525400E5486D"
],
"vars": {}
},
"VX": {
"hosts": [
"52540073956E",
"52540022211A"
],
"vars": {}
},
"_meta": {
"hostvars": {
"5254001CAFD8": {
"agent_start_time": "2017-02-03T00:49:16.000000Z",
"ansible_ssh_host": "172.20.52.6",
"aos_hcl_model": "Arista_vEOS",
"aos_server": "",
"aos_version": "AOS_1.1.1_OB.5",
"comm_state": "on",
"device_start_time": "2017-02-03T00:47:58.454480Z",
"domain_name": "",
"error_message": "",
"fqdn": "localhost",
"hostname": "localhost",
"hw_model": "vEOS",
"hw_version": "",
"is_acknowledged": false,
"mgmt_ifname": "Management1",
"mgmt_ipaddr": "172.20.52.6",
"mgmt_macaddr": "52:54:00:1C:AF:D8",
"os_arch": "x86_64",
"os_family": "EOS",
"os_version": "4.16.6M",
"os_version_info": {
"build": "6M",
"major": "4",
"minor": "16"
},
"serial_number": "5254001CAFD8",
"state": "OOS-QUARANTINED",
"vendor": "Arista"
},
"52540022211A": {
"agent_start_time": "2017-02-03T00:45:22.000000Z",
"ansible_ssh_host": "172.20.52.7",
"aos_hcl_model": "Cumulus_VX",
"aos_server": "172.20.52.3",
"aos_version": "AOS_1.1.1_OB.5",
"comm_state": "on",
"device_start_time": "2017-02-03T00:45:11.019189Z",
"domain_name": "",
"error_message": "",
"fqdn": "cumulus",
"hostname": "cumulus",
"hw_model": "VX",
"hw_version": "",
"is_acknowledged": false,
"mgmt_ifname": "eth0",
"mgmt_ipaddr": "172.20.52.7",
"mgmt_macaddr": "52:54:00:22:21:1a",
"os_arch": "x86_64",
"os_family": "Cumulus",
"os_version": "3.1.1",
"os_version_info": {
"build": "1",
"major": "3",
"minor": "1"
},
"serial_number": "52540022211A",
"state": "OOS-QUARANTINED",
"vendor": "Cumulus"
},
"52540073956E": {
"agent_start_time": "2017-02-03T00:45:19.000000Z",
"ansible_ssh_host": "172.20.52.8",
"aos_hcl_model": "Cumulus_VX",
"aos_server": "172.20.52.3",
"aos_version": "AOS_1.1.1_OB.5",
"comm_state": "on",
"device_start_time": "2017-02-03T00:45:11.030113Z",
"domain_name": "",
"error_message": "",
"fqdn": "cumulus",
"hostname": "cumulus",
"hw_model": "VX",
"hw_version": "",
"is_acknowledged": false,
"mgmt_ifname": "eth0",
"mgmt_ipaddr": "172.20.52.8",
"mgmt_macaddr": "52:54:00:73:95:6e",
"os_arch": "x86_64",
"os_family": "Cumulus",
"os_version": "3.1.1",
"os_version_info": {
"build": "1",
"major": "3",
"minor": "1"
},
"serial_number": "52540073956E",
"state": "OOS-QUARANTINED",
"vendor": "Cumulus"
},
"525400DDDF72": {
"agent_start_time": "2017-02-03T00:49:07.000000Z",
"ansible_ssh_host": "172.20.52.5",
"aos_hcl_model": "Arista_vEOS",
"aos_server": "",
"aos_version": "AOS_1.1.1_OB.5",
"comm_state": "on",
"device_start_time": "2017-02-03T00:47:46.929921Z",
"domain_name": "",
"error_message": "",
"fqdn": "localhost",
"hostname": "localhost",
"hw_model": "vEOS",
"hw_version": "",
"is_acknowledged": false,
"mgmt_ifname": "Management1",
"mgmt_ipaddr": "172.20.52.5",
"mgmt_macaddr": "52:54:00:DD:DF:72",
"os_arch": "x86_64",
"os_family": "EOS",
"os_version": "4.16.6M",
"os_version_info": {
"build": "6M",
"major": "4",
"minor": "16"
},
"serial_number": "525400DDDF72",
"state": "OOS-QUARANTINED",
"vendor": "Arista"
},
"525400E5486D": {
"agent_start_time": "2017-02-02T18:44:42.000000Z",
"ansible_ssh_host": "172.20.52.4",
"aos_hcl_model": "Generic_Server_1RU_1x10G",
"aos_server": "172.20.52.3",
"aos_version": "AOS_1.1.1_OB.5",
"comm_state": "on",
"device_start_time": "2017-02-02T21:11:25.188734Z",
"domain_name": "",
"error_message": "",
"fqdn": "localhost",
"hostname": "localhost",
"hw_model": "Generic Model",
"hw_version": "pc-i440fx-trusty",
"is_acknowledged": false,
"mgmt_ifname": "eth0",
"mgmt_ipaddr": "172.20.52.4",
"mgmt_macaddr": "52:54:00:e5:48:6d",
"os_arch": "x86_64",
"os_family": "Ubuntu GNU/Linux",
"os_version": "14.04 LTS",
"os_version_info": {
"build": "",
"major": "14",
"minor": "04"
},
"serial_number": "525400E5486D",
"state": "OOS-QUARANTINED",
"vendor": "Generic Manufacturer"
}
}
},
"all": {
"hosts": [
"5254001CAFD8",
"52540073956E",
"525400DDDF72",
"525400E5486D",
"52540022211A"
],
"vars": {}
},
"vEOS": {
"hosts": [
"5254001CAFD8",
"525400DDDF72"
],
"vars": {}
}
}
"""
def fail(msg):
sys.stderr.write("%s\n" % msg)
sys.exit(1)
class AosInventory(object):
def __init__(self):
""" Main execution path """
if not HAS_AOS_PYEZ:
raise Exception('aos-pyez is not installed. Please see details here: https://github.com/Apstra/aos-pyez')
if not HAS_ARGPARSE:
raise Exception('argparse is not installed. Please install the argparse library or upgrade to python-2.7')
# Initialize inventory
self.inventory = dict() # A list of groups and the hosts in that group
self.inventory['_meta'] = dict()
self.inventory['_meta']['hostvars'] = dict()
# Read settings and parse CLI arguments
self.read_settings()
self.parse_cli_args()
# ----------------------------------------------------
# Open session to AOS
# ----------------------------------------------------
aos = Session(server=self.aos_server,
port=self.aos_server_port,
user=self.aos_username,
passwd=self.aos_password)
aos.login()
# Save session information in variables of group all
self.add_var_to_group('all', 'aos_session', aos.session)
# Add the AOS server itself in the inventory
self.add_host_to_group("all", 'aos')
self.add_var_to_host("aos", "ansible_ssh_host", self.aos_server)
self.add_var_to_host("aos", "ansible_ssh_pass", self.aos_password)
self.add_var_to_host("aos", "ansible_ssh_user", self.aos_username)
# ----------------------------------------------------
# Build the inventory
# 2 modes are supported: device based or blueprint based
# - For device based, the list of device is taken from the global device list
# the serial ID will be used as the inventory_hostname
# - For Blueprint based, the list of device is taken from the given blueprint
# the Node name will be used as the inventory_hostname
# ----------------------------------------------------
if self.aos_blueprint:
bp = aos.Blueprints[self.aos_blueprint]
if bp.exists is False:
fail("Unable to find the Blueprint: %s" % self.aos_blueprint)
for dev_name, dev_id in bp.params['devices'].value.items():
self.add_host_to_group('all', dev_name)
device = aos.Devices.find(uid=dev_id)
if 'facts' in device.value.keys():
self.add_device_facts_to_var(dev_name, device)
# Define admin State and Status
if 'user_config' in device.value.keys():
if 'admin_state' in device.value['user_config'].keys():
self.add_var_to_host(dev_name, 'admin_state', device.value['user_config']['admin_state'])
self.add_device_status_to_var(dev_name, device)
# Go over the contents data structure
for node in bp.contents['system']['nodes']:
if node['display_name'] == dev_name:
self.add_host_to_group(node['role'], dev_name)
# Check for additional attribute to import
attributes_to_import = [
'loopback_ip',
'asn',
'role',
'position',
]
for attr in attributes_to_import:
if attr in node.keys():
self.add_var_to_host(dev_name, attr, node[attr])
# if blueprint_interface is enabled in the configuration
# Collect links information
if self.aos_blueprint_int:
interfaces = dict()
for link in bp.contents['system']['links']:
# each link has 2 sides [0,1], and it's unknown which one match this device
# at first we assume, first side match(0) and peer is (1)
peer_id = 1
for side in link['endpoints']:
if side['display_name'] == dev_name:
# import local information first
int_name = side['interface']
# init dict
interfaces[int_name] = dict()
if 'ip' in side.keys():
interfaces[int_name]['ip'] = side['ip']
if 'interface' in side.keys():
interfaces[int_name]['name'] = side['interface']
if 'display_name' in link['endpoints'][peer_id].keys():
interfaces[int_name]['peer'] = link['endpoints'][peer_id]['display_name']
if 'ip' in link['endpoints'][peer_id].keys():
interfaces[int_name]['peer_ip'] = link['endpoints'][peer_id]['ip']
if 'type' in link['endpoints'][peer_id].keys():
interfaces[int_name]['peer_type'] = link['endpoints'][peer_id]['type']
else:
# if we haven't match the first time, prepare the peer_id
# for the second loop iteration
peer_id = 0
self.add_var_to_host(dev_name, 'interfaces', interfaces)
else:
for device in aos.Devices:
# If not reacheable, create by key and
# If reacheable, create by hostname
self.add_host_to_group('all', device.name)
# populate information for this host
self.add_device_status_to_var(device.name, device)
if 'user_config' in device.value.keys():
for key, value in device.value['user_config'].items():
self.add_var_to_host(device.name, key, value)
# Based on device status online|offline, collect facts as well
if device.value['status']['comm_state'] == 'on':
if 'facts' in device.value.keys():
self.add_device_facts_to_var(device.name, device)
# Check if device is associated with a blueprint
# if it's create a new group
if 'blueprint_active' in device.value['status'].keys():
if 'blueprint_id' in device.value['status'].keys():
bp = aos.Blueprints.find(uid=device.value['status']['blueprint_id'])
if bp:
self.add_host_to_group(bp.name, device.name)
# ----------------------------------------------------
# Convert the inventory and return a JSON String
# ----------------------------------------------------
data_to_print = ""
data_to_print += self.json_format_dict(self.inventory, True)
print(data_to_print)
def read_settings(self):
""" Reads the settings from the apstra_aos.ini file """
config = configparser.ConfigParser()
config.read(os.path.dirname(os.path.realpath(__file__)) + '/apstra_aos.ini')
# Default Values
self.aos_blueprint = False
self.aos_blueprint_int = True
self.aos_username = 'admin'
self.aos_password = 'admin'
self.aos_server_port = 8888
# Try to reach all parameters from File, if not available try from ENV
try:
self.aos_server = config.get('aos', 'aos_server')
except:
if 'AOS_SERVER' in os.environ.keys():
self.aos_server = os.environ['AOS_SERVER']
pass
try:
self.aos_server_port = config.get('aos', 'port')
except:
if 'AOS_PORT' in os.environ.keys():
self.aos_server_port = os.environ['AOS_PORT']
pass
try:
self.aos_username = config.get('aos', 'username')
except:
if 'AOS_USERNAME' in os.environ.keys():
self.aos_username = os.environ['AOS_USERNAME']
pass
try:
self.aos_password = config.get('aos', 'password')
except:
if 'AOS_PASSWORD' in os.environ.keys():
self.aos_password = os.environ['AOS_PASSWORD']
pass
try:
self.aos_blueprint = config.get('aos', 'blueprint')
except:
if 'AOS_BLUEPRINT' in os.environ.keys():
self.aos_blueprint = os.environ['AOS_BLUEPRINT']
pass
try:
if config.get('aos', 'blueprint_interface') in ['false', 'no']:
self.aos_blueprint_int = False
except:
pass
def parse_cli_args(self):
""" Command line argument processing """
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on Apstra AOS')
parser.add_argument('--list', action='store_true', default=True, help='List instances (default: True)')
parser.add_argument('--host', action='store', help='Get all the variables about a specific instance')
self.args = parser.parse_args()
def json_format_dict(self, data, pretty=False):
""" Converts a dict to a JSON object and dumps it as a formatted string """
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
def add_host_to_group(self, group, host):
# Cleanup group name first
clean_group = self.cleanup_group_name(group)
# Check if the group exist, if not initialize it
if clean_group not in self.inventory.keys():
self.inventory[clean_group] = {}
self.inventory[clean_group]['hosts'] = []
self.inventory[clean_group]['vars'] = {}
self.inventory[clean_group]['hosts'].append(host)
def add_var_to_host(self, host, var, value):
# Check if the host exist, if not initialize it
if host not in self.inventory['_meta']['hostvars'].keys():
self.inventory['_meta']['hostvars'][host] = {}
self.inventory['_meta']['hostvars'][host][var] = value
def add_var_to_group(self, group, var, value):
# Cleanup group name first
clean_group = self.cleanup_group_name(group)
# Check if the group exist, if not initialize it
if clean_group not in self.inventory.keys():
self.inventory[clean_group] = {}
self.inventory[clean_group]['hosts'] = []
self.inventory[clean_group]['vars'] = {}
self.inventory[clean_group]['vars'][var] = value
def add_device_facts_to_var(self, device_name, device):
# Populate variables for this host
self.add_var_to_host(device_name,
'ansible_ssh_host',
device.value['facts']['mgmt_ipaddr'])
self.add_var_to_host(device_name, 'id', device.id)
# self.add_host_to_group('all', device.name)
for key, value in device.value['facts'].items():
self.add_var_to_host(device_name, key, value)
if key == 'os_family':
self.add_host_to_group(value, device_name)
elif key == 'hw_model':
self.add_host_to_group(value, device_name)
def cleanup_group_name(self, group_name):
"""
Clean up group name by :
- Replacing all non-alphanumeric caracter by underscore
- Converting to lowercase
"""
rx = re.compile('\W+')
clean_group = rx.sub('_', group_name).lower()
return clean_group
def add_device_status_to_var(self, device_name, device):
if 'status' in device.value.keys():
for key, value in device.value['status'].items():
self.add_var_to_host(device.name, key, value)
# Run the script
if __name__ == '__main__':
AosInventory() | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2010-2024 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.analysis.api.fir.test.cases.generated.cases.components.expressionTypeProvider;
import com.intellij.testFramework.TestDataPath;
import org.jetbrains.kotlin.test.util.KtTestUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.kotlin.analysis.api.fir.test.configurators.AnalysisApiFirTestConfiguratorFactory;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiTestConfiguratorFactoryData;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiTestConfigurator;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.TestModuleKind;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.FrontendKind;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisSessionMode;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiMode;
import org.jetbrains.kotlin.analysis.api.impl.base.test.cases.components.expressionTypeProvider.AbstractHLExpressionTypeTest;
import org.jetbrains.kotlin.test.TestMetadata;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.util.regex.Pattern;
/** This class is generated by {@link org.jetbrains.kotlin.generators.tests.analysis.api.GenerateAnalysisApiTestsKt}. DO NOT MODIFY MANUALLY */
@SuppressWarnings("all")
@TestMetadata("analysis/analysis-api/testData/components/expressionTypeProvider/expressionType")
@TestDataPath("$PROJECT_ROOT")
public class FirIdeDependentAnalysisScriptSourceModuleHLExpressionTypeTestGenerated extends AbstractHLExpressionTypeTest {
@NotNull
@Override
public AnalysisApiTestConfigurator getConfigurator() {
return AnalysisApiFirTestConfiguratorFactory.INSTANCE.createConfigurator(
new AnalysisApiTestConfiguratorFactoryData(
FrontendKind.Fir,
TestModuleKind.ScriptSource,
AnalysisSessionMode.Dependent,
AnalysisApiMode.Ide
)
);
}
@Test
public void testAllFilesPresentInExpressionType() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/expressionTypeProvider/expressionType"), Pattern.compile("^(.+)\\.kts$"), null, true);
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/expressionTypeProvider/expressionType/approximatedLocalClasses")
@TestDataPath("$PROJECT_ROOT")
public class ApproximatedLocalClasses {
@Test
public void testAllFilesPresentInApproximatedLocalClasses() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/expressionTypeProvider/expressionType/approximatedLocalClasses"), Pattern.compile("^(.+)\\.kts$"), null, true);
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/expressionTypeProvider/expressionType/assignment")
@TestDataPath("$PROJECT_ROOT")
public class Assignment {
@Test
public void testAllFilesPresentInAssignment() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/expressionTypeProvider/expressionType/assignment"), Pattern.compile("^(.+)\\.kts$"), null, true);
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/expressionTypeProvider/expressionType/nameReference")
@TestDataPath("$PROJECT_ROOT")
public class NameReference {
@Test
public void testAllFilesPresentInNameReference() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/expressionTypeProvider/expressionType/nameReference"), Pattern.compile("^(.+)\\.kts$"), null, true);
}
}
} | java | github | https://github.com/JetBrains/kotlin | analysis/analysis-api-fir/tests-gen/org/jetbrains/kotlin/analysis/api/fir/test/cases/generated/cases/components/expressionTypeProvider/FirIdeDependentAnalysisScriptSourceModuleHLExpressionTypeTestGenerated.java |
# frozen_string_literal: true
require "action_view/rendering"
require "active_support/core_ext/module/redefine_method"
module ActionView
# = Action View \Layouts
#
# Layouts reverse the common pattern of including shared headers and footers in many templates to isolate changes in
# repeated setups. The inclusion pattern has pages that look like this:
#
# <%= render "application/header" %>
# Hello World
# <%= render "application/footer" %>
#
# This approach is a decent way of keeping common structures isolated from the changing content, but it's verbose
# and if you ever want to change the structure of these two includes, you'll have to change all the templates.
#
# With layouts, you can flip it around and have the common structure know where to insert changing content. This means
# that the header and footer are only mentioned in one place, like this:
#
# // The header part of this layout
# <%= yield %>
# // The footer part of this layout
#
# And then you have content pages that look like this:
#
# hello world
#
# At rendering time, the content page is computed and then inserted in the layout, like this:
#
# // The header part of this layout
# hello world
# // The footer part of this layout
#
# == Accessing shared variables
#
# Layouts have access to variables specified in the content pages and vice versa. This allows you to have layouts with
# references that won't materialize before rendering time:
#
# <h1><%= @page_title %></h1>
# <%= yield %>
#
# ...and content pages that fulfill these references _at_ rendering time:
#
# <% @page_title = "Welcome" %>
# Off-world colonies offers you a chance to start a new life
#
# The result after rendering is:
#
# <h1>Welcome</h1>
# Off-world colonies offers you a chance to start a new life
#
# == Layout assignment
#
# You can either specify a layout declaratively (using the #layout class method) or give
# it the same name as your controller, and place it in <tt>app/views/layouts</tt>.
# If a subclass does not have a layout specified, it inherits its layout using normal Ruby inheritance.
#
# For instance, if you have PostsController and a template named <tt>app/views/layouts/posts.html.erb</tt>,
# that template will be used for all actions in PostsController and controllers inheriting
# from PostsController.
#
# If you use a module, for instance Weblog::PostsController, you will need a template named
# <tt>app/views/layouts/weblog/posts.html.erb</tt>.
#
# Since all your controllers inherit from ApplicationController, they will use
# <tt>app/views/layouts/application.html.erb</tt> if no other layout is specified
# or provided.
#
# == Inheritance Examples
#
# class BankController < ActionController::Base
# # bank.html.erb exists
#
# class ExchangeController < BankController
# # exchange.html.erb exists
#
# class CurrencyController < BankController
#
# class InformationController < BankController
# layout "information"
#
# class TellerController < InformationController
# # teller.html.erb exists
#
# class EmployeeController < InformationController
# # employee.html.erb exists
# layout nil
#
# class VaultController < BankController
# layout :access_level_layout
#
# class TillController < BankController
# layout false
#
# In these examples, we have three implicit lookup scenarios:
# * The +BankController+ uses the "bank" layout.
# * The +ExchangeController+ uses the "exchange" layout.
# * The +CurrencyController+ inherits the layout from BankController.
#
# However, when a layout is explicitly set, the explicitly set layout wins:
# * The +InformationController+ uses the "information" layout, explicitly set.
# * The +TellerController+ also uses the "information" layout, because the parent explicitly set it.
# * The +EmployeeController+ uses the "employee" layout, because it set the layout to +nil+, resetting the parent configuration.
# * The +VaultController+ chooses a layout dynamically by calling the <tt>access_level_layout</tt> method.
# * The +TillController+ does not use a layout at all.
#
# == Types of layouts
#
# Layouts are basically just regular templates, but the name of this template needs not be specified statically. Sometimes
# you want to alternate layouts depending on runtime information, such as whether someone is logged in or not. This can
# be done either by specifying a method reference as a symbol or using an inline method (as a proc).
#
# The method reference is the preferred approach to variable layouts and is used like this:
#
# class WeblogController < ActionController::Base
# layout :writers_and_readers
#
# def index
# # fetching posts
# end
#
# private
# def writers_and_readers
# logged_in? ? "writer_layout" : "reader_layout"
# end
# end
#
# Now when a new request for the index action is processed, the layout will vary depending on whether the person accessing
# is logged in or not.
#
# If you want to use an inline method, such as a proc, do something like this:
#
# class WeblogController < ActionController::Base
# layout proc { |controller| controller.logged_in? ? "writer_layout" : "reader_layout" }
# end
#
# If an argument isn't given to the proc, it's evaluated in the context of
# the current controller anyway.
#
# class WeblogController < ActionController::Base
# layout proc { logged_in? ? "writer_layout" : "reader_layout" }
# end
#
# Of course, the most common way of specifying a layout is still just as a plain template name:
#
# class WeblogController < ActionController::Base
# layout "weblog_standard"
# end
#
# The template will be looked always in <tt>app/views/layouts/</tt> folder. But you can point
# <tt>layouts</tt> folder direct also. <tt>layout "layouts/demo"</tt> is the same as <tt>layout "demo"</tt>.
#
# Setting the layout to +nil+ forces it to be looked up in the filesystem and falls back to the parent behavior if none exists.
# Setting it to +nil+ is useful to re-enable template lookup overriding a previous configuration set in the parent:
#
# class ApplicationController < ActionController::Base
# layout "application"
# end
#
# class PostsController < ApplicationController
# # Will use "application" layout
# end
#
# class CommentsController < ApplicationController
# # Will search for "comments" layout and fall back to "application" layout
# layout nil
# end
#
# == Conditional layouts
#
# If you have a layout that by default is applied to all the actions of a controller, you still have the option of rendering
# a given action or set of actions without a layout, or restricting a layout to only a single action or a set of actions. The
# <tt>:only</tt> and <tt>:except</tt> options can be passed to the layout call. For example:
#
# class WeblogController < ActionController::Base
# layout "weblog_standard", except: :rss
#
# # ...
#
# end
#
# This will assign "weblog_standard" as the WeblogController's layout for all actions except for the +rss+ action, which will
# be rendered directly, without wrapping a layout around the rendered view.
#
# Both the <tt>:only</tt> and <tt>:except</tt> condition can accept an arbitrary number of method references, so
# <tt>except: [ :rss, :text_only ]</tt> is valid, as is <tt>except: :rss</tt>.
#
# == Using a different layout in the action render call
#
# If most of your actions use the same layout, it makes perfect sense to define a controller-wide layout as described above.
# Sometimes you'll have exceptions where one action wants to use a different layout than the rest of the controller.
# You can do this by passing a <tt>:layout</tt> option to the <tt>render</tt> call. For example:
#
# class WeblogController < ActionController::Base
# layout "weblog_standard"
#
# def help
# render action: "help", layout: "help"
# end
# end
#
# This will override the controller-wide "weblog_standard" layout, and will render the help action with the "help" layout instead.
module Layouts
extend ActiveSupport::Concern
include ActionView::Rendering
included do
class_attribute :_layout, instance_accessor: false
class_attribute :_layout_conditions, instance_accessor: false, instance_reader: true, default: {}
_write_layout_method
end
module ClassMethods
def inherited(klass) # :nodoc:
super
klass._write_layout_method
end
# This module is mixed in if layout conditions are provided. This means
# that if no layout conditions are used, this method is not used
module LayoutConditions # :nodoc:
private
# Determines whether the current action has a layout definition by
# checking the action name against the :only and :except conditions
# set by the <tt>layout</tt> method.
#
# ==== Returns
# * <tt>Boolean</tt> - True if the action has a layout definition, false otherwise.
def _conditional_layout?
return unless super
conditions = _layout_conditions
if only = conditions[:only]
only.include?(action_name)
elsif except = conditions[:except]
!except.include?(action_name)
else
true
end
end
end
# Specify the layout to use for this class.
#
# If the specified layout is a:
# String:: the String is the template name
# Symbol:: call the method specified by the symbol
# Proc:: call the passed Proc
# false:: There is no layout
# true:: raise an ArgumentError
# nil:: Force default layout behavior with inheritance
#
# Return value of +Proc+ and +Symbol+ arguments should be +String+, +false+, +true+, or +nil+
# with the same meaning as described above.
#
# ==== Parameters
#
# * <tt>layout</tt> - The layout to use.
#
# ==== Options (conditions)
#
# * +:only+ - A list of actions to apply this layout to.
# * +:except+ - Apply this layout to all actions but this one.
def layout(layout, conditions = {})
include LayoutConditions unless conditions.empty?
conditions.each { |k, v| conditions[k] = Array(v).map(&:to_s) }
self._layout_conditions = conditions
self._layout = layout
_write_layout_method
end
# Creates a _layout method to be called by _default_layout .
#
# If a layout is not explicitly mentioned then look for a layout with the controller's name.
# if nothing is found then try same procedure to find super class's layout.
def _write_layout_method # :nodoc:
silence_redefinition_of_method(:_layout)
prefixes = /\blayouts/.match?(_implied_layout_name) ? [] : ["layouts"]
default_behavior = "lookup_context.find_all('#{_implied_layout_name}', #{prefixes.inspect}, false, keys, { formats: formats }).first || super"
name_clause = if name
default_behavior
else
<<-RUBY
super
RUBY
end
layout_definition = \
case _layout
when String
_layout.inspect
when Symbol
<<-RUBY
#{_layout}.tap do |layout|
return #{default_behavior} if layout.nil?
unless layout.is_a?(String) || !layout
raise ArgumentError, "Your layout method :#{_layout} returned \#{layout}. It " \
"should have returned a String, false, or nil"
end
end
RUBY
when Proc
define_method :_layout_from_proc, &_layout
private :_layout_from_proc
<<-RUBY
result = _layout_from_proc(#{_layout.arity == 0 ? '' : 'self'})
return #{default_behavior} if result.nil?
result
RUBY
when false
nil
when true
raise ArgumentError, "Layouts must be specified as a String, Symbol, Proc, false, or nil"
when nil
name_clause
end
class_eval <<-RUBY, __FILE__, __LINE__ + 1
# frozen_string_literal: true
def _layout(lookup_context, formats, keys)
if _conditional_layout?
#{layout_definition}
else
#{name_clause}
end
end
private :_layout
RUBY
end
private
# If no layout is supplied, look for a template named the return
# value of this method.
#
# ==== Returns
# * <tt>String</tt> - A template name
def _implied_layout_name
controller_path
end
end
def _process_render_template_options(options) # :nodoc:
super
if _include_layout?(options)
layout = options.delete(:layout) { :default }
options[:layout] = _layout_for_option(layout)
end
end
attr_internal_writer :action_has_layout
def initialize(*) # :nodoc:
@_action_has_layout = true
super
end
# Controls whether an action should be rendered using a layout.
# If you want to disable any <tt>layout</tt> settings for the
# current action so that it is rendered without a layout then
# either override this method in your controller to return false
# for that action or set the <tt>action_has_layout</tt> attribute
# to false before rendering.
def action_has_layout?
@_action_has_layout
end
private
def _conditional_layout?
true
end
# This will be overwritten by _write_layout_method
def _layout(*); end
# Determine the layout for a given name, taking into account the name type.
#
# ==== Parameters
# * <tt>name</tt> - The name of the template
def _layout_for_option(name)
case name
when String then _normalize_layout(name)
when Proc then name
when true then Proc.new { |lookup_context, formats, keys| _default_layout(lookup_context, formats, keys, true) }
when :default then Proc.new { |lookup_context, formats, keys| _default_layout(lookup_context, formats, keys, false) }
when false, nil then nil
else
raise ArgumentError,
"String, Proc, :default, true, or false, expected for `layout'; you passed #{name.inspect}"
end
end
def _normalize_layout(value)
value.is_a?(String) && !value.match?(/\blayouts/) ? "layouts/#{value}" : value
end
# Returns the default layout for this controller.
# Optionally raises an exception if the layout could not be found.
#
# ==== Parameters
# * <tt>formats</tt> - The formats accepted to this layout
# * <tt>require_layout</tt> - If set to +true+ and layout is not found,
# an +ArgumentError+ exception is raised (defaults to +false+)
#
# ==== Returns
# * <tt>template</tt> - The template object for the default layout (or +nil+)
def _default_layout(lookup_context, formats, keys, require_layout = false)
begin
value = _layout(lookup_context, formats, keys) if action_has_layout?
rescue NameError => e
raise e, "Could not render layout: #{e.message}"
end
if require_layout && action_has_layout? && !value
raise ArgumentError,
"There was no default layout for #{self.class} in #{view_paths.inspect}"
end
_normalize_layout(value)
end
def _include_layout?(options)
!options.keys.intersect?([:body, :plain, :html, :inline, :partial]) || options.key?(:layout)
end
end
end | ruby | github | https://github.com/rails/rails | actionview/lib/action_view/layouts.rb |
# SPDX-License-Identifier: (GPL-2.0-only OR BSD-2-Clause)
%YAML 1.2
---
$id: http://devicetree.org/schemas/clock/qcom,spmi-clkdiv.yaml#
$schema: http://devicetree.org/meta-schemas/core.yaml#
title: Qualcomm SPMI PMIC clock divider
maintainers:
- Bjorn Andersson <andersson@kernel.org>
- Stephen Boyd <sboyd@kernel.org>
description: |
Qualcomm SPMI PMIC clock divider configures the clock frequency of a set of
outputs on the PMIC. These clocks are typically wired through alternate
functions on GPIO pins.
properties:
compatible:
const: qcom,spmi-clkdiv
reg:
maxItems: 1
clocks:
items:
- description: Board XO source
clock-names:
items:
- const: xo
"#clock-cells":
const: 1
qcom,num-clkdivs:
$ref: /schemas/types.yaml#/definitions/uint32
description: Number of CLKDIV peripherals.
required:
- compatible
- reg
- clocks
- clock-names
- "#clock-cells"
- qcom,num-clkdivs
additionalProperties: false
examples:
- |
pmic {
#address-cells = <1>;
#size-cells = <0>;
clock-controller@5b00 {
compatible = "qcom,spmi-clkdiv";
reg = <0x5b00>;
clocks = <&xo_board>;
clock-names = "xo";
#clock-cells = <1>;
qcom,num-clkdivs = <3>;
assigned-clocks = <&pm8998_clk_divs 1>,
<&pm8998_clk_divs 2>,
<&pm8998_clk_divs 3>;
assigned-clock-rates = <9600000>,
<9600000>,
<9600000>;
};
}; | unknown | github | https://github.com/torvalds/linux | Documentation/devicetree/bindings/clock/qcom,spmi-clkdiv.yaml |
from concurrent.futures import Future
from unittest import mock
from taiga_ncurses.ui import signals, views
from taiga_ncurses import controllers
from taiga_ncurses.config import settings
from taiga_ncurses.executor import Executor
from taiga_ncurses.core import StateMachine
from tests import factories
def test_backlog_controller_show_the_help_popup():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
assert not hasattr(project_detail_controller.view.backlog, "help_popup")
project_detail_controller.handle(settings.data.backlog.keys.help)
assert hasattr(project_detail_controller.view.backlog, "help_popup")
def test_backlog_controller_close_the_help_popup():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.help)
assert hasattr(project_detail_controller.view.backlog, "help_popup")
help_popup = project_detail_controller.view.backlog.help_popup
signals.emit(help_popup.close_button, "click")
assert not hasattr(project_detail_controller.view.backlog, "help_popup")
def test_backlog_controller_reload():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
executor.project_stats.reset_mock()
executor.unassigned_user_stories.reset_mock()
assert executor.project_stats.call_count == 0
assert executor.unassigned_user_stories.call_count == 0
project_detail_controller.handle(settings.data.backlog.keys.reload)
assert executor.project_stats.call_count == 1
assert executor.unassigned_user_stories.call_count == 1
def test_backlog_controller_show_the_new_user_story_form():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
assert not hasattr(project_detail_controller.view.backlog, "user_story_form")
project_detail_controller.handle(settings.data.backlog.keys.create)
assert hasattr(project_detail_controller.view.backlog, "user_story_form")
def test_backlog_controller_cancel_the_new_user_story_form():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.create)
assert hasattr(project_detail_controller.view.backlog, "user_story_form")
form = project_detail_controller.view.backlog.user_story_form
signals.emit(form.cancel_button, "click")
assert not hasattr(project_detail_controller.view.backlog, "user_story_form")
def test_backlog_controller_submit_new_user_story_form_with_errors():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.create)
form = project_detail_controller.view.backlog.user_story_form
signals.emit(form.save_button, "click")
assert project_view.backlog.notifier.error_msg.call_count == 1
def test_backlog_controller_submit_new_user_story_form_successfully():
us_subject = "Create a new user story"
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor(create_user_story_response=factories.future(
factories.successful_create_user_story_response(us_subject)))
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.create)
form = project_detail_controller.view.backlog.user_story_form
project_view.backlog.notifier.reset_mock()
form._subject_edit.set_edit_text(us_subject)
signals.emit(form.save_button, "click")
assert project_view.backlog.notifier.info_msg.call_count == 1
assert executor.create_user_story.call_args.call_list()[0][0][0]["subject"] == us_subject
assert executor.create_user_story.call_count == 1
assert executor.create_user_story.return_value.result()["subject"] == us_subject
def test_backlog_controller_show_the_edit_user_story_form():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
assert not hasattr(project_detail_controller.view.backlog, "user_story_form")
project_detail_controller.handle(settings.data.backlog.keys.edit)
assert hasattr(project_detail_controller.view.backlog, "user_story_form")
assert (project_detail_controller.view.backlog.user_story_form.user_story ==
project_detail_controller.view.backlog.user_stories.widget.get_focus().user_story)
def test_backlog_controller_cancel_the_edit_user_story_form():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.edit)
assert hasattr(project_detail_controller.view.backlog, "user_story_form")
form = project_detail_controller.view.backlog.user_story_form
signals.emit(form.cancel_button, "click")
assert not hasattr(project_detail_controller.view.backlog, "user_story_form")
def test_backlog_controller_submit_the_edit_user_story_form_with_errors():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.edit)
form = project_detail_controller.view.backlog.user_story_form
form._subject_edit.set_edit_text("")
signals.emit(form.save_button, "click")
assert project_view.backlog.notifier.error_msg.call_count == 1
def test_backlog_controller_submit_edit_user_story_form_successfully():
us_subject = "Update a user story"
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor(update_user_story_response=factories.future(
factories.successful_update_user_story_response(us_subject)))
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.edit)
form = project_detail_controller.view.backlog.user_story_form
project_view.backlog.notifier.reset_mock()
form._subject_edit.set_edit_text(us_subject)
signals.emit(form.save_button, "click")
assert project_view.backlog.notifier.info_msg.call_count == 1
assert (executor.update_user_story.call_args.call_list()[0][0][0]["id"] == form.user_story["id"])
assert executor.update_user_story.call_args.call_list()[0][0][1]["subject"] == us_subject
assert executor.update_user_story.call_count == 1
assert executor.update_user_story.return_value.result()["subject"] == us_subject
def test_backlog_controller_move_user_story_down():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_view.backlog.notifier.reset_mock()
us_a_old = project_detail_controller.backlog.user_stories[0]
us_b_old = project_detail_controller.backlog.user_stories[1]
project_detail_controller.handle(settings.data.backlog.keys.decrease_priority)
assert project_view.backlog.notifier.info_msg.call_count == 1
us_b_new = project_detail_controller.backlog.user_stories[0]
us_a_new = project_detail_controller.backlog.user_stories[1]
assert us_a_old == us_a_new
assert us_b_old == us_b_new
def test_backlog_controller_move_user_story_up():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.view.backlog.user_stories.widget.contents.focus = 2
project_view.backlog.notifier.reset_mock()
us_a_old = project_detail_controller.backlog.user_stories[0]
us_b_old = project_detail_controller.backlog.user_stories[1]
project_detail_controller.handle(settings.data.backlog.keys.increase_priority)
assert project_view.backlog.notifier.info_msg.call_count == 1
us_b_new = project_detail_controller.backlog.user_stories[0]
us_a_new = project_detail_controller.backlog.user_stories[1]
assert us_a_old == us_a_new
assert us_b_old == us_b_new
def test_backlog_controller_update_user_stories_order_with_errors():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor(update_user_stories_order_response=factories.future(None))
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.update_order)
assert project_view.backlog.notifier.error_msg.call_count == 1
def test_backlog_controller_update_user_stories_order_with_success():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_view.backlog.notifier.reset_mock()
project_detail_controller.handle(settings.data.backlog.keys.update_order)
assert project_view.backlog.notifier.info_msg.call_count == 1
def test_backlog_controller_delete_user_story_with_errors():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor(delete_user_story_response=factories.future(None))
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.delete)
assert project_view.backlog.notifier.error_msg.call_count == 1
assert (executor.delete_user_story.call_args.call_list()[0][0][0]["id"] ==
project_detail_controller.backlog.user_stories[0]["id"])
def test_backlog_controller_delete_user_story_with_success():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_view.backlog.notifier.reset_mock()
project_detail_controller.handle(settings.data.backlog.keys.delete)
assert project_view.backlog.notifier.info_msg.call_count == 1
assert (executor.delete_user_story.call_args.call_list()[0][0][0]["id"] ==
project_detail_controller.backlog.user_stories[0]["id"])
def test_backlog_controller_show_the_milestone_selector_popup():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
assert not hasattr(project_detail_controller.view.backlog, "milestone_selector_popup")
project_detail_controller.handle(settings.data.backlog.keys.move_to_milestone)
assert hasattr(project_detail_controller.view.backlog, "milestone_selector_popup")
def test_backlog_controller_close_the_milestone_selector_popup():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.move_to_milestone)
assert hasattr(project_detail_controller.view.backlog, "milestone_selector_popup")
milestone_selector_popup = project_detail_controller.view.backlog.milestone_selector_popup
signals.emit(milestone_selector_popup.cancel_button, "click")
assert not hasattr(project_detail_controller.view.backlog, "milestone_selector_popup")
def test_backlog_controller_move_a_user_story_to_a_milestone():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.move_to_milestone)
milestone_selector_popup = project_detail_controller.view.backlog.milestone_selector_popup
project_view.backlog.notifier.reset_mock()
assert project_view.backlog.notifier.info_msg.call_count == 0
assert executor.update_user_story.call_count == 0
signals.emit(milestone_selector_popup.options[2], "click")
assert project_view.backlog.notifier.info_msg.call_count == 1
assert executor.update_user_story.call_count == 1
assert (executor.update_user_story.call_args.call_list()[0][0][1]["milestone"] ==
milestone_selector_popup.project["list_of_milestones"][-3]["id"])
def test_backlog_controller_change_user_story_status():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.edit)
project_view.backlog.notifier.reset_mock()
us = project_detail_controller.view.backlog.user_stories.widget.contents[1][0]
combo = us.base_widget.widget.contents[5][0] # 5 => status
item = combo.menu.get_item(0) # 0 => New
combo.item_changed(item, True)
assert project_view.backlog.notifier.info_msg.call_count == 1
assert executor.update_user_story.call_args.call_list()[0][0][1]["status"] == item.value
assert executor.update_user_story.call_count == 1
def test_backlog_controller_change_user_story_points():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.edit)
project_view.backlog.notifier.reset_mock()
us = project_detail_controller.view.backlog.user_stories.widget.contents[1][0]
combo = us.base_widget.widget.contents[6][0] # 6 => points
item = combo.menu.get_item(2) # 2 => 1/2
combo.item_changed(item, True)
assert project_view.backlog.notifier.info_msg.call_count == 1
assert list(executor.update_user_story.call_args.call_list()[0][0][1]["points"].values())[0] == item.value
assert executor.update_user_story.call_count == 1
# BULK
def test_backlog_controller_show_the_new_user_stories_in_bulk_form():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
assert not hasattr(project_detail_controller.view.backlog, "user_stories_in_bulk_form")
project_detail_controller.handle(settings.data.backlog.keys.create_in_bulk)
assert hasattr(project_detail_controller.view.backlog, "user_stories_in_bulk_form")
def test_backlog_controller_cancel_the_new_user_stories_in_bulk_form():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.create_in_bulk)
assert hasattr(project_detail_controller.view.backlog, "user_stories_in_bulk_form")
form = project_detail_controller.view.backlog.user_stories_in_bulk_form
signals.emit(form.cancel_button, "click")
assert not hasattr(project_detail_controller.view.backlog, "user_stories_in_bulk_form")
def test_backlog_controller_submit_new_user_stories_in_bulk_form_with_errors():
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.create_in_bulk)
form = project_detail_controller.view.backlog.user_stories_in_bulk_form
signals.emit(form.save_button, "click")
assert project_view.backlog.notifier.error_msg.call_count == 1
def test_backlog_controller_submit_new_user_stories_in_bulk_form_successfully():
us_subjects = "Create a new user story 1\nCreate a new user story 2"
project = factories.project()
project_view = views.projects.ProjectDetailView(project)
project_view.backlog.notifier = mock.Mock()
executor = factories.patched_executor()
_ = mock.Mock()
project_detail_controller = controllers.projects.ProjectDetailController(project_view, executor, _)
project_detail_controller.handle(settings.data.backlog.keys.create_in_bulk)
form = project_detail_controller.view.backlog.user_stories_in_bulk_form
project_view.backlog.notifier.reset_mock()
form._subjects_edit.set_edit_text(us_subjects)
signals.emit(form.save_button, "click")
assert project_view.backlog.notifier.info_msg.call_count == 1
assert executor.create_user_stories_in_bulk.call_args.call_list()[0][0][0]["bulkStories"] == us_subjects
assert executor.create_user_stories_in_bulk.call_count == 1
assert executor.create_user_stories_in_bulk.return_value.result() | unknown | codeparrot/codeparrot-clean | ||
"""Create a plot of SMOS data for either 0 or 12z"""
import sys
import datetime
import numpy as np
from pandas.io.sql import read_sql
from pyiem.plot import get_cmap
from pyiem.plot.geoplot import MapPlot
from pyiem.util import get_dbconn, logger, utc
LOG = logger()
def makeplot(ts, routes="ac"):
"""
Generate two plots for a given time GMT
"""
pgconn = get_dbconn("smos", user="nobody")
df = read_sql(
"""
WITH obs as (
SELECT grid_idx, avg(soil_moisture) * 100. as sm,
avg(optical_depth) as od from data where valid BETWEEN %s and %s
GROUP by grid_idx)
SELECT ST_x(geom) as lon, ST_y(geom) as lat,
CASE WHEN sm is Null THEN -1 ELSE sm END as sm,
CASE WHEN od is Null THEN -1 ELSE od END as od
from obs o JOIN grid g ON (o.grid_idx = g.idx)
""",
pgconn,
params=(
ts - datetime.timedelta(hours=6),
ts + datetime.timedelta(hours=6),
),
index_col=None,
)
if df.empty:
LOG.info(
"Did not find SMOS data for: %s-%s",
ts - datetime.timedelta(hours=6),
ts + datetime.timedelta(hours=6),
)
return
for sector in ["midwest", "iowa"]:
clevs = np.arange(0, 71, 5)
mp = MapPlot(
sector=sector,
axisbg="white",
title="SMOS Satellite: Soil Moisture (0-5cm)",
subtitle="Satelite passes around %s UTC"
% (ts.strftime("%d %B %Y %H"),),
)
if sector == "iowa":
mp.drawcounties()
cmap = get_cmap("jet_r")
cmap.set_under("#EEEEEE")
cmap.set_over("k")
mp.hexbin(
df["lon"].values,
df["lat"].values,
df["sm"],
clevs,
units="%",
cmap=cmap,
)
pqstr = "plot %s %s00 smos_%s_sm%s.png smos_%s_sm%s.png png" % (
routes,
ts.strftime("%Y%m%d%H"),
sector,
ts.strftime("%H"),
sector,
ts.strftime("%H"),
)
mp.postprocess(pqstr=pqstr)
mp.close()
for sector in ["midwest", "iowa"]:
clevs = np.arange(0, 1.001, 0.05)
mp = MapPlot(
sector=sector,
axisbg="white",
title=(
"SMOS Satellite: Land Cover Optical Depth "
"(microwave L-band)"
),
subtitle="Satelite passes around %s UTC"
% (ts.strftime("%d %B %Y %H"),),
)
if sector == "iowa":
mp.drawcounties()
cmap = get_cmap("jet")
cmap.set_under("#EEEEEE")
cmap.set_over("k")
mp.hexbin(
df["lon"].values, df["lat"].values, df["od"], clevs, cmap=cmap
)
pqstr = "plot %s %s00 smos_%s_od%s.png smos_%s_od%s.png png" % (
routes,
ts.strftime("%Y%m%d%H"),
sector,
ts.strftime("%H"),
sector,
ts.strftime("%H"),
)
mp.postprocess(pqstr=pqstr)
mp.close()
def main(argv):
"""Go Main Go"""
if len(argv) == 2:
hr = int(argv[1])
if hr == 12: # Run for the previous UTC day
ts = utc() - datetime.timedelta(days=1)
ts = ts.replace(hour=12, minute=0, second=0, microsecond=0)
else:
ts = utc().replace(hour=0, minute=0, second=0, microsecond=0)
makeplot(ts)
# Run a day, a week ago ago as well
for d in [1, 5]:
ts -= datetime.timedelta(days=d)
makeplot(ts, "a")
else:
ts = utc(int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4]))
makeplot(ts, "a")
if __name__ == "__main__":
main(sys.argv) | unknown | codeparrot/codeparrot-clean | ||
---
- hosts: testhost
gather_facts: False
vars:
data:
- one
- two
tasks:
- debug:
msg: '{{ lookup("list", data) }}'
- debug:
msg: '{{ item }}'
with_list: '{{ data }}' | unknown | github | https://github.com/ansible/ansible | test/integration/targets/plugin_filtering/lookup.yml |
#!/usr/bin/python
# coding=utf-8
from __future__ import print_function
from subprocess import check_output, call
import urllib2
import json
import sys
if __name__ == '__main__':
"""Run a bunch of boilerplate commands to sync your local clone to its
parent github repo.
"""
print("Starting sync...", "\n")
CURRENT_REPO_CMD = ['git', 'config', '--get', 'remote.origin.url']
ADD_REMOTE_CMD = ['git', 'remote', 'add', 'upstream']
CHECK_REMOTES_CMD = ['git', 'remote', '-v']
FETCH_UPSTREAM_CMD = ['git', 'fetch', 'upstream']
CHECKOUT_MASTER_CMD = ['git', 'checkout', 'master']
MERGE_UPSTREAM_CMD = ['git', 'merge', 'upstream/master']
try:
repo_url = check_output(CURRENT_REPO_CMD)
print("Getting repo's url...")
print("Syncing repo:", repo_url)
if "github.com:" not in repo_url:
url_segments = repo_url.split("github.com/")
else:
url_segments = repo_url.split("github.com:")
path = url_segments[1]
user, repo = path.split("/")
repo = repo.split(".git")
repo = repo[0]
print("Checking the fork's parent url...", "\n")
url = "https://api.github.com/repos/{}/{}".format(user, repo)
req = urllib2.urlopen(url)
res = json.load(req)
parent_url = res['parent']['git_url']
print("ddd:", url)
print("Will add remote to parent repo:", parent_url, "\n")
ADD_REMOTE_CMD.append(parent_url)
print(ADD_REMOTE_CMD)
call(ADD_REMOTE_CMD)
print("")
print("Checking remotes...", "\n")
call(CHECK_REMOTES_CMD)
print("")
print("Fetching upstream...", "\n")
call(FETCH_UPSTREAM_CMD)
print("")
print("Merging upstream and master", "\n")
check_output(CHECKOUT_MASTER_CMD)
call(MERGE_UPSTREAM_CMD)
print("Syncing done.")
except Exception as e:
e_type = sys.exc_info()[0].__name__
print("The following error happened:", e, "\n")
if (e_type == 'CalledProcessError' and
hasattr(e, 'cmd') and
e.cmd == CURRENT_REPO_CMD):
print("Are you sure you are on the git repo folder?", "\n")
elif (e_type == 'IndexError' and
e.message == 'list index out of range'):
print("Sorry, couldn't get the user and repo names from the Git config.", "\n")
elif (e_type == 'KeyError' and
e.message == 'parent'):
print("Are you sure the repo is a fork?")
elif (e_type == 'CalledProcessError' and
(e.cmd == MERGE_UPSTREAM_CMD or e.cmd == CHECKOUT_MASTER_CMD)):
print("Didn't merge. Reason:", e.output)
print("Game Over.") | unknown | codeparrot/codeparrot-clean | ||
#ifndef NPY_SIMD
#error "Not a standalone header"
#endif
#ifndef _NPY_SIMD_SSE_OPERATORS_H
#define _NPY_SIMD_SSE_OPERATORS_H
/***************************
* Shifting
***************************/
// left
#define npyv_shl_u16(A, C) _mm_sll_epi16(A, _mm_cvtsi32_si128(C))
#define npyv_shl_s16(A, C) _mm_sll_epi16(A, _mm_cvtsi32_si128(C))
#define npyv_shl_u32(A, C) _mm_sll_epi32(A, _mm_cvtsi32_si128(C))
#define npyv_shl_s32(A, C) _mm_sll_epi32(A, _mm_cvtsi32_si128(C))
#define npyv_shl_u64(A, C) _mm_sll_epi64(A, _mm_cvtsi32_si128(C))
#define npyv_shl_s64(A, C) _mm_sll_epi64(A, _mm_cvtsi32_si128(C))
// left by an immediate constant
#define npyv_shli_u16 _mm_slli_epi16
#define npyv_shli_s16 _mm_slli_epi16
#define npyv_shli_u32 _mm_slli_epi32
#define npyv_shli_s32 _mm_slli_epi32
#define npyv_shli_u64 _mm_slli_epi64
#define npyv_shli_s64 _mm_slli_epi64
// right
#define npyv_shr_u16(A, C) _mm_srl_epi16(A, _mm_cvtsi32_si128(C))
#define npyv_shr_s16(A, C) _mm_sra_epi16(A, _mm_cvtsi32_si128(C))
#define npyv_shr_u32(A, C) _mm_srl_epi32(A, _mm_cvtsi32_si128(C))
#define npyv_shr_s32(A, C) _mm_sra_epi32(A, _mm_cvtsi32_si128(C))
#define npyv_shr_u64(A, C) _mm_srl_epi64(A, _mm_cvtsi32_si128(C))
NPY_FINLINE __m128i npyv_shr_s64(__m128i a, int c)
{
const __m128i sbit = npyv_setall_s64(0x8000000000000000);
const __m128i cv = _mm_cvtsi32_si128(c);
__m128i r = _mm_srl_epi64(_mm_add_epi64(a, sbit), cv);
return _mm_sub_epi64(r, _mm_srl_epi64(sbit, cv));
}
// Right by an immediate constant
#define npyv_shri_u16 _mm_srli_epi16
#define npyv_shri_s16 _mm_srai_epi16
#define npyv_shri_u32 _mm_srli_epi32
#define npyv_shri_s32 _mm_srai_epi32
#define npyv_shri_u64 _mm_srli_epi64
#define npyv_shri_s64 npyv_shr_s64
/***************************
* Logical
***************************/
// AND
#define npyv_and_u8 _mm_and_si128
#define npyv_and_s8 _mm_and_si128
#define npyv_and_u16 _mm_and_si128
#define npyv_and_s16 _mm_and_si128
#define npyv_and_u32 _mm_and_si128
#define npyv_and_s32 _mm_and_si128
#define npyv_and_u64 _mm_and_si128
#define npyv_and_s64 _mm_and_si128
#define npyv_and_f32 _mm_and_ps
#define npyv_and_f64 _mm_and_pd
#define npyv_and_b8 _mm_and_si128
#define npyv_and_b16 _mm_and_si128
#define npyv_and_b32 _mm_and_si128
#define npyv_and_b64 _mm_and_si128
// OR
#define npyv_or_u8 _mm_or_si128
#define npyv_or_s8 _mm_or_si128
#define npyv_or_u16 _mm_or_si128
#define npyv_or_s16 _mm_or_si128
#define npyv_or_u32 _mm_or_si128
#define npyv_or_s32 _mm_or_si128
#define npyv_or_u64 _mm_or_si128
#define npyv_or_s64 _mm_or_si128
#define npyv_or_f32 _mm_or_ps
#define npyv_or_f64 _mm_or_pd
#define npyv_or_b8 _mm_or_si128
#define npyv_or_b16 _mm_or_si128
#define npyv_or_b32 _mm_or_si128
#define npyv_or_b64 _mm_or_si128
// XOR
#define npyv_xor_u8 _mm_xor_si128
#define npyv_xor_s8 _mm_xor_si128
#define npyv_xor_u16 _mm_xor_si128
#define npyv_xor_s16 _mm_xor_si128
#define npyv_xor_u32 _mm_xor_si128
#define npyv_xor_s32 _mm_xor_si128
#define npyv_xor_u64 _mm_xor_si128
#define npyv_xor_s64 _mm_xor_si128
#define npyv_xor_f32 _mm_xor_ps
#define npyv_xor_f64 _mm_xor_pd
#define npyv_xor_b8 _mm_xor_si128
#define npyv_xor_b16 _mm_xor_si128
#define npyv_xor_b32 _mm_xor_si128
#define npyv_xor_b64 _mm_xor_si128
// NOT
#define npyv_not_u8(A) _mm_xor_si128(A, _mm_set1_epi32(-1))
#define npyv_not_s8 npyv_not_u8
#define npyv_not_u16 npyv_not_u8
#define npyv_not_s16 npyv_not_u8
#define npyv_not_u32 npyv_not_u8
#define npyv_not_s32 npyv_not_u8
#define npyv_not_u64 npyv_not_u8
#define npyv_not_s64 npyv_not_u8
#define npyv_not_f32(A) _mm_xor_ps(A, _mm_castsi128_ps(_mm_set1_epi32(-1)))
#define npyv_not_f64(A) _mm_xor_pd(A, _mm_castsi128_pd(_mm_set1_epi32(-1)))
#define npyv_not_b8 npyv_not_u8
#define npyv_not_b16 npyv_not_u8
#define npyv_not_b32 npyv_not_u8
#define npyv_not_b64 npyv_not_u8
// ANDC, ORC and XNOR
#define npyv_andc_u8(A, B) _mm_andnot_si128(B, A)
#define npyv_andc_b8(A, B) _mm_andnot_si128(B, A)
#define npyv_orc_b8(A, B) npyv_or_b8(npyv_not_b8(B), A)
#define npyv_xnor_b8 _mm_cmpeq_epi8
/***************************
* Comparison
***************************/
// Int Equal
#define npyv_cmpeq_u8 _mm_cmpeq_epi8
#define npyv_cmpeq_s8 _mm_cmpeq_epi8
#define npyv_cmpeq_u16 _mm_cmpeq_epi16
#define npyv_cmpeq_s16 _mm_cmpeq_epi16
#define npyv_cmpeq_u32 _mm_cmpeq_epi32
#define npyv_cmpeq_s32 _mm_cmpeq_epi32
#define npyv_cmpeq_s64 npyv_cmpeq_u64
#ifdef NPY_HAVE_SSE41
#define npyv_cmpeq_u64 _mm_cmpeq_epi64
#else
NPY_FINLINE __m128i npyv_cmpeq_u64(__m128i a, __m128i b)
{
__m128i cmpeq = _mm_cmpeq_epi32(a, b);
__m128i cmpeq_h = _mm_srli_epi64(cmpeq, 32);
__m128i test = _mm_and_si128(cmpeq, cmpeq_h);
return _mm_shuffle_epi32(test, _MM_SHUFFLE(2, 2, 0, 0));
}
#endif
// Int Not Equal
#ifdef NPY_HAVE_XOP
#define npyv_cmpneq_u8 _mm_comneq_epi8
#define npyv_cmpneq_u16 _mm_comneq_epi16
#define npyv_cmpneq_u32 _mm_comneq_epi32
#define npyv_cmpneq_u64 _mm_comneq_epi64
#else
#define npyv_cmpneq_u8(A, B) npyv_not_u8(npyv_cmpeq_u8(A, B))
#define npyv_cmpneq_u16(A, B) npyv_not_u16(npyv_cmpeq_u16(A, B))
#define npyv_cmpneq_u32(A, B) npyv_not_u32(npyv_cmpeq_u32(A, B))
#define npyv_cmpneq_u64(A, B) npyv_not_u64(npyv_cmpeq_u64(A, B))
#endif
#define npyv_cmpneq_s8 npyv_cmpneq_u8
#define npyv_cmpneq_s16 npyv_cmpneq_u16
#define npyv_cmpneq_s32 npyv_cmpneq_u32
#define npyv_cmpneq_s64 npyv_cmpneq_u64
// signed greater than
#define npyv_cmpgt_s8 _mm_cmpgt_epi8
#define npyv_cmpgt_s16 _mm_cmpgt_epi16
#define npyv_cmpgt_s32 _mm_cmpgt_epi32
#ifdef NPY_HAVE_SSE42
#define npyv_cmpgt_s64 _mm_cmpgt_epi64
#else
NPY_FINLINE __m128i npyv_cmpgt_s64(__m128i a, __m128i b)
{
__m128i sub = _mm_sub_epi64(b, a);
__m128i nsame_sbit = _mm_xor_si128(a, b);
// nsame_sbit ? b : sub
__m128i test = _mm_xor_si128(sub, _mm_and_si128(_mm_xor_si128(sub, b), nsame_sbit));
__m128i extend_sbit = _mm_shuffle_epi32(_mm_srai_epi32(test, 31), _MM_SHUFFLE(3, 3, 1, 1));
return extend_sbit;
}
#endif
// signed greater than or equal
#ifdef NPY_HAVE_XOP
#define npyv_cmpge_s8 _mm_comge_epi8
#define npyv_cmpge_s16 _mm_comge_epi16
#define npyv_cmpge_s32 _mm_comge_epi32
#define npyv_cmpge_s64 _mm_comge_epi64
#else
#define npyv_cmpge_s8(A, B) npyv_not_s8(_mm_cmpgt_epi8(B, A))
#define npyv_cmpge_s16(A, B) npyv_not_s16(_mm_cmpgt_epi16(B, A))
#define npyv_cmpge_s32(A, B) npyv_not_s32(_mm_cmpgt_epi32(B, A))
#define npyv_cmpge_s64(A, B) npyv_not_s64(npyv_cmpgt_s64(B, A))
#endif
// unsigned greater than
#ifdef NPY_HAVE_XOP
#define npyv_cmpgt_u8 _mm_comgt_epu8
#define npyv_cmpgt_u16 _mm_comgt_epu16
#define npyv_cmpgt_u32 _mm_comgt_epu32
#define npyv_cmpgt_u64 _mm_comgt_epu64
#else
#define NPYV_IMPL_SSE_UNSIGNED_GT(LEN, SIGN) \
NPY_FINLINE __m128i npyv_cmpgt_u##LEN(__m128i a, __m128i b) \
{ \
const __m128i sbit = _mm_set1_epi32(SIGN); \
return _mm_cmpgt_epi##LEN( \
_mm_xor_si128(a, sbit), _mm_xor_si128(b, sbit) \
); \
}
NPYV_IMPL_SSE_UNSIGNED_GT(8, 0x80808080)
NPYV_IMPL_SSE_UNSIGNED_GT(16, 0x80008000)
NPYV_IMPL_SSE_UNSIGNED_GT(32, 0x80000000)
NPY_FINLINE __m128i npyv_cmpgt_u64(__m128i a, __m128i b)
{
const __m128i sbit = npyv_setall_s64(0x8000000000000000);
return npyv_cmpgt_s64(_mm_xor_si128(a, sbit), _mm_xor_si128(b, sbit));
}
#endif
// unsigned greater than or equal
#ifdef NPY_HAVE_XOP
#define npyv_cmpge_u8 _mm_comge_epu8
#define npyv_cmpge_u16 _mm_comge_epu16
#define npyv_cmpge_u32 _mm_comge_epu32
#define npyv_cmpge_u64 _mm_comge_epu64
#else
NPY_FINLINE __m128i npyv_cmpge_u8(__m128i a, __m128i b)
{ return _mm_cmpeq_epi8(a, _mm_max_epu8(a, b)); }
#ifdef NPY_HAVE_SSE41
NPY_FINLINE __m128i npyv_cmpge_u16(__m128i a, __m128i b)
{ return _mm_cmpeq_epi16(a, _mm_max_epu16(a, b)); }
NPY_FINLINE __m128i npyv_cmpge_u32(__m128i a, __m128i b)
{ return _mm_cmpeq_epi32(a, _mm_max_epu32(a, b)); }
#else
#define npyv_cmpge_u16(A, B) _mm_cmpeq_epi16(_mm_subs_epu16(B, A), _mm_setzero_si128())
#define npyv_cmpge_u32(A, B) npyv_not_u32(npyv_cmpgt_u32(B, A))
#endif
#define npyv_cmpge_u64(A, B) npyv_not_u64(npyv_cmpgt_u64(B, A))
#endif
// less than
#define npyv_cmplt_u8(A, B) npyv_cmpgt_u8(B, A)
#define npyv_cmplt_s8(A, B) npyv_cmpgt_s8(B, A)
#define npyv_cmplt_u16(A, B) npyv_cmpgt_u16(B, A)
#define npyv_cmplt_s16(A, B) npyv_cmpgt_s16(B, A)
#define npyv_cmplt_u32(A, B) npyv_cmpgt_u32(B, A)
#define npyv_cmplt_s32(A, B) npyv_cmpgt_s32(B, A)
#define npyv_cmplt_u64(A, B) npyv_cmpgt_u64(B, A)
#define npyv_cmplt_s64(A, B) npyv_cmpgt_s64(B, A)
// less than or equal
#define npyv_cmple_u8(A, B) npyv_cmpge_u8(B, A)
#define npyv_cmple_s8(A, B) npyv_cmpge_s8(B, A)
#define npyv_cmple_u16(A, B) npyv_cmpge_u16(B, A)
#define npyv_cmple_s16(A, B) npyv_cmpge_s16(B, A)
#define npyv_cmple_u32(A, B) npyv_cmpge_u32(B, A)
#define npyv_cmple_s32(A, B) npyv_cmpge_s32(B, A)
#define npyv_cmple_u64(A, B) npyv_cmpge_u64(B, A)
#define npyv_cmple_s64(A, B) npyv_cmpge_s64(B, A)
// precision comparison
#define npyv_cmpeq_f32(a, b) _mm_castps_si128(_mm_cmpeq_ps(a, b))
#define npyv_cmpeq_f64(a, b) _mm_castpd_si128(_mm_cmpeq_pd(a, b))
#define npyv_cmpneq_f32(a, b) _mm_castps_si128(_mm_cmpneq_ps(a, b))
#define npyv_cmpneq_f64(a, b) _mm_castpd_si128(_mm_cmpneq_pd(a, b))
#define npyv_cmplt_f32(a, b) _mm_castps_si128(_mm_cmplt_ps(a, b))
#define npyv_cmplt_f64(a, b) _mm_castpd_si128(_mm_cmplt_pd(a, b))
#define npyv_cmple_f32(a, b) _mm_castps_si128(_mm_cmple_ps(a, b))
#define npyv_cmple_f64(a, b) _mm_castpd_si128(_mm_cmple_pd(a, b))
#define npyv_cmpgt_f32(a, b) _mm_castps_si128(_mm_cmpgt_ps(a, b))
#define npyv_cmpgt_f64(a, b) _mm_castpd_si128(_mm_cmpgt_pd(a, b))
#define npyv_cmpge_f32(a, b) _mm_castps_si128(_mm_cmpge_ps(a, b))
#define npyv_cmpge_f64(a, b) _mm_castpd_si128(_mm_cmpge_pd(a, b))
// check special cases
NPY_FINLINE npyv_b32 npyv_notnan_f32(npyv_f32 a)
{ return _mm_castps_si128(_mm_cmpord_ps(a, a)); }
NPY_FINLINE npyv_b64 npyv_notnan_f64(npyv_f64 a)
{ return _mm_castpd_si128(_mm_cmpord_pd(a, a)); }
// Test cross all vector lanes
// any: returns true if any of the elements is not equal to zero
// all: returns true if all elements are not equal to zero
#define NPYV_IMPL_SSE_ANYALL(SFX) \
NPY_FINLINE bool npyv_any_##SFX(npyv_##SFX a) \
{ return _mm_movemask_epi8(a) != 0; } \
NPY_FINLINE bool npyv_all_##SFX(npyv_##SFX a) \
{ return _mm_movemask_epi8(a) == 0xffff; }
NPYV_IMPL_SSE_ANYALL(b8)
NPYV_IMPL_SSE_ANYALL(b16)
NPYV_IMPL_SSE_ANYALL(b32)
NPYV_IMPL_SSE_ANYALL(b64)
#undef NPYV_IMPL_SSE_ANYALL
#define NPYV_IMPL_SSE_ANYALL(SFX, MSFX, TSFX, MASK) \
NPY_FINLINE bool npyv_any_##SFX(npyv_##SFX a) \
{ \
return _mm_movemask_##MSFX( \
_mm_cmpeq_##TSFX(a, npyv_zero_##SFX()) \
) != MASK; \
} \
NPY_FINLINE bool npyv_all_##SFX(npyv_##SFX a) \
{ \
return _mm_movemask_##MSFX( \
_mm_cmpeq_##TSFX(a, npyv_zero_##SFX()) \
) == 0; \
}
NPYV_IMPL_SSE_ANYALL(u8, epi8, epi8, 0xffff)
NPYV_IMPL_SSE_ANYALL(s8, epi8, epi8, 0xffff)
NPYV_IMPL_SSE_ANYALL(u16, epi8, epi16, 0xffff)
NPYV_IMPL_SSE_ANYALL(s16, epi8, epi16, 0xffff)
NPYV_IMPL_SSE_ANYALL(u32, epi8, epi32, 0xffff)
NPYV_IMPL_SSE_ANYALL(s32, epi8, epi32, 0xffff)
#ifdef NPY_HAVE_SSE41
NPYV_IMPL_SSE_ANYALL(u64, epi8, epi64, 0xffff)
NPYV_IMPL_SSE_ANYALL(s64, epi8, epi64, 0xffff)
#else
NPY_FINLINE bool npyv_any_u64(npyv_u64 a)
{
return _mm_movemask_epi8(
_mm_cmpeq_epi32(a, npyv_zero_u64())
) != 0xffff;
}
NPY_FINLINE bool npyv_all_u64(npyv_u64 a)
{
a = _mm_cmpeq_epi32(a, npyv_zero_u64());
a = _mm_and_si128(a, _mm_shuffle_epi32(a, _MM_SHUFFLE(2, 3, 0, 1)));
return _mm_movemask_epi8(a) == 0;
}
#define npyv_any_s64 npyv_any_u64
#define npyv_all_s64 npyv_all_u64
#endif
NPYV_IMPL_SSE_ANYALL(f32, ps, ps, 0xf)
NPYV_IMPL_SSE_ANYALL(f64, pd, pd, 0x3)
#undef NPYV_IMPL_SSE_ANYALL
#endif // _NPY_SIMD_SSE_OPERATORS_H | c | github | https://github.com/numpy/numpy | numpy/_core/src/common/simd/sse/operators.h |
#!/usr/bin/env python
#
# Copyright (C) 2015 GNS3 Technologies Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Tool for manipulating OVA
"""
import sys
import tarfile
def view(path):
"""
Display the content of the OVA file for human
"""
with tarfile.open(path) as tar:
print("=> Files in .ova:")
for member in tar.getmembers():
print("* " + member.name)
print("")
for member in tar.getmembers():
if member.name.endswith(".ovf") or member.name.endswith(".mf"):
print("=> Content of " + member.name + ":")
print(tar.extractfile(member).read().decode("utf-8"))
print("")
def main():
view(sys.argv[1])
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
import mock
import unittest
from django.db import models
from django.test import TestCase
from django_filters.exceptions import FieldLookupError
from django_filters.filters import (
BaseInFilter,
BaseRangeFilter,
BooleanFilter,
CharFilter,
ChoiceFilter,
DateRangeFilter,
Filter,
FilterMethod,
ModelChoiceFilter,
ModelMultipleChoiceFilter,
NumberFilter,
UUIDFilter
)
from django_filters.filterset import FILTER_FOR_DBFIELD_DEFAULTS, FilterSet
from django_filters.widgets import BooleanWidget
from .models import (
Account,
AdminUser,
Article,
BankAccount,
Book,
Business,
Comment,
DirectedNode,
NetworkSetting,
Node,
Profile,
Restaurant,
SubnetMaskField,
User,
UUIDTestModel,
Worker
)
from .utils import MockQuerySet
def checkItemsEqual(L1, L2):
"""
TestCase.assertItemsEqual() is not available in Python 2.6.
"""
return len(L1) == len(L2) and sorted(L1) == sorted(L2)
class HelperMethodsTests(TestCase):
@unittest.skip('todo')
def test_get_declared_filters(self):
pass
@unittest.skip('todo')
def test_filters_for_model(self):
pass
@unittest.skip('todo')
def test_filterset_factory(self):
pass
class DbFieldDefaultFiltersTests(TestCase):
def test_expected_db_fields_get_filters(self):
to_check = [
models.BooleanField,
models.CharField,
models.CommaSeparatedIntegerField,
models.DateField,
models.DateTimeField,
models.DecimalField,
models.EmailField,
models.FilePathField,
models.FloatField,
models.IntegerField,
models.GenericIPAddressField,
models.NullBooleanField,
models.PositiveIntegerField,
models.PositiveSmallIntegerField,
models.SlugField,
models.SmallIntegerField,
models.TextField,
models.TimeField,
models.DurationField,
models.URLField,
models.ForeignKey,
models.OneToOneField,
models.ManyToManyField,
models.UUIDField,
]
msg = "%s expected to be found in FILTER_FOR_DBFIELD_DEFAULTS"
for m in to_check:
self.assertIn(m, FILTER_FOR_DBFIELD_DEFAULTS, msg % m.__name__)
def test_expected_db_fields_do_not_get_filters(self):
to_check = [
models.Field,
models.BigIntegerField,
models.FileField,
models.ImageField,
]
msg = "%s expected to not be found in FILTER_FOR_DBFIELD_DEFAULTS"
for m in to_check:
self.assertNotIn(m, FILTER_FOR_DBFIELD_DEFAULTS, msg % m.__name__)
class FilterSetFilterForFieldTests(TestCase):
def test_filter_found_for_field(self):
f = User._meta.get_field('username')
result = FilterSet.filter_for_field(f, 'username')
self.assertIsInstance(result, CharFilter)
self.assertEqual(result.field_name, 'username')
def test_filter_found_for_uuidfield(self):
f = UUIDTestModel._meta.get_field('uuid')
result = FilterSet.filter_for_field(f, 'uuid')
self.assertIsInstance(result, UUIDFilter)
self.assertEqual(result.field_name, 'uuid')
def test_filter_found_for_autofield(self):
f = User._meta.get_field('id')
result = FilterSet.filter_for_field(f, 'id')
self.assertIsInstance(result, NumberFilter)
self.assertEqual(result.field_name, 'id')
def test_field_with_extras(self):
f = User._meta.get_field('favorite_books')
result = FilterSet.filter_for_field(f, 'favorite_books')
self.assertIsInstance(result, ModelMultipleChoiceFilter)
self.assertEqual(result.field_name, 'favorite_books')
self.assertTrue('queryset' in result.extra)
self.assertIsNotNone(result.extra['queryset'])
self.assertEqual(result.extra['queryset'].model, Book)
def test_field_with_choices(self):
f = User._meta.get_field('status')
result = FilterSet.filter_for_field(f, 'status')
self.assertIsInstance(result, ChoiceFilter)
self.assertEqual(result.field_name, 'status')
self.assertTrue('choices' in result.extra)
self.assertIsNotNone(result.extra['choices'])
def test_field_that_is_subclassed(self):
f = User._meta.get_field('first_name')
result = FilterSet.filter_for_field(f, 'first_name')
self.assertIsInstance(result, CharFilter)
def test_unknown_field_type_error(self):
f = NetworkSetting._meta.get_field('mask')
with self.assertRaises(AssertionError) as excinfo:
FilterSet.filter_for_field(f, 'mask')
self.assertIn(
"FilterSet resolved field 'mask' with 'exact' lookup "
"to an unrecognized field type SubnetMaskField",
excinfo.exception.args[0])
def test_symmetrical_selfref_m2m_field(self):
f = Node._meta.get_field('adjacents')
result = FilterSet.filter_for_field(f, 'adjacents')
self.assertIsInstance(result, ModelMultipleChoiceFilter)
self.assertEqual(result.field_name, 'adjacents')
self.assertTrue('queryset' in result.extra)
self.assertIsNotNone(result.extra['queryset'])
self.assertEqual(result.extra['queryset'].model, Node)
def test_non_symmetrical_selfref_m2m_field(self):
f = DirectedNode._meta.get_field('outbound_nodes')
result = FilterSet.filter_for_field(f, 'outbound_nodes')
self.assertIsInstance(result, ModelMultipleChoiceFilter)
self.assertEqual(result.field_name, 'outbound_nodes')
self.assertTrue('queryset' in result.extra)
self.assertIsNotNone(result.extra['queryset'])
self.assertEqual(result.extra['queryset'].model, DirectedNode)
def test_m2m_field_with_through_model(self):
f = Business._meta.get_field('employees')
result = FilterSet.filter_for_field(f, 'employees')
self.assertIsInstance(result, ModelMultipleChoiceFilter)
self.assertEqual(result.field_name, 'employees')
self.assertTrue('queryset' in result.extra)
self.assertIsNotNone(result.extra['queryset'])
self.assertEqual(result.extra['queryset'].model, Worker)
def test_transformed_lookup_expr(self):
f = Comment._meta.get_field('date')
result = FilterSet.filter_for_field(f, 'date', 'year__gte')
self.assertIsInstance(result, NumberFilter)
self.assertEqual(result.field_name, 'date')
@unittest.skip('todo')
def test_filter_overrides(self):
pass
class FilterSetFilterForLookupTests(TestCase):
def test_filter_for_ISNULL_lookup(self):
f = Article._meta.get_field('author')
result, params = FilterSet.filter_for_lookup(f, 'isnull')
self.assertEqual(result, BooleanFilter)
self.assertDictEqual(params, {})
def test_filter_for_IN_lookup(self):
f = Article._meta.get_field('author')
result, params = FilterSet.filter_for_lookup(f, 'in')
self.assertTrue(issubclass(result, ModelChoiceFilter))
self.assertTrue(issubclass(result, BaseInFilter))
self.assertEqual(params['to_field_name'], 'id')
def test_filter_for_RANGE_lookup(self):
f = Article._meta.get_field('author')
result, params = FilterSet.filter_for_lookup(f, 'range')
self.assertTrue(issubclass(result, ModelChoiceFilter))
self.assertTrue(issubclass(result, BaseRangeFilter))
self.assertEqual(params['to_field_name'], 'id')
def test_isnull_with_filter_overrides(self):
class OFilterSet(FilterSet):
class Meta:
filter_overrides = {
models.BooleanField: {
'filter_class': BooleanFilter,
'extra': lambda f: {
'widget': BooleanWidget,
},
},
}
f = Article._meta.get_field('author')
result, params = OFilterSet.filter_for_lookup(f, 'isnull')
self.assertEqual(result, BooleanFilter)
self.assertEqual(params['widget'], BooleanWidget)
class ReverseFilterSetFilterForFieldTests(TestCase):
# Test reverse relationships for `filter_for_field`
def test_reverse_o2o_relationship(self):
f = Account._meta.get_field('profile')
result = FilterSet.filter_for_field(f, 'profile')
self.assertIsInstance(result, ModelChoiceFilter)
self.assertEqual(result.field_name, 'profile')
self.assertTrue('queryset' in result.extra)
self.assertIsNotNone(result.extra['queryset'])
self.assertEqual(result.extra['queryset'].model, Profile)
def test_reverse_fk_relationship(self):
f = User._meta.get_field('comments')
result = FilterSet.filter_for_field(f, 'comments')
self.assertIsInstance(result, ModelMultipleChoiceFilter)
self.assertEqual(result.field_name, 'comments')
self.assertTrue('queryset' in result.extra)
self.assertIsNotNone(result.extra['queryset'])
self.assertEqual(result.extra['queryset'].model, Comment)
def test_reverse_m2m_relationship(self):
f = Book._meta.get_field('lovers')
result = FilterSet.filter_for_field(f, 'lovers')
self.assertIsInstance(result, ModelMultipleChoiceFilter)
self.assertEqual(result.field_name, 'lovers')
self.assertTrue('queryset' in result.extra)
self.assertIsNotNone(result.extra['queryset'])
self.assertEqual(result.extra['queryset'].model, User)
def test_reverse_non_symmetrical_selfref_m2m_field(self):
f = DirectedNode._meta.get_field('inbound_nodes')
result = FilterSet.filter_for_field(f, 'inbound_nodes')
self.assertIsInstance(result, ModelMultipleChoiceFilter)
self.assertEqual(result.field_name, 'inbound_nodes')
self.assertTrue('queryset' in result.extra)
self.assertIsNotNone(result.extra['queryset'])
self.assertEqual(result.extra['queryset'].model, DirectedNode)
def test_reverse_m2m_field_with_through_model(self):
f = Worker._meta.get_field('employers')
result = FilterSet.filter_for_field(f, 'employers')
self.assertIsInstance(result, ModelMultipleChoiceFilter)
self.assertEqual(result.field_name, 'employers')
self.assertTrue('queryset' in result.extra)
self.assertIsNotNone(result.extra['queryset'])
self.assertEqual(result.extra['queryset'].model, Business)
def test_reverse_relationship_lookup_expr(self):
f = Book._meta.get_field('lovers')
result = FilterSet.filter_for_field(f, 'lovers', 'isnull')
self.assertIsInstance(result, BooleanFilter)
self.assertEqual(result.field_name, 'lovers')
self.assertEqual(result.lookup_expr, 'isnull')
class FilterSetFilterForReverseFieldTests(TestCase):
def test_method_raises_assertion(self):
msg = ("`F.filter_for_reverse_field` has been removed. "
"`F.filter_for_field` now generates filters for reverse fields.")
with self.assertRaisesMessage(AssertionError, msg):
class F(FilterSet):
@classmethod
def filter_for_reverse_field(cls, field, field_name):
pass
class FilterSetClassCreationTests(TestCase):
def test_no_filters(self):
class F(FilterSet):
pass
self.assertEqual(len(F.declared_filters), 0)
self.assertEqual(len(F.base_filters), 0)
def test_declaring_filter(self):
class F(FilterSet):
username = CharFilter()
self.assertEqual(len(F.declared_filters), 1)
self.assertListEqual(list(F.declared_filters), ['username'])
self.assertEqual(len(F.base_filters), 1)
self.assertListEqual(list(F.base_filters), ['username'])
def test_model_derived(self):
class F(FilterSet):
class Meta:
model = Book
fields = '__all__'
self.assertEqual(len(F.declared_filters), 0)
self.assertEqual(len(F.base_filters), 3)
self.assertListEqual(list(F.base_filters),
['title', 'price', 'average_rating'])
def test_model_no_fields_or_exclude(self):
with self.assertRaises(AssertionError) as excinfo:
class F(FilterSet):
class Meta:
model = Book
self.assertIn(
"Setting 'Meta.model' without either 'Meta.fields' or 'Meta.exclude'",
str(excinfo.exception)
)
def test_model_fields_empty(self):
class F(FilterSet):
class Meta:
model = Book
fields = []
self.assertEqual(len(F.declared_filters), 0)
self.assertEqual(len(F.base_filters), 0)
self.assertListEqual(list(F.base_filters), [])
def test_model_exclude_empty(self):
# equivalent to fields = '__all__'
class F(FilterSet):
class Meta:
model = Book
exclude = []
self.assertEqual(len(F.declared_filters), 0)
self.assertEqual(len(F.base_filters), 3)
self.assertListEqual(list(F.base_filters),
['title', 'price', 'average_rating'])
def test_declared_and_model_derived(self):
class F(FilterSet):
username = CharFilter()
class Meta:
model = Book
fields = '__all__'
self.assertEqual(len(F.declared_filters), 1)
self.assertEqual(len(F.base_filters), 4)
self.assertListEqual(list(F.base_filters),
['title', 'price', 'average_rating', 'username'])
def test_meta_fields_with_declared_and_model_derived(self):
class F(FilterSet):
username = CharFilter()
class Meta:
model = Book
fields = ('username', 'price')
self.assertEqual(len(F.declared_filters), 1)
self.assertEqual(len(F.base_filters), 2)
self.assertListEqual(list(F.base_filters), ['username', 'price'])
def test_meta_fields_dictionary_derived(self):
class F(FilterSet):
class Meta:
model = Book
fields = {'price': ['exact', 'gte', 'lte'], }
self.assertEqual(len(F.declared_filters), 0)
self.assertEqual(len(F.base_filters), 3)
expected_list = ['price', 'price__gte', 'price__lte', ]
self.assertTrue(checkItemsEqual(list(F.base_filters), expected_list))
def test_meta_fields_containing_autofield(self):
class F(FilterSet):
username = CharFilter()
class Meta:
model = Book
fields = ('id', 'username', 'price')
self.assertEqual(len(F.declared_filters), 1)
self.assertEqual(len(F.base_filters), 3)
self.assertListEqual(list(F.base_filters), ['id', 'username', 'price'])
def test_meta_fields_dictionary_autofield(self):
class F(FilterSet):
username = CharFilter()
class Meta:
model = Book
fields = {'id': ['exact'],
'username': ['exact'],
}
self.assertEqual(len(F.declared_filters), 1)
self.assertEqual(len(F.base_filters), 2)
expected_list = ['id', 'username']
self.assertTrue(checkItemsEqual(list(F.base_filters), expected_list))
def test_meta_fields_containing_unknown(self):
with self.assertRaises(TypeError) as excinfo:
class F(FilterSet):
username = CharFilter()
class Meta:
model = Book
fields = ('username', 'price', 'other', 'another')
self.assertEqual(
str(excinfo.exception),
"'Meta.fields' contains fields that are not defined on this FilterSet: "
"other, another"
)
def test_meta_fields_dictionary_containing_unknown(self):
with self.assertRaises(TypeError):
class F(FilterSet):
class Meta:
model = Book
fields = {'id': ['exact'],
'title': ['exact'],
'other': ['exact'],
}
def test_meta_fields_invalid_lookup(self):
# We want to ensure that non existent lookups (or just simple misspellings)
# throw a useful exception containg the field and lookup expr.
with self.assertRaises(FieldLookupError) as context:
class F(FilterSet):
class Meta:
model = User
fields = {'username': ['flub']}
exc = str(context.exception)
self.assertIn('tests.User.username', exc)
self.assertIn('flub', exc)
def test_meta_exlude_with_declared_and_declared_wins(self):
class F(FilterSet):
username = CharFilter()
class Meta:
model = Book
exclude = ('username', 'price')
self.assertEqual(len(F.declared_filters), 1)
self.assertEqual(len(F.base_filters), 3)
self.assertListEqual(list(F.base_filters),
['title', 'average_rating', 'username'])
def test_meta_fields_and_exlude_and_exclude_wins(self):
class F(FilterSet):
username = CharFilter()
class Meta:
model = Book
fields = ('username', 'title', 'price')
exclude = ('title',)
self.assertEqual(len(F.declared_filters), 1)
self.assertEqual(len(F.base_filters), 2)
self.assertListEqual(list(F.base_filters),
['username', 'price'])
def test_meta_exlude_with_no_fields(self):
class F(FilterSet):
class Meta:
model = Book
exclude = ('price', )
self.assertEqual(len(F.declared_filters), 0)
self.assertEqual(len(F.base_filters), 2)
self.assertListEqual(list(F.base_filters),
['title', 'average_rating'])
def test_filterset_class_inheritance(self):
class F(FilterSet):
class Meta:
model = Book
fields = '__all__'
class G(F):
pass
self.assertEqual(set(F.base_filters), set(G.base_filters))
class F(FilterSet):
other = CharFilter
class Meta:
model = Book
fields = '__all__'
class G(F):
pass
self.assertEqual(set(F.base_filters), set(G.base_filters))
def test_abstract_model_inheritance(self):
class F(FilterSet):
class Meta:
model = Restaurant
fields = '__all__'
self.assertEqual(set(F.base_filters), set(['name', 'serves_pizza']))
class F(FilterSet):
class Meta:
model = Restaurant
fields = ['name', 'serves_pizza']
self.assertEqual(set(F.base_filters), set(['name', 'serves_pizza']))
def test_custom_field_gets_filter_from_override(self):
class F(FilterSet):
class Meta:
model = NetworkSetting
fields = '__all__'
filter_overrides = {
SubnetMaskField: {'filter_class': CharFilter}
}
self.assertEqual(list(F.base_filters.keys()), ['ip', 'mask', 'cidr'])
def test_custom_declared_field_no_warning(self):
class F(FilterSet):
mask = CharFilter()
class Meta:
model = NetworkSetting
fields = ['mask']
self.assertEqual(list(F.base_filters.keys()), ['mask'])
def test_filterset_for_proxy_model(self):
class F(FilterSet):
class Meta:
model = User
fields = '__all__'
class ProxyF(FilterSet):
class Meta:
model = AdminUser
fields = '__all__'
self.assertEqual(list(F.base_filters), list(ProxyF.base_filters))
def test_filterset_for_mti_model(self):
class F(FilterSet):
class Meta:
model = Account
fields = '__all__'
class FtiF(FilterSet):
class Meta:
model = BankAccount
fields = '__all__'
# fails due to 'account_ptr' getting picked up
self.assertEqual(
list(F.base_filters) + ['amount_saved'],
list(FtiF.base_filters))
def test_declared_filter_disabling(self):
class Parent(FilterSet):
f1 = CharFilter()
f2 = CharFilter()
class Child(Parent):
f1 = None
class Grandchild(Child):
pass
self.assertEqual(len(Parent.base_filters), 2)
self.assertEqual(len(Child.base_filters), 1)
self.assertEqual(len(Grandchild.base_filters), 1)
class FilterSetInstantiationTests(TestCase):
class F(FilterSet):
class Meta:
model = User
fields = ['username']
def test_creating_instance(self):
f = self.F()
self.assertFalse(f.is_bound)
self.assertIsNotNone(f.queryset)
self.assertEqual(len(f.filters), len(self.F.base_filters))
for name, filter_ in f.filters.items():
self.assertEqual(
filter_.model,
User,
"%s does not have model set correctly" % name)
def test_creating_bound_instance(self):
f = self.F({'username': 'username'})
self.assertTrue(f.is_bound)
def test_creating_with_queryset(self):
m = mock.Mock()
f = self.F(queryset=m)
self.assertEqual(f.queryset, m)
def test_creating_with_request(self):
m = mock.Mock()
f = self.F(request=m)
self.assertEqual(f.request, m)
class FilterSetQuerysetTests(TestCase):
class F(FilterSet):
invalid = CharFilter(method=lambda *args: None)
class Meta:
model = User
fields = ['username', 'invalid']
def test_filter_queryset_called_once(self):
m = MockQuerySet()
f = self.F({'username': 'bob'}, queryset=m)
with mock.patch.object(f, 'filter_queryset',
wraps=f.filter_queryset) as fn:
f.qs
fn.assert_called_once_with(m.all())
f.qs
fn.assert_called_once_with(m.all())
def test_get_form_class_called_once(self):
f = self.F()
with mock.patch.object(f, 'get_form_class',
wraps=f.get_form_class) as fn:
f.form
fn.assert_called_once()
f.form
fn.assert_called_once()
def test_qs_caching(self):
m = mock.Mock()
f = self.F(queryset=m)
self.assertIs(f.qs, m.all())
self.assertIs(f.qs, f.qs)
def test_form_caching(self):
f = self.F()
self.assertIs(f.form, f.form)
def test_qs_triggers_form_validation(self):
m = MockQuerySet()
f = self.F({'username': 'bob'}, queryset=m)
with mock.patch.object(f.form, 'full_clean',
wraps=f.form.full_clean) as fn:
fn.assert_not_called()
f.qs
fn.assert_called()
def test_filters_must_return_queryset(self):
m = MockQuerySet()
f = self.F({'invalid': 'result'}, queryset=m)
msg = "Expected 'F.invalid' to return a QuerySet, but got a NoneType instead."
with self.assertRaisesMessage(AssertionError, msg):
f.qs
# test filter.method here, as it depends on its parent FilterSet
class FilterMethodTests(TestCase):
def test_none(self):
# use a mock to bypass bound/unbound method equality
class TestFilter(Filter):
filter = mock.Mock()
f = TestFilter(method=None)
self.assertIsNone(f.method)
# passing method=None should not modify filter function
self.assertIs(f.filter, TestFilter.filter)
def test_method_name(self):
class F(FilterSet):
f = Filter(method='filter_f')
def filter_f(self, qs, name, value):
pass
f = F({}, queryset=User.objects.all())
self.assertEqual(f.filters['f'].method, 'filter_f')
self.assertEqual(f.filters['f'].filter.method, f.filter_f)
self.assertIsInstance(f.filters['f'].filter, FilterMethod)
def test_method_callable(self):
def filter_f(qs, name, value):
pass
class F(FilterSet):
f = Filter(method=filter_f)
f = F({}, queryset=User.objects.all())
self.assertEqual(f.filters['f'].method, filter_f)
self.assertEqual(f.filters['f'].filter.method, filter_f)
self.assertIsInstance(f.filters['f'].filter, FilterMethod)
def test_request_available_during_method_called(self):
class F(FilterSet):
f = Filter(method='filter_f')
def filter_f(self, qs, name, value):
# call mock request object to prove self.request can be accessed
self.request()
m = mock.Mock()
f = F({}, queryset=User.objects.all(), request=m)
# call the filter
f.filters['f'].filter.method(User.objects.all(), 'f', '')
m.assert_called_once_with()
def test_method_with_overridden_filter(self):
# Some filter classes override the base filter() method. We need
# to ensure that passing a method argument still works correctly
class F(FilterSet):
f = DateRangeFilter(method='filter_f')
def filter_f(self, qs, name, value):
pass
f = F({}, queryset=User.objects.all())
self.assertEqual(f.filters['f'].method, 'filter_f')
self.assertEqual(f.filters['f'].filter.method, f.filter_f)
def test_parent_unresolvable(self):
f = Filter(method='filter_f')
with self.assertRaises(AssertionError) as w:
f.filter(User.objects.all(), 0)
self.assertIn("'None'", str(w.exception))
self.assertIn('parent', str(w.exception))
self.assertIn('filter_f', str(w.exception))
def test_method_self_is_parent(self):
# Ensure the method isn't 're-parented' on the `FilterMethod` helper class.
# Filter methods should have access to the filterset's properties.
request = MockQuerySet()
class F(FilterSet):
f = CharFilter(method='filter_f')
class Meta:
model = User
fields = []
def filter_f(inner_self, qs, name, value):
self.assertIsInstance(inner_self, F)
self.assertIs(inner_self.request, request)
return qs
F({'f': 'foo'}, request=request, queryset=User.objects.all()).qs
def test_method_unresolvable(self):
class F(FilterSet):
f = Filter(method='filter_f')
f = F({}, queryset=User.objects.all())
with self.assertRaises(AssertionError) as w:
f.filters['f'].filter(User.objects.all(), 0)
self.assertIn('%s.%s' % (F.__module__, F.__name__), str(w.exception))
self.assertIn('.filter_f()', str(w.exception))
def test_method_uncallable(self):
class F(FilterSet):
f = Filter(method='filter_f')
filter_f = 4
f = F({}, queryset=User.objects.all())
with self.assertRaises(AssertionError) as w:
f.filters['f'].filter(User.objects.all(), 0)
self.assertIn('%s.%s' % (F.__module__, F.__name__), str(w.exception))
self.assertIn('.filter_f()', str(w.exception))
def test_method_set_unset(self):
# use a mock to bypass bound/unbound method equality
class TestFilter(Filter):
filter = mock.Mock()
f = TestFilter(method='filter_f')
self.assertEqual(f.method, 'filter_f')
self.assertIsInstance(f.filter, FilterMethod)
# setting None should revert to Filter.filter
f.method = None
self.assertIsNone(f.method)
self.assertIs(f.filter, TestFilter.filter)
class MiscFilterSetTests(TestCase):
def test_no__getitem__(self):
# The DTL processes variable lookups by the following rules:
# https://docs.djangoproject.com/en/stable/ref/templates/language/#variables
# A __getitem__ implementation precedes normal attribute access, and in
# the case of #58, will force the queryset to evaluate when it should
# not (eg, when rendering a blank form).
self.assertFalse(hasattr(FilterSet, '__getitem__'))
def test_no_qs_proxying(self):
# The FilterSet should not proxy .qs methods - just access .qs directly
self.assertFalse(hasattr(FilterSet, '__len__'))
self.assertFalse(hasattr(FilterSet, '__iter__')) | unknown | codeparrot/codeparrot-clean | ||
"""Tests for distutils.command.config."""
import unittest
import os
import sys
from test.test_support import run_unittest
from distutils.command.config import dump_file, config
from distutils.tests import support
from distutils import log
class ConfigTestCase(support.LoggingSilencer,
support.TempdirManager,
unittest.TestCase):
def _info(self, msg, *args):
for line in msg.splitlines():
self._logs.append(line)
def setUp(self):
super(ConfigTestCase, self).setUp()
self._logs = []
self.old_log = log.info
log.info = self._info
def tearDown(self):
log.info = self.old_log
super(ConfigTestCase, self).tearDown()
def test_dump_file(self):
this_file = os.path.splitext(__file__)[0] + '.py'
f = open(this_file)
try:
numlines = len(f.readlines())
finally:
f.close()
dump_file(this_file, 'I am the header')
self.assertEqual(len(self._logs), numlines+1)
def test_search_cpp(self):
if sys.platform == 'win32':
return
pkg_dir, dist = self.create_dist()
cmd = config(dist)
# simple pattern searches
match = cmd.search_cpp(pattern='xxx', body='/* xxx */')
self.assertEqual(match, 0)
match = cmd.search_cpp(pattern='_configtest', body='/* xxx */')
self.assertEqual(match, 1)
def test_finalize_options(self):
# finalize_options does a bit of transformation
# on options
pkg_dir, dist = self.create_dist()
cmd = config(dist)
cmd.include_dirs = 'one%stwo' % os.pathsep
cmd.libraries = 'one'
cmd.library_dirs = 'three%sfour' % os.pathsep
cmd.ensure_finalized()
self.assertEqual(cmd.include_dirs, ['one', 'two'])
self.assertEqual(cmd.libraries, ['one'])
self.assertEqual(cmd.library_dirs, ['three', 'four'])
def test_clean(self):
# _clean removes files
tmp_dir = self.mkdtemp()
f1 = os.path.join(tmp_dir, 'one')
f2 = os.path.join(tmp_dir, 'two')
self.write_file(f1, 'xxx')
self.write_file(f2, 'xxx')
for f in (f1, f2):
self.assertTrue(os.path.exists(f))
pkg_dir, dist = self.create_dist()
cmd = config(dist)
cmd._clean(f1, f2)
for f in (f1, f2):
self.assertTrue(not os.path.exists(f))
def test_suite():
return unittest.makeSuite(ConfigTestCase)
if __name__ == "__main__":
run_unittest(test_suite()) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: ios_linkagg
version_added: "2.5"
author: "Trishna Guha (@trishnaguha)"
short_description: Manage link aggregation groups on Cisco IOS network devices
description:
- This module provides declarative management of link aggregation groups
on Cisco IOS network devices.
notes:
- Tested against IOS 15.2
options:
group:
description:
- Channel-group number for the port-channel
Link aggregation group. Range 1-255.
mode:
description:
- Mode of the link aggregation group.
choices: ['active', 'on', 'passive', 'auto', 'desirable']
members:
description:
- List of members of the link aggregation group.
aggregate:
description: List of link aggregation definitions.
state:
description:
- State of the link aggregation group.
default: present
choices: ['present', 'absent']
purge:
description:
- Purge links not defined in the I(aggregate) parameter.
default: no
type: bool
extends_documentation_fragment: ios
"""
EXAMPLES = """
- name: create link aggregation group
ios_linkagg:
group: 10
state: present
- name: delete link aggregation group
ios_linkagg:
group: 10
state: absent
- name: set link aggregation group to members
ios_linkagg:
group: 200
mode: active
members:
- GigabitEthernet0/0
- GigabitEthernet0/1
- name: remove link aggregation group from GigabitEthernet0/0
ios_linkagg:
group: 200
mode: active
members:
- GigabitEthernet0/1
- name: Create aggregate of linkagg definitions
ios_linkagg:
aggregate:
- { group: 3, mode: on, members: [GigabitEthernet0/1] }
- { group: 100, mode: passive, members: [GigabitEthernet0/2] }
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always, except for the platforms that use Netconf transport to manage the device.
type: list
sample:
- interface port-channel 30
- interface GigabitEthernet0/3
- channel-group 30 mode on
- no interface port-channel 30
"""
import re
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.config import CustomNetworkConfig
from ansible.module_utils.network.common.utils import remove_default_spec
from ansible.module_utils.network.ios.ios import get_config, load_config
from ansible.module_utils.network.ios.ios import ios_argument_spec
def search_obj_in_list(group, lst):
for o in lst:
if o['group'] == group:
return o
def map_obj_to_commands(updates, module):
commands = list()
want, have = updates
purge = module.params['purge']
for w in want:
group = w['group']
mode = w['mode']
members = w.get('members') or []
state = w['state']
del w['state']
obj_in_have = search_obj_in_list(group, have)
if state == 'absent':
if obj_in_have:
commands.append('no interface port-channel {0}'.format(group))
elif state == 'present':
cmd = ['interface port-channel {0}'.format(group),
'end']
if not obj_in_have:
if not group:
module.fail_json(msg='group is a required option')
commands.extend(cmd)
if members:
for m in members:
commands.append('interface {0}'.format(m))
commands.append('channel-group {0} mode {1}'.format(group, mode))
else:
if members:
if 'members' not in obj_in_have.keys():
for m in members:
commands.extend(cmd)
commands.append('interface {0}'.format(m))
commands.append('channel-group {0} mode {1}'.format(group, mode))
elif set(members) != set(obj_in_have['members']):
missing_members = list(set(members) - set(obj_in_have['members']))
for m in missing_members:
commands.extend(cmd)
commands.append('interface {0}'.format(m))
commands.append('channel-group {0} mode {1}'.format(group, mode))
superfluous_members = list(set(obj_in_have['members']) - set(members))
for m in superfluous_members:
commands.extend(cmd)
commands.append('interface {0}'.format(m))
commands.append('no channel-group {0} mode {1}'.format(group, mode))
if purge:
for h in have:
obj_in_want = search_obj_in_list(h['group'], want)
if not obj_in_want:
commands.append('no interface port-channel {0}'.format(h['group']))
return commands
def map_params_to_obj(module):
obj = []
aggregate = module.params.get('aggregate')
if aggregate:
for item in aggregate:
for key in item:
if item.get(key) is None:
item[key] = module.params[key]
d = item.copy()
d['group'] = str(d['group'])
obj.append(d)
else:
obj.append({
'group': str(module.params['group']),
'mode': module.params['mode'],
'members': module.params['members'],
'state': module.params['state']
})
return obj
def parse_mode(module, config, group, member):
mode = None
netcfg = CustomNetworkConfig(indent=1, contents=config)
parents = ['interface {0}'.format(member)]
body = netcfg.get_section(parents)
match_int = re.findall(r'interface {0}\n'.format(member), body, re.M)
if match_int:
match = re.search(r'channel-group {0} mode (\S+)'.format(group), body, re.M)
if match:
mode = match.group(1)
return mode
def parse_members(module, config, group):
members = []
for line in config.strip().split('!'):
l = line.strip()
if l.startswith('interface'):
match_group = re.findall(r'channel-group {0} mode'.format(group), l, re.M)
if match_group:
match = re.search(r'interface (\S+)', l, re.M)
if match:
members.append(match.group(1))
return members
def get_channel(module, config, group):
match = re.findall(r'^interface (\S+)', config, re.M)
if not match:
return {}
channel = {}
for item in set(match):
member = item
channel['mode'] = parse_mode(module, config, group, member)
channel['members'] = parse_members(module, config, group)
return channel
def map_config_to_obj(module):
objs = list()
config = get_config(module)
for line in config.split('\n'):
l = line.strip()
match = re.search(r'interface Port-channel(\S+)', l, re.M)
if match:
obj = {}
group = match.group(1)
obj['group'] = group
obj.update(get_channel(module, config, group))
objs.append(obj)
return objs
def main():
""" main entry point for module execution
"""
element_spec = dict(
group=dict(type='int'),
mode=dict(choices=['active', 'on', 'passive', 'auto', 'desirable']),
members=dict(type='list'),
state=dict(default='present',
choices=['present', 'absent'])
)
aggregate_spec = deepcopy(element_spec)
aggregate_spec['group'] = dict(required=True)
required_one_of = [['group', 'aggregate']]
required_together = [['members', 'mode']]
mutually_exclusive = [['group', 'aggregate']]
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec,
required_together=required_together),
purge=dict(default=False, type='bool')
)
argument_spec.update(element_spec)
argument_spec.update(ios_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
required_one_of=required_one_of,
required_together=required_together,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands((want, have), module)
result['commands'] = commands
if commands:
if not module.check_mode:
load_config(module, commands)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
// Copyright IBM Corp. 2016, 2025
// SPDX-License-Identifier: MPL-2.0
package api
import (
"context"
"net/http"
)
// TokenAuth is used to perform token backend operations on Vault
type TokenAuth struct {
c *Client
}
// Token is used to return the client for token-backend API calls
func (a *Auth) Token() *TokenAuth {
return &TokenAuth{c: a.c}
}
func (c *TokenAuth) Create(opts *TokenCreateRequest) (*Secret, error) {
return c.CreateWithContext(context.Background(), opts)
}
func (c *TokenAuth) CreateWithContext(ctx context.Context, opts *TokenCreateRequest) (*Secret, error) {
ctx, cancelFunc := c.c.withConfiguredTimeout(ctx)
defer cancelFunc()
r := c.c.NewRequest(http.MethodPost, "/v1/auth/token/create")
if err := r.SetJSONBody(opts); err != nil {
return nil, err
}
resp, err := c.c.rawRequestWithContext(ctx, r)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return ParseSecret(resp.Body)
}
func (c *TokenAuth) CreateOrphan(opts *TokenCreateRequest) (*Secret, error) {
return c.CreateOrphanWithContext(context.Background(), opts)
}
func (c *TokenAuth) CreateOrphanWithContext(ctx context.Context, opts *TokenCreateRequest) (*Secret, error) {
ctx, cancelFunc := c.c.withConfiguredTimeout(ctx)
defer cancelFunc()
r := c.c.NewRequest(http.MethodPost, "/v1/auth/token/create-orphan")
if err := r.SetJSONBody(opts); err != nil {
return nil, err
}
resp, err := c.c.rawRequestWithContext(ctx, r)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return ParseSecret(resp.Body)
}
func (c *TokenAuth) CreateWithRole(opts *TokenCreateRequest, roleName string) (*Secret, error) {
return c.CreateWithRoleWithContext(context.Background(), opts, roleName)
}
func (c *TokenAuth) CreateWithRoleWithContext(ctx context.Context, opts *TokenCreateRequest, roleName string) (*Secret, error) {
ctx, cancelFunc := c.c.withConfiguredTimeout(ctx)
defer cancelFunc()
r := c.c.NewRequest(http.MethodPost, "/v1/auth/token/create/"+roleName)
if err := r.SetJSONBody(opts); err != nil {
return nil, err
}
resp, err := c.c.rawRequestWithContext(ctx, r)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return ParseSecret(resp.Body)
}
func (c *TokenAuth) Lookup(token string) (*Secret, error) {
return c.LookupWithContext(context.Background(), token)
}
func (c *TokenAuth) LookupWithContext(ctx context.Context, token string) (*Secret, error) {
ctx, cancelFunc := c.c.withConfiguredTimeout(ctx)
defer cancelFunc()
r := c.c.NewRequest(http.MethodPost, "/v1/auth/token/lookup")
if err := r.SetJSONBody(map[string]interface{}{
"token": token,
}); err != nil {
return nil, err
}
resp, err := c.c.rawRequestWithContext(ctx, r)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return ParseSecret(resp.Body)
}
func (c *TokenAuth) LookupAccessor(accessor string) (*Secret, error) {
return c.LookupAccessorWithContext(context.Background(), accessor)
}
func (c *TokenAuth) LookupAccessorWithContext(ctx context.Context, accessor string) (*Secret, error) {
ctx, cancelFunc := c.c.withConfiguredTimeout(ctx)
defer cancelFunc()
r := c.c.NewRequest(http.MethodPost, "/v1/auth/token/lookup-accessor")
if err := r.SetJSONBody(map[string]interface{}{
"accessor": accessor,
}); err != nil {
return nil, err
}
resp, err := c.c.rawRequestWithContext(ctx, r)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return ParseSecret(resp.Body)
}
func (c *TokenAuth) LookupSelf() (*Secret, error) {
return c.LookupSelfWithContext(context.Background())
}
func (c *TokenAuth) LookupSelfWithContext(ctx context.Context) (*Secret, error) {
ctx, cancelFunc := c.c.withConfiguredTimeout(ctx)
defer cancelFunc()
r := c.c.NewRequest(http.MethodGet, "/v1/auth/token/lookup-self")
resp, err := c.c.rawRequestWithContext(ctx, r)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return ParseSecret(resp.Body)
}
func (c *TokenAuth) RenewAccessor(accessor string, increment int) (*Secret, error) {
return c.RenewAccessorWithContext(context.Background(), accessor, increment)
}
func (c *TokenAuth) RenewAccessorWithContext(ctx context.Context, accessor string, increment int) (*Secret, error) {
ctx, cancelFunc := c.c.withConfiguredTimeout(ctx)
defer cancelFunc()
r := c.c.NewRequest(http.MethodPost, "/v1/auth/token/renew-accessor")
if err := r.SetJSONBody(map[string]interface{}{
"accessor": accessor,
"increment": increment,
}); err != nil {
return nil, err
}
resp, err := c.c.rawRequestWithContext(ctx, r)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return ParseSecret(resp.Body)
}
func (c *TokenAuth) Renew(token string, increment int) (*Secret, error) {
return c.RenewWithContext(context.Background(), token, increment)
}
func (c *TokenAuth) RenewWithContext(ctx context.Context, token string, increment int) (*Secret, error) {
ctx, cancelFunc := c.c.withConfiguredTimeout(ctx)
defer cancelFunc()
r := c.c.NewRequest(http.MethodPut, "/v1/auth/token/renew")
if err := r.SetJSONBody(map[string]interface{}{
"token": token,
"increment": increment,
}); err != nil {
return nil, err
}
resp, err := c.c.rawRequestWithContext(ctx, r)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return ParseSecret(resp.Body)
}
func (c *TokenAuth) RenewSelf(increment int) (*Secret, error) {
return c.RenewSelfWithContext(context.Background(), increment)
}
func (c *TokenAuth) RenewSelfWithContext(ctx context.Context, increment int) (*Secret, error) {
ctx, cancelFunc := c.c.withConfiguredTimeout(ctx)
defer cancelFunc()
r := c.c.NewRequest(http.MethodPut, "/v1/auth/token/renew-self")
body := map[string]interface{}{"increment": increment}
if err := r.SetJSONBody(body); err != nil {
return nil, err
}
resp, err := c.c.rawRequestWithContext(ctx, r)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return ParseSecret(resp.Body)
}
// RenewTokenAsSelf wraps RenewTokenAsSelfWithContext using context.Background.
func (c *TokenAuth) RenewTokenAsSelf(token string, increment int) (*Secret, error) {
return c.RenewTokenAsSelfWithContext(context.Background(), token, increment)
}
// RenewTokenAsSelfWithContext behaves like renew-self, but authenticates using a provided
// token instead of the token attached to the client.
func (c *TokenAuth) RenewTokenAsSelfWithContext(ctx context.Context, token string, increment int) (*Secret, error) {
ctx, cancelFunc := c.c.withConfiguredTimeout(ctx)
defer cancelFunc()
r := c.c.NewRequest(http.MethodPut, "/v1/auth/token/renew-self")
r.ClientToken = token
body := map[string]interface{}{"increment": increment}
if err := r.SetJSONBody(body); err != nil {
return nil, err
}
resp, err := c.c.rawRequestWithContext(ctx, r)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return ParseSecret(resp.Body)
}
// RevokeAccessor wraps RevokeAccessorWithContext using context.Background.
func (c *TokenAuth) RevokeAccessor(accessor string) error {
return c.RevokeAccessorWithContext(context.Background(), accessor)
}
// RevokeAccessorWithContext revokes a token associated with the given accessor
// along with all the child tokens.
func (c *TokenAuth) RevokeAccessorWithContext(ctx context.Context, accessor string) error {
ctx, cancelFunc := c.c.withConfiguredTimeout(ctx)
defer cancelFunc()
r := c.c.NewRequest(http.MethodPost, "/v1/auth/token/revoke-accessor")
if err := r.SetJSONBody(map[string]interface{}{
"accessor": accessor,
}); err != nil {
return err
}
resp, err := c.c.rawRequestWithContext(ctx, r)
if err != nil {
return err
}
defer resp.Body.Close()
return nil
}
// RevokeOrphan wraps RevokeOrphanWithContext using context.Background.
func (c *TokenAuth) RevokeOrphan(token string) error {
return c.RevokeOrphanWithContext(context.Background(), token)
}
// RevokeOrphanWithContext revokes a token without revoking the tree underneath it (so
// child tokens are orphaned rather than revoked)
func (c *TokenAuth) RevokeOrphanWithContext(ctx context.Context, token string) error {
ctx, cancelFunc := c.c.withConfiguredTimeout(ctx)
defer cancelFunc()
r := c.c.NewRequest(http.MethodPut, "/v1/auth/token/revoke-orphan")
if err := r.SetJSONBody(map[string]interface{}{
"token": token,
}); err != nil {
return err
}
resp, err := c.c.rawRequestWithContext(ctx, r)
if err != nil {
return err
}
defer resp.Body.Close()
return nil
}
// RevokeSelf wraps RevokeSelfWithContext using context.Background.
func (c *TokenAuth) RevokeSelf(token string) error {
return c.RevokeSelfWithContext(context.Background(), token)
}
// RevokeSelfWithContext revokes the token making the call. The `token` parameter is kept
// for backwards compatibility but is ignored; only the client's set token has
// an effect.
func (c *TokenAuth) RevokeSelfWithContext(ctx context.Context, token string) error {
ctx, cancelFunc := c.c.withConfiguredTimeout(ctx)
defer cancelFunc()
r := c.c.NewRequest(http.MethodPut, "/v1/auth/token/revoke-self")
resp, err := c.c.rawRequestWithContext(ctx, r)
if err != nil {
return err
}
defer resp.Body.Close()
return nil
}
// RevokeTree wraps RevokeTreeWithContext using context.Background.
func (c *TokenAuth) RevokeTree(token string) error {
return c.RevokeTreeWithContext(context.Background(), token)
}
// RevokeTreeWithContext is the "normal" revoke operation that revokes the given token and
// the entire tree underneath -- all of its child tokens, their child tokens,
// etc.
func (c *TokenAuth) RevokeTreeWithContext(ctx context.Context, token string) error {
ctx, cancelFunc := c.c.withConfiguredTimeout(ctx)
defer cancelFunc()
r := c.c.NewRequest(http.MethodPut, "/v1/auth/token/revoke")
if err := r.SetJSONBody(map[string]interface{}{
"token": token,
}); err != nil {
return err
}
resp, err := c.c.rawRequestWithContext(ctx, r)
if err != nil {
return err
}
defer resp.Body.Close()
return nil
}
// TokenCreateRequest is the options structure for creating a token.
type TokenCreateRequest struct {
ID string `json:"id,omitempty"`
Policies []string `json:"policies,omitempty"`
Metadata map[string]string `json:"meta,omitempty"`
Lease string `json:"lease,omitempty"`
TTL string `json:"ttl,omitempty"`
ExplicitMaxTTL string `json:"explicit_max_ttl,omitempty"`
Period string `json:"period,omitempty"`
NoParent bool `json:"no_parent,omitempty"`
NoDefaultPolicy bool `json:"no_default_policy,omitempty"`
DisplayName string `json:"display_name"`
NumUses int `json:"num_uses"`
Renewable *bool `json:"renewable,omitempty"`
Type string `json:"type"`
EntityAlias string `json:"entity_alias"`
} | go | github | https://github.com/hashicorp/vault | api/auth_token.go |
// Copyright 2016 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package v3compactor implements automated policies for compacting etcd's mvcc storage.
package v3compactor | go | github | https://github.com/etcd-io/etcd | server/etcdserver/api/v3compactor/doc.go |
/*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.ReflectionFreeAssertThrows.assertThrows;
import static com.google.common.collect.TableCollectors.toImmutableTable;
import static com.google.common.collect.Tables.immutableCell;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.common.base.Equivalence;
import com.google.common.base.Function;
import com.google.common.base.MoreObjects;
import com.google.common.collect.Table.Cell;
import com.google.common.testing.CollectorTester;
import java.util.function.BiPredicate;
import java.util.function.BinaryOperator;
import java.util.stream.Collector;
import java.util.stream.Stream;
import junit.framework.TestCase;
import org.jspecify.annotations.NullMarked;
import org.jspecify.annotations.Nullable;
/** Unit tests for {@link TableCollectors}. */
@GwtCompatible
@NullMarked
public class TableCollectorsTest extends TestCase {
public void testToImmutableTable() {
Collector<Cell<String, String, Integer>, ?, ImmutableTable<String, String, Integer>> collector =
toImmutableTable(Cell::getRowKey, Cell::getColumnKey, Cell::getValue);
BiPredicate<ImmutableTable<String, String, Integer>, ImmutableTable<String, String, Integer>>
equivalence = pairwiseOnResultOf(ImmutableTable::cellSet);
CollectorTester.of(collector, equivalence)
.expectCollects(
new ImmutableTable.Builder<String, String, Integer>()
.put("one", "uno", 1)
.put("two", "dos", 2)
.put("three", "tres", 3)
.buildOrThrow(),
immutableCell("one", "uno", 1),
immutableCell("two", "dos", 2),
immutableCell("three", "tres", 3));
}
public void testToImmutableTableConflict() {
Collector<Cell<String, String, Integer>, ?, ImmutableTable<String, String, Integer>> collector =
toImmutableTable(Cell::getRowKey, Cell::getColumnKey, Cell::getValue);
assertThrows(
IllegalArgumentException.class,
() ->
Stream.of(immutableCell("one", "uno", 1), immutableCell("one", "uno", 2))
.collect(collector));
}
// https://youtrack.jetbrains.com/issue/KT-58242/. Crash when rowFunction result (null) is unboxed
@J2ktIncompatible
public void testToImmutableTableNullRowKey() {
Collector<Cell<String, String, Integer>, ?, ImmutableTable<String, String, Integer>> collector =
toImmutableTable(t -> null, Cell::getColumnKey, Cell::getValue);
assertThrows(
NullPointerException.class,
() -> Stream.of(immutableCell("one", "uno", 1)).collect(collector));
}
// https://youtrack.jetbrains.com/issue/KT-58242/. Crash when columnFunction result (null) is
// unboxed
@J2ktIncompatible
public void testToImmutableTableNullColumnKey() {
Collector<Cell<String, String, Integer>, ?, ImmutableTable<String, String, Integer>> collector =
toImmutableTable(Cell::getRowKey, t -> null, Cell::getValue);
assertThrows(
NullPointerException.class,
() -> Stream.of(immutableCell("one", "uno", 1)).collect(collector));
}
// https://youtrack.jetbrains.com/issue/KT-58242/. Crash when getValue result (null) is unboxed
@J2ktIncompatible
public void testToImmutableTableNullValue() {
{
Collector<Cell<String, String, Integer>, ?, ImmutableTable<String, String, Integer>>
collector = toImmutableTable(Cell::getRowKey, Cell::getColumnKey, t -> null);
assertThrows(
NullPointerException.class,
() -> Stream.of(immutableCell("one", "uno", 1)).collect(collector));
}
{
Collector<Cell<String, String, Integer>, ?, ImmutableTable<String, String, Integer>>
collector = toImmutableTable(Cell::getRowKey, Cell::getColumnKey, Cell::getValue);
assertThrows(
NullPointerException.class,
() ->
Stream.of(immutableCell("one", "uno", 1), immutableCell("one", "uno", (Integer) null))
.collect(collector));
}
}
public void testToImmutableTableMerging() {
Collector<Cell<String, String, Integer>, ?, ImmutableTable<String, String, Integer>> collector =
toImmutableTable(Cell::getRowKey, Cell::getColumnKey, Cell::getValue, Integer::sum);
BiPredicate<ImmutableTable<String, String, Integer>, ImmutableTable<String, String, Integer>>
equivalence = pairwiseOnResultOf(ImmutableTable::cellSet);
CollectorTester.of(collector, equivalence)
.expectCollects(
new ImmutableTable.Builder<String, String, Integer>()
.put("one", "uno", 1)
.put("two", "dos", 6)
.put("three", "tres", 3)
.buildOrThrow(),
immutableCell("one", "uno", 1),
immutableCell("two", "dos", 2),
immutableCell("three", "tres", 3),
immutableCell("two", "dos", 4));
}
// https://youtrack.jetbrains.com/issue/KT-58242/. Crash when rowFunction result (null) is unboxed
@J2ktIncompatible
public void testToImmutableTableMergingNullRowKey() {
Collector<Cell<String, String, Integer>, ?, ImmutableTable<String, String, Integer>> collector =
toImmutableTable(t -> null, Cell::getColumnKey, Cell::getValue, Integer::sum);
assertThrows(
NullPointerException.class,
() -> Stream.of(immutableCell("one", "uno", 1)).collect(collector));
}
// https://youtrack.jetbrains.com/issue/KT-58242/. Crash when columnFunction result (null) is
// unboxed
@J2ktIncompatible
public void testToImmutableTableMergingNullColumnKey() {
Collector<Cell<String, String, Integer>, ?, ImmutableTable<String, String, Integer>> collector =
toImmutableTable(Cell::getRowKey, t -> null, Cell::getValue, Integer::sum);
assertThrows(
NullPointerException.class,
() -> Stream.of(immutableCell("one", "uno", 1)).collect(collector));
}
// https://youtrack.jetbrains.com/issue/KT-58242/. Crash when valueFunction result (null) is
// unboxed
@J2ktIncompatible
public void testToImmutableTableMergingNullValue() {
{
Collector<Cell<String, String, Integer>, ?, ImmutableTable<String, String, Integer>>
collector =
toImmutableTable(Cell::getRowKey, Cell::getColumnKey, t -> null, Integer::sum);
assertThrows(
NullPointerException.class,
() -> Stream.of(immutableCell("one", "uno", 1)).collect(collector));
}
{
Collector<Cell<String, String, Integer>, ?, ImmutableTable<String, String, Integer>>
collector =
toImmutableTable(
Cell::getRowKey,
Cell::getColumnKey,
Cell::getValue,
(i, j) -> MoreObjects.firstNonNull(i, 0) + MoreObjects.firstNonNull(j, 0));
assertThrows(
NullPointerException.class,
() ->
Stream.of(immutableCell("one", "uno", 1), immutableCell("one", "uno", (Integer) null))
.collect(collector));
}
}
// https://youtrack.jetbrains.com/issue/KT-58242/. Crash when mergeFunction result (null) is
// unboxed
@J2ktIncompatible
public void testToImmutableTableMergingNullMerge() {
Collector<Cell<String, String, Integer>, ?, ImmutableTable<String, String, Integer>> collector =
toImmutableTable(Cell::getRowKey, Cell::getColumnKey, Cell::getValue, (v1, v2) -> null);
assertThrows(
NullPointerException.class,
() ->
Stream.of(immutableCell("one", "uno", 1), immutableCell("one", "uno", 2))
.collect(collector));
}
public void testToTable() {
Collector<Cell<String, String, Integer>, ?, Table<String, String, Integer>> collector =
TableCollectors.toTable(
Cell::getRowKey, Cell::getColumnKey, Cell::getValue, HashBasedTable::create);
BiPredicate<Table<String, String, Integer>, Table<String, String, Integer>> equivalence =
pairwiseOnResultOf(Table::cellSet);
CollectorTester.of(collector, equivalence)
.expectCollects(
new ImmutableTable.Builder<String, String, Integer>()
.put("one", "uno", 1)
.put("two", "dos", 2)
.put("three", "tres", 3)
.buildOrThrow(),
immutableCell("one", "uno", 1),
immutableCell("two", "dos", 2),
immutableCell("three", "tres", 3));
}
// https://youtrack.jetbrains.com/issue/KT-58242/. Crash when mergeFunction result (null) is
// unboxed
@J2ktIncompatible
public void testToTableNullMerge() {
// TODO github.com/google/guava/issues/6824 - the null merge feature is not compatible with the
// current nullness annotation of the mergeFunction parameter. Work around with casts.
BinaryOperator<@Nullable Integer> mergeFunction = (v1, v2) -> null;
Collector<Cell<String, String, Integer>, ?, Table<String, String, Integer>> collector =
TableCollectors.toTable(
Cell::getRowKey,
Cell::getColumnKey,
Cell::getValue,
(BinaryOperator<Integer>) mergeFunction,
HashBasedTable::create);
BiPredicate<Table<String, String, Integer>, Table<String, String, Integer>> equivalence =
pairwiseOnResultOf(Table::cellSet);
CollectorTester.of(collector, equivalence)
.expectCollects(
ImmutableTable.of(), immutableCell("one", "uno", 1), immutableCell("one", "uno", 2));
}
// https://youtrack.jetbrains.com/issue/KT-58242/. Crash when getValue result (null) is unboxed
@J2ktIncompatible
public void testToTableNullValues() {
Collector<Cell<String, String, Integer>, ?, Table<String, String, Integer>> collector =
TableCollectors.toTable(
Cell::getRowKey,
Cell::getColumnKey,
Cell::getValue,
() -> {
Table<String, String, @Nullable Integer> table =
ArrayTable.create(ImmutableList.of("one"), ImmutableList.of("uno"));
return (Table<String, String, Integer>) table;
});
Cell<String, String, @Nullable Integer> cell = immutableCell("one", "uno", null);
assertThrows(
NullPointerException.class,
() -> Stream.of((Cell<String, String, Integer>) cell).collect(collector));
}
public void testToTableConflict() {
Collector<Cell<String, String, Integer>, ?, Table<String, String, Integer>> collector =
TableCollectors.toTable(
Cell::getRowKey, Cell::getColumnKey, Cell::getValue, HashBasedTable::create);
assertThrows(
IllegalStateException.class,
() ->
Stream.of(immutableCell("one", "uno", 1), immutableCell("one", "uno", 2))
.collect(collector));
}
public void testToTableMerging() {
Collector<Cell<String, String, Integer>, ?, Table<String, String, Integer>> collector =
TableCollectors.toTable(
Cell::getRowKey,
Cell::getColumnKey,
Cell::getValue,
Integer::sum,
HashBasedTable::create);
BiPredicate<Table<String, String, Integer>, Table<String, String, Integer>> equivalence =
pairwiseOnResultOf(Table::cellSet);
CollectorTester.of(collector, equivalence)
.expectCollects(
new ImmutableTable.Builder<String, String, Integer>()
.put("one", "uno", 1)
.put("two", "dos", 6)
.put("three", "tres", 3)
.buildOrThrow(),
immutableCell("one", "uno", 1),
immutableCell("two", "dos", 2),
immutableCell("three", "tres", 3),
immutableCell("two", "dos", 4));
}
// This function specifically returns a BiPredicate, because Guava7’s Equivalence class does not
// actually implement BiPredicate, and CollectorTests expects a BiPredicate.
static <C, E extends @Nullable Object, R extends Iterable<E>>
BiPredicate<C, C> pairwiseOnResultOf(Function<C, R> arg) {
Equivalence<C> equivalence = Equivalence.equals().<E>pairwise().onResultOf(arg);
return equivalence::equivalent;
}
} | java | github | https://github.com/google/guava | android/guava-tests/test/com/google/common/collect/TableCollectorsTest.java |
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import base64
import os
import sys
import re
from optparse import OptionParser
"""Extracts the list of resident symbols of a library loaded in a process.
This scripts combines the extended output of memdump for a given process
(obtained through memdump -x PID) and the symbol table of a .so loaded in that
process (obtained through nm -C lib-with-symbols.so), filtering out only those
symbols that, at the time of the snapshot, were resident in memory (that are,
the symbols which start address belongs to a mapped page of the .so which was
resident at the time of the snapshot).
The aim is to perform a "code coverage"-like profiling of a binary, intersecting
run-time information (list of resident pages) and debug symbols.
"""
_PAGE_SIZE = 4096
def _TestBit(word, bit):
assert(bit >= 0 and bit < 8)
return not not ((word >> bit) & 1)
def _HexAddr(addr):
return hex(addr)[2:].zfill(8)
def _GetResidentPagesSet(memdump_contents, lib_name, verbose):
"""Parses the memdump output and extracts the resident page set for lib_name.
Args:
memdump_contents: Array of strings (lines) of a memdump output.
lib_name: A string containing the name of the library.so to be matched.
verbose: Print a verbose header for each mapping matched.
Returns:
A set of resident pages (the key is the page index) for all the
mappings matching .*lib_name.
"""
resident_pages = set()
MAP_RX = re.compile(
r'^([0-9a-f]+)-([0-9a-f]+) ([\w-]+) ([0-9a-f]+) .* "(.*)" \[(.*)\]$')
for line in memdump_contents:
line = line.rstrip('\r\n')
if line.startswith('[ PID'):
continue
r = MAP_RX.match(line)
if not r:
sys.stderr.write('Skipping %s from %s\n' % (line, memdump_file))
continue
map_start = int(r.group(1), 16)
map_end = int(r.group(2), 16)
prot = r.group(3)
offset = int(r.group(4), 16)
assert(offset % _PAGE_SIZE == 0)
lib = r.group(5)
enc_bitmap = r.group(6)
if not lib.endswith(lib_name):
continue
bitmap = base64.b64decode(enc_bitmap)
map_pages_count = (map_end - map_start + 1) / _PAGE_SIZE
bitmap_pages_count = len(bitmap) * 8
if verbose:
print 'Found %s: mapped %d pages in mode %s @ offset %s.' % (
lib, map_pages_count, prot, _HexAddr(offset))
print ' Map range in the process VA: [%s - %s]. Len: %s' % (
_HexAddr(map_start),
_HexAddr(map_end),
_HexAddr(map_pages_count * _PAGE_SIZE))
print ' Corresponding addresses in the binary: [%s - %s]. Len: %s' % (
_HexAddr(offset),
_HexAddr(offset + map_end - map_start),
_HexAddr(map_pages_count * _PAGE_SIZE))
print ' Bitmap: %d pages' % bitmap_pages_count
print ''
assert(bitmap_pages_count >= map_pages_count)
for i in xrange(map_pages_count):
bitmap_idx = i / 8
bitmap_off = i % 8
if (bitmap_idx < len(bitmap) and
_TestBit(ord(bitmap[bitmap_idx]), bitmap_off)):
resident_pages.add(offset / _PAGE_SIZE + i)
return resident_pages
def main(argv):
NM_RX = re.compile(r'^([0-9a-f]+)\s+.*$')
parser = OptionParser()
parser.add_option("-r", "--reverse",
action="store_true", dest="reverse", default=False,
help="Print out non present symbols")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose", default=False,
help="Print out verbose debug information.")
(options, args) = parser.parse_args()
if len(args) != 3:
print 'Usage: %s [-v] memdump.file nm.file library.so' % (
os.path.basename(argv[0]))
return 1
memdump_file = args[0]
nm_file = args[1]
lib_name = args[2]
if memdump_file == '-':
memdump_contents = sys.stdin.readlines()
else:
memdump_contents = open(memdump_file, 'r').readlines()
resident_pages = _GetResidentPagesSet(memdump_contents,
lib_name,
options.verbose)
# Process the nm symbol table, filtering out the resident symbols.
nm_fh = open(nm_file, 'r')
for line in nm_fh:
line = line.rstrip('\r\n')
# Skip undefined symbols (lines with no address).
if line.startswith(' '):
continue
r = NM_RX.match(line)
if not r:
sys.stderr.write('Skipping %s from %s\n' % (line, nm_file))
continue
sym_addr = int(r.group(1), 16)
sym_page = sym_addr / _PAGE_SIZE
last_sym_matched = (sym_page in resident_pages)
if (sym_page in resident_pages) != options.reverse:
print line
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv)) | unknown | codeparrot/codeparrot-clean | ||
# Copyright (C) 2005 Martin v. Löwis
# Licensed to PSF under a Contributor Agreement.
from _msi import *
import os, string, re, sys
AMD64 = "AMD64" in sys.version
Itanium = "Itanium" in sys.version
Win64 = AMD64 or Itanium
# Partially taken from Wine
datasizemask= 0x00ff
type_valid= 0x0100
type_localizable= 0x0200
typemask= 0x0c00
type_long= 0x0000
type_short= 0x0400
type_string= 0x0c00
type_binary= 0x0800
type_nullable= 0x1000
type_key= 0x2000
# XXX temporary, localizable?
knownbits = datasizemask | type_valid | type_localizable | \
typemask | type_nullable | type_key
class Table:
def __init__(self, name):
self.name = name
self.fields = []
def add_field(self, index, name, type):
self.fields.append((index,name,type))
def sql(self):
fields = []
keys = []
self.fields.sort()
fields = [None]*len(self.fields)
for index, name, type in self.fields:
index -= 1
unk = type & ~knownbits
if unk:
print("%s.%s unknown bits %x" % (self.name, name, unk))
size = type & datasizemask
dtype = type & typemask
if dtype == type_string:
if size:
tname="CHAR(%d)" % size
else:
tname="CHAR"
elif dtype == type_short:
assert size==2
tname = "SHORT"
elif dtype == type_long:
assert size==4
tname="LONG"
elif dtype == type_binary:
assert size==0
tname="OBJECT"
else:
tname="unknown"
print("%s.%sunknown integer type %d" % (self.name, name, size))
if type & type_nullable:
flags = ""
else:
flags = " NOT NULL"
if type & type_localizable:
flags += " LOCALIZABLE"
fields[index] = "`%s` %s%s" % (name, tname, flags)
if type & type_key:
keys.append("`%s`" % name)
fields = ", ".join(fields)
keys = ", ".join(keys)
return "CREATE TABLE %s (%s PRIMARY KEY %s)" % (self.name, fields, keys)
def create(self, db):
v = db.OpenView(self.sql())
v.Execute(None)
v.Close()
class _Unspecified:pass
def change_sequence(seq, action, seqno=_Unspecified, cond = _Unspecified):
"Change the sequence number of an action in a sequence list"
for i in range(len(seq)):
if seq[i][0] == action:
if cond is _Unspecified:
cond = seq[i][1]
if seqno is _Unspecified:
seqno = seq[i][2]
seq[i] = (action, cond, seqno)
return
raise ValueError("Action not found in sequence")
def add_data(db, table, values):
v = db.OpenView("SELECT * FROM `%s`" % table)
count = v.GetColumnInfo(MSICOLINFO_NAMES).GetFieldCount()
r = CreateRecord(count)
for value in values:
assert len(value) == count, value
for i in range(count):
field = value[i]
if isinstance(field, int):
r.SetInteger(i+1,field)
elif isinstance(field, str):
r.SetString(i+1,field)
elif field is None:
pass
elif isinstance(field, Binary):
r.SetStream(i+1, field.name)
else:
raise TypeError("Unsupported type %s" % field.__class__.__name__)
try:
v.Modify(MSIMODIFY_INSERT, r)
except Exception as e:
raise MSIError("Could not insert "+repr(values)+" into "+table)
r.ClearData()
v.Close()
def add_stream(db, name, path):
v = db.OpenView("INSERT INTO _Streams (Name, Data) VALUES ('%s', ?)" % name)
r = CreateRecord(1)
r.SetStream(1, path)
v.Execute(r)
v.Close()
def init_database(name, schema,
ProductName, ProductCode, ProductVersion,
Manufacturer):
try:
os.unlink(name)
except OSError:
pass
ProductCode = ProductCode.upper()
# Create the database
db = OpenDatabase(name, MSIDBOPEN_CREATE)
# Create the tables
for t in schema.tables:
t.create(db)
# Fill the validation table
add_data(db, "_Validation", schema._Validation_records)
# Initialize the summary information, allowing atmost 20 properties
si = db.GetSummaryInformation(20)
si.SetProperty(PID_TITLE, "Installation Database")
si.SetProperty(PID_SUBJECT, ProductName)
si.SetProperty(PID_AUTHOR, Manufacturer)
if Itanium:
si.SetProperty(PID_TEMPLATE, "Intel64;1033")
elif AMD64:
si.SetProperty(PID_TEMPLATE, "x64;1033")
else:
si.SetProperty(PID_TEMPLATE, "Intel;1033")
si.SetProperty(PID_REVNUMBER, gen_uuid())
si.SetProperty(PID_WORDCOUNT, 2) # long file names, compressed, original media
si.SetProperty(PID_PAGECOUNT, 200)
si.SetProperty(PID_APPNAME, "Python MSI Library")
# XXX more properties
si.Persist()
add_data(db, "Property", [
("ProductName", ProductName),
("ProductCode", ProductCode),
("ProductVersion", ProductVersion),
("Manufacturer", Manufacturer),
("ProductLanguage", "1033")])
db.Commit()
return db
def add_tables(db, module):
for table in module.tables:
add_data(db, table, getattr(module, table))
def make_id(str):
identifier_chars = string.ascii_letters + string.digits + "._"
str = "".join([c if c in identifier_chars else "_" for c in str])
if str[0] in (string.digits + "."):
str = "_" + str
assert re.match("^[A-Za-z_][A-Za-z0-9_.]*$", str), "FILE"+str
return str
def gen_uuid():
return "{"+UuidCreate().upper()+"}"
class CAB:
def __init__(self, name):
self.name = name
self.files = []
self.filenames = set()
self.index = 0
def gen_id(self, file):
logical = _logical = make_id(file)
pos = 1
while logical in self.filenames:
logical = "%s.%d" % (_logical, pos)
pos += 1
self.filenames.add(logical)
return logical
def append(self, full, file, logical):
if os.path.isdir(full):
return
if not logical:
logical = self.gen_id(file)
self.index += 1
self.files.append((full, logical))
return self.index, logical
def commit(self, db):
from tempfile import mktemp
filename = mktemp()
FCICreate(filename, self.files)
add_data(db, "Media",
[(1, self.index, None, "#"+self.name, None, None)])
add_stream(db, self.name, filename)
os.unlink(filename)
db.Commit()
_directories = set()
class Directory:
def __init__(self, db, cab, basedir, physical, _logical, default, componentflags=None):
"""Create a new directory in the Directory table. There is a current component
at each point in time for the directory, which is either explicitly created
through start_component, or implicitly when files are added for the first
time. Files are added into the current component, and into the cab file.
To create a directory, a base directory object needs to be specified (can be
None), the path to the physical directory, and a logical directory name.
Default specifies the DefaultDir slot in the directory table. componentflags
specifies the default flags that new components get."""
index = 1
_logical = make_id(_logical)
logical = _logical
while logical in _directories:
logical = "%s%d" % (_logical, index)
index += 1
_directories.add(logical)
self.db = db
self.cab = cab
self.basedir = basedir
self.physical = physical
self.logical = logical
self.component = None
self.short_names = set()
self.ids = set()
self.keyfiles = {}
self.componentflags = componentflags
if basedir:
self.absolute = os.path.join(basedir.absolute, physical)
blogical = basedir.logical
else:
self.absolute = physical
blogical = None
add_data(db, "Directory", [(logical, blogical, default)])
def start_component(self, component = None, feature = None, flags = None, keyfile = None, uuid=None):
"""Add an entry to the Component table, and make this component the current for this
directory. If no component name is given, the directory name is used. If no feature
is given, the current feature is used. If no flags are given, the directory's default
flags are used. If no keyfile is given, the KeyPath is left null in the Component
table."""
if flags is None:
flags = self.componentflags
if uuid is None:
uuid = gen_uuid()
else:
uuid = uuid.upper()
if component is None:
component = self.logical
self.component = component
if Win64:
flags |= 256
if keyfile:
keyid = self.cab.gen_id(self.absolute, keyfile)
self.keyfiles[keyfile] = keyid
else:
keyid = None
add_data(self.db, "Component",
[(component, uuid, self.logical, flags, None, keyid)])
if feature is None:
feature = current_feature
add_data(self.db, "FeatureComponents",
[(feature.id, component)])
def make_short(self, file):
oldfile = file
file = file.replace('+', '_')
file = ''.join(c for c in file if not c in ' "/\[]:;=,')
parts = file.split(".")
if len(parts) > 1:
prefix = "".join(parts[:-1]).upper()
suffix = parts[-1].upper()
if not prefix:
prefix = suffix
suffix = None
else:
prefix = file.upper()
suffix = None
if len(parts) < 3 and len(prefix) <= 8 and file == oldfile and (
not suffix or len(suffix) <= 3):
if suffix:
file = prefix+"."+suffix
else:
file = prefix
else:
file = None
if file is None or file in self.short_names:
prefix = prefix[:6]
if suffix:
suffix = suffix[:3]
pos = 1
while 1:
if suffix:
file = "%s~%d.%s" % (prefix, pos, suffix)
else:
file = "%s~%d" % (prefix, pos)
if file not in self.short_names: break
pos += 1
assert pos < 10000
if pos in (10, 100, 1000):
prefix = prefix[:-1]
self.short_names.add(file)
assert not re.search(r'[\?|><:/*"+,;=\[\]]', file) # restrictions on short names
return file
def add_file(self, file, src=None, version=None, language=None):
"""Add a file to the current component of the directory, starting a new one
if there is no current component. By default, the file name in the source
and the file table will be identical. If the src file is specified, it is
interpreted relative to the current directory. Optionally, a version and a
language can be specified for the entry in the File table."""
if not self.component:
self.start_component(self.logical, current_feature, 0)
if not src:
# Allow relative paths for file if src is not specified
src = file
file = os.path.basename(file)
absolute = os.path.join(self.absolute, src)
assert not re.search(r'[\?|><:/*]"', file) # restrictions on long names
if file in self.keyfiles:
logical = self.keyfiles[file]
else:
logical = None
sequence, logical = self.cab.append(absolute, file, logical)
assert logical not in self.ids
self.ids.add(logical)
short = self.make_short(file)
full = "%s|%s" % (short, file)
filesize = os.stat(absolute).st_size
# constants.msidbFileAttributesVital
# Compressed omitted, since it is the database default
# could add r/o, system, hidden
attributes = 512
add_data(self.db, "File",
[(logical, self.component, full, filesize, version,
language, attributes, sequence)])
#if not version:
# # Add hash if the file is not versioned
# filehash = FileHash(absolute, 0)
# add_data(self.db, "MsiFileHash",
# [(logical, 0, filehash.IntegerData(1),
# filehash.IntegerData(2), filehash.IntegerData(3),
# filehash.IntegerData(4))])
# Automatically remove .pyc files on uninstall (2)
# XXX: adding so many RemoveFile entries makes installer unbelievably
# slow. So instead, we have to use wildcard remove entries
if file.endswith(".py"):
add_data(self.db, "RemoveFile",
[(logical+"c", self.component, "%sC|%sc" % (short, file),
self.logical, 2),
(logical+"o", self.component, "%sO|%so" % (short, file),
self.logical, 2)])
return logical
def glob(self, pattern, exclude = None):
"""Add a list of files to the current component as specified in the
glob pattern. Individual files can be excluded in the exclude list."""
files = glob.glob1(self.absolute, pattern)
for f in files:
if exclude and f in exclude: continue
self.add_file(f)
return files
def remove_pyc(self):
"Remove .pyc files on uninstall"
add_data(self.db, "RemoveFile",
[(self.component+"c", self.component, "*.pyc", self.logical, 2)])
class Binary:
def __init__(self, fname):
self.name = fname
def __repr__(self):
return 'msilib.Binary(os.path.join(dirname,"%s"))' % self.name
class Feature:
def __init__(self, db, id, title, desc, display, level = 1,
parent=None, directory = None, attributes=0):
self.id = id
if parent:
parent = parent.id
add_data(db, "Feature",
[(id, parent, title, desc, display,
level, directory, attributes)])
def set_current(self):
global current_feature
current_feature = self
class Control:
def __init__(self, dlg, name):
self.dlg = dlg
self.name = name
def event(self, event, argument, condition = "1", ordering = None):
add_data(self.dlg.db, "ControlEvent",
[(self.dlg.name, self.name, event, argument,
condition, ordering)])
def mapping(self, event, attribute):
add_data(self.dlg.db, "EventMapping",
[(self.dlg.name, self.name, event, attribute)])
def condition(self, action, condition):
add_data(self.dlg.db, "ControlCondition",
[(self.dlg.name, self.name, action, condition)])
class RadioButtonGroup(Control):
def __init__(self, dlg, name, property):
self.dlg = dlg
self.name = name
self.property = property
self.index = 1
def add(self, name, x, y, w, h, text, value = None):
if value is None:
value = name
add_data(self.dlg.db, "RadioButton",
[(self.property, self.index, value,
x, y, w, h, text, None)])
self.index += 1
class Dialog:
def __init__(self, db, name, x, y, w, h, attr, title, first, default, cancel):
self.db = db
self.name = name
self.x, self.y, self.w, self.h = x,y,w,h
add_data(db, "Dialog", [(name, x,y,w,h,attr,title,first,default,cancel)])
def control(self, name, type, x, y, w, h, attr, prop, text, next, help):
add_data(self.db, "Control",
[(self.name, name, type, x, y, w, h, attr, prop, text, next, help)])
return Control(self, name)
def text(self, name, x, y, w, h, attr, text):
return self.control(name, "Text", x, y, w, h, attr, None,
text, None, None)
def bitmap(self, name, x, y, w, h, text):
return self.control(name, "Bitmap", x, y, w, h, 1, None, text, None, None)
def line(self, name, x, y, w, h):
return self.control(name, "Line", x, y, w, h, 1, None, None, None, None)
def pushbutton(self, name, x, y, w, h, attr, text, next):
return self.control(name, "PushButton", x, y, w, h, attr, None, text, next, None)
def radiogroup(self, name, x, y, w, h, attr, prop, text, next):
add_data(self.db, "Control",
[(self.name, name, "RadioButtonGroup",
x, y, w, h, attr, prop, text, next, None)])
return RadioButtonGroup(self, name, prop)
def checkbox(self, name, x, y, w, h, attr, prop, text, next):
return self.control(name, "CheckBox", x, y, w, h, attr, prop, text, next, None) | unknown | codeparrot/codeparrot-clean | ||
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package plans
import (
"fmt"
"github.com/zclconf/go-cty/cty"
"github.com/hashicorp/terraform/internal/addrs"
"github.com/hashicorp/terraform/internal/genconfig"
"github.com/hashicorp/terraform/internal/lang/marks"
"github.com/hashicorp/terraform/internal/providers"
"github.com/hashicorp/terraform/internal/schemarepo"
"github.com/hashicorp/terraform/internal/states"
"github.com/hashicorp/terraform/internal/tfdiags"
)
// Changes describes various actions that Terraform will attempt to take if
// the corresponding plan is applied.
type Changes struct {
// Resources tracks planned changes to resource instance objects.
Resources []*ResourceInstanceChange
Queries []*QueryInstance
// ActionInvocations tracks planned action invocations, which may have
// embedded resource instance changes.
ActionInvocations ActionInvocationInstances
// Outputs tracks planned changes output values.
//
// Note that although an in-memory plan contains planned changes for
// outputs throughout the configuration, a plan serialized
// to disk retains only the root outputs because they are
// externally-visible, while other outputs are implementation details and
// can be easily re-calculated during the apply phase. Therefore only root
// module outputs will survive a round-trip through a plan file.
Outputs []*OutputChange
}
// NewChanges returns a valid Changes object that describes no changes.
func NewChanges() *Changes {
return &Changes{}
}
// Encode encodes all the stored resource and output changes into a new *ChangeSrc value
func (c *Changes) Encode(schemas *schemarepo.Schemas) (*ChangesSrc, error) {
// a plan is always built even when there are errors, so make sure to return
// a valid changesSrc.
changesSrc := NewChangesSrc()
for _, rc := range c.Resources {
p, ok := schemas.Providers[rc.ProviderAddr.Provider]
if !ok {
return changesSrc, fmt.Errorf("Changes.Encode: missing provider %s for %s", rc.ProviderAddr, rc.Addr)
}
var schema providers.Schema
switch rc.Addr.Resource.Resource.Mode {
case addrs.ManagedResourceMode:
schema = p.ResourceTypes[rc.Addr.Resource.Resource.Type]
case addrs.DataResourceMode:
schema = p.DataSources[rc.Addr.Resource.Resource.Type]
case addrs.ListResourceMode:
schema = p.ListResourceTypes[rc.Addr.Resource.Resource.Type]
default:
panic(fmt.Sprintf("unexpected resource mode %s", rc.Addr.Resource.Resource.Mode))
}
if schema.Body == nil {
return changesSrc, fmt.Errorf("Changes.Encode: missing schema for %s", rc.Addr)
}
rcs, err := rc.Encode(schema)
if err != nil {
return changesSrc, fmt.Errorf("Changes.Encode: %w", err)
}
changesSrc.Resources = append(changesSrc.Resources, rcs)
}
for _, qi := range c.Queries {
p, ok := schemas.Providers[qi.ProviderAddr.Provider]
if !ok {
return changesSrc, fmt.Errorf("Changes.Encode: missing provider %s for %s", qi.ProviderAddr, qi.Addr)
}
schema := p.ListResourceTypes[qi.Addr.Resource.Resource.Type]
if schema.Body == nil {
return changesSrc, fmt.Errorf("Changes.Encode: missing schema for %s", qi.Addr)
}
rcs, err := qi.Encode(schema)
if err != nil {
return changesSrc, fmt.Errorf("Changes.Encode: %w", err)
}
changesSrc.Queries = append(changesSrc.Queries, rcs)
}
for _, ai := range c.ActionInvocations {
p, ok := schemas.Providers[ai.ProviderAddr.Provider]
if !ok {
return changesSrc, fmt.Errorf("Changes.Encode: missing provider %s for %s", ai.ProviderAddr, ai.Addr)
}
schema, ok := p.Actions[ai.Addr.Action.Action.Type]
if !ok {
return changesSrc, fmt.Errorf("Changes.Encode: missing schema for %s", ai.Addr.Action.Action.Type)
}
a, err := ai.Encode(&schema)
if err != nil {
return changesSrc, fmt.Errorf("Changes.Encode: %w", err)
}
changesSrc.ActionInvocations = append(changesSrc.ActionInvocations, a)
}
for _, ocs := range c.Outputs {
oc, err := ocs.Encode()
if err != nil {
return changesSrc, err
}
changesSrc.Outputs = append(changesSrc.Outputs, oc)
}
return changesSrc, nil
}
// ResourceInstance returns the planned change for the current object of the
// resource instance of the given address, if any. Returns nil if no change is
// planned.
func (c *Changes) ResourceInstance(addr addrs.AbsResourceInstance) *ResourceInstanceChange {
for _, rc := range c.Resources {
if rc.Addr.Equal(addr) && rc.DeposedKey == states.NotDeposed {
return rc
}
}
return nil
}
// InstancesForAbsResource returns the planned change for the current objects
// of the resource instances of the given address, if any. Returns nil if no
// changes are planned.
func (c *Changes) InstancesForAbsResource(addr addrs.AbsResource) []*ResourceInstanceChange {
var changes []*ResourceInstanceChange
for _, rc := range c.Resources {
resAddr := rc.Addr.ContainingResource()
if resAddr.Equal(addr) && rc.DeposedKey == states.NotDeposed {
changes = append(changes, rc)
}
}
return changes
}
func (c *Changes) QueriesForAbsResource(addr addrs.AbsResource) []*QueryInstance {
var queries []*QueryInstance
for _, q := range c.Queries {
qAddr := q.Addr.ContainingResource()
if qAddr.Equal(addr) {
queries = append(queries, q)
}
}
return queries
}
// InstancesForConfigResource returns the planned change for the current objects
// of the resource instances of the given address, if any. Returns nil if no
// changes are planned.
func (c *Changes) InstancesForConfigResource(addr addrs.ConfigResource) []*ResourceInstanceChange {
var changes []*ResourceInstanceChange
for _, rc := range c.Resources {
resAddr := rc.Addr.ContainingResource().Config()
if resAddr.Equal(addr) && rc.DeposedKey == states.NotDeposed {
changes = append(changes, rc)
}
}
return changes
}
// ResourceInstanceDeposed returns the plan change of a deposed object of
// the resource instance of the given address, if any. Returns nil if no change
// is planned.
func (c *Changes) ResourceInstanceDeposed(addr addrs.AbsResourceInstance, key states.DeposedKey) *ResourceInstanceChange {
for _, rc := range c.Resources {
if rc.Addr.Equal(addr) && rc.DeposedKey == key {
return rc
}
}
return nil
}
// OutputValue returns the planned change for the output value with the
//
// given address, if any. Returns nil if no change is planned.
func (c *Changes) OutputValue(addr addrs.AbsOutputValue) *OutputChange {
for _, oc := range c.Outputs {
if oc.Addr.Equal(addr) {
return oc
}
}
return nil
}
// RootOutputValues returns planned changes for all outputs of the root module.
func (c *Changes) RootOutputValues() []*OutputChange {
var res []*OutputChange
for _, oc := range c.Outputs {
// we can't evaluate root module outputs
if !oc.Addr.Module.Equal(addrs.RootModuleInstance) {
continue
}
res = append(res, oc)
}
return res
}
// OutputValues returns planned changes for all outputs for all module
// instances that reside in the parent path. Returns nil if no changes are
// planned.
func (c *Changes) OutputValues(parent addrs.ModuleInstance, module addrs.ModuleCall) []*OutputChange {
var res []*OutputChange
for _, oc := range c.Outputs {
// we can't evaluate root module outputs
if oc.Addr.Module.Equal(addrs.RootModuleInstance) {
continue
}
changeMod, changeCall := oc.Addr.Module.Call()
// this does not reside on our parent instance path
if !changeMod.Equal(parent) {
continue
}
// this is not the module you're looking for
if changeCall.Name != module.Name {
continue
}
res = append(res, oc)
}
return res
}
// SyncWrapper returns a wrapper object around the receiver that can be used
// to make certain changes to the receiver in a concurrency-safe way, as long
// as all callers share the same wrapper object.
func (c *Changes) SyncWrapper() *ChangesSync {
return &ChangesSync{
changes: c,
}
}
// ActionInvocations returns planned action invocations for all module instances
// that reside in the parent path. Returns nil if no changes are planned.
func (c *Changes) ActionInstances(parent addrs.ModuleInstance, module addrs.ModuleCall) []*ActionInvocationInstance {
var ret []*ActionInvocationInstance
for _, a := range c.ActionInvocations {
changeMod, changeCall := a.Addr.Module.Call()
// this does not reside on our parent instance path
if !changeMod.Equal(parent) {
continue
}
// this is not the module you're looking for
if changeCall.Name != module.Name {
continue
}
ret = append(ret, a)
}
return ret
}
type QueryInstance struct {
Addr addrs.AbsResourceInstance
ProviderAddr addrs.AbsProviderConfig
Results QueryResults
}
type QueryResults struct {
Value cty.Value
Generated genconfig.ImportGroup
}
func (qi *QueryInstance) DeepCopy() *QueryInstance {
if qi == nil {
return qi
}
ret := *qi
return &ret
}
func (rc *QueryInstance) Encode(schema providers.Schema) (*QueryInstanceSrc, error) {
results, err := NewDynamicValue(rc.Results.Value, schema.Body.ImpliedType())
if err != nil {
return nil, err
}
return &QueryInstanceSrc{
Addr: rc.Addr,
Results: results,
ProviderAddr: rc.ProviderAddr,
Generated: rc.Results.Generated,
}, nil
}
// ResourceInstanceChange describes a change to a particular resource instance
// object.
type ResourceInstanceChange struct {
// Addr is the absolute address of the resource instance that the change
// will apply to.
Addr addrs.AbsResourceInstance
// PrevRunAddr is the absolute address that this resource instance had at
// the conclusion of a previous run.
//
// This will typically be the same as Addr, but can be different if the
// previous resource instance was subject to a "moved" block that we
// handled in the process of creating this plan.
//
// For the initial creation of a resource instance there isn't really any
// meaningful "previous run address", but PrevRunAddr will still be set
// equal to Addr in that case in order to simplify logic elsewhere which
// aims to detect and react to the movement of instances between addresses.
PrevRunAddr addrs.AbsResourceInstance
// DeposedKey is the identifier for a deposed object associated with the
// given instance, or states.NotDeposed if this change applies to the
// current object.
//
// A Replace change for a resource with create_before_destroy set will
// create a new DeposedKey temporarily during replacement. In that case,
// DeposedKey in the plan is always states.NotDeposed, representing that
// the current object is being replaced with the deposed.
DeposedKey states.DeposedKey
// Provider is the address of the provider configuration that was used
// to plan this change, and thus the configuration that must also be
// used to apply it.
ProviderAddr addrs.AbsProviderConfig
// Change is an embedded description of the change.
Change
// ActionReason is an optional extra indication of why we chose the
// action recorded in Change.Action for this particular resource instance.
//
// This is an approximate mechanism only for the purpose of explaining the
// plan to end-users in the UI and is not to be used for any
// decision-making during the apply step; if apply behavior needs to vary
// depending on the "action reason" then the information for that decision
// must be recorded more precisely elsewhere for that purpose.
//
// Sometimes there might be more than one reason for choosing a particular
// action. In that case, it's up to the codepath making that decision to
// decide which value would provide the most relevant explanation to the
// end-user and return that. It's not a goal of this field to represent
// fine details about the planning process.
ActionReason ResourceInstanceChangeActionReason
// RequiredReplace is a set of paths that caused the change action to be
// Replace rather than Update. Always nil if the change action is not
// Replace.
//
// This is retained only for UI-plan-rendering purposes and so it does not
// currently survive a round-trip through a saved plan file.
RequiredReplace cty.PathSet
// Private allows a provider to stash any extra data that is opaque to
// Terraform that relates to this change. Terraform will save this
// byte-for-byte and return it to the provider in the apply call.
Private []byte
}
// Encode produces a variant of the receiver that has its change values
// serialized so it can be written to a plan file. Pass the implied type of the
// corresponding resource type schema for correct operation.
func (rc *ResourceInstanceChange) Encode(schema providers.Schema) (*ResourceInstanceChangeSrc, error) {
cs, err := rc.Change.Encode(&schema)
if err != nil {
return nil, err
}
prevRunAddr := rc.PrevRunAddr
if prevRunAddr.Resource.Resource.Type == "" {
// Suggests an old caller that hasn't been properly updated to
// populate this yet.
prevRunAddr = rc.Addr
}
return &ResourceInstanceChangeSrc{
Addr: rc.Addr,
PrevRunAddr: prevRunAddr,
DeposedKey: rc.DeposedKey,
ProviderAddr: rc.ProviderAddr,
ChangeSrc: *cs,
ActionReason: rc.ActionReason,
RequiredReplace: rc.RequiredReplace,
Private: rc.Private,
}, err
}
func (rc *ResourceInstanceChange) DeepCopy() *ResourceInstanceChange {
if rc == nil {
return rc
}
ret := *rc
return &ret
}
func (rc *ResourceInstanceChange) Moved() bool {
return !rc.Addr.Equal(rc.PrevRunAddr)
}
// Simplify will, where possible, produce a change with a simpler action than
// the receiver given a flag indicating whether the caller is dealing with
// a normal apply or a destroy. This flag deals with the fact that Terraform
// Core uses a specialized graph node type for destroying; only that
// specialized node should set "destroying" to true.
//
// The following table shows the simplification behavior:
//
// Action Destroying? New Action
// --------+-------------+-----------
// Create true NoOp
// Delete false NoOp
// Replace true Delete
// Replace false Create
//
// For any combination not in the above table, the Simplify just returns the
// receiver as-is.
func (rc *ResourceInstanceChange) Simplify(destroying bool) *ResourceInstanceChange {
if destroying {
switch rc.Action {
case Delete:
// We'll fall out and just return rc verbatim, then.
case CreateThenDelete, DeleteThenCreate:
return &ResourceInstanceChange{
Addr: rc.Addr,
DeposedKey: rc.DeposedKey,
Private: rc.Private,
ProviderAddr: rc.ProviderAddr,
Change: Change{
Action: Delete,
Before: rc.Before,
BeforeIdentity: rc.BeforeIdentity,
After: cty.NullVal(rc.Before.Type()),
AfterIdentity: cty.NullVal(rc.BeforeIdentity.Type()),
Importing: rc.Importing,
GeneratedConfig: rc.GeneratedConfig,
},
}
default:
return &ResourceInstanceChange{
Addr: rc.Addr,
DeposedKey: rc.DeposedKey,
Private: rc.Private,
ProviderAddr: rc.ProviderAddr,
Change: Change{
Action: NoOp,
Before: rc.Before,
BeforeIdentity: rc.BeforeIdentity,
After: rc.Before,
AfterIdentity: rc.BeforeIdentity,
Importing: rc.Importing,
GeneratedConfig: rc.GeneratedConfig,
},
}
}
} else {
switch rc.Action {
case Delete:
return &ResourceInstanceChange{
Addr: rc.Addr,
DeposedKey: rc.DeposedKey,
Private: rc.Private,
ProviderAddr: rc.ProviderAddr,
Change: Change{
Action: NoOp,
Before: rc.Before,
BeforeIdentity: rc.BeforeIdentity,
After: rc.Before,
AfterIdentity: rc.BeforeIdentity,
Importing: rc.Importing,
GeneratedConfig: rc.GeneratedConfig,
},
}
case CreateThenDelete, DeleteThenCreate:
return &ResourceInstanceChange{
Addr: rc.Addr,
DeposedKey: rc.DeposedKey,
Private: rc.Private,
ProviderAddr: rc.ProviderAddr,
Change: Change{
Action: Create,
Before: cty.NullVal(rc.After.Type()),
BeforeIdentity: cty.NullVal(rc.AfterIdentity.Type()),
After: rc.After,
AfterIdentity: rc.AfterIdentity,
Importing: rc.Importing,
GeneratedConfig: rc.GeneratedConfig,
},
}
}
}
// If we fall out here then our change is already simple enough.
return rc
}
// ResourceInstanceChangeActionReason allows for some extra user-facing
// reasoning for why a particular change action was chosen for a particular
// resource instance.
//
// This only represents sufficient detail to give a suitable explanation to
// an end-user, and mustn't be used for any real decision-making during the
// apply step.
type ResourceInstanceChangeActionReason rune
//go:generate go tool golang.org/x/tools/cmd/stringer -type=ResourceInstanceChangeActionReason changes.go
const (
// In most cases there's no special reason for choosing a particular
// action, which is represented by ResourceInstanceChangeNoReason.
ResourceInstanceChangeNoReason ResourceInstanceChangeActionReason = 0
// ResourceInstanceReplaceBecauseTainted indicates that the resource
// instance must be replaced because its existing current object is
// marked as "tainted".
ResourceInstanceReplaceBecauseTainted ResourceInstanceChangeActionReason = 'T'
// ResourceInstanceReplaceByRequest indicates that the resource instance
// is planned to be replaced because a caller specifically asked for it
// to be using ReplaceAddrs. (On the command line, the -replace=...
// planning option.)
ResourceInstanceReplaceByRequest ResourceInstanceChangeActionReason = 'R'
// ResourceInstanceReplaceByTriggers indicates that the resource instance
// is planned to be replaced because of a corresponding change in a
// replace_triggered_by reference.
ResourceInstanceReplaceByTriggers ResourceInstanceChangeActionReason = 'D'
// ResourceInstanceReplaceBecauseCannotUpdate indicates that the resource
// instance is planned to be replaced because the provider has indicated
// that a requested change cannot be applied as an update.
//
// In this case, the RequiredReplace field will typically be populated on
// the ResourceInstanceChange object to give information about specifically
// which arguments changed in a non-updatable way.
ResourceInstanceReplaceBecauseCannotUpdate ResourceInstanceChangeActionReason = 'F'
// ResourceInstanceDeleteBecauseNoResourceConfig indicates that the
// resource instance is planned to be deleted because there's no
// corresponding resource configuration block in the configuration.
ResourceInstanceDeleteBecauseNoResourceConfig ResourceInstanceChangeActionReason = 'N'
// ResourceInstanceDeleteBecauseWrongRepetition indicates that the
// resource instance is planned to be deleted because the instance key
// type isn't consistent with the repetition mode selected in the
// resource configuration.
ResourceInstanceDeleteBecauseWrongRepetition ResourceInstanceChangeActionReason = 'W'
// ResourceInstanceDeleteBecauseCountIndex indicates that the resource
// instance is planned to be deleted because its integer instance key
// is out of range for the current configured resource "count" value.
ResourceInstanceDeleteBecauseCountIndex ResourceInstanceChangeActionReason = 'C'
// ResourceInstanceDeleteBecauseEachKey indicates that the resource
// instance is planned to be deleted because its string instance key
// isn't one of the keys included in the current configured resource
// "for_each" value.
ResourceInstanceDeleteBecauseEachKey ResourceInstanceChangeActionReason = 'E'
// ResourceInstanceDeleteBecauseNoModule indicates that the resource
// instance is planned to be deleted because it belongs to a module
// instance that's no longer declared in the configuration.
//
// This is less specific than the reasons we return for the various ways
// a resource instance itself can be no longer declared, including both
// the total removal of a module block and changes to its count/for_each
// arguments. This difference in detail is out of pragmatism, because
// potentially multiple nested modules could all contribute conflicting
// specific reasons for a particular instance to no longer be declared.
ResourceInstanceDeleteBecauseNoModule ResourceInstanceChangeActionReason = 'M'
// ResourceInstanceDeleteBecauseNoMoveTarget indicates that the resource
// address appears as the target ("to") in a moved block, but no
// configuration exists for that resource. According to our move rules,
// this combination evaluates to a deletion of the "new" resource.
ResourceInstanceDeleteBecauseNoMoveTarget ResourceInstanceChangeActionReason = 'A'
// ResourceInstanceReadBecauseConfigUnknown indicates that the resource
// must be read during apply (rather than during planning) because its
// configuration contains unknown values. This reason applies only to
// data resources.
ResourceInstanceReadBecauseConfigUnknown ResourceInstanceChangeActionReason = '?'
// ResourceInstanceReadBecauseDependencyPending indicates that the resource
// must be read during apply (rather than during planning) because it
// depends on a managed resource instance which has its own changes
// pending.
ResourceInstanceReadBecauseDependencyPending ResourceInstanceChangeActionReason = '!'
// ResourceInstanceReadBecauseCheckNested indicates that the resource must
// be read during apply (as well as during planning) because it is inside
// a check block and when the check assertions execute we want them to use
// the most up-to-date data.
ResourceInstanceReadBecauseCheckNested ResourceInstanceChangeActionReason = '#'
)
// OutputChange describes a change to an output value.
type OutputChange struct {
// Addr is the absolute address of the output value that the change
// will apply to.
Addr addrs.AbsOutputValue
// Change is an embedded description of the change.
//
// For output value changes, the type constraint for the DynamicValue
// instances is always cty.DynamicPseudoType.
Change
// Sensitive, if true, indicates that either the old or new value in the
// change is sensitive and so a rendered version of the plan in the UI
// should elide the actual values while still indicating the action of the
// change.
Sensitive bool
}
// Encode produces a variant of the receiver that has its change values
// serialized so it can be written to a plan file.
func (oc *OutputChange) Encode() (*OutputChangeSrc, error) {
cs, err := oc.Change.Encode(nil)
if err != nil {
return nil, err
}
return &OutputChangeSrc{
Addr: oc.Addr,
ChangeSrc: *cs,
Sensitive: oc.Sensitive,
}, err
}
// Importing is the part of a ChangeSrc that describes the embedded import
// action.
//
// The fields in here are subject to change, so downstream consumers should be
// prepared for backwards compatibility in case the contents changes.
type Importing struct {
Target cty.Value
}
// Encode converts the Importing object into a form suitable for serialization
// to a plan file.
func (i *Importing) Encode(identityTy cty.Type) *ImportingSrc {
if i == nil {
return nil
}
if i.Target.IsWhollyKnown() {
if i.Target.Type().IsObjectType() {
identity, err := NewDynamicValue(i.Target, identityTy)
if err != nil {
return nil
}
return &ImportingSrc{
Identity: identity,
}
} else {
return &ImportingSrc{
ID: i.Target.AsString(),
}
}
}
return &ImportingSrc{
Unknown: true,
}
}
// Change describes a single change with a given action.
type Change struct {
// Action defines what kind of change is being made.
Action Action
// Interpretation of Before and After depend on Action:
//
// NoOp Before and After are the same, unchanged value
// Create Before is nil, and After is the expected value after create.
// Read Before is any prior value (nil if no prior), and After is the
// value that was or will be read.
// Update Before is the value prior to update, and After is the expected
// value after update.
// Replace As with Update.
// Delete Before is the value prior to delete, and After is always nil.
// Forget As with Delete.
//
// Unknown values may appear anywhere within the Before and After values,
// either as the values themselves or as nested elements within known
// collections/structures.
Before, After cty.Value
// Keeping track of how the identity of the resource has changed.
BeforeIdentity, AfterIdentity cty.Value
// Importing is present if the resource is being imported as part of this
// change.
//
// Use the simple presence of this field to detect if a ChangeSrc is to be
// imported, the contents of this structure may be modified going forward.
Importing *Importing
// GeneratedConfig contains any HCL config generated for this resource
// during planning, as a string. If GeneratedConfig is populated, Importing
// should be true. However, not all Importing changes contain generated
// config.
GeneratedConfig string
}
// Encode produces a variant of the receiver that has its change values
// serialized so it can be written to a plan file. Pass the type constraint
// that the values are expected to conform to; to properly decode the values
// later an identical type constraint must be provided at that time.
//
// Where a Change is embedded in some other struct, it's generally better
// to call the corresponding Encode method of that struct rather than working
// directly with its embedded Change.
func (c *Change) Encode(schema *providers.Schema) (*ChangeSrc, error) {
// We can't serialize value marks directly so we'll need to extract the
// sensitive marks and store them in a separate field.
// We ignore Deprecation marks.
//
// We don't accept any other marks here. The caller should have dealt
// with those somehow and replaced them with unmarked placeholders before
// writing the value into the state.
unmarkedBefore, marksesBefore := c.Before.UnmarkDeepWithPaths()
unmarkedAfter, marksesAfter := c.After.UnmarkDeepWithPaths()
sensitiveAttrsBefore, unsupportedMarksesBefore := marks.PathsWithMark(marksesBefore, marks.Sensitive)
sensitiveAttrsAfter, unsupportedMarksesAfter := marks.PathsWithMark(marksesAfter, marks.Sensitive)
_, unsupportedMarksesBefore = marks.PathsWithMark(unsupportedMarksesBefore, marks.Deprecation)
_, unsupportedMarksesAfter = marks.PathsWithMark(unsupportedMarksesAfter, marks.Deprecation)
if len(unsupportedMarksesBefore) != 0 {
return nil, fmt.Errorf(
"prior value %s: can't serialize value marked with %#v (this is a bug in Terraform)",
tfdiags.FormatCtyPath(unsupportedMarksesBefore[0].Path),
unsupportedMarksesBefore[0].Marks,
)
}
if len(unsupportedMarksesAfter) != 0 {
return nil, fmt.Errorf(
"new value %s: can't serialize value marked with %#v (this is a bug in Terraform)",
tfdiags.FormatCtyPath(unsupportedMarksesAfter[0].Path),
unsupportedMarksesAfter[0].Marks,
)
}
ty := cty.DynamicPseudoType
if schema != nil {
ty = schema.Body.ImpliedType()
}
beforeDV, err := NewDynamicValue(unmarkedBefore, ty)
if err != nil {
return nil, err
}
afterDV, err := NewDynamicValue(unmarkedAfter, ty)
if err != nil {
return nil, err
}
var beforeIdentityDV DynamicValue
var afterIdentityDV DynamicValue
identityTy := cty.DynamicPseudoType
if schema != nil {
identityTy = schema.Identity.ImpliedType()
}
if !c.BeforeIdentity.IsNull() {
beforeIdentityDV, err = NewDynamicValue(c.BeforeIdentity, identityTy)
if err != nil {
return nil, err
}
}
if !c.AfterIdentity.IsNull() {
afterIdentityDV, err = NewDynamicValue(c.AfterIdentity, identityTy)
if err != nil {
return nil, err
}
}
return &ChangeSrc{
Action: c.Action,
Before: beforeDV,
After: afterDV,
BeforeSensitivePaths: sensitiveAttrsBefore,
AfterSensitivePaths: sensitiveAttrsAfter,
BeforeIdentity: beforeIdentityDV,
AfterIdentity: afterIdentityDV,
Importing: c.Importing.Encode(identityTy),
GeneratedConfig: c.GeneratedConfig,
}, nil
} | go | github | https://github.com/hashicorp/terraform | internal/plans/changes.go |
# Copyright (C) 2009-2010 Aren Olson
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
# The developers of the Exaile media player hereby grant permission
# for non-GPL compatible GStreamer and Exaile plugins to be used and
# distributed together with GStreamer and Exaile. This permission is
# above and beyond the permissions granted by the GPL license by which
# Exaile is covered. If you modify this code, you may extend this
# exception to your version of the code, but you are not obligated to
# do so. If you do not wish to do so, delete this exception statement
# from your version.
try:
import xml.etree.cElementTree as ETree
except:
import xml.etree.ElementTree as ETree
import urllib
from xl.dynamic import DynamicSource
from xl import providers, common
import logging
logger = logging.getLogger(__name__)
LFMS = None
# Last.fm API Key for Exaile
# if you reuse this code in a different application, please
# register your own key with last.fm
API_KEY = '3599c79a97fd61ce518b75922688bc38'
def enable(exaile):
global LFMS
LFMS = LastfmSource()
providers.register("dynamic_playlists", LFMS)
def disable(exaile):
global LFMS
providers.unregister("dynamic_playlists", LFMS)
LFMS = None
class LastfmSource(DynamicSource):
name='lastfm'
def __init__(self):
DynamicSource.__init__(self)
def get_results(self, artist):
ar = urllib.quote_plus(artist.encode('utf-8'))
url = 'http://ws.audioscrobbler.com/2.0/?method=artist.getsimilar&artist=%s&api_key='+API_KEY
try:
f = urllib.urlopen(url%ar).read()
except IOError:
logger.exception("Error retrieving results")
return []
retlist = []
xml = ETree.fromstring(f)
for e in xml.getiterator('artist'):
retlist.append((float(e.find('match').text), e.find('name').text))
return retlist | unknown | codeparrot/codeparrot-clean | ||
"""Access Ansible Core CI remote services."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import tempfile
import time
from .util import (
SubprocessError,
ApplicationError,
cmd_quote,
display,
ANSIBLE_TEST_DATA_ROOT,
)
from .util_common import (
intercept_command,
run_command,
)
from .core_ci import (
AnsibleCoreCI,
)
from .ansible_util import (
ansible_environment,
)
from .config import (
ShellConfig,
)
from .payload import (
create_payload,
)
class ManageWindowsCI:
"""Manage access to a Windows instance provided by Ansible Core CI."""
def __init__(self, core_ci):
"""
:type core_ci: AnsibleCoreCI
"""
self.core_ci = core_ci
self.ssh_args = ['-i', self.core_ci.ssh_key.key]
ssh_options = dict(
BatchMode='yes',
StrictHostKeyChecking='no',
UserKnownHostsFile='/dev/null',
ServerAliveInterval=15,
ServerAliveCountMax=4,
)
for ssh_option in sorted(ssh_options):
self.ssh_args += ['-o', '%s=%s' % (ssh_option, ssh_options[ssh_option])]
def setup(self, python_version):
"""Used in delegate_remote to setup the host, no action is required for Windows.
:type python_version: str
"""
def wait(self):
"""Wait for instance to respond to ansible ping."""
extra_vars = [
'ansible_connection=winrm',
'ansible_host=%s' % self.core_ci.connection.hostname,
'ansible_user=%s' % self.core_ci.connection.username,
'ansible_password=%s' % self.core_ci.connection.password,
'ansible_port=%s' % self.core_ci.connection.port,
'ansible_winrm_server_cert_validation=ignore',
]
name = 'windows_%s' % self.core_ci.version
env = ansible_environment(self.core_ci.args)
cmd = ['ansible', '-m', 'win_ping', '-i', '%s,' % name, name, '-e', ' '.join(extra_vars)]
for dummy in range(1, 120):
try:
intercept_command(self.core_ci.args, cmd, 'ping', env=env, disable_coverage=True)
return
except SubprocessError:
time.sleep(10)
raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
(self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
def download(self, remote, local):
"""
:type remote: str
:type local: str
"""
self.scp('%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote), local)
def upload(self, local, remote):
"""
:type local: str
:type remote: str
"""
self.scp(local, '%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote))
def ssh(self, command, options=None, force_pty=True):
"""
:type command: str | list[str]
:type options: list[str] | None
:type force_pty: bool
"""
if not options:
options = []
if force_pty:
options.append('-tt')
if isinstance(command, list):
command = ' '.join(cmd_quote(c) for c in command)
run_command(self.core_ci.args,
['ssh', '-q'] + self.ssh_args +
options +
['-p', '22',
'%s@%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname)] +
[command])
def scp(self, src, dst):
"""
:type src: str
:type dst: str
"""
for dummy in range(1, 10):
try:
run_command(self.core_ci.args,
['scp'] + self.ssh_args +
['-P', '22', '-q', '-r', src, dst])
return
except SubprocessError:
time.sleep(10)
raise ApplicationError('Failed transfer: %s -> %s' % (src, dst))
class ManageNetworkCI:
"""Manage access to a network instance provided by Ansible Core CI."""
def __init__(self, core_ci):
"""
:type core_ci: AnsibleCoreCI
"""
self.core_ci = core_ci
def wait(self):
"""Wait for instance to respond to ansible ping."""
extra_vars = [
'ansible_host=%s' % self.core_ci.connection.hostname,
'ansible_port=%s' % self.core_ci.connection.port,
'ansible_connection=local',
'ansible_ssh_private_key_file=%s' % self.core_ci.ssh_key.key,
]
name = '%s-%s' % (self.core_ci.platform, self.core_ci.version.replace('.', '-'))
env = ansible_environment(self.core_ci.args)
cmd = [
'ansible',
'-m', '%s_command' % self.core_ci.platform,
'-a', 'commands=?',
'-u', self.core_ci.connection.username,
'-i', '%s,' % name,
'-e', ' '.join(extra_vars),
name,
]
for dummy in range(1, 90):
try:
intercept_command(self.core_ci.args, cmd, 'ping', env=env, disable_coverage=True)
return
except SubprocessError:
time.sleep(10)
raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
(self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
class ManagePosixCI:
"""Manage access to a POSIX instance provided by Ansible Core CI."""
def __init__(self, core_ci):
"""
:type core_ci: AnsibleCoreCI
"""
self.core_ci = core_ci
self.ssh_args = ['-i', self.core_ci.ssh_key.key]
ssh_options = dict(
BatchMode='yes',
StrictHostKeyChecking='no',
UserKnownHostsFile='/dev/null',
ServerAliveInterval=15,
ServerAliveCountMax=4,
)
for ssh_option in sorted(ssh_options):
self.ssh_args += ['-o', '%s=%s' % (ssh_option, ssh_options[ssh_option])]
if self.core_ci.platform == 'freebsd':
if self.core_ci.provider == 'aws':
self.become = ['su', '-l', 'root', '-c']
elif self.core_ci.provider == 'azure':
self.become = ['sudo', '-in', 'sh', '-c']
else:
raise NotImplementedError('provider %s has not been implemented' % self.core_ci.provider)
elif self.core_ci.platform == 'osx':
self.become = ['sudo', '-in', 'PATH=/usr/local/bin:$PATH']
elif self.core_ci.platform == 'rhel':
self.become = ['sudo', '-in', 'bash', '-c']
def setup(self, python_version):
"""Start instance and wait for it to become ready and respond to an ansible ping.
:type python_version: str
:rtype: str
"""
pwd = self.wait()
display.info('Remote working directory: %s' % pwd, verbosity=1)
if isinstance(self.core_ci.args, ShellConfig):
if self.core_ci.args.raw:
return pwd
self.configure(python_version)
self.upload_source()
return pwd
def wait(self): # type: () -> str
"""Wait for instance to respond to SSH."""
for dummy in range(1, 90):
try:
stdout = self.ssh('pwd', capture=True)[0]
if self.core_ci.args.explain:
return '/pwd'
pwd = stdout.strip().splitlines()[-1]
if not pwd.startswith('/'):
raise Exception('Unexpected current working directory "%s" from "pwd" command output:\n%s' % (pwd, stdout))
return pwd
except SubprocessError:
time.sleep(10)
raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
(self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
def configure(self, python_version):
"""Configure remote host for testing.
:type python_version: str
"""
self.upload(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'remote.sh'), '/tmp')
self.ssh('chmod +x /tmp/remote.sh && /tmp/remote.sh %s %s' % (self.core_ci.platform, python_version))
def upload_source(self):
"""Upload and extract source."""
with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as local_source_fd:
remote_source_dir = '/tmp'
remote_source_path = os.path.join(remote_source_dir, os.path.basename(local_source_fd.name))
create_payload(self.core_ci.args, local_source_fd.name)
self.upload(local_source_fd.name, remote_source_dir)
self.ssh('rm -rf ~/ansible && mkdir ~/ansible && cd ~/ansible && tar oxzf %s' % remote_source_path)
def download(self, remote, local):
"""
:type remote: str
:type local: str
"""
self.scp('%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote), local)
def upload(self, local, remote):
"""
:type local: str
:type remote: str
"""
self.scp(local, '%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote))
def ssh(self, command, options=None, capture=False):
"""
:type command: str | list[str]
:type options: list[str] | None
:type capture: bool
:rtype: str | None, str | None
"""
if not options:
options = []
if isinstance(command, list):
command = ' '.join(cmd_quote(c) for c in command)
return run_command(self.core_ci.args,
['ssh', '-tt', '-q'] + self.ssh_args +
options +
['-p', str(self.core_ci.connection.port),
'%s@%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname)] +
self.become + [cmd_quote(command)], capture=capture)
def scp(self, src, dst):
"""
:type src: str
:type dst: str
"""
for dummy in range(1, 10):
try:
run_command(self.core_ci.args,
['scp'] + self.ssh_args +
['-P', str(self.core_ci.connection.port), '-q', '-r', src, dst])
return
except SubprocessError:
time.sleep(10)
raise ApplicationError('Failed transfer: %s -> %s' % (src, dst)) | unknown | codeparrot/codeparrot-clean | ||
/** @import { BlockStatement, ExpressionStatement } from 'estree' */
/** @import { AST } from '#compiler' */
/** @import { ComponentContext } from '../types' */
/**
* @param {AST.SvelteFragment} node
* @param {ComponentContext} context
*/
export function SvelteFragment(node, context) {
for (const attribute of node.attributes) {
if (attribute.type === 'LetDirective') {
context.visit(attribute);
}
}
context.state.init.push(.../** @type {BlockStatement} */ (context.visit(node.fragment)).body);
} | javascript | github | https://github.com/sveltejs/svelte | packages/svelte/src/compiler/phases/3-transform/client/visitors/SvelteFragment.js |
{
"resource": {
"test_object": {
"a": {
"count": 1,
"test_string": "new"
},
"b": {
"count": 1,
"lifecycle": {
"replace_triggered_by": [
{"test_object.a[count.index].test_string":"nope"}
]
}
}
}
}
} | json | github | https://github.com/hashicorp/terraform | internal/configs/testdata/invalid-files/resource-rtb.tf.json |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
tests/trigger_acceptance_test.py - Acceptance test suite that verifies trigger functionality
in very brief
"""
from trigger.netdevices import NetDevices
netdevices = NetDevices(with_acls=False)
nd=NetDevices(with_acls=False)
print nd.values()
__author__ = 'Murat Ezbiderli'
__maintainer__ = 'Salesforce'
__copyright__ = 'Copyright 2012-2013 Salesforce Inc.'
__version__ = '2.1'
import os
import unittest
from trigger.netdevices import NetDevices
class NetDevicesTest(unittest.TestCase):
def setUp(self):
self.nd = NetDevices(with_acls=False)
print self.nd.values()
self.nodename = self.nd.keys()[0]
self.nodeobj = self.nd.values()[0]
def testBasics(self):
"""Basic test of NetDevices functionality."""
self.assertEqual(len(self.nd), 3)
self.assertEqual(self.nodeobj.nodeName, self.nodename)
self.assertEqual(self.nodeobj.manufacturer, 'JUNIPER')
def testFind(self):
"""Test the find() method."""
self.assertEqual(self.nd.find(self.nodename), self.nodeobj)
nodebasename = self.nodename[:self.nodename.index('.')]
self.assertEqual(self.nd.find(nodebasename), self.nodeobj)
self.assertRaises(KeyError, lambda: self.nd.find(self.nodename[0:3]))
if __name__ == "__main__":
unittest.main() | unknown | codeparrot/codeparrot-clean | ||
---
c: Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
SPDX-License-Identifier: curl
Long: doh-insecure
Help: Allow insecure DoH server connections
Added: 7.76.0
Category: dns tls
Protocols: DNS
Multi: boolean
See-also:
- doh-url
- insecure
- proxy-insecure
Example:
- --doh-insecure --doh-url https://doh.example $URL
---
# `--doh-insecure`
By default, every connection curl makes to a DoH server is verified to be
secure before the transfer takes place. This option tells curl to skip the
verification step and proceed without checking.
**WARNING**: using this option makes the DoH transfer and name resolution
insecure.
This option is equivalent to --insecure and --proxy-insecure but used for DoH
(DNS-over-HTTPS) only. | unknown | github | https://github.com/curl/curl | docs/cmdline-opts/doh-insecure.md |
"""
Tests stringify functions used in xmodule html
"""
from nose.tools import assert_equals # pylint: disable=no-name-in-module
from lxml import etree
from xmodule.stringify import stringify_children
def test_stringify():
text = 'Hi <div x="foo">there <span>Bruce</span><b>!</b></div>'
html = '''<html a="b" foo="bar">{0}</html>'''.format(text)
xml = etree.fromstring(html)
out = stringify_children(xml)
assert_equals(out, text)
def test_stringify_again():
html = r"""<html name="Voltage Source Answer" >A voltage source is non-linear!
<div align="center">
<img src="/static/images/circuits/voltage-source.png"/>
\(V=V_C\)
</div>
But it is <a href="http://mathworld.wolfram.com/AffineFunction.html">affine</a>,
which means linear except for an offset.
</html>
"""
html = """<html>A voltage source is non-linear!
<div align="center">
</div>
But it is <a href="http://mathworld.wolfram.com/AffineFunction.html">affine</a>,
which means linear except for an offset.
</html>
"""
xml = etree.fromstring(html)
out = stringify_children(xml)
print "output:"
print out
# Tracking strange content repeating bug
# Should appear once
assert_equals(out.count("But it is "), 1) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import VpcAccessServiceTransport
from .grpc import VpcAccessServiceGrpcTransport
from .grpc_asyncio import VpcAccessServiceGrpcAsyncIOTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[VpcAccessServiceTransport]]
_transport_registry["grpc"] = VpcAccessServiceGrpcTransport
_transport_registry["grpc_asyncio"] = VpcAccessServiceGrpcAsyncIOTransport
__all__ = (
"VpcAccessServiceTransport",
"VpcAccessServiceGrpcTransport",
"VpcAccessServiceGrpcAsyncIOTransport",
) | unknown | codeparrot/codeparrot-clean | ||
###############################################################################
##
## Copyright (C) 2014, Tavendo GmbH and/or collaborators. All rights reserved.
##
## Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## 1. Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
##
## 2. Redistributions in binary form must reproduce the above copyright notice,
## this list of conditions and the following disclaimer in the documentation
## and/or other materials provided with the distribution.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
## IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
## ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
## LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
## CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
## SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
## INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
## CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
## ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
## POSSIBILITY OF SUCH DAMAGE.
##
###############################################################################
from setuptools import setup
setup(
name = 'hello',
version = '0.0.1',
description = "'hello' WAMP Component",
platforms = ['Any'],
packages = ['hello'],
include_package_data = True,
zip_safe = False,
entry_points = {
'autobahn.twisted.wamplet': [
'backend = hello.hello:AppSession'
],
}
) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
"""
Copyright 2015 Reverb Technologies, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class V1beta3_ResourceQuotaSpec(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
"""
Attributes:
swaggerTypes (dict): The key is attribute name and the value is attribute type.
attributeMap (dict): The key is attribute name and the value is json key in definition.
"""
self.swaggerTypes = {
'hard': 'dict'
}
self.attributeMap = {
'hard': 'hard'
}
#hard is the set of desired hard limits for each named resource
self.hard = None # any | unknown | codeparrot/codeparrot-clean | ||
#include "test/jemalloc_test.h"
#include "jemalloc/internal/sec.h"
typedef struct pai_test_allocator_s pai_test_allocator_t;
struct pai_test_allocator_s {
pai_t pai;
bool alloc_fail;
size_t alloc_count;
size_t alloc_batch_count;
size_t dalloc_count;
size_t dalloc_batch_count;
/*
* We use a simple bump allocator as the implementation. This isn't
* *really* correct, since we may allow expansion into a subsequent
* allocation, but it's not like the SEC is really examining the
* pointers it gets back; this is mostly just helpful for debugging.
*/
uintptr_t next_ptr;
size_t expand_count;
bool expand_return_value;
size_t shrink_count;
bool shrink_return_value;
};
static void
test_sec_init(sec_t *sec, pai_t *fallback, size_t nshards, size_t max_alloc,
size_t max_bytes) {
sec_opts_t opts;
opts.nshards = 1;
opts.max_alloc = max_alloc;
opts.max_bytes = max_bytes;
/*
* Just choose reasonable defaults for these; most tests don't care so
* long as they're something reasonable.
*/
opts.bytes_after_flush = max_bytes / 2;
opts.batch_fill_extra = 4;
/*
* We end up leaking this base, but that's fine; this test is
* short-running, and SECs are arena-scoped in reality.
*/
base_t *base = base_new(TSDN_NULL, /* ind */ 123,
&ehooks_default_extent_hooks, /* metadata_use_hooks */ true);
bool err = sec_init(TSDN_NULL, sec, base, fallback, &opts);
assert_false(err, "Unexpected initialization failure");
assert_u_ge(sec->npsizes, 0, "Zero size classes allowed for caching");
}
static inline edata_t *
pai_test_allocator_alloc(tsdn_t *tsdn, pai_t *self, size_t size,
size_t alignment, bool zero, bool guarded, bool frequent_reuse,
bool *deferred_work_generated) {
assert(!guarded);
pai_test_allocator_t *ta = (pai_test_allocator_t *)self;
if (ta->alloc_fail) {
return NULL;
}
edata_t *edata = malloc(sizeof(edata_t));
assert_ptr_not_null(edata, "");
ta->next_ptr += alignment - 1;
edata_init(edata, /* arena_ind */ 0,
(void *)(ta->next_ptr & ~(alignment - 1)), size,
/* slab */ false,
/* szind */ 0, /* sn */ 1, extent_state_active, /* zero */ zero,
/* comitted */ true, /* ranged */ false, EXTENT_NOT_HEAD);
ta->next_ptr += size;
ta->alloc_count++;
return edata;
}
static inline size_t
pai_test_allocator_alloc_batch(tsdn_t *tsdn, pai_t *self, size_t size,
size_t nallocs, edata_list_active_t *results,
bool *deferred_work_generated) {
pai_test_allocator_t *ta = (pai_test_allocator_t *)self;
if (ta->alloc_fail) {
return 0;
}
for (size_t i = 0; i < nallocs; i++) {
edata_t *edata = malloc(sizeof(edata_t));
assert_ptr_not_null(edata, "");
edata_init(edata, /* arena_ind */ 0,
(void *)ta->next_ptr, size,
/* slab */ false, /* szind */ 0, /* sn */ 1,
extent_state_active, /* zero */ false, /* comitted */ true,
/* ranged */ false, EXTENT_NOT_HEAD);
ta->next_ptr += size;
ta->alloc_batch_count++;
edata_list_active_append(results, edata);
}
return nallocs;
}
static bool
pai_test_allocator_expand(tsdn_t *tsdn, pai_t *self, edata_t *edata,
size_t old_size, size_t new_size, bool zero,
bool *deferred_work_generated) {
pai_test_allocator_t *ta = (pai_test_allocator_t *)self;
ta->expand_count++;
return ta->expand_return_value;
}
static bool
pai_test_allocator_shrink(tsdn_t *tsdn, pai_t *self, edata_t *edata,
size_t old_size, size_t new_size, bool *deferred_work_generated) {
pai_test_allocator_t *ta = (pai_test_allocator_t *)self;
ta->shrink_count++;
return ta->shrink_return_value;
}
static void
pai_test_allocator_dalloc(tsdn_t *tsdn, pai_t *self, edata_t *edata,
bool *deferred_work_generated) {
pai_test_allocator_t *ta = (pai_test_allocator_t *)self;
ta->dalloc_count++;
free(edata);
}
static void
pai_test_allocator_dalloc_batch(tsdn_t *tsdn, pai_t *self,
edata_list_active_t *list, bool *deferred_work_generated) {
pai_test_allocator_t *ta = (pai_test_allocator_t *)self;
edata_t *edata;
while ((edata = edata_list_active_first(list)) != NULL) {
edata_list_active_remove(list, edata);
ta->dalloc_batch_count++;
free(edata);
}
}
static inline void
pai_test_allocator_init(pai_test_allocator_t *ta) {
ta->alloc_fail = false;
ta->alloc_count = 0;
ta->alloc_batch_count = 0;
ta->dalloc_count = 0;
ta->dalloc_batch_count = 0;
/* Just don't start the edata at 0. */
ta->next_ptr = 10 * PAGE;
ta->expand_count = 0;
ta->expand_return_value = false;
ta->shrink_count = 0;
ta->shrink_return_value = false;
ta->pai.alloc = &pai_test_allocator_alloc;
ta->pai.alloc_batch = &pai_test_allocator_alloc_batch;
ta->pai.expand = &pai_test_allocator_expand;
ta->pai.shrink = &pai_test_allocator_shrink;
ta->pai.dalloc = &pai_test_allocator_dalloc;
ta->pai.dalloc_batch = &pai_test_allocator_dalloc_batch;
}
TEST_BEGIN(test_reuse) {
pai_test_allocator_t ta;
pai_test_allocator_init(&ta);
sec_t sec;
/*
* We can't use the "real" tsd, since we malloc within the test
* allocator hooks; we'd get lock inversion crashes. Eventually, we
* should have a way to mock tsds, but for now just don't do any
* lock-order checking.
*/
tsdn_t *tsdn = TSDN_NULL;
/*
* 11 allocs apiece of 1-PAGE and 2-PAGE objects means that we should be
* able to get to 33 pages in the cache before triggering a flush. We
* set the flush liimt to twice this amount, to avoid accidentally
* triggering a flush caused by the batch-allocation down the cache fill
* pathway disrupting ordering.
*/
enum { NALLOCS = 11 };
edata_t *one_page[NALLOCS];
edata_t *two_page[NALLOCS];
bool deferred_work_generated = false;
test_sec_init(&sec, &ta.pai, /* nshards */ 1, /* max_alloc */ 2 * PAGE,
/* max_bytes */ 2 * (NALLOCS * PAGE + NALLOCS * 2 * PAGE));
for (int i = 0; i < NALLOCS; i++) {
one_page[i] = pai_alloc(tsdn, &sec.pai, PAGE, PAGE,
/* zero */ false, /* guarded */ false, /* frequent_reuse */
false, &deferred_work_generated);
expect_ptr_not_null(one_page[i], "Unexpected alloc failure");
two_page[i] = pai_alloc(tsdn, &sec.pai, 2 * PAGE, PAGE,
/* zero */ false, /* guarded */ false, /* frequent_reuse */
false, &deferred_work_generated);
expect_ptr_not_null(one_page[i], "Unexpected alloc failure");
}
expect_zu_eq(0, ta.alloc_count, "Should be using batch allocs");
size_t max_allocs = ta.alloc_count + ta.alloc_batch_count;
expect_zu_le(2 * NALLOCS, max_allocs,
"Incorrect number of allocations");
expect_zu_eq(0, ta.dalloc_count,
"Incorrect number of allocations");
/*
* Free in a different order than we allocated, to make sure free-list
* separation works correctly.
*/
for (int i = NALLOCS - 1; i >= 0; i--) {
pai_dalloc(tsdn, &sec.pai, one_page[i],
&deferred_work_generated);
}
for (int i = NALLOCS - 1; i >= 0; i--) {
pai_dalloc(tsdn, &sec.pai, two_page[i],
&deferred_work_generated);
}
expect_zu_eq(max_allocs, ta.alloc_count + ta.alloc_batch_count,
"Incorrect number of allocations");
expect_zu_eq(0, ta.dalloc_count,
"Incorrect number of allocations");
/*
* Check that the n'th most recent deallocated extent is returned for
* the n'th alloc request of a given size.
*/
for (int i = 0; i < NALLOCS; i++) {
edata_t *alloc1 = pai_alloc(tsdn, &sec.pai, PAGE, PAGE,
/* zero */ false, /* guarded */ false, /* frequent_reuse */
false, &deferred_work_generated);
edata_t *alloc2 = pai_alloc(tsdn, &sec.pai, 2 * PAGE, PAGE,
/* zero */ false, /* guarded */ false, /* frequent_reuse */
false, &deferred_work_generated);
expect_ptr_eq(one_page[i], alloc1,
"Got unexpected allocation");
expect_ptr_eq(two_page[i], alloc2,
"Got unexpected allocation");
}
expect_zu_eq(max_allocs, ta.alloc_count + ta.alloc_batch_count,
"Incorrect number of allocations");
expect_zu_eq(0, ta.dalloc_count,
"Incorrect number of allocations");
}
TEST_END
TEST_BEGIN(test_auto_flush) {
pai_test_allocator_t ta;
pai_test_allocator_init(&ta);
sec_t sec;
/* See the note above -- we can't use the real tsd. */
tsdn_t *tsdn = TSDN_NULL;
/*
* 10-allocs apiece of 1-PAGE and 2-PAGE objects means that we should be
* able to get to 30 pages in the cache before triggering a flush. The
* choice of NALLOCS here is chosen to match the batch allocation
* default (4 extra + 1 == 5; so 10 allocations leaves the cache exactly
* empty, even in the presence of batch allocation on fill).
* Eventually, once our allocation batching strategies become smarter,
* this should change.
*/
enum { NALLOCS = 10 };
edata_t *extra_alloc;
edata_t *allocs[NALLOCS];
bool deferred_work_generated = false;
test_sec_init(&sec, &ta.pai, /* nshards */ 1, /* max_alloc */ PAGE,
/* max_bytes */ NALLOCS * PAGE);
for (int i = 0; i < NALLOCS; i++) {
allocs[i] = pai_alloc(tsdn, &sec.pai, PAGE, PAGE,
/* zero */ false, /* guarded */ false, /* frequent_reuse */
false, &deferred_work_generated);
expect_ptr_not_null(allocs[i], "Unexpected alloc failure");
}
extra_alloc = pai_alloc(tsdn, &sec.pai, PAGE, PAGE, /* zero */ false,
/* guarded */ false, /* frequent_reuse */ false,
&deferred_work_generated);
expect_ptr_not_null(extra_alloc, "Unexpected alloc failure");
size_t max_allocs = ta.alloc_count + ta.alloc_batch_count;
expect_zu_le(NALLOCS + 1, max_allocs,
"Incorrect number of allocations");
expect_zu_eq(0, ta.dalloc_count,
"Incorrect number of allocations");
/* Free until the SEC is full, but should not have flushed yet. */
for (int i = 0; i < NALLOCS; i++) {
pai_dalloc(tsdn, &sec.pai, allocs[i], &deferred_work_generated);
}
expect_zu_le(NALLOCS + 1, max_allocs,
"Incorrect number of allocations");
expect_zu_eq(0, ta.dalloc_count,
"Incorrect number of allocations");
/*
* Free the extra allocation; this should trigger a flush. The internal
* flushing logic is allowed to get complicated; for now, we rely on our
* whitebox knowledge of the fact that the SEC flushes bins in their
* entirety when it decides to do so, and it has only one bin active
* right now.
*/
pai_dalloc(tsdn, &sec.pai, extra_alloc, &deferred_work_generated);
expect_zu_eq(max_allocs, ta.alloc_count + ta.alloc_batch_count,
"Incorrect number of allocations");
expect_zu_eq(0, ta.dalloc_count,
"Incorrect number of (non-batch) deallocations");
expect_zu_eq(NALLOCS + 1, ta.dalloc_batch_count,
"Incorrect number of batch deallocations");
}
TEST_END
/*
* A disable and a flush are *almost* equivalent; the only difference is what
* happens afterwards; disabling disallows all future caching as well.
*/
static void
do_disable_flush_test(bool is_disable) {
pai_test_allocator_t ta;
pai_test_allocator_init(&ta);
sec_t sec;
/* See the note above -- we can't use the real tsd. */
tsdn_t *tsdn = TSDN_NULL;
enum { NALLOCS = 11 };
edata_t *allocs[NALLOCS];
bool deferred_work_generated = false;
test_sec_init(&sec, &ta.pai, /* nshards */ 1, /* max_alloc */ PAGE,
/* max_bytes */ NALLOCS * PAGE);
for (int i = 0; i < NALLOCS; i++) {
allocs[i] = pai_alloc(tsdn, &sec.pai, PAGE, PAGE,
/* zero */ false, /* guarded */ false, /* frequent_reuse */
false, &deferred_work_generated);
expect_ptr_not_null(allocs[i], "Unexpected alloc failure");
}
/* Free all but the last aloc. */
for (int i = 0; i < NALLOCS - 1; i++) {
pai_dalloc(tsdn, &sec.pai, allocs[i], &deferred_work_generated);
}
size_t max_allocs = ta.alloc_count + ta.alloc_batch_count;
expect_zu_le(NALLOCS, max_allocs, "Incorrect number of allocations");
expect_zu_eq(0, ta.dalloc_count,
"Incorrect number of allocations");
if (is_disable) {
sec_disable(tsdn, &sec);
} else {
sec_flush(tsdn, &sec);
}
expect_zu_eq(max_allocs, ta.alloc_count + ta.alloc_batch_count,
"Incorrect number of allocations");
expect_zu_eq(0, ta.dalloc_count,
"Incorrect number of (non-batch) deallocations");
expect_zu_le(NALLOCS - 1, ta.dalloc_batch_count,
"Incorrect number of batch deallocations");
size_t old_dalloc_batch_count = ta.dalloc_batch_count;
/*
* If we free into a disabled SEC, it should forward to the fallback.
* Otherwise, the SEC should accept the allocation.
*/
pai_dalloc(tsdn, &sec.pai, allocs[NALLOCS - 1],
&deferred_work_generated);
expect_zu_eq(max_allocs, ta.alloc_count + ta.alloc_batch_count,
"Incorrect number of allocations");
expect_zu_eq(is_disable ? 1 : 0, ta.dalloc_count,
"Incorrect number of (non-batch) deallocations");
expect_zu_eq(old_dalloc_batch_count, ta.dalloc_batch_count,
"Incorrect number of batch deallocations");
}
TEST_BEGIN(test_disable) {
do_disable_flush_test(/* is_disable */ true);
}
TEST_END
TEST_BEGIN(test_flush) {
do_disable_flush_test(/* is_disable */ false);
}
TEST_END
TEST_BEGIN(test_max_alloc_respected) {
pai_test_allocator_t ta;
pai_test_allocator_init(&ta);
sec_t sec;
/* See the note above -- we can't use the real tsd. */
tsdn_t *tsdn = TSDN_NULL;
size_t max_alloc = 2 * PAGE;
size_t attempted_alloc = 3 * PAGE;
bool deferred_work_generated = false;
test_sec_init(&sec, &ta.pai, /* nshards */ 1, max_alloc,
/* max_bytes */ 1000 * PAGE);
for (size_t i = 0; i < 100; i++) {
expect_zu_eq(i, ta.alloc_count,
"Incorrect number of allocations");
expect_zu_eq(i, ta.dalloc_count,
"Incorrect number of deallocations");
edata_t *edata = pai_alloc(tsdn, &sec.pai, attempted_alloc,
PAGE, /* zero */ false, /* guarded */ false,
/* frequent_reuse */ false, &deferred_work_generated);
expect_ptr_not_null(edata, "Unexpected alloc failure");
expect_zu_eq(i + 1, ta.alloc_count,
"Incorrect number of allocations");
expect_zu_eq(i, ta.dalloc_count,
"Incorrect number of deallocations");
pai_dalloc(tsdn, &sec.pai, edata, &deferred_work_generated);
}
}
TEST_END
TEST_BEGIN(test_expand_shrink_delegate) {
/*
* Expand and shrink shouldn't affect sec state; they should just
* delegate to the fallback PAI.
*/
pai_test_allocator_t ta;
pai_test_allocator_init(&ta);
sec_t sec;
/* See the note above -- we can't use the real tsd. */
tsdn_t *tsdn = TSDN_NULL;
bool deferred_work_generated = false;
test_sec_init(&sec, &ta.pai, /* nshards */ 1, /* max_alloc */ 10 * PAGE,
/* max_bytes */ 1000 * PAGE);
edata_t *edata = pai_alloc(tsdn, &sec.pai, PAGE, PAGE,
/* zero */ false, /* guarded */ false, /* frequent_reuse */ false,
&deferred_work_generated);
expect_ptr_not_null(edata, "Unexpected alloc failure");
bool err = pai_expand(tsdn, &sec.pai, edata, PAGE, 4 * PAGE,
/* zero */ false, &deferred_work_generated);
expect_false(err, "Unexpected expand failure");
expect_zu_eq(1, ta.expand_count, "");
ta.expand_return_value = true;
err = pai_expand(tsdn, &sec.pai, edata, 4 * PAGE, 3 * PAGE,
/* zero */ false, &deferred_work_generated);
expect_true(err, "Unexpected expand success");
expect_zu_eq(2, ta.expand_count, "");
err = pai_shrink(tsdn, &sec.pai, edata, 4 * PAGE, 2 * PAGE,
&deferred_work_generated);
expect_false(err, "Unexpected shrink failure");
expect_zu_eq(1, ta.shrink_count, "");
ta.shrink_return_value = true;
err = pai_shrink(tsdn, &sec.pai, edata, 2 * PAGE, PAGE,
&deferred_work_generated);
expect_true(err, "Unexpected shrink success");
expect_zu_eq(2, ta.shrink_count, "");
}
TEST_END
TEST_BEGIN(test_nshards_0) {
pai_test_allocator_t ta;
pai_test_allocator_init(&ta);
sec_t sec;
/* See the note above -- we can't use the real tsd. */
tsdn_t *tsdn = TSDN_NULL;
base_t *base = base_new(TSDN_NULL, /* ind */ 123,
&ehooks_default_extent_hooks, /* metadata_use_hooks */ true);
sec_opts_t opts = SEC_OPTS_DEFAULT;
opts.nshards = 0;
sec_init(TSDN_NULL, &sec, base, &ta.pai, &opts);
bool deferred_work_generated = false;
edata_t *edata = pai_alloc(tsdn, &sec.pai, PAGE, PAGE,
/* zero */ false, /* guarded */ false, /* frequent_reuse */ false,
&deferred_work_generated);
pai_dalloc(tsdn, &sec.pai, edata, &deferred_work_generated);
/* Both operations should have gone directly to the fallback. */
expect_zu_eq(1, ta.alloc_count, "");
expect_zu_eq(1, ta.dalloc_count, "");
}
TEST_END
static void
expect_stats_pages(tsdn_t *tsdn, sec_t *sec, size_t npages) {
sec_stats_t stats;
/*
* Check that the stats merging accumulates rather than overwrites by
* putting some (made up) data there to begin with.
*/
stats.bytes = 123;
sec_stats_merge(tsdn, sec, &stats);
assert_zu_le(npages * PAGE + 123, stats.bytes, "");
}
TEST_BEGIN(test_stats_simple) {
pai_test_allocator_t ta;
pai_test_allocator_init(&ta);
sec_t sec;
/* See the note above -- we can't use the real tsd. */
tsdn_t *tsdn = TSDN_NULL;
enum {
NITERS = 100,
FLUSH_PAGES = 20,
};
bool deferred_work_generated = false;
test_sec_init(&sec, &ta.pai, /* nshards */ 1, /* max_alloc */ PAGE,
/* max_bytes */ FLUSH_PAGES * PAGE);
edata_t *allocs[FLUSH_PAGES];
for (size_t i = 0; i < FLUSH_PAGES; i++) {
allocs[i] = pai_alloc(tsdn, &sec.pai, PAGE, PAGE,
/* zero */ false, /* guarded */ false, /* frequent_reuse */
false, &deferred_work_generated);
expect_stats_pages(tsdn, &sec, 0);
}
/* Increase and decrease, without flushing. */
for (size_t i = 0; i < NITERS; i++) {
for (size_t j = 0; j < FLUSH_PAGES / 2; j++) {
pai_dalloc(tsdn, &sec.pai, allocs[j],
&deferred_work_generated);
expect_stats_pages(tsdn, &sec, j + 1);
}
for (size_t j = 0; j < FLUSH_PAGES / 2; j++) {
allocs[j] = pai_alloc(tsdn, &sec.pai, PAGE, PAGE,
/* zero */ false, /* guarded */ false,
/* frequent_reuse */ false,
&deferred_work_generated);
expect_stats_pages(tsdn, &sec, FLUSH_PAGES / 2 - j - 1);
}
}
}
TEST_END
TEST_BEGIN(test_stats_auto_flush) {
pai_test_allocator_t ta;
pai_test_allocator_init(&ta);
sec_t sec;
/* See the note above -- we can't use the real tsd. */
tsdn_t *tsdn = TSDN_NULL;
enum {
FLUSH_PAGES = 10,
};
test_sec_init(&sec, &ta.pai, /* nshards */ 1, /* max_alloc */ PAGE,
/* max_bytes */ FLUSH_PAGES * PAGE);
edata_t *extra_alloc0;
edata_t *extra_alloc1;
edata_t *allocs[2 * FLUSH_PAGES];
bool deferred_work_generated = false;
extra_alloc0 = pai_alloc(tsdn, &sec.pai, PAGE, PAGE, /* zero */ false,
/* guarded */ false, /* frequent_reuse */ false,
&deferred_work_generated);
extra_alloc1 = pai_alloc(tsdn, &sec.pai, PAGE, PAGE, /* zero */ false,
/* guarded */ false, /* frequent_reuse */ false,
&deferred_work_generated);
for (size_t i = 0; i < 2 * FLUSH_PAGES; i++) {
allocs[i] = pai_alloc(tsdn, &sec.pai, PAGE, PAGE,
/* zero */ false, /* guarded */ false, /* frequent_reuse */
false, &deferred_work_generated);
}
for (size_t i = 0; i < FLUSH_PAGES; i++) {
pai_dalloc(tsdn, &sec.pai, allocs[i], &deferred_work_generated);
}
pai_dalloc(tsdn, &sec.pai, extra_alloc0, &deferred_work_generated);
/* Flush the remaining pages; stats should still work. */
for (size_t i = 0; i < FLUSH_PAGES; i++) {
pai_dalloc(tsdn, &sec.pai, allocs[FLUSH_PAGES + i],
&deferred_work_generated);
}
pai_dalloc(tsdn, &sec.pai, extra_alloc1, &deferred_work_generated);
expect_stats_pages(tsdn, &sec, ta.alloc_count + ta.alloc_batch_count
- ta.dalloc_count - ta.dalloc_batch_count);
}
TEST_END
TEST_BEGIN(test_stats_manual_flush) {
pai_test_allocator_t ta;
pai_test_allocator_init(&ta);
sec_t sec;
/* See the note above -- we can't use the real tsd. */
tsdn_t *tsdn = TSDN_NULL;
enum {
FLUSH_PAGES = 10,
};
test_sec_init(&sec, &ta.pai, /* nshards */ 1, /* max_alloc */ PAGE,
/* max_bytes */ FLUSH_PAGES * PAGE);
bool deferred_work_generated = false;
edata_t *allocs[FLUSH_PAGES];
for (size_t i = 0; i < FLUSH_PAGES; i++) {
allocs[i] = pai_alloc(tsdn, &sec.pai, PAGE, PAGE,
/* zero */ false, /* guarded */ false, /* frequent_reuse */
false, &deferred_work_generated);
expect_stats_pages(tsdn, &sec, 0);
}
/* Dalloc the first half of the allocations. */
for (size_t i = 0; i < FLUSH_PAGES / 2; i++) {
pai_dalloc(tsdn, &sec.pai, allocs[i], &deferred_work_generated);
expect_stats_pages(tsdn, &sec, i + 1);
}
sec_flush(tsdn, &sec);
expect_stats_pages(tsdn, &sec, 0);
/* Flush the remaining pages. */
for (size_t i = 0; i < FLUSH_PAGES / 2; i++) {
pai_dalloc(tsdn, &sec.pai, allocs[FLUSH_PAGES / 2 + i],
&deferred_work_generated);
expect_stats_pages(tsdn, &sec, i + 1);
}
sec_disable(tsdn, &sec);
expect_stats_pages(tsdn, &sec, 0);
}
TEST_END
int
main(void) {
return test(
test_reuse,
test_auto_flush,
test_disable,
test_flush,
test_max_alloc_respected,
test_expand_shrink_delegate,
test_nshards_0,
test_stats_simple,
test_stats_auto_flush,
test_stats_manual_flush);
} | c | github | https://github.com/redis/redis | deps/jemalloc/test/unit/sec.c |
/*
* Copyright (C) 2010 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.util.concurrent;
import static org.junit.Assert.assertThrows;
import com.google.common.testing.NullPointerTester;
import java.util.concurrent.atomic.AtomicReferenceArray;
import junit.framework.TestCase;
import org.jspecify.annotations.NullUnmarked;
/**
* Unit test for {@link Atomics}.
*
* @author Kurt Alfred Kluever
*/
@NullUnmarked
public class AtomicsTest extends TestCase {
private static final Object OBJECT = new Object();
public void testNewReference() throws Exception {
assertEquals(null, Atomics.newReference().get());
}
public void testNewReference_withInitialValue() throws Exception {
assertEquals(null, Atomics.newReference(null).get());
assertEquals(OBJECT, Atomics.newReference(OBJECT).get());
}
public void testNewReferenceArray_withLength() throws Exception {
int length = 42;
AtomicReferenceArray<String> refArray = Atomics.newReferenceArray(length);
for (int i = 0; i < length; ++i) {
assertEquals(null, refArray.get(i));
}
assertThrows(IndexOutOfBoundsException.class, () -> refArray.get(length));
}
public void testNewReferenceArray_withNegativeLength() throws Exception {
assertThrows(NegativeArraySizeException.class, () -> Atomics.newReferenceArray(-1));
}
public void testNewReferenceArray_withStringArray() throws Exception {
String[] array = {"foo", "bar", "baz"};
AtomicReferenceArray<String> refArray = Atomics.newReferenceArray(array);
for (int i = 0; i < array.length; ++i) {
assertEquals(array[i], refArray.get(i));
}
assertThrows(IndexOutOfBoundsException.class, () -> refArray.get(array.length));
}
public void testNewReferenceArray_withNullArray() throws Exception {
assertThrows(NullPointerException.class, () -> Atomics.newReferenceArray(null));
}
public void testNullPointers() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicConstructors(Atomics.class); // there aren't any
tester.testAllPublicStaticMethods(Atomics.class);
}
} | java | github | https://github.com/google/guava | android/guava-tests/test/com/google/common/util/concurrent/AtomicsTest.java |
- name: Install all collections in a repo, one of which has a recursive dependency
command: 'ansible-galaxy collection install git+file://{{ scm_path }}/namespace_1/.git'
register: command
- assert:
that:
- command.stdout_lines | length == 12
- >-
'Starting galaxy collection install process'
in command.stdout_lines
- >-
'Starting collection install process'
in command.stdout_lines
- >-
"Installing 'namespace_1.collection_1:1.0.0' to '" +
install_path + "/namespace_1/collection_1'"
in command.stdout_lines
- >-
'Created collection for namespace_1.collection_1:1.0.0 at ' +
install_path + '/namespace_1/collection_1'
in command.stdout_lines
- >-
'namespace_1.collection_1:1.0.0 was installed successfully'
in command.stdout_lines
- >-
"Installing 'namespace_2.collection_2:1.0.0' to '" +
install_path + "/namespace_2/collection_2'"
in command.stdout_lines
- >-
'Created collection for namespace_2.collection_2:1.0.0 at ' +
install_path + '/namespace_2/collection_2'
in command.stdout_lines
- >-
'namespace_2.collection_2:1.0.0 was installed successfully'
in command.stdout_lines
- name: list installed collections
command: 'ansible-galaxy collection list'
register: installed_collections
- assert:
that:
- "'namespace_1.collection_1' in installed_collections.stdout"
- "'namespace_2.collection_2' in installed_collections.stdout"
- name: Install a specific collection in a repo with a recursive dependency
command: 'ansible-galaxy collection install git+file://{{ scm_path }}/namespace_1/.git#/collection_1/ --force-with-deps'
register: command
- assert:
that:
- command.stdout_lines | length == 12
- >-
'Starting galaxy collection install process'
in command.stdout_lines
- >-
'Starting collection install process'
in command.stdout_lines
- >-
"Installing 'namespace_1.collection_1:1.0.0' to '" +
install_path + "/namespace_1/collection_1'"
in command.stdout_lines
- >-
'Created collection for namespace_1.collection_1:1.0.0 at ' +
install_path + '/namespace_1/collection_1'
in command.stdout_lines
- >-
'namespace_1.collection_1:1.0.0 was installed successfully'
in command.stdout_lines
- >-
"Installing 'namespace_2.collection_2:1.0.0' to '" +
install_path + "/namespace_2/collection_2'"
in command.stdout_lines
- >-
'Created collection for namespace_2.collection_2:1.0.0 at ' +
install_path + '/namespace_2/collection_2'
in command.stdout_lines
- >-
'namespace_2.collection_2:1.0.0 was installed successfully'
in command.stdout_lines
- name: list installed collections
command: 'ansible-galaxy collection list'
register: installed_collections
- assert:
that:
- "'namespace_1.collection_1' in installed_collections.stdout"
- "'namespace_2.collection_2' in installed_collections.stdout"
- include_tasks: ./empty_installed_collections.yml
when: cleanup | unknown | github | https://github.com/ansible/ansible | test/integration/targets/ansible-galaxy-collection-scm/tasks/scm_dependency_deduplication.yml |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.