function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def pdb_string_to_pdb_dict(filestring):
"""Takes a .pdb filestring and turns into a ``dict`` which represents its
record structure. Only lines which aren't empty are used.
The resultant dictionary has line types as the keys, which point to the
lines as its value. So ``{"TITLE": ["TITLE line 1", "TITLE ... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def pdb_dict_to_data_dict(pdb_dict):
"""Converts an .pdb dictionary into an atomium data dictionary, with the
same standard layout that the other file formats get converted into.
:param dict pdb_dict: the .pdb dictionary.
:rtype: ``dict``"""
data_dict = {
"description": {
"code": None, ... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def update_experiment_dict(pdb_dict, data_dict):
"""Creates the experiment component of a standard atomium data dictionary
from a .pdb dictionary.
:param dict pdb_dict: The .pdb dictionary to read.
:param dict data_dict: The data dictionary to update."""
extract_technique(pdb_dict, data_dict["expe... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def update_geometry_dict(pdb_dict, data_dict):
"""Creates the geometry component of a standard atomium data dictionary
from a .pdb dictionary.
:param dict pdb_dict: The .pdb dictionary to read.
:param dict data_dict: The data dictionary to update."""
extract_assembly_remark(pdb_dict, data_dict["ge... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def extract_header(pdb_dict, description_dict):
"""Takes a ``dict`` and adds header information to it by parsing the HEADER
line.
:param dict pdb_dict: the ``dict`` to read.
:param dict description_dict: the ``dict`` to update."""
if pdb_dict.get("HEADER"):
line = pdb_dict["HEADER"][0]
... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def extract_keywords(pdb_dict, description_dict):
"""Takes a ``dict`` and adds header information to it by parsing the KEYWDS
line.
:param dict pdb_dict: the ``dict`` to read.
:param dict description_dict: the ``dict`` to update."""
if pdb_dict.get("KEYWDS"):
text = merge_lines(pdb_dict["K... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def extract_technique(pdb_dict, experiment_dict):
"""Takes a ``dict`` and adds technique information to it by parsing EXPDTA
lines.
:param dict pdb_dict: the ``dict`` to read.
:param dict experiment_dict: the ``dict`` to update."""
if pdb_dict.get("EXPDTA"):
if pdb_dict["EXPDTA"][0].strip(... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def extract_missing_residues(pdb_dict, experiment_dict):
"""Takes a ``dict`` and adds missing residue information to it by parsing
REMARK 465 lines.
:param dict pdb_dict: the ``dict`` to read.
:param dict experiment_dict: the ``dict`` to update."""
for line in pdb_dict.get("REMARK", {}).get("465",... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def extract_rvalue_remark(pdb_dict, quality_dict):
"""Takes a ``dict`` and adds resolution information to it by parsing REMARK
3 lines.
:param dict pdb_dict: the ``dict`` to read.
:param dict quality_dict: the ``dict`` to update."""
if pdb_dict.get("REMARK") and pdb_dict["REMARK"].get("3"):
... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def assembly_lines_to_assembly_dict(lines):
"""Takes the lines representing a single biological assembly and turns
them into an assembly dictionary.
:param list lines: The REMARK lines to read.
:rtype: ``dict``"""
assembly = {
"transformations": [], "software": None, "buried_surface_area": No... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def make_sequences(pdb_dict):
"""Creates a mapping of chain IDs to sequences, by parsing SEQRES records.
:param dict pdb_dict: the .pdb dictionary to read.
:rtype: ``dict``"""
seq = {}
if pdb_dict.get("SEQRES"):
for line in pdb_dict["SEQRES"]:
chain, residues = line[11], line[1... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def get_full_names(pdb_dict):
"""Creates a mapping of het names to full English names.
:param pdb_dict: the .pdb dict to read.
:rtype: ``dict``"""
full_names = {}
for line in pdb_dict.get("HETNAM", []):
try:
full_names[line[11:14].strip()] += line[15:].strip()
except: f... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def get_last_ter_line(model_lines):
"""Gets the index of the last TER record in a list of records. 0 will be
returned if there are none.
:param list model_lines: the lines to search.
:rtype: ``int``"""
last_ter = 0
for index, line in enumerate(model_lines[::-1]):
if line[:3] == "TER":
... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def add_atom_to_polymer(line, model, chain_id, res_id, aniso_dict, full_names):
"""Takes an .pdb ATOM or HETATM record, converts it, and adds it to a
polymer dictionary.
:param dict line: the line to read.
:param dict model: the model to update.
:param str chain_id: the chain ID to add to.
:par... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def atom_line_to_dict(line, aniso_dict):
"""Converts an ATOM or HETATM record to an atom dictionary.
:param str line: the record to convert.
:param dict aniso_dict: the anisotropy dictionary to use.
:rtype: ``dict``"""
a = {
"occupancy": 1, "bvalue": None, "charge": 0,
"anisotropy": anis... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def structure_to_pdb_string(structure):
"""Converts a :py:class:`.AtomStructure` to a .pdb filestring.
:param AtomStructure structure: the structure to convert.
:rtype: ``str``"""
lines = []
pack_sequences(structure, lines)
atoms = sorted(structure.atoms(), key=lambda a: a.id)
for i, atom ... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def atom_to_atom_line(a, lines):
"""Converts an :py:class:`.Atom` to an ATOM or HETATM record. ANISOU lines
will also be added where appropriate.
:param Atom a: The Atom to pack.
:param list lines: the string lines to update."""
line = "{:6}{:5} {:4} {:3} {:1}{:4}{:1} "
line += "{:>8}{:>8}{:... | samirelanduk/molecupy | [
97,
19,
97,
13,
1462320672
] |
def chdir(dirname=None):
curdir = os.getcwd()
try:
if dirname is not None:
os.chdir(dirname)
yield
finally:
os.chdir(curdir) | sproutsocial/botanist | [
12,
11,
12,
7,
1438202896
] |
def __init__(self, args):
self.args = args | sproutsocial/botanist | [
12,
11,
12,
7,
1438202896
] |
def https_url_with_auth(self, base_url):
_, suffix = base_url.split('https://')
return 'https://%s:%s@%s' % (quote(self.args.username), quote(self.args.password), suffix) | sproutsocial/botanist | [
12,
11,
12,
7,
1438202896
] |
def get_pagination(raw_link_header):
link_map = {}
for link, rel in (lh.split(';') for lh in raw_link_header.split(',')):
link_map[rel.split('=')[1].strip('"')] = link.strip(' <>')
return Pagination(*(link_map.get(f) for f in Pagination._fields)) | sproutsocial/botanist | [
12,
11,
12,
7,
1438202896
] |
def get_repos(org, repo_type, access_token=None, username=None, password=None, per_page=25):
"""
Paginates through all of the repositories using github's Link header.
https://developer.github.com/v3/#link-header
"""
url = API_BASE + 'orgs/%s/repos?' % org
qs_params = {'type': repo_type, 'per... | sproutsocial/botanist | [
12,
11,
12,
7,
1438202896
] |
def repocsv(string):
"""
>>> repocsv('org1/repo1, org2/repo2,org3/repo3 ,org4/repo4')
['org1/repo1', 'org2/repo2', 'org3/repo3', 'org4/repo4']
"""
try:
repos = [r.strip() for r in string.split(',')]
return set(repos)
except Exception as exc:
raise argparse.ArgumentTypeErr... | sproutsocial/botanist | [
12,
11,
12,
7,
1438202896
] |
def prepare(self):
self.add_arg('file', help='The json dump file to restore') | NaPs/Kolekto | [
27,
3,
27,
6,
1360522969
] |
def __init__(self, size):
self.size = size | idmillington/layout | [
17,
1,
17,
1,
1308760437
] |
def render(self, rect, data):
self.rect = rect | idmillington/layout | [
17,
1,
17,
1,
1308760437
] |
def test_center_minimum_size(self):
b = BoxLM()
b.center = DummyElement(Point(3,4))
self.assertEqual(b.get_minimum_size(None), Point(3,4)) | idmillington/layout | [
17,
1,
17,
1,
1308760437
] |
def test_horizontal_minimum_size(self):
b = BoxLM()
b.left = DummyElement(Point(2,4))
b.center = DummyElement(Point(3,4))
b.right = DummyElement(Point(1,5))
self.assertEqual(b.get_minimum_size(None), Point(6,5)) | idmillington/layout | [
17,
1,
17,
1,
1308760437
] |
def test_margin_minimum_size(self):
b = BoxLM()
b.top = DummyElement(Point(4,2))
b.center = DummyElement(Point(3,4))
b.bottom = DummyElement(Point(5,1))
b.margin = 1
self.assertEqual(b.get_minimum_size(None), Point(5,9)) | idmillington/layout | [
17,
1,
17,
1,
1308760437
] |
def home():
return render_template('./home.html') | kylehayes/fpoimg | [
34,
9,
34,
3,
1356634216
] |
def generator():
return render_template('./generator.html') | kylehayes/fpoimg | [
34,
9,
34,
3,
1356634216
] |
def configurator():
return redirect('/generator', 301) | kylehayes/fpoimg | [
34,
9,
34,
3,
1356634216
] |
def examples():
return render_template('./examples.html') | kylehayes/fpoimg | [
34,
9,
34,
3,
1356634216
] |
def show_image_square(square):
return show_image_width_height(square, square) | kylehayes/fpoimg | [
34,
9,
34,
3,
1356634216
] |
def show_image_width_height(width, height):
caption = request.args.get('text', '')
return show_image_width_height_caption(width, height, caption) | kylehayes/fpoimg | [
34,
9,
34,
3,
1356634216
] |
def show_image_width_height_caption(width, height, caption):
width = min([width, 5000])
height = min([height, 5000])
bg_color_hex = request.args.get('bg_color', '#C7C7C7')
text_color = hex_to_rgb(request.args.get('text_color', '#8F8F8F'))
text_color_hex = request.args.get('text_color', '#8F8F8F')
return gen... | kylehayes/fpoimg | [
34,
9,
34,
3,
1356634216
] |
def hex_to_rgb(value):
'''
Algorithm provided by @Jeremy Cantrell on StackOverflow.com:
http://stackoverflow.com/questions/214359/converting-hex-color-to-rgb-and-vice-versa
'''
if len(value.strip()) != 0:
if value[0] == '#':
value = value[1:]
len_value = len(value)
if len_value not in [3... | kylehayes/fpoimg | [
34,
9,
34,
3,
1356634216
] |
def writeAndUploadCSV(data="", fieldnames=['name', 'category']):
new_csvfile = io.StringIO()
wr = csv.DictWriter(new_csvfile, fieldnames=fieldnames, quoting=csv.QUOTE_ALL)
wr.writeheader()
wr.writerow(data)
buffer = io.BytesIO(new_csvfile.getvalue().encode())
ts = datetime.datetime.now().timestamp()
now =... | kylehayes/fpoimg | [
34,
9,
34,
3,
1356634216
] |
def generate(width, height, caption="", bg_color=(100,100,100), text_color=(200,200,200)):
size = (width,height) # size of the image to create
im = Image.new('RGB', size, bg_color) # create the image
draw = ImageDraw.Draw(im) # create a drawing object
DEFAULT_DIM_SIZE = 50
DEFAULT_CAPTION_SIZE = 30
... | kylehayes/fpoimg | [
34,
9,
34,
3,
1356634216
] |
def __call__(self, ledgers, report, output):
for line in self.generate(ledgers, report):
output(line) | pcapriotti/pledger | [
14,
2,
14,
4,
1309686730
] |
def lpad(self, item, size, color=None):
text = str(item)[:size]
padlength = size - len(text)
if (padlength < 0):
padlength = 0
return "%s%s" % (self.colored(color, text), " " * padlength) | pcapriotti/pledger | [
14,
2,
14,
4,
1309686730
] |
def print_account(self, account, size=39):
if size is None:
return self.colored(self.ACCOUNT_COLOR, account.name)
else:
text = account.shortened_name(size)
return self.lpad(text, size, self.ACCOUNT_COLOR) | pcapriotti/pledger | [
14,
2,
14,
4,
1309686730
] |
def colored(self, color, text):
if color:
return COLORS[color] + text + COLORS["nocolor"]
else:
return text | pcapriotti/pledger | [
14,
2,
14,
4,
1309686730
] |
def generate(self, ledgers, report):
it = report.generate(ledgers)
# save total
total = next(it)
count = 0
for entry in it:
components = entry.amount.components()
for component in components[:-1]:
yield self.print_value(component)
... | pcapriotti/pledger | [
14,
2,
14,
4,
1309686730
] |
def generate(self, ledgers, report):
last_entry = None
for entry in report.generate(ledgers):
if last_entry and id(last_entry.transaction) == id(entry.transaction):
for line in self.print_secondary_entry(entry):
yield line
else:
... | pcapriotti/pledger | [
14,
2,
14,
4,
1309686730
] |
def print_secondary_entry(self, entry):
currencies = sorted(
set(entry.entry.amount.currencies()).union(entry.total.currencies()))
components = entry.entry.amount.components(currencies)
total_components = entry.total.components(currencies)
yield "%s %s %s %s" % (
... | pcapriotti/pledger | [
14,
2,
14,
4,
1309686730
] |
def percentChange(startPoint,currentPoint):
try:
x = ((float(currentPoint)-startPoint)/abs(startPoint))*100.00
if x == 0.0:
return 0.000000001
else:
return x
except:
return 0.0001 | PythonProgramming/Pattern-Recognition-for-Forex-Trading | [
199,
100,
199,
1,
1427336523
] |
def currentPattern():
mostRecentPoint = avgLine[-1] | PythonProgramming/Pattern-Recognition-for-Forex-Trading | [
199,
100,
199,
1,
1427336523
] |
def graphRawFX(): | PythonProgramming/Pattern-Recognition-for-Forex-Trading | [
199,
100,
199,
1,
1427336523
] |
def patternRecognition():
for eachPattern in patternAr:
sim1 = 100.00 - abs(percentChange(eachPattern[0], patForRec[0]))
sim2 = 100.00 - abs(percentChange(eachPattern[1], patForRec[1]))
sim3 = 100.00 - abs(percentChange(eachPattern[2], patForRec[2]))
sim4 = 100.00 - abs(percentChange... | PythonProgramming/Pattern-Recognition-for-Forex-Trading | [
199,
100,
199,
1,
1427336523
] |
def __init__(self, market_place=None, product_id=None): # noqa: E501
"""AppStoreProduct - a model defined in Swagger""" # noqa: E501
self._market_place = None
self._product_id = None
self.discriminator = None
if market_place is not None:
self.market_place = market... | docusign/docusign-python-client | [
77,
83,
77,
32,
1502240567
] |
def market_place(self):
"""Gets the market_place of this AppStoreProduct. # noqa: E501
# noqa: E501
:return: The market_place of this AppStoreProduct. # noqa: E501
:rtype: str
"""
return self._market_place | docusign/docusign-python-client | [
77,
83,
77,
32,
1502240567
] |
def market_place(self, market_place):
"""Sets the market_place of this AppStoreProduct.
# noqa: E501
:param market_place: The market_place of this AppStoreProduct. # noqa: E501
:type: str
"""
self._market_place = market_place | docusign/docusign-python-client | [
77,
83,
77,
32,
1502240567
] |
def product_id(self):
"""Gets the product_id of this AppStoreProduct. # noqa: E501
The Product ID from the AppStore. # noqa: E501
:return: The product_id of this AppStoreProduct. # noqa: E501
:rtype: str
"""
return self._product_id | docusign/docusign-python-client | [
77,
83,
77,
32,
1502240567
] |
def product_id(self, product_id):
"""Sets the product_id of this AppStoreProduct.
The Product ID from the AppStore. # noqa: E501
:param product_id: The product_id of this AppStoreProduct. # noqa: E501
:type: str
"""
self._product_id = product_id | docusign/docusign-python-client | [
77,
83,
77,
32,
1502240567
] |
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict()) | docusign/docusign-python-client | [
77,
83,
77,
32,
1502240567
] |
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AppStoreProduct):
return False
return self.__dict__ == other.__dict__ | docusign/docusign-python-client | [
77,
83,
77,
32,
1502240567
] |
def dov_proxy_no_xdov():
"""Fixture to start the DOV proxy and set PYDOV_BASE_URL to route
traffic through it.
The DOV proxy behaves as the XDOV server would be unavailable.
"""
process = Popen([sys.executable,
os.path.join(os.path.dirname(os.path.abspath(__file__)),
... | DOV-Vlaanderen/pydov | [
30,
16,
30,
15,
1482487793
] |
def reload_modules(dov_proxy_no_xdov):
"""Reload the boring and grondwaterfilter modules after setting
PYDOV_BASE_URL.
These need to be reloaded because they use the PYDOV_BASE_URL at import
time to set the location of XSD schemas.
Parameters
----------
dov_proxy_no_xdov : pytest.fixture
... | DOV-Vlaanderen/pydov | [
30,
16,
30,
15,
1482487793
] |
def reset_cache(dov_proxy_no_xdov):
"""Reset the cache to a temporary folder to remove influence from other
tests.
The cache needs to be reset after setting the PYDOV_BASE_URL variable
because at initialisation this URL is used to construct a regex for
determining the datatype of an XML request.
... | DOV-Vlaanderen/pydov | [
30,
16,
30,
15,
1482487793
] |
def test_hook_count():
"""PyTest fixture temporarily disabling default hooks and installing
HookCounter."""
orig_hooks = pydov.hooks
pydov.hooks = Hooks(
(HookCounter(),)
)
yield
pydov.hooks = orig_hooks | DOV-Vlaanderen/pydov | [
30,
16,
30,
15,
1482487793
] |
def test_do_not_cache_error(self):
"""Test whether the 404 error page does not end up being cached."""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
bs.search(query=PropertyIsEqualTo(
'pkey_boring', build_dov_url('data/boring/2004-103984')))
assert not os.path.exi... | DOV-Vlaanderen/pydov | [
30,
16,
30,
15,
1482487793
] |
def test_do_not_overwrite_stale_cache(self):
"""Test whether a stale copy of the data which exists in the cache is
not overwritten by the 404 error page."""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
testdata_path = os.path.join(
'tests', 'data', 'types', 'borin... | DOV-Vlaanderen/pydov | [
30,
16,
30,
15,
1482487793
] |
def test_stale_warning(self):
"""Test whether a stale version of the data from the cache is used in
case of a service error, and if a warning is issued to the user."""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
testdata_path = os.path.join(
'tests', 'data', 'typ... | DOV-Vlaanderen/pydov | [
30,
16,
30,
15,
1482487793
] |
def test_stale_disabled(self):
"""Test whether no stale version of the data from the cache is used
when disabled, and if a warning is issued to the user."""
pydov.cache.stale_on_error = False
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
testdata_path = os.path.join(
... | DOV-Vlaanderen/pydov | [
30,
16,
30,
15,
1482487793
] |
def test_wfs_data_present(self):
"""Test whether data available in the WFS is present in the dataframe
in case of a service error in XDOV."""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
df = bs.search(query=PropertyIsEqualTo(
'pkey_boring', build_dov_url('data/bo... | DOV-Vlaanderen/pydov | [
30,
16,
30,
15,
1482487793
] |
def test_nan_and_fetch_warning(self):
"""Test whether the XML data is set tot NaN in case of an error and
no stale cache is available. Also test if a warning is given to the
user."""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
with pytest.warns(XmlFetchWarning):
... | DOV-Vlaanderen/pydov | [
30,
16,
30,
15,
1482487793
] |
def test_no_xsd_warning(self):
"""Test whether the metadata can still be retrieved, and that the
XSD values are unavailable. Also test if a warning is given to the
user."""
with pytest.warns(XsdFetchWarning):
gwf = GrondwaterFilterSearch(
objecttype=pydov.type... | DOV-Vlaanderen/pydov | [
30,
16,
30,
15,
1482487793
] |
def test_no_xsd_wfs_only(self):
"""Test whether the WFS data is available, even if XSD schemas cannot
be resolved."""
gwf = GrondwaterFilterSearch(
objecttype=pydov.types.grondwaterfilter.GrondwaterFilter)
df = gwf.search(max_features=1)
assert df.iloc[0].pkey_filter... | DOV-Vlaanderen/pydov | [
30,
16,
30,
15,
1482487793
] |
def test_hooks_fetch_error(self, test_hook_count):
"""Test if the correct hooks are fired when the XML fails to be
fetched from DOV.
Parameters
----------
test_hook_count : pytest.fixture
Fixture removing default hooks and installing HookCounter.
"""
... | DOV-Vlaanderen/pydov | [
30,
16,
30,
15,
1482487793
] |
def lazy_tag(tag, *args, **kwargs):
"""
Lazily loads a template tag after the page has loaded. Requires jQuery
(for now).
Usage:
{% load lazy_tags %}
{% lazy_tag 'tag_lib.tag_name' arg1 arg2 kw1='test' kw2='hello' %}
Args:
tag (str): the tag library and tag name separated ... | grantmcconnaughey/django-lazy-tags | [
24,
5,
24,
2,
1437441030
] |
def lazy_tags_javascript():
"""Outputs the necessary JavaScript to load tags over AJAX."""
return _render_js('javascript') | grantmcconnaughey/django-lazy-tags | [
24,
5,
24,
2,
1437441030
] |
def lazy_tags_jquery():
"""Outputs the necessary jQuery to load tags over AJAX."""
return _render_js('jquery') | grantmcconnaughey/django-lazy-tags | [
24,
5,
24,
2,
1437441030
] |
def lazy_tags_prototype():
"""Outputs the necessary Prototype to load tags over AJAX."""
return _render_js('prototype') | grantmcconnaughey/django-lazy-tags | [
24,
5,
24,
2,
1437441030
] |
def given_a_call_counter(self):
self.x = 0
self.expected_args = (1, 4, "hello")
self.expected_kwargs = {"blah": "bloh", "bleh": 5}
self.expected_return_value = "some thing that was returned" | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def it_should_forward_the_arguments(self):
assert self.args == self.expected_args | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def it_should_call_it_once(self):
assert self.x == 1 | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def function_to_break(self, *args, **kwargs):
self.x += 1
self.args = args
self.kwargs = kwargs
return self.expected_return_value | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def given_an_exception_to_throw(self):
self.x = 0
self.expected_exception = ValueError()
@circuitbreaker(ValueError, threshold=3, reset_timeout=1, on_error=self.on_error_callback)
def function_to_break():
self.x += 1
raise self.expected_exception
self.fun... | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def it_should_bubble_the_exception_out(self):
assert self.exception is self.expected_exception | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def it_should_call_the_on_error_callback(self):
assert self.on_error_called | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def given_an_exception_to_throw(self):
self.x = 0
self.expected_exception = ValueError()
@circuitbreaker(ValueError, threshold=3, reset_timeout=1, on_error=self.on_error_callback)
def function_to_break():
self.x += 1
raise self.expected_exception
self.fun... | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def it_should_bubble_the_exception_out(self):
assert self.exception is self.expected_exception | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def it_should_call_the_on_error_callback(self):
assert self.on_error_result is self.expected_exception | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def given_the_function_has_failed_twice(self):
self.expected_exception = ValueError()
contexts.catch(self.function_to_break)
contexts.catch(self.function_to_break) | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def it_should_bubble_the_exception_out(self):
assert self.exception is self.expected_exception | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def function_to_break(self):
raise self.expected_exception | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def given_the_function_has_failed_three_times(self):
self.patch = mock.patch('time.perf_counter', return_value=0)
self.mock = self.patch.start()
self.x = 0
contexts.catch(self.function_to_break)
contexts.catch(self.function_to_break)
contexts.catch(self.function_to_break)... | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def it_should_throw_CircuitBrokenError(self):
assert isinstance(self.exception, CircuitBrokenError) | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def it_should_not_call_the_function(self):
assert self.x == 0 | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def function_to_break(self):
self.x += 1
raise ValueError | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def given_the_circuit_was_about_to_be_broken(self):
self.patch = mock.patch('time.perf_counter', return_value=0)
self.mock = self.patch.start()
contexts.catch(self.function_to_break)
self.mock.return_value = 0.5
contexts.catch(self.function_to_break)
self.mock.return_valu... | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def it_should_have_decremented_the_failure_count(self):
assert isinstance(self.exception1, ValueError)
assert isinstance(self.exception2, ValueError)
assert isinstance(self.exception3, CircuitBrokenError) | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def function_to_break(self):
raise ValueError | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def given_the_circuit_was_broken_in_the_past(self):
self.x = 0
self.expected_return_value = "some thing that was returned"
self.patch = mock.patch('time.perf_counter', return_value=0)
self.mock = self.patch.start()
contexts.catch(self.function_to_break)
contexts.catch(sel... | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def it_should_call_the_function(self):
assert self.x == 4 | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def cleanup_the_mock(self):
self.patch.stop() | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def function_to_break(self):
self.x += 1
if self.x < 3:
raise ValueError
return self.expected_return_value | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def given_the_circuit_was_broken_in_the_past(self):
self.x = 0
self.expected_exception = ValueError()
self.patch = mock.patch('time.perf_counter', return_value=0)
self.mock = self.patch.start()
contexts.catch(self.function_to_break)
contexts.catch(self.function_to_break)
... | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
def it_should_call_the_function(self):
assert self.x == 4 | benjamin-hodgson/poll | [
10,
2,
10,
2,
1438953343
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.