repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
st135yle/django-site | dbenv/lib/python3.4/site-packages/django/templatetags/i18n.py | 115 | 19351 | from __future__ import unicode_literals
import sys
from django.conf import settings
from django.template import Library, Node, TemplateSyntaxError, Variable
from django.template.base import TOKEN_TEXT, TOKEN_VAR, render_value_in_context
from django.template.defaulttags import token_kwargs
from django.utils import six, translation
from django.utils.safestring import SafeData, mark_safe
register = Library()
class GetAvailableLanguagesNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = [(k, translation.ugettext(v)) for k, v in settings.LANGUAGES]
return ''
class GetLanguageInfoNode(Node):
def __init__(self, lang_code, variable):
self.lang_code = lang_code
self.variable = variable
def render(self, context):
lang_code = self.lang_code.resolve(context)
context[self.variable] = translation.get_language_info(lang_code)
return ''
class GetLanguageInfoListNode(Node):
def __init__(self, languages, variable):
self.languages = languages
self.variable = variable
def get_language_info(self, language):
# ``language`` is either a language code string or a sequence
# with the language code as its first item
if len(language[0]) > 1:
return translation.get_language_info(language[0])
else:
return translation.get_language_info(str(language))
def render(self, context):
langs = self.languages.resolve(context)
context[self.variable] = [self.get_language_info(lang) for lang in langs]
return ''
class GetCurrentLanguageNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = translation.get_language()
return ''
class GetCurrentLanguageBidiNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = translation.get_language_bidi()
return ''
class TranslateNode(Node):
def __init__(self, filter_expression, noop, asvar=None,
message_context=None):
self.noop = noop
self.asvar = asvar
self.message_context = message_context
self.filter_expression = filter_expression
if isinstance(self.filter_expression.var, six.string_types):
self.filter_expression.var = Variable("'%s'" %
self.filter_expression.var)
def render(self, context):
self.filter_expression.var.translate = not self.noop
if self.message_context:
self.filter_expression.var.message_context = (
self.message_context.resolve(context))
output = self.filter_expression.resolve(context)
value = render_value_in_context(output, context)
# Restore percent signs. Percent signs in template text are doubled
# so they are not interpreted as string format flags.
is_safe = isinstance(value, SafeData)
value = value.replace('%%', '%')
value = mark_safe(value) if is_safe else value
if self.asvar:
context[self.asvar] = value
return ''
else:
return value
class BlockTranslateNode(Node):
def __init__(self, extra_context, singular, plural=None, countervar=None,
counter=None, message_context=None, trimmed=False, asvar=None):
self.extra_context = extra_context
self.singular = singular
self.plural = plural
self.countervar = countervar
self.counter = counter
self.message_context = message_context
self.trimmed = trimmed
self.asvar = asvar
def render_token_list(self, tokens):
result = []
vars = []
for token in tokens:
if token.token_type == TOKEN_TEXT:
result.append(token.contents.replace('%', '%%'))
elif token.token_type == TOKEN_VAR:
result.append('%%(%s)s' % token.contents)
vars.append(token.contents)
msg = ''.join(result)
if self.trimmed:
msg = translation.trim_whitespace(msg)
return msg, vars
def render(self, context, nested=False):
if self.message_context:
message_context = self.message_context.resolve(context)
else:
message_context = None
tmp_context = {}
for var, val in self.extra_context.items():
tmp_context[var] = val.resolve(context)
# Update() works like a push(), so corresponding context.pop() is at
# the end of function
context.update(tmp_context)
singular, vars = self.render_token_list(self.singular)
if self.plural and self.countervar and self.counter:
count = self.counter.resolve(context)
context[self.countervar] = count
plural, plural_vars = self.render_token_list(self.plural)
if message_context:
result = translation.npgettext(message_context, singular,
plural, count)
else:
result = translation.ungettext(singular, plural, count)
vars.extend(plural_vars)
else:
if message_context:
result = translation.pgettext(message_context, singular)
else:
result = translation.ugettext(singular)
default_value = context.template.engine.string_if_invalid
def render_value(key):
if key in context:
val = context[key]
else:
val = default_value % key if '%s' in default_value else default_value
return render_value_in_context(val, context)
data = {v: render_value(v) for v in vars}
context.pop()
try:
result = result % data
except (KeyError, ValueError):
if nested:
# Either string is malformed, or it's a bug
raise TemplateSyntaxError(
"'blocktrans' is unable to format string returned by gettext: %r using %r"
% (result, data)
)
with translation.override(None):
result = self.render(context, nested=True)
if self.asvar:
context[self.asvar] = result
return ''
else:
return result
class LanguageNode(Node):
def __init__(self, nodelist, language):
self.nodelist = nodelist
self.language = language
def render(self, context):
with translation.override(self.language.resolve(context)):
output = self.nodelist.render(context)
return output
@register.tag("get_available_languages")
def do_get_available_languages(parser, token):
"""
This will store a list of available languages
in the context.
Usage::
{% get_available_languages as languages %}
{% for language in languages %}
...
{% endfor %}
This will just pull the LANGUAGES setting from
your setting file (or the default settings) and
put it into the named variable.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError("'get_available_languages' requires 'as variable' (got %r)" % args)
return GetAvailableLanguagesNode(args[2])
@register.tag("get_language_info")
def do_get_language_info(parser, token):
"""
This will store the language information dictionary for the given language
code in a context variable.
Usage::
{% get_language_info for LANGUAGE_CODE as l %}
{{ l.code }}
{{ l.name }}
{{ l.name_translated }}
{{ l.name_local }}
{{ l.bidi|yesno:"bi-directional,uni-directional" }}
"""
args = token.split_contents()
if len(args) != 5 or args[1] != 'for' or args[3] != 'as':
raise TemplateSyntaxError("'%s' requires 'for string as variable' (got %r)" % (args[0], args[1:]))
return GetLanguageInfoNode(parser.compile_filter(args[2]), args[4])
@register.tag("get_language_info_list")
def do_get_language_info_list(parser, token):
"""
This will store a list of language information dictionaries for the given
language codes in a context variable. The language codes can be specified
either as a list of strings or a settings.LANGUAGES style list (or any
sequence of sequences whose first items are language codes).
Usage::
{% get_language_info_list for LANGUAGES as langs %}
{% for l in langs %}
{{ l.code }}
{{ l.name }}
{{ l.name_translated }}
{{ l.name_local }}
{{ l.bidi|yesno:"bi-directional,uni-directional" }}
{% endfor %}
"""
args = token.split_contents()
if len(args) != 5 or args[1] != 'for' or args[3] != 'as':
raise TemplateSyntaxError("'%s' requires 'for sequence as variable' (got %r)" % (args[0], args[1:]))
return GetLanguageInfoListNode(parser.compile_filter(args[2]), args[4])
@register.filter
def language_name(lang_code):
return translation.get_language_info(lang_code)['name']
@register.filter
def language_name_translated(lang_code):
english_name = translation.get_language_info(lang_code)['name']
return translation.ugettext(english_name)
@register.filter
def language_name_local(lang_code):
return translation.get_language_info(lang_code)['name_local']
@register.filter
def language_bidi(lang_code):
return translation.get_language_info(lang_code)['bidi']
@register.tag("get_current_language")
def do_get_current_language(parser, token):
"""
This will store the current language in the context.
Usage::
{% get_current_language as language %}
This will fetch the currently active language and
put it's value into the ``language`` context
variable.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError("'get_current_language' requires 'as variable' (got %r)" % args)
return GetCurrentLanguageNode(args[2])
@register.tag("get_current_language_bidi")
def do_get_current_language_bidi(parser, token):
"""
This will store the current language layout in the context.
Usage::
{% get_current_language_bidi as bidi %}
This will fetch the currently active language's layout and
put it's value into the ``bidi`` context variable.
True indicates right-to-left layout, otherwise left-to-right
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError("'get_current_language_bidi' requires 'as variable' (got %r)" % args)
return GetCurrentLanguageBidiNode(args[2])
@register.tag("trans")
def do_translate(parser, token):
"""
This will mark a string for translation and will
translate the string for the current language.
Usage::
{% trans "this is a test" %}
This will mark the string for translation so it will
be pulled out by mark-messages.py into the .po files
and will run the string through the translation engine.
There is a second form::
{% trans "this is a test" noop %}
This will only mark for translation, but will return
the string unchanged. Use it when you need to store
values into forms that should be translated later on.
You can use variables instead of constant strings
to translate stuff you marked somewhere else::
{% trans variable %}
This will just try to translate the contents of
the variable ``variable``. Make sure that the string
in there is something that is in the .po file.
It is possible to store the translated string into a variable::
{% trans "this is a test" as var %}
{{ var }}
Contextual translations are also supported::
{% trans "this is a test" context "greeting" %}
This is equivalent to calling pgettext instead of (u)gettext.
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one argument" % bits[0])
message_string = parser.compile_filter(bits[1])
remaining = bits[2:]
noop = False
asvar = None
message_context = None
seen = set()
invalid_context = {'as', 'noop'}
while remaining:
option = remaining.pop(0)
if option in seen:
raise TemplateSyntaxError(
"The '%s' option was specified more than once." % option,
)
elif option == 'noop':
noop = True
elif option == 'context':
try:
value = remaining.pop(0)
except IndexError:
msg = "No argument provided to the '%s' tag for the context option." % bits[0]
six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])
if value in invalid_context:
raise TemplateSyntaxError(
"Invalid argument '%s' provided to the '%s' tag for the context option" % (value, bits[0]),
)
message_context = parser.compile_filter(value)
elif option == 'as':
try:
value = remaining.pop(0)
except IndexError:
msg = "No argument provided to the '%s' tag for the as option." % bits[0]
six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])
asvar = value
else:
raise TemplateSyntaxError(
"Unknown argument for '%s' tag: '%s'. The only options "
"available are 'noop', 'context' \"xxx\", and 'as VAR'." % (
bits[0], option,
)
)
seen.add(option)
return TranslateNode(message_string, noop, asvar, message_context)
@register.tag("blocktrans")
def do_block_translate(parser, token):
"""
This will translate a block of text with parameters.
Usage::
{% blocktrans with bar=foo|filter boo=baz|filter %}
This is {{ bar }} and {{ boo }}.
{% endblocktrans %}
Additionally, this supports pluralization::
{% blocktrans count count=var|length %}
There is {{ count }} object.
{% plural %}
There are {{ count }} objects.
{% endblocktrans %}
This is much like ngettext, only in template syntax.
The "var as value" legacy format is still supported::
{% blocktrans with foo|filter as bar and baz|filter as boo %}
{% blocktrans count var|length as count %}
The translated string can be stored in a variable using `asvar`::
{% blocktrans with bar=foo|filter boo=baz|filter asvar var %}
This is {{ bar }} and {{ boo }}.
{% endblocktrans %}
{{ var }}
Contextual translations are supported::
{% blocktrans with bar=foo|filter context "greeting" %}
This is {{ bar }}.
{% endblocktrans %}
This is equivalent to calling pgettext/npgettext instead of
(u)gettext/(u)ngettext.
"""
bits = token.split_contents()
options = {}
remaining_bits = bits[1:]
asvar = None
while remaining_bits:
option = remaining_bits.pop(0)
if option in options:
raise TemplateSyntaxError('The %r option was specified more '
'than once.' % option)
if option == 'with':
value = token_kwargs(remaining_bits, parser, support_legacy=True)
if not value:
raise TemplateSyntaxError('"with" in %r tag needs at least '
'one keyword argument.' % bits[0])
elif option == 'count':
value = token_kwargs(remaining_bits, parser, support_legacy=True)
if len(value) != 1:
raise TemplateSyntaxError('"count" in %r tag expected exactly '
'one keyword argument.' % bits[0])
elif option == "context":
try:
value = remaining_bits.pop(0)
value = parser.compile_filter(value)
except Exception:
msg = (
'"context" in %r tag expected '
'exactly one argument.') % bits[0]
six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])
elif option == "trimmed":
value = True
elif option == "asvar":
try:
value = remaining_bits.pop(0)
except IndexError:
msg = "No argument provided to the '%s' tag for the asvar option." % bits[0]
six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])
asvar = value
else:
raise TemplateSyntaxError('Unknown argument for %r tag: %r.' %
(bits[0], option))
options[option] = value
if 'count' in options:
countervar, counter = list(options['count'].items())[0]
else:
countervar, counter = None, None
if 'context' in options:
message_context = options['context']
else:
message_context = None
extra_context = options.get('with', {})
trimmed = options.get("trimmed", False)
singular = []
plural = []
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
singular.append(token)
else:
break
if countervar and counter:
if token.contents.strip() != 'plural':
raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags inside it")
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
plural.append(token)
else:
break
if token.contents.strip() != 'endblocktrans':
raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags (seen %r) inside it" % token.contents)
return BlockTranslateNode(extra_context, singular, plural, countervar,
counter, message_context, trimmed=trimmed,
asvar=asvar)
@register.tag
def language(parser, token):
"""
This will enable the given language just for this block.
Usage::
{% language "de" %}
This is {{ bar }} and {{ boo }}.
{% endlanguage %}
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'%s' takes one argument (language)" % bits[0])
language = parser.compile_filter(bits[1])
nodelist = parser.parse(('endlanguage',))
parser.delete_first_token()
return LanguageNode(nodelist, language)
| mit |
nest/nest-simulator | pynest/nest/tests/test_regression_issue-1034.py | 11 | 13081 | # -*- coding: utf-8 -*-
#
# test_regression_issue-1034.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
# Please see `doc/userdoc/model_details/test_post_trace.ipynb` for a version of this
# test that includes more documentation and plotting.
import nest
import numpy as np
import scipy as sp
import scipy.stats
import unittest
class PostTraceTester(object):
'''Test that postsynaptic trace values returned from NEST are consistent
with reference values generated in Python.
For more information, please see the Jupyter notebook in
`doc/userdoc/model_details/test_post_trace.ipynb`.
'''
def __init__(self, pre_spike_times, post_spike_times, delay, resolution,
tau_minus, trace_match_atol, trace_match_rtol):
self.pre_spike_times_ = pre_spike_times
self.post_spike_times_ = post_spike_times
self.delay_ = delay
self.dendritic_delay_ = delay
self.resolution_ = resolution
self.tau_minus_ = tau_minus
self.trace_match_atol_ = trace_match_atol
self.trace_match_rtol_ = trace_match_rtol
self.max_t_sp_ = max(np.amax(self.pre_spike_times_),
np.amax(self.post_spike_times_))
self.sim_time_ = self.max_t_sp_ + 5 * self.delay_
def run_post_trace_test_nest_(self,
show_all_nest_trace_samples=False):
nest.set_verbosity("M_WARNING")
nest.ResetKernel()
nest.SetKernelStatus({'resolution': self.resolution_})
wr = nest.Create('weight_recorder')
nest.CopyModel("stdp_synapse", "stdp_synapse_rec",
{"weight_recorder": wr, "weight": 1.})
# create spike_generators with these times
pre_sg_ps = nest.Create("spike_generator",
params={"spike_times": self.pre_spike_times_,
'precise_times': True})
post_sg_ps = nest.Create("spike_generator",
params={"spike_times": self.post_spike_times_,
'precise_times': True})
# create parrot neurons and connect spike_generators
pre_parrot_ps = nest.Create("parrot_neuron_ps")
post_parrot_ps = nest.Create("parrot_neuron_ps",
params={"tau_minus": self.tau_minus_})
nest.Connect(pre_sg_ps, pre_parrot_ps,
syn_spec={"delay": self.delay_})
nest.Connect(post_sg_ps, post_parrot_ps,
syn_spec={"delay": self.delay_})
# create spike recorder --- debugging only
spikes = nest.Create("spike_recorder")
nest.Connect(pre_parrot_ps + post_parrot_ps, spikes)
# connect both parrot neurons with a stdp synapse onto port 1
# thereby spikes transmitted through the stdp connection are
# not repeated postsynaptically.
nest.Connect(
pre_parrot_ps, post_parrot_ps,
syn_spec={'synapse_model': 'stdp_synapse_rec',
'receptor_type': 1,
'delay': self.delay_})
# get STDP synapse
syn_ps = nest.GetConnections(source=pre_parrot_ps,
synapse_model="stdp_synapse_rec")
print("[py] Total simulation time: " + str(self.sim_time_) + " ms")
n_steps = int(np.ceil(self.sim_time_ / self.delay_))
trace_nest = []
trace_nest_t = []
t = nest.GetKernelStatus("biological_time")
trace_nest_t.append(t)
post_tr = nest.GetStatus(post_parrot_ps)[0]['post_trace']
trace_nest.append(post_tr)
for step in range(n_steps):
print("\n[py] simulating for " + str(self.delay_) + " ms")
nest.Simulate(self.delay_)
t = nest.GetKernelStatus("biological_time")
nearby_pre_spike = np.any(
np.abs(t - np.array(self.pre_spike_times_) - self.delay_) < self.resolution_ / 2.)
if show_all_nest_trace_samples or nearby_pre_spike:
trace_nest_t.append(t)
post_tr = nest.GetStatus(post_parrot_ps)[0]['post_trace']
trace_nest.append(post_tr)
print("[py] Received NEST trace: " +
str(post_tr) + " at time t = " + str(t))
return trace_nest_t, trace_nest
def run_post_trace_test_python_reference_(self, debug=False):
"""
compute Python known-good reference of postsynaptic trace
"""
n_timepoints = int(np.ceil(1000 * self.sim_time_))
trace_python_ref = np.zeros(n_timepoints)
for post_spike_time in self.post_spike_times_:
t_sp = post_spike_time + self.delay_ + self.dendritic_delay_
for i in range(n_timepoints):
t = (i / float(n_timepoints - 1)) * self.sim_time_
if t > t_sp:
trace_python_ref[i] += np.exp(-(t - t_sp) / self.tau_minus_)
for pre_spike_time in self.pre_spike_times_:
t_sp = pre_spike_time + self.delay_
i = int(np.round(t_sp / self.sim_time_ * float(len(trace_python_ref) - 1)))
if debug:
print("* At t_sp = " + str(t_sp) + ", post_trace should be " + str(trace_python_ref[i]))
return trace_python_ref
def nest_trace_matches_ref_trace_(self, trace_nest_t, trace_nest,
trace_python_ref, debug=True):
"""
Trace values are returned from NEST at regular intervals, but only
updated at presynaptic spike times.
To match the NEST samples with the continuous reference trace, step
backwards in time from the sampled value, to find the last time at
which the trace value was updated, namely the time of occurrence of
the last presynaptic spike.
"""
for t, trace_nest_val in zip(trace_nest_t[1:], trace_nest[1:]):
if debug:
print("* Finding ref for NEST timepoint t = " + str(t) + ", trace = " + str(trace_nest_val))
traces_match = False
for i_search, t_search in enumerate(
reversed(np.array(self.pre_spike_times_) + self.delay_)):
if t_search <= t:
_trace_at_t_search = trace_python_ref[int(np.round(
t_search / self.sim_time_ * float(len(trace_python_ref) - 1)))]
traces_match = np.allclose(
_trace_at_t_search,
trace_nest_val,
atol=self.trace_match_atol_,
rtol=self.trace_match_rtol_)
post_spike_occurred_at_t_search = np.any(
(t_search - (np.array(self.post_spike_times_) + self.delay_ + self.dendritic_delay_))**2 <
self.resolution_ / 2.)
if debug:
print("\t* Testing " + str(t_search) + "...")
print("\t traces_match = " + str(traces_match))
print("\t post_spike_occurred_at_t_search = " + str(post_spike_occurred_at_t_search))
if (not traces_match) and post_spike_occurred_at_t_search:
traces_match = np.allclose(
_trace_at_t_search + 1,
trace_nest_val,
atol=self.trace_match_atol_,
rtol=self.trace_match_rtol_)
if debug:
print("\t traces_match = " + str(traces_match) + " (nest trace = " +
str(trace_nest_val) + ", ref trace = " + str(_trace_at_t_search + 1) + ")")
if traces_match:
_trace_at_t_search += 1.
if (not traces_match) and post_spike_occurred_at_t_search:
traces_match = np.allclose(
_trace_at_t_search - 1,
trace_nest_val,
atol=self.trace_match_atol_,
rtol=self.trace_match_rtol_)
if debug:
print("\t traces_match = " + str(traces_match) + " (nest trace = " +
str(trace_nest_val) + ", ref trace = " + str(_trace_at_t_search - 1) + ")")
if traces_match:
_trace_at_t_search -= 1.
break
if ((not traces_match) and i_search == len(self.pre_spike_times_) - 1):
if debug:
print("\tthe time before the first pre spike")
# the time before the first pre spike
traces_match = trace_nest_val == 0.
if not traces_match:
return False
return True
def nest_trace_matches_python_trace(self):
trace_nest_t, trace_nest = self.run_post_trace_test_nest_()
trace_python_ref = self.run_post_trace_test_python_reference_()
return self.nest_trace_matches_ref_trace_(
trace_nest_t,
trace_nest,
trace_python_ref)
class PostTraceTestCase(unittest.TestCase):
def test_post_trace(self):
"""
construct a network of the form:
- pre_spike_gen connects via static_synapse to pre_parrot
- pre_parrot connects via stdp_synapse to post_parrot
- post_spike_gen connects via static_synapse to post_parrot
The spike times of the spike generators are defined in
`pre_spike_times` and `post_spike_times`. From the perspective of the
STDP synapse, spikes arrive with the following delays (with respect to
the values in these lists):
- for the presynaptic neuron: one synaptic delay in the static synapse
- for the postsynaptic neuron: one synaptic delay in the static synapse
- for the synapse itself: one dendritic delay between the post_parrot
node and the synapse itself (see the C++ variable `dendritic_delay`).
"""
resolution = .1 # [ms]
delays = np.array([1., 5.]) # [ms]
# spike test pattern 1: minimal reproducing example of the original bug
pre_spike_times1 = np.array([2., 3., 10.])
post_spike_times1 = np.array([1., 2., 3.])
# spike test pattern 2: generate some random integer spike times
t_sp_min = 1.
t_sp_max = 50
n_spikes = 10
pre_spike_times2 = np.sort(
np.unique(
np.ceil(
sp.stats.uniform.rvs(
t_sp_min, t_sp_max - t_sp_min, n_spikes))))
n_spikes = 50
post_spike_times2 = np.sort(
np.unique(
np.ceil(
sp.stats.uniform.rvs(
t_sp_min, t_sp_max - t_sp_min, n_spikes))))
tau_minus = 2. # [ms]
# for each parameter set, run the test
# spike test pattern 3 is a pre/post-reversed version of test pattern 2
pre_spike_times = [pre_spike_times1,
pre_spike_times2,
post_spike_times2]
post_spike_times = [post_spike_times1,
post_spike_times2,
pre_spike_times2]
for pre_spike_time, post_spike_time in zip(pre_spike_times,
post_spike_times):
print("Pre spike times: [" + ", ".join([str(t) for t in pre_spike_time]) + "]")
print("Post spike times: [" + ", ".join([str(t) for t in post_spike_time]) + "]")
for delay in delays:
test = PostTraceTester(
pre_spike_times=pre_spike_time,
post_spike_times=post_spike_time,
delay=delay,
resolution=resolution,
tau_minus=tau_minus,
trace_match_atol=1E-3,
trace_match_rtol=1E-3)
self.assertTrue(test.nest_trace_matches_python_trace())
def suite():
t = unittest.TestLoader().loadTestsFromTestCase(PostTraceTestCase)
return unittest.TestSuite([t])
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
| gpl-2.0 |
thomasrogers03/phantomjs | src/breakpad/src/third_party/protobuf/protobuf/python/google/protobuf/internal/containers.py | 261 | 9573 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains container classes to represent different protocol buffer types.
This file defines container classes which represent categories of protocol
buffer field types which need extra maintenance. Currently these categories
are:
- Repeated scalar fields - These are all repeated fields which aren't
composite (e.g. they are of simple types like int32, string, etc).
- Repeated composite fields - Repeated fields which are composite. This
includes groups and nested messages.
"""
__author__ = 'petar@google.com (Petar Petrov)'
class BaseContainer(object):
"""Base container class."""
# Minimizes memory usage and disallows assignment to other attributes.
__slots__ = ['_message_listener', '_values']
def __init__(self, message_listener):
"""
Args:
message_listener: A MessageListener implementation.
The RepeatedScalarFieldContainer will call this object's
Modified() method when it is modified.
"""
self._message_listener = message_listener
self._values = []
def __getitem__(self, key):
"""Retrieves item by the specified key."""
return self._values[key]
def __len__(self):
"""Returns the number of elements in the container."""
return len(self._values)
def __ne__(self, other):
"""Checks if another instance isn't equal to this one."""
# The concrete classes should define __eq__.
return not self == other
def __hash__(self):
raise TypeError('unhashable object')
def __repr__(self):
return repr(self._values)
def sort(self, sort_function=cmp):
self._values.sort(sort_function)
class RepeatedScalarFieldContainer(BaseContainer):
"""Simple, type-checked, list-like container for holding repeated scalars."""
# Disallows assignment to other attributes.
__slots__ = ['_type_checker']
def __init__(self, message_listener, type_checker):
"""
Args:
message_listener: A MessageListener implementation.
The RepeatedScalarFieldContainer will call this object's
Modified() method when it is modified.
type_checker: A type_checkers.ValueChecker instance to run on elements
inserted into this container.
"""
super(RepeatedScalarFieldContainer, self).__init__(message_listener)
self._type_checker = type_checker
def append(self, value):
"""Appends an item to the list. Similar to list.append()."""
self._type_checker.CheckValue(value)
self._values.append(value)
if not self._message_listener.dirty:
self._message_listener.Modified()
def insert(self, key, value):
"""Inserts the item at the specified position. Similar to list.insert()."""
self._type_checker.CheckValue(value)
self._values.insert(key, value)
if not self._message_listener.dirty:
self._message_listener.Modified()
def extend(self, elem_seq):
"""Extends by appending the given sequence. Similar to list.extend()."""
if not elem_seq:
return
new_values = []
for elem in elem_seq:
self._type_checker.CheckValue(elem)
new_values.append(elem)
self._values.extend(new_values)
self._message_listener.Modified()
def MergeFrom(self, other):
"""Appends the contents of another repeated field of the same type to this
one. We do not check the types of the individual fields.
"""
self._values.extend(other._values)
self._message_listener.Modified()
def remove(self, elem):
"""Removes an item from the list. Similar to list.remove()."""
self._values.remove(elem)
self._message_listener.Modified()
def __setitem__(self, key, value):
"""Sets the item on the specified position."""
self._type_checker.CheckValue(value)
self._values[key] = value
self._message_listener.Modified()
def __getslice__(self, start, stop):
"""Retrieves the subset of items from between the specified indices."""
return self._values[start:stop]
def __setslice__(self, start, stop, values):
"""Sets the subset of items from between the specified indices."""
new_values = []
for value in values:
self._type_checker.CheckValue(value)
new_values.append(value)
self._values[start:stop] = new_values
self._message_listener.Modified()
def __delitem__(self, key):
"""Deletes the item at the specified position."""
del self._values[key]
self._message_listener.Modified()
def __delslice__(self, start, stop):
"""Deletes the subset of items from between the specified indices."""
del self._values[start:stop]
self._message_listener.Modified()
def __eq__(self, other):
"""Compares the current instance with another one."""
if self is other:
return True
# Special case for the same type which should be common and fast.
if isinstance(other, self.__class__):
return other._values == self._values
# We are presumably comparing against some other sequence type.
return other == self._values
class RepeatedCompositeFieldContainer(BaseContainer):
"""Simple, list-like container for holding repeated composite fields."""
# Disallows assignment to other attributes.
__slots__ = ['_message_descriptor']
def __init__(self, message_listener, message_descriptor):
"""
Note that we pass in a descriptor instead of the generated directly,
since at the time we construct a _RepeatedCompositeFieldContainer we
haven't yet necessarily initialized the type that will be contained in the
container.
Args:
message_listener: A MessageListener implementation.
The RepeatedCompositeFieldContainer will call this object's
Modified() method when it is modified.
message_descriptor: A Descriptor instance describing the protocol type
that should be present in this container. We'll use the
_concrete_class field of this descriptor when the client calls add().
"""
super(RepeatedCompositeFieldContainer, self).__init__(message_listener)
self._message_descriptor = message_descriptor
def add(self, **kwargs):
"""Adds a new element at the end of the list and returns it. Keyword
arguments may be used to initialize the element.
"""
new_element = self._message_descriptor._concrete_class(**kwargs)
new_element._SetListener(self._message_listener)
self._values.append(new_element)
if not self._message_listener.dirty:
self._message_listener.Modified()
return new_element
def extend(self, elem_seq):
"""Extends by appending the given sequence of elements of the same type
as this one, copying each individual message.
"""
message_class = self._message_descriptor._concrete_class
listener = self._message_listener
values = self._values
for message in elem_seq:
new_element = message_class()
new_element._SetListener(listener)
new_element.MergeFrom(message)
values.append(new_element)
listener.Modified()
def MergeFrom(self, other):
"""Appends the contents of another repeated field of the same type to this
one, copying each individual message.
"""
self.extend(other._values)
def __getslice__(self, start, stop):
"""Retrieves the subset of items from between the specified indices."""
return self._values[start:stop]
def __delitem__(self, key):
"""Deletes the item at the specified position."""
del self._values[key]
self._message_listener.Modified()
def __delslice__(self, start, stop):
"""Deletes the subset of items from between the specified indices."""
del self._values[start:stop]
self._message_listener.Modified()
def __eq__(self, other):
"""Compares the current instance with another one."""
if self is other:
return True
if not isinstance(other, self.__class__):
raise TypeError('Can only compare repeated composite fields against '
'other repeated composite fields.')
return self._values == other._values
| bsd-3-clause |
holytortoise/abwreservierung | src/reservierung/views.py | 1 | 19052 | from django.shortcuts import render
from django.views.generic import TemplateView, ListView, DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView, FormView
from django.views.generic.dates import WeekArchiveView
from django.urls import reverse_lazy, reverse
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect
from django import forms as d_forms
import datetime
from . import forms
from . import models
# Create your views here.
class ReservierungList(ListView):
queryset = models.Reservierung.objects.order_by('anfangsDatum','anfangsZeit')
context_object_name = 'reservierungen'
class ReservierungUpdate(LoginRequiredMixin, UpdateView):
login_url = 'account:login'
redirect_field_name = 'redirect_to'
model = models.Reservierung
fields = ['reserviert_für','reservierterRaum', 'reservierungsGrund', 'anfangsDatum',
'endDatum', 'anfangsZeit', 'endZeit']
class ReservierungDelete(LoginRequiredMixin, DeleteView):
login_url = 'account:login'
redirect_field_name = 'redirect_to'
model = models.Reservierung
success_url = reverse_lazy('reservierung:reservierung-list')
template_name = 'reservierung/reservierung_delete.html'
class ReservierungDetail(DetailView):
model = models.Reservierung
context_object_name = 'reservierung'
template_name = 'reservierung/reservierung_detail.html'
# View für das Darstellen der Reservierungen für die aktuelle Woche
def index(request):
"""
Diese Funktion stellt auf der Index Seite die Tabelle für die aktuelle
Woche. Und ermöglicht Durch die Wochen zu gehen
"""
current_week = datetime.date.today().isocalendar()[1]
current_year = datetime.date.today().isocalendar()[0]
is_week = None
if request.method == 'POST':
jahr = int(request.POST['jahr'])
woche = int(request.POST['woche'])
# Wurde der rechte Button für nächste Woche gedrückt wird woche um 1
# hochgezählt
if request.POST.__contains__('next_week'):
if woche == datetime.date(jahr, 12, 28).isocalendar()[1]:
woche = 1
jahr = jahr + 1
else:
woche = woche + 1
# Wurde der linke Button gedrückt wird Woche heruntergezählt
if request.POST.__contains__('last_week'):
if woche == 1:
jahr = jahr -1
woche = datetime.date(jahr,12,28).isocalendar()[1]
else:
woche = woche - 1
else:
jahr = datetime.date.today().isocalendar()[0]
woche = datetime.date.today().isocalendar()[1]
# Ergibt True wenn die aktuelle Woche gleich der auf dem Schild angezeigten ist
if woche == current_week and jahr == current_year:
is_week = True
if woche != current_week or jahr != current_year:
is_week = False
# Erzeuge daten für die Aktuelle Woche
datum = str(jahr)+'-W'+str(woche)
r = datetime.datetime.strptime(datum + '-0', "%Y-W%W-%w")
start = r - datetime.timedelta(days=r.weekday())
end = start + datetime.timedelta(days=6)
start = start.strftime('%d.%m')
end = end.strftime('%d.%m')
rooms = models.Raum.objects.all()
rooms_return = []
for room in rooms:
room_return = []
reservierungen = models.Reservierung.objects.filter(
reservierterRaum=room).order_by('anfangsDatum')
for reservierung in reservierungen:
if reservierung.anfangsDatum.isocalendar()[1] < woche and woche < reservierung.endDatum.isocalendar()[1]:
room_return.append(reservierung)
if ((reservierung.anfangsDatum.isocalendar()[1] == woche and reservierung.anfangsDatum.isocalendar()[0] == jahr)
or (reservierung.endDatum.isocalendar()[1] == woche and reservierung.endDatum.isocalendar()[0] == jahr)):
room_return.append(reservierung)
if len(room_return) != 0:
rooms_return.append(room_return)
if len(rooms_return) == 0:
rooms_return = None
context_dict = {'rooms_return':rooms_return,'reserv':reservierungen,
'woche':woche,'jahr':jahr,'current_week':current_week,
'current_year':current_year,'is_week':is_week,'start':start,'end':end}
return render(request, 'index.html', context_dict)
# View um Reservierungen zu erstellen
@login_required(login_url='account:login')
def reservierung_form(request):
"""
Diese Funktion ist für die neuen Reservierungen zuständig.
Sie Überprüft ob der Raum für den gewünschten Zeitraum zur verfügung steht.
Wenn ja wird eine neue Reservierung angelegt und der Nutzer wird zur Index
seite Umgeleitet. Wenn nein dann werden dem Nutzer alternative Räume
vorgeschlagen, welche zum gewünschten Zeitpunkt frei sind.
"""
nutzer = request.user
free_rooms = None
reserv = None
moeglich = False
if request.method == 'POST':
form = forms.ReservierungForm(data=request.POST)
if form.is_valid():
free_rooms = []
reservierungen = models.Reservierung.objects.filter(
reservierterRaum=form.cleaned_data.get("reservierterRaum"))
if reservierungen.exists():
for reservierung in reservierungen:
if reservierung.täglich:
# liegt form.anfangsDatum in einer bereits bestehenden
# reservierung
if reservierung.anfangsDatum < form.cleaned_data.get("anfangsDatum") and form.cleaned_data.get("anfangsDatum") < reservierung.endDatum:
# ist die reservierung täglich
if form.cleaned_data.get("täglich"):
# liegt die r.endZeit vor f.anfangsZeit oder
# r.anfangsZeit nach f.endZeit
if reservierung.endZeit <= form.cleaned_data.get("anfangsZeit") or reservierung.anfangsZeit >= form.cleaned_data.get("endZeit"):
# trifft zu also reservierung möglich
moeglich = True
else:
moeglich = False
reserv = reservierung
break
else:
if reservierung.endZeit <= form.cleaned_data.get("anfangsZeit"):
moeglich = True
elif reservierung.anfangsZeit >= form.cleaned_data.get("endZeit"):
moeglich = True
else:
# reservierung ganztägig
# nicht möglich
moeglich = False
reserv = reservierung
break
else:
# liegt f.anfangsDatum nach r.endDatum
if reservierung.endDatum < form.cleaned_data.get("anfangsDatum"):
moeglich = True
# liegen r.endDatum und f.anfangsDatum auf den
# gleichen Tag
elif reservierung.endDatum == form.cleaned_data.get("anfangsDatum"):
# liegt die r.endZeit vor f.anfangsZeit
if reservierung.endZeit <= form.cleaned_data.get("anfangsZeit"):
# reservierung möglich
moeglich = True
# liegt r.anfangsZeit nach f.endZeit
elif reservierung.anfangsZeit >= form.cleaned_data.get("endZeit"):
# reservierung möglich
moeglich = True
else:
# Reservierung nicht möglich
moeglich = False
reserv = reservierung
break
# ist r.anfangsDatum und f.endDatum am gleichen Tag
elif reservierung.anfangsDatum == form.cleaned_data.get("endDatum"):
if reservierung.endZeit <= form.cleaned_data.get("anfangsZeit"):
# reservierung möglich
moeglich = True
elif reservierung.anfangsZeit >= form.cleaned_data.get("endZeit"):
# reservierung möglich
moeglich = True
else:
moeglich = False
reserv = reservierung
break
else:
if reservierung.anfangsDatum < form.cleaned_data.get("anfangsDatum") and form.cleaned_data.get("anfangsDatum") < reservierung.endDatum:
# fehlermeldung anzeigen
# verfügbare räume anzeigen
# reservierung die belegt anzeigen
moeglich = False
reserv = reservierung
break
else:
# aktuelle reservierungsende liegt vor dem beginn
# der neuen
if reservierung.endDatum < form.cleaned_data.get("anfangsDatum"):
moeglich = True
# reservierungsende und beginn der neuen gleicher
# tag
elif reservierung.endDatum == form.cleaned_data.get("anfangsDatum"):
# reservierungs zeit ende vor oder gleich der
# neuen anfangszeit
if reservierung.endZeit <= form.cleaned_data.get("anfangsZeit"):
moeglich = True
elif reservierung.anfangsZeit >= form.cleaned_data.get("endZeit"):
moeglich = True
else:
moeglich = False
reserv = reservierung
break
elif reservierung.anfangsDatum > form.cleaned_data.get("endDatum"):
moeglich = True
elif reservierung.anfangsDatum == form.cleaned_data.get("endDatum"):
if reservierung.anfangsZeit > form.cleaned_data.get("endZeit"):
moeglich = True
else:
moeglich = False
reserv = reservierung
break
else:
moeglich = True
if moeglich:
reserv = models.Reservierung()
reserv.reserviert_von = request.user
if form.cleaned_data.get("reserviertFür") == "":
reserv.reserviert_für = request.user.last_name
else:
reserv.reserviert_für = form.cleaned_data.get("reserviertFür")
reserv.reservierterRaum = models.Raum.objects.get(
id=form.cleaned_data.get("reservierterRaum"))
reserv.reservierungsGrund = form.cleaned_data.get(
"reservierungsGrund")
reserv.anfangsDatum = form.cleaned_data.get("anfangsDatum")
reserv.endDatum = form.cleaned_data.get("endDatum")
reserv.anfangsZeit = form.cleaned_data.get("anfangsZeit")
reserv.endZeit = form.cleaned_data.get("endZeit")
reserv.täglich = form.cleaned_data.get("täglich")
reserv.save()
return HttpResponseRedirect(reverse('reservierung:index'))
else:
# return free rooms
# restlichen reservierungen anschauen
rooms = models.Raum.objects.exclude(
id=form.cleaned_data.get("reservierterRaum"))
if rooms.exists():
for room in rooms:
room_reservs = models.Reservierung.objects.filter(
reservierterRaum=room)
# existieren reservierungen
if room_reservs.exists():
# für alle reservierungen
free_room = False
for room_reserv in room_reservs:
# liegt die reservierung in dem zeitraum einer
# bestehenden Reservierung
if form.cleaned_data.get("täglich"):
if room_reserv.anfangsDatum < form.cleaned_data.get("anfangsDatum") and form.cleaned_data.get("anfangsDatum") < room_reserv.endDatum:
if room_reserv.täglich:
if room_reserv.endZeit <= form.cleaned_data.get("anfangsZeit") or room_reserv.anfangsZeit > form.cleaned_data.get("endZeit"):
free_room = True
else:
free_room = False
break
else:
free_room = False
break
else:
if room_reserv.endDatum < form.cleaned_data.get("anfangsDatum"):
free_room = True
elif room_reserv.endDatum == form.cleaned_data.get("anfangsDatum"):
if room_reserv.endZeit <= form.cleaned_data.get("anfangsZeit"):
free_room = True
elif room_reserv.anfangsZeit >= form.cleaned_data.get("endZeit"):
free_room = True
else:
free_room = False
break
elif room_reserv.anfangsDatum == form.cleaned_data.get("endDatum"):
if room_reserv.endZeit <= form.cleaned_data.get("anfangsZeit"):
free_room = True
elif room_reserv.anfangsZeit >= form.cleaned_data.get("endZeit"):
free_room = True
else:
free_room = False
break
else:
if room_reserv.anfangsDatum < form.cleaned_data.get("anfangsDatum") and form.cleaned_data.get("anfangsDatum") < room_reserv.endDatum:
# ja, raum also nicht frei
free_room = False
break
else:
# nein, also raum eventuell frei,
# prüfen ob anfangsDatum nach oder am
# endDatum
if room_reserv.endDatum < form.cleaned_data.get("anfangsDatum"):
# Raum Frei
free_room = True
elif room_reserv.endDatum == form.cleaned_data.get("anfangsDatum"):
# gleicher Tag
if room_reserv.endZeit <= form.cleaned_data.get("anfangsZeit"):
# Raum Frei
free_room = True
else:
# Raum ist nicht Frei
free_room = False
break
elif room_reserv.anfangsDatum > form.cleaned_data.get("endDatum"):
# Raum Frei
free_room = True
elif room_reserv.anfangsDatum == form.cleaned_data.get("endDatum"):
if room_reserv.anfangsZeit > form.cleaned_data.get("endZeit"):
# Raum frei
free_room = True
else:
# Raum nicht Frei
free_room = False
break
if free_room:
free_rooms.append(room)
else:
free_rooms.append(room)
else:
free_rooms = models.Raum.objects.all()
else:
form = forms.ReservierungForm()
return render(request, 'reservierung/reservierung_form.html', {'form': form, 'reserv': reserv, 'free_rooms': free_rooms, })
# View zum anzeigen aller Reservierungen des angemeldeten nutzers
@login_required(login_url='account:login')
def reservierung_user(request):
user = request.user
rooms = models.Raum.objects.all()
rooms_return = []
for room in rooms:
room_return = []
reservierungen = models.Reservierung.objects.filter(
reservierterRaum=room).order_by('anfangsDatum')
for reservierung in reservierungen:
if reservierung.reserviert_von == user:
room_return.append(reservierung)
rooms_return.append(room_return)
return render(request, 'reservierung/reservierung_user.html', {'user': user, 'rooms_return': rooms_return, })
| mit |
alexhersh/calico | calico/common.py | 1 | 22270 | # -*- coding: utf-8 -*-
# Copyright (c) 2014, 2015 Metaswitch Networks
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
calico.common
~~~~~~~~~~~~
Calico common utilities.
"""
import errno
import logging
import logging.handlers
import os
import re
import sys
from types import StringTypes
import netaddr
import netaddr.core
from netaddr.strategy import eui48
_log = logging.getLogger(__name__)
AGENT_TYPE_CALICO = 'Calico agent'
FORMAT_STRING = '%(asctime)s [%(levelname)s][%(process)s/%(thread)d] %(name)s %(lineno)d: %(message)s'
# Used "tid", which we swap for the greenlet ID, instead of "thread"
FORMAT_STRING_GEVENT = '%(asctime)s [%(levelname)s][%(process)s/%(tid)d] %(name)s %(lineno)d: %(message)s'
# This format string deliberately uses two different styles of format
# specifier. The %()s form is used by the logging module: the {} form is used
# by the code in this module. This allows us to dynamically generate the format
# string used by the logger.
SYSLOG_FORMAT_STRING = '{excname}[%(process)s]: %(module)s@%(lineno)d %(message)s'
# White-list for the --protocol match criteria. We allow the guaranteed
# string shortcuts as well as int/string versions of the raw IDs. We disallow
# 0 because the kernel cannot match on it directly.
KERNEL_PROTOCOLS = set(["tcp", "udp", "icmp", "icmpv6", "sctp", "udplite"])
KERNEL_PROTOCOLS.update(xrange(1, 256))
KERNEL_PROTOCOLS.update(intern(str(p)) for p in xrange(1, 256))
# Protocols that support a port match in iptables. We allow the name and
# protocol number.
KERNEL_PORT_PROTOCOLS = set([
"tcp", 6, "6",
"udp", 17, "17",
"udplite", 136, "136",
"sctp", 132, "132",
"dccp", 33, "33",
])
# Valid keys for a rule JSON dict.
KNOWN_RULE_KEYS = set([
"action",
"protocol",
"src_net",
"src_tag",
"src_ports",
"dst_net",
"dst_tag",
"dst_ports",
"icmp_type",
"icmp_code",
"ip_version",
])
# Regex that matches only names with valid characters in them. The list of
# valid characters is the same for endpoints, profiles, and tags.
VALID_ID_RE = re.compile('^[a-zA-Z0-9_\.\-]+$')
VALID_LINUX_IFACE_NAME_RE = re.compile(r'^[a-zA-Z0-9_]{1,15}$')
# Not that thorough: we don't care if it's a valid CIDR, only that it doesn't
# have anything malicious in it.
VALID_IPAM_POOL_ID_RE = re.compile(r'^[0-9\.:a-fA-F\-]{1,43}$')
EXPECTED_IPAM_POOL_KEYS = set(["cidr", "masquerade"])
def validate_port(port):
"""
Validates that a port is valid. Returns true if valid, false if not.
"""
try:
port_int = int(port)
if port_int <= 0 or port_int > 65535:
return False
else:
return True
except ValueError:
return False
def validate_ip_addr(addr, version=None):
"""
Validates that an IP address is valid. Returns true if valid, false if
not. Version can be "4", "6", None for "IPv4", "IPv6", or "either"
respectively.
"""
if version == 4:
return netaddr.valid_ipv4(addr)
elif version == 6:
return netaddr.valid_ipv6(addr)
else:
return netaddr.valid_ipv4(addr) or netaddr.valid_ipv6(addr)
def canonicalise_ip(addr, version):
if addr is None:
return None
ip = netaddr.IPAddress(addr, version=version)
return intern(str(ip))
def validate_cidr(cidr, version):
"""
Validates that a CIDR is valid. Returns true if valid, false if
not. Version can be "4", "6", None for "IPv4", "IPv6", or "either"
respectively.
"""
try:
ip = netaddr.IPNetwork(cidr, version=version)
return True
except (netaddr.core.AddrFormatError, ValueError, TypeError):
return False
def canonicalise_cidr(cidr, version):
if cidr is None:
return None
nw = netaddr.IPNetwork(cidr, version=version)
return intern(str(nw))
def canonicalise_mac(mac):
# Use the Unix dialect, which uses ':' for its separator instead of
# '-'. This fits best with what iptables is expecting.
eui = netaddr.EUI(mac, dialect=eui48.mac_unix)
return str(eui)
def mkdir_p(path):
"""http://stackoverflow.com/a/600612/190597 (tzot)"""
try:
os.makedirs(path, exist_ok=True) # Python>3.2
except TypeError:
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
def default_logging(gevent_in_use=True, syslog_executable_name=None):
"""
Sets up the Calico default logging, with default severities.
Our default logging consists of:
- setting the log level of the root logger to DEBUG (a safe initial value)
- attaching a SysLog handler with no formatter (log to syslog), ERROR level
only
- attaching a StreamHandler with the Calico formatter, to log to stdout,
with ERROR level
This default explicitly excludes adding logging to file. This is because
working out what file to log to requires reading the configuration file,
and doing that may cause errors that we want to log! To add a file logger,
call :meth:`complete_logging() <calico.common.complete_logging>` after
this function has been called.
"""
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
executable_name = syslog_executable_name or os.path.basename(sys.argv[0])
syslog_format = SYSLOG_FORMAT_STRING.format(excname=executable_name)
syslog_formatter = logging.Formatter(syslog_format)
if os.path.exists("/dev/log"):
syslog_handler = logging.handlers.SysLogHandler(address='/dev/log')
else:
# Probably unit tests running on windows.
syslog_handler = logging.handlers.SysLogHandler()
syslog_handler.setLevel(logging.ERROR)
syslog_handler.setFormatter(syslog_formatter)
root_logger.addHandler(syslog_handler)
format_string = FORMAT_STRING_GEVENT if gevent_in_use else FORMAT_STRING
file_formatter = logging.Formatter(format_string)
stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setLevel(logging.ERROR)
stream_handler.setFormatter(file_formatter)
if gevent_in_use:
from geventutils import GreenletFilter
stream_handler.addFilter(GreenletFilter())
root_logger.addHandler(stream_handler)
def complete_logging(logfile=None,
file_level=logging.DEBUG,
syslog_level=logging.ERROR,
stream_level=logging.ERROR,
gevent_in_use=True):
"""
Updates the logging configuration based on learned configuration.
The purpose of this function is to update the previously set logging
configuration such that we can start logging to file. This is done in a
separate step to the initial logging configuration in order to ensure that
logging is available as early in execution as possible, i.e. before the
config file has been parsed.
This function must only be called once, after
:meth:`default_logging() <calico.common.default_logging>`
has been called.
The xyz_level parameters may be a valid logging level DEBUG/INFO/... or
None to disable that log entirely. Note: the config module supports
using the string "none" in the configuration to disable logging.
"""
root_logger = logging.getLogger()
# If default_logging got called already, we'll have some loggers in place.
# Update their levels.
file_handler = None
for handler in root_logger.handlers[:]:
if isinstance(handler, logging.handlers.SysLogHandler):
if syslog_level is None:
root_logger.removeHandler(handler)
else:
handler.setLevel(syslog_level)
elif isinstance(handler, logging.StreamHandler):
if stream_level is None:
root_logger.removeHandler(handler)
else:
handler.setLevel(stream_level)
elif isinstance(handler, logging.handlers.WatchedFileHandler):
file_handler = handler
if file_level is None:
root_logger.removeHandler(handler)
else:
handler.setLevel(file_level)
# If we've been given a log file, log to file as well.
if logfile and file_level is not None:
if not file_handler:
mkdir_p(os.path.dirname(logfile))
format_string = (FORMAT_STRING_GEVENT if gevent_in_use
else FORMAT_STRING)
formatter = logging.Formatter(format_string)
file_handler = logging.handlers.WatchedFileHandler(logfile)
if gevent_in_use:
from geventutils import GreenletFilter
file_handler.addFilter(GreenletFilter())
file_handler.setLevel(file_level)
file_handler.setFormatter(formatter)
root_logger.addHandler(file_handler)
# Optimization: disable all logging below the minimum level that we care
# about. The global "disable" setting is the first thing that gets checked
# in the logging framework so it's the fastest way to disable logging.
levels = [file_level, syslog_level, stream_level]
# Map None to something greater than the highest logging level.
levels = [l if l is not None else logging.CRITICAL + 1 for l in levels]
min_log_level = min(levels)
logging.disable(min_log_level - 1)
_log.info("Logging initialized")
class ValidationFailed(Exception):
"""
Class used for data validation exceptions.
"""
pass
def validate_endpoint(config, combined_id, endpoint):
"""
Ensures that the supplied endpoint is valid. Once this routine has returned
successfully, we know that all required fields are present and have valid
values.
Has the side-effect of putting IP and MAC addresses in canonical form in
the input dict.
:param config: configuration structure
:param combined_id: EndpointId object
:param endpoint: endpoint dictionary as read from etcd
:raises ValidationFailed
"""
issues = []
if not isinstance(endpoint, dict):
raise ValidationFailed("Expected endpoint to be a dict.")
if not VALID_ID_RE.match(combined_id.endpoint):
issues.append("Invalid endpoint ID '%r'." % combined_id.endpoint)
if "state" not in endpoint:
issues.append("Missing 'state' field.")
elif endpoint["state"] not in ("active", "inactive"):
issues.append("Expected 'state' to be one of active/inactive.")
for field in ["name", "mac"]:
if field not in endpoint:
issues.append("Missing '%s' field." % field)
elif not isinstance(endpoint[field], StringTypes):
issues.append("Expected '%s' to be a string; got %r." %
(field, endpoint[field]))
elif field == "mac":
if not netaddr.valid_mac(endpoint.get("mac")):
issues.append("Invalid MAC address")
else:
endpoint["mac"] = canonicalise_mac(endpoint.get("mac"))
if "profile_id" in endpoint:
if "profile_ids" not in endpoint:
endpoint["profile_ids"] = [endpoint["profile_id"]]
del endpoint["profile_id"]
if "profile_ids" not in endpoint:
issues.append("Missing 'profile_id(s)' field.")
else:
for value in endpoint["profile_ids"]:
if not isinstance(value, StringTypes):
issues.append("Expected profile IDs to be strings.")
break
if not VALID_ID_RE.match(value):
issues.append("Invalid profile ID '%r'." % value)
if ("name" in endpoint and isinstance(endpoint['name'], StringTypes)
and combined_id.host == config.HOSTNAME
and not endpoint["name"].startswith(config.IFACE_PREFIX)):
# Only test the interface for local endpoints - remote hosts may have
# a different interface prefix.
issues.append("Interface %r does not start with %r." %
(endpoint["name"], config.IFACE_PREFIX))
for version in (4, 6):
nets = "ipv%d_nets" % version
if nets not in endpoint:
endpoint[nets] = []
else:
canonical_nws = []
nets_list = endpoint.get(nets, [])
if not isinstance(nets_list, list):
issues.append("%s should be a list" % nets)
else:
for ip in nets_list:
if not validate_cidr(ip, version):
issues.append("IP address %r is not a valid "
"IPv%d CIDR." % (ip, version))
break
else:
canonical_nws.append(canonicalise_cidr(ip, version))
endpoint[nets] = canonical_nws
gw_key = "ipv%d_gateway" % version
try:
gw_str = endpoint[gw_key]
if gw_str is not None and not validate_ip_addr(gw_str,
version):
issues.append("%s is not a valid IPv%d gateway address." %
(gw_key, version))
else:
endpoint[gw_key] = canonicalise_ip(gw_str, version)
except KeyError:
pass
if issues:
raise ValidationFailed(" ".join(issues))
def validate_rules(profile_id, rules):
"""
Ensures that the supplied rules are valid. Once this routine has returned
successfully, we know that all required fields are present and have valid
values.
:param profile_id: Profile ID from etcd
:param rules: rules list as read from etcd
:raises ValidationFailed
"""
issues = []
if not isinstance(rules, dict):
raise ValidationFailed("Expected rules to be a dict.")
if not VALID_ID_RE.match(profile_id):
issues.append("Invalid profile_id '%r'." % profile_id)
for dirn in ("inbound_rules", "outbound_rules"):
if dirn not in rules:
issues.append("No %s in rules." % dirn)
continue
if not isinstance(rules[dirn], list):
issues.append("Expected rules[%s] to be a list." % dirn)
continue
for rule in rules[dirn]:
if not isinstance(rule, dict):
issues.append("Rules should be dicts.")
break
for key, value in rule.items():
if value is None:
del rule[key]
# Absolutely all fields are optional, but some have valid and
# invalid values.
protocol = rule.get('protocol')
if protocol is not None and protocol not in KERNEL_PROTOCOLS:
issues.append("Invalid protocol %s in rule %s" %
(protocol, rule))
elif protocol is not None:
protocol = intern(str(protocol))
rule['protocol'] = str(protocol)
ip_version = rule.get('ip_version')
if ip_version is not None and ip_version not in (4, 6):
# Bad IP version prevents further validation
issues.append("Invalid ip_version in rule %s." % rule)
continue
if ip_version == 4 and protocol == "icmpv6":
issues.append("Using icmpv6 with IPv4 in rule %s." % rule)
if ip_version == 6 and protocol == "icmp":
issues.append("Using icmp with IPv6 in rule %s." % rule)
for tag_type in ('src_tag', 'dst_tag'):
tag = rule.get(tag_type)
if tag is None:
continue
if not VALID_ID_RE.match(tag):
issues.append("Invalid %s '%r'." % (tag_type, tag))
for key in ("src_net", "dst_net"):
network = rule.get(key)
if (network is not None and
not validate_cidr(rule[key], ip_version)):
issues.append("Invalid CIDR (version %s) in rule %s." %
(ip_version, rule))
elif network is not None:
rule[key] = canonicalise_cidr(network, ip_version)
for key in ("src_ports", "dst_ports"):
ports = rule.get(key)
if (ports is not None and
not isinstance(ports, list)):
issues.append("Expected ports to be a list in rule %s."
% rule)
continue
if ports is not None:
if protocol not in KERNEL_PORT_PROTOCOLS:
issues.append("%s is not allowed for protocol %s in "
"rule %s" % (key, protocol, rule))
for port in ports:
error = validate_rule_port(port)
if error:
issues.append("Invalid port %s (%s) in rule %s." %
(port, error, rule))
action = rule.get('action')
if (action is not None and
action not in ("allow", "deny")):
issues.append("Invalid action in rule %s." % rule)
icmp_type = rule.get('icmp_type')
if icmp_type is not None:
if not isinstance(icmp_type, int):
issues.append("ICMP type is not an integer in rule %s." %
rule)
elif not 0 <= icmp_type <= 255:
issues.append("ICMP type is out of range in rule %s." %
rule)
icmp_code = rule.get("icmp_code")
if icmp_code is not None:
if not isinstance(icmp_code, int):
issues.append("ICMP code is not an integer in rule %s." %
rule)
elif not 0 <= icmp_code <= 255:
issues.append("ICMP code is out of range.")
if icmp_type is None:
# TODO: ICMP code without ICMP type not supported by iptables
# Firewall against that for now.
issues.append("ICMP code specified without ICMP type.")
unknown_keys = set(rule.keys()) - KNOWN_RULE_KEYS
if unknown_keys:
issues.append("Rule contains unknown keys: %s." % unknown_keys)
if issues:
raise ValidationFailed(" ".join(issues))
def validate_rule_port(port):
"""
Validates that any value in a port list really is valid.
Valid values are an integer port, or a string range separated by a colon.
:param port: the port, which is validated for type
:returns: None or an error string if invalid
"""
if isinstance(port, int):
if port < 0 or port > 65535:
return "integer out of range"
return None
# If not an integer, must be format N:M, i.e. a port range.
try:
fields = port.split(":")
except AttributeError:
return "neither integer nor string"
if not len(fields) == 2:
return "range unparseable"
try:
start = int(fields.pop(0))
end = int(fields.pop(0))
except ValueError:
return "range invalid"
if start >= end or start < 0 or end > 65535:
return "range invalid"
return None
def validate_tags(profile_id, tags):
"""
Ensures that the supplied tags are valid. Once this routine has returned
successfully, we know that all required fields are present and have valid
values.
:param profile_id: profile_id as read from etcd
:param tags: tag set as read from etcd
:raises ValidationFailed
"""
issues = []
if not VALID_ID_RE.match(profile_id):
issues.append("Invalid profile_id '%r'." % profile_id)
if not isinstance(tags, list):
issues.append("Expected tags to be a list.")
else:
for tag in tags:
if not isinstance(tag, StringTypes):
issues.append("Expected tag '%s' to be a string." % tag)
break
if not VALID_ID_RE.match(tag):
issues.append("Invalid tag '%r'." % tag)
if issues:
raise ValidationFailed(" ".join(issues))
def validate_ipam_pool(pool_id, pool, ip_version):
"""
Validates and canonicalises an IPAM pool dict. Removes any fields that
it doesn't know about.
Modifies the dict in-place.
"""
if not isinstance(pool, dict):
raise ValidationFailed("Pool should be a dict")
# Remove any keys that we're not expecting. Stops unvalidated data from
# slipping through. We ignore other keys since this structure is used
# by calicoctl for its own purposes too.
keys_to_remove = set()
for key in pool:
if key not in EXPECTED_IPAM_POOL_KEYS:
keys_to_remove.add(key)
for key in keys_to_remove:
pool.pop(key)
issues = []
if "cidr" not in pool:
# CIDR is mandatory.
issues.append("'cidr' field is missing")
else:
cidr = pool["cidr"]
if cidr is None or not validate_cidr(cidr, ip_version):
issues.append("Invalid CIDR: %r" % cidr)
else:
pool["cidr"] = canonicalise_cidr(cidr, ip_version)
if not isinstance(pool.get("masquerade", False), bool):
issues.append("Invalid 'masquerade' field: %r" % pool["masquerade"])
if not VALID_IPAM_POOL_ID_RE.match(pool_id):
issues.append("Invalid pool ID: %r" % pool)
if issues:
raise ValidationFailed(','.join(issues))
| apache-2.0 |
PyGithub/PyGithub | github/Commit.py | 2 | 15054 | ############################ Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2013 martinqt <m.ki2@laposte.net> #
# Copyright 2014 Andy Casey <acasey@mso.anu.edu.au> #
# Copyright 2014 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2016 Jannis Gebauer <ja.geb@me.com> #
# Copyright 2016 John Eskew <jeskew@edx.org> #
# Copyright 2016 Peter Buckley <dx-pbuckley@users.noreply.github.com> #
# Copyright 2018 sfdye <tsfdye@gmail.com> #
# #
# This file is part of PyGithub. #
# http://pygithub.readthedocs.io/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
################################################################################
import github.CheckRun
import github.CheckSuite
import github.CommitCombinedStatus
import github.CommitComment
import github.CommitStats
import github.CommitStatus
import github.File
import github.GitCommit
import github.GithubObject
import github.NamedUser
import github.PaginatedList
class Commit(github.GithubObject.CompletableGithubObject):
"""
This class represents Commits. The reference can be found here http://docs.github.com/en/rest/reference/git#commits
"""
def __repr__(self):
return self.get__repr__({"sha": self._sha.value})
@property
def author(self):
"""
:type: :class:`github.NamedUser.NamedUser`
"""
self._completeIfNotSet(self._author)
return self._author.value
@property
def comments_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._comments_url)
return self._comments_url.value
@property
def commit(self):
"""
:type: :class:`github.GitCommit.GitCommit`
"""
self._completeIfNotSet(self._commit)
return self._commit.value
@property
def committer(self):
"""
:type: :class:`github.NamedUser.NamedUser`
"""
self._completeIfNotSet(self._committer)
return self._committer.value
@property
def files(self):
"""
:type: list of :class:`github.File.File`
"""
self._completeIfNotSet(self._files)
return self._files.value
@property
def html_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._html_url)
return self._html_url.value
@property
def parents(self):
"""
:type: list of :class:`github.Commit.Commit`
"""
self._completeIfNotSet(self._parents)
return self._parents.value
@property
def sha(self):
"""
:type: string
"""
self._completeIfNotSet(self._sha)
return self._sha.value
@property
def stats(self):
"""
:type: :class:`github.CommitStats.CommitStats`
"""
self._completeIfNotSet(self._stats)
return self._stats.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
def create_comment(
self,
body,
line=github.GithubObject.NotSet,
path=github.GithubObject.NotSet,
position=github.GithubObject.NotSet,
):
"""
:calls: `POST /repos/{owner}/{repo}/commits/{sha}/comments <http://docs.github.com/en/rest/reference/repos#comments>`_
:param body: string
:param line: integer
:param path: string
:param position: integer
:rtype: :class:`github.CommitComment.CommitComment`
"""
assert isinstance(body, str), body
assert line is github.GithubObject.NotSet or isinstance(line, int), line
assert path is github.GithubObject.NotSet or isinstance(path, str), path
assert position is github.GithubObject.NotSet or isinstance(
position, int
), position
post_parameters = {
"body": body,
}
if line is not github.GithubObject.NotSet:
post_parameters["line"] = line
if path is not github.GithubObject.NotSet:
post_parameters["path"] = path
if position is not github.GithubObject.NotSet:
post_parameters["position"] = position
headers, data = self._requester.requestJsonAndCheck(
"POST", f"{self.url}/comments", input=post_parameters
)
return github.CommitComment.CommitComment(
self._requester, headers, data, completed=True
)
def create_status(
self,
state,
target_url=github.GithubObject.NotSet,
description=github.GithubObject.NotSet,
context=github.GithubObject.NotSet,
):
"""
:calls: `POST /repos/{owner}/{repo}/statuses/{sha} <http://docs.github.com/en/rest/reference/repos#statuses>`_
:param state: string
:param target_url: string
:param description: string
:param context: string
:rtype: :class:`github.CommitStatus.CommitStatus`
"""
assert isinstance(state, str), state
assert target_url is github.GithubObject.NotSet or isinstance(
target_url, str
), target_url
assert description is github.GithubObject.NotSet or isinstance(
description, str
), description
assert context is github.GithubObject.NotSet or isinstance(
context, str
), context
post_parameters = {
"state": state,
}
if target_url is not github.GithubObject.NotSet:
post_parameters["target_url"] = target_url
if description is not github.GithubObject.NotSet:
post_parameters["description"] = description
if context is not github.GithubObject.NotSet:
post_parameters["context"] = context
headers, data = self._requester.requestJsonAndCheck(
"POST",
f"{self._parentUrl(self._parentUrl(self.url))}/statuses/{self.sha}",
input=post_parameters,
)
return github.CommitStatus.CommitStatus(
self._requester, headers, data, completed=True
)
def get_comments(self):
"""
:calls: `GET /repos/{owner}/{repo}/commits/{sha}/comments <http://docs.github.com/en/rest/reference/repos#comments>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.CommitComment.CommitComment`
"""
return github.PaginatedList.PaginatedList(
github.CommitComment.CommitComment,
self._requester,
f"{self.url}/comments",
None,
)
def get_statuses(self):
"""
:calls: `GET /repos/{owner}/{repo}/statuses/{ref} <http://docs.github.com/en/rest/reference/repos#statuses>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.CommitStatus.CommitStatus`
"""
return github.PaginatedList.PaginatedList(
github.CommitStatus.CommitStatus,
self._requester,
f"{self._parentUrl(self._parentUrl(self.url))}/statuses/{self.sha}",
None,
)
def get_combined_status(self):
"""
:calls: `GET /repos/{owner}/{repo}/commits/{ref}/status/ <http://docs.github.com/en/rest/reference/repos#statuses>`_
:rtype: :class:`github.CommitCombinedStatus.CommitCombinedStatus`
"""
headers, data = self._requester.requestJsonAndCheck("GET", f"{self.url}/status")
return github.CommitCombinedStatus.CommitCombinedStatus(
self._requester, headers, data, completed=True
)
def get_pulls(self):
"""
:calls: `GET /repos/{owner}/{repo}/commits/{sha}/pulls <https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.PullRequest.PullRequest`
"""
return github.PaginatedList.PaginatedList(
github.PullRequest.PullRequest,
self._requester,
f"{self.url}/pulls",
None,
headers={"Accept": "application/vnd.github.groot-preview+json"},
)
def get_check_runs(
self,
check_name=github.GithubObject.NotSet,
status=github.GithubObject.NotSet,
filter=github.GithubObject.NotSet,
):
"""
:calls: `GET /repos/{owner}/{repo}/commits/{sha}/check-runs <https://docs.github.com/en/rest/reference/checks#list-check-runs-for-a-git-reference>`_
:param check_name: string
:param status: string
:param filter: string
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.CheckRun.CheckRun`
"""
assert check_name is github.GithubObject.NotSet or isinstance(
check_name, str
), check_name
assert status is github.GithubObject.NotSet or isinstance(status, str), status
assert filter is github.GithubObject.NotSet or isinstance(filter, str), filter
url_parameters = dict()
if check_name is not github.GithubObject.NotSet:
url_parameters["check_name"] = check_name
if status is not github.GithubObject.NotSet:
url_parameters["status"] = status
if filter is not github.GithubObject.NotSet:
url_parameters["filter"] = filter
return github.PaginatedList.PaginatedList(
github.CheckRun.CheckRun,
self._requester,
f"{self.url}/check-runs",
url_parameters,
headers={"Accept": "application/vnd.github.v3+json"},
list_item="check_runs",
)
def get_check_suites(
self, app_id=github.GithubObject.NotSet, check_name=github.GithubObject.NotSet
):
"""
:class: `GET /repos/{owner}/{repo}/commits/{ref}/check-suites <https://docs.github.com/en/rest/reference/checks#list-check-suites-for-a-git-reference>`_
:param app_id: int
:param check_name: string
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.CheckSuite.CheckSuite`
"""
assert app_id is github.GithubObject.NotSet or isinstance(app_id, int), app_id
assert check_name is github.GithubObject.NotSet or isinstance(
check_name, str
), check_name
parameters = dict()
if app_id is not github.GithubObject.NotSet:
parameters["app_id"] = app_id
if check_name is not github.GithubObject.NotSet:
parameters["check_name"] = check_name
request_headers = {"Accept": "application/vnd.github.v3+json"}
return github.PaginatedList.PaginatedList(
github.CheckSuite.CheckSuite,
self._requester,
f"{self.url}/check-suites",
parameters,
headers=request_headers,
list_item="check_suites",
)
@property
def _identity(self):
return self.sha
def _initAttributes(self):
self._author = github.GithubObject.NotSet
self._comments_url = github.GithubObject.NotSet
self._commit = github.GithubObject.NotSet
self._committer = github.GithubObject.NotSet
self._files = github.GithubObject.NotSet
self._html_url = github.GithubObject.NotSet
self._parents = github.GithubObject.NotSet
self._sha = github.GithubObject.NotSet
self._stats = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "author" in attributes: # pragma no branch
self._author = self._makeClassAttribute(
github.NamedUser.NamedUser, attributes["author"]
)
if "comments_url" in attributes: # pragma no branch
self._comments_url = self._makeStringAttribute(attributes["comments_url"])
if "commit" in attributes: # pragma no branch
self._commit = self._makeClassAttribute(
github.GitCommit.GitCommit, attributes["commit"]
)
if "committer" in attributes: # pragma no branch
self._committer = self._makeClassAttribute(
github.NamedUser.NamedUser, attributes["committer"]
)
if "files" in attributes: # pragma no branch
self._files = self._makeListOfClassesAttribute(
github.File.File, attributes["files"]
)
if "html_url" in attributes: # pragma no branch
self._html_url = self._makeStringAttribute(attributes["html_url"])
if "parents" in attributes: # pragma no branch
self._parents = self._makeListOfClassesAttribute(
Commit, attributes["parents"]
)
if "sha" in attributes: # pragma no branch
self._sha = self._makeStringAttribute(attributes["sha"])
if "stats" in attributes: # pragma no branch
self._stats = self._makeClassAttribute(
github.CommitStats.CommitStats, attributes["stats"]
)
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
| lgpl-3.0 |
scalable-networks/gnuradio-3.7.0.1 | gr-blocks/python/blocks/qa_stream_mux.py | 10 | 6241 | #!/usr/bin/env python
#
# Copyright 2004,2005,2007,2010,2012,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, blocks
import os
class test_stream_mux (gr_unittest.TestCase):
def setUp (self):
os.environ['GR_CONF_CONTROLPORT_ON'] = 'False'
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def help_stream_2ff(self, N, stream_sizes):
v0 = blocks.vector_source_f(N*[1,], False)
v1 = blocks.vector_source_f(N*[2,], False)
mux = blocks.stream_mux(gr.sizeof_float, stream_sizes)
dst = blocks.vector_sink_f ()
self.tb.connect (v0, (mux,0))
self.tb.connect (v1, (mux,1))
self.tb.connect (mux, dst)
self.tb.run ()
return dst.data ()
def help_stream_ramp_2ff(self, N, stream_sizes):
r1 = range(N)
r2 = range(N)
r2.reverse()
v0 = blocks.vector_source_f(r1, False)
v1 = blocks.vector_source_f(r2, False)
mux = blocks.stream_mux(gr.sizeof_float, stream_sizes)
dst = blocks.vector_sink_f ()
self.tb.connect (v0, (mux,0))
self.tb.connect (v1, (mux,1))
self.tb.connect (mux, dst)
self.tb.run ()
return dst.data ()
def test_stream_2NN_ff(self):
N = 40
stream_sizes = [10, 10]
result_data = self.help_stream_2ff(N, stream_sizes)
exp_data = (1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0)
self.assertEqual (exp_data, result_data)
def test_stream_ramp_2NN_ff(self):
N = 40
stream_sizes = [10, 10]
result_data = self.help_stream_ramp_2ff(N, stream_sizes)
exp_data = ( 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0,
39.0, 38.0, 37.0, 36.0, 35.0, 34.0, 33.0, 32.0, 31.0, 30.0,
10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0,
29.0, 28.0, 27.0, 26.0, 25.0, 24.0, 23.0, 22.0, 21.0, 20.0,
20.0, 21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0,
19.0, 18.0, 17.0, 16.0, 15.0, 14.0, 13.0, 12.0, 11.0, 10.0,
30.0, 31.0, 32.0, 33.0, 34.0, 35.0, 36.0, 37.0, 38.0, 39.0,
9.0, 8.0, 7.0, 6.0, 5.0, 4.0, 3.0, 2.0, 1.0, 0.0)
self.assertEqual (exp_data, result_data)
def test_stream_2NM_ff(self):
N = 40
stream_sizes = [7, 9]
self.help_stream_2ff(N, stream_sizes)
result_data = self.help_stream_2ff(N, stream_sizes)
exp_data = (1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0)
self.assertEqual (exp_data, result_data)
def test_stream_2MN_ff(self):
N = 37
stream_sizes = [7, 9]
self.help_stream_2ff(N, stream_sizes)
result_data = self.help_stream_2ff(N, stream_sizes)
exp_data = (1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
2.0)
self.assertEqual (exp_data, result_data)
def test_stream_2N0_ff(self):
N = 30
stream_sizes = [7, 0]
self.help_stream_2ff(N, stream_sizes)
result_data = self.help_stream_2ff(N, stream_sizes)
exp_data = (1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0)
self.assertEqual (exp_data, result_data)
def test_stream_20N_ff(self):
N = 30
stream_sizes = [0, 9]
self.help_stream_2ff(N, stream_sizes)
result_data = self.help_stream_2ff(N, stream_sizes)
exp_data = (2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0,
2.0, 2.0, 2.0)
self.assertEqual (exp_data, result_data)
if __name__ == '__main__':
gr_unittest.run(test_stream_mux, "test_stream_mux.xml")
| gpl-3.0 |
eos87/Booktype | lib/booktype/apps/edit/utils.py | 7 | 3015 | # -*- coding: utf-8 -*-
"""
Utility functions related with editor app
"""
import sputnik
from lxml import etree
from booktype.utils.plugins import icejs
def clean_chapter_html(content, text_only=False, **kwargs):
"""
Removes icejs contents for now. We could later add more functionality to
this function to clean other stuff
Args:
- content: html string
- text_only: Boolean
Returns:
- cleaned either html or text content :)
"""
ice_params = icejs.IceCleanPlugin.OPTIONS
cleaned = icejs.ice_cleanup(content, **ice_params)
if kwargs.get('clean_comments_trail', False):
for comment_bubble in cleaned.xpath(".//a[@class='comment-link']"):
comment_bubble.drop_tree()
if text_only:
return ' '.join(cleaned.itertext())
cnt = etree.tostring(cleaned, pretty_print=True)
return cnt[6:-8]
def color_me(l, rgb, pos):
# TODO: add docstrings and improve if possible
if pos:
t1 = l.find('>', pos[0])
t2 = l.find('<', pos[0])
if (t1 == t2) or (t1 > t2 and t2 != -1):
out = l[:pos[0]]
out += '<span class="diff changed">'+color_me(l[pos[0]:pos[1]], rgb, None)+'</span>'
out += l[pos[1]:]
else:
out = l
return out
out = '<span class="%s">' % rgb
n = 0
m = 0
while True:
n = l.find('<', n)
if n == -1: # no more tags
out += l[m:n]
break
else:
if l[n+1] == '/': # tag ending
# closed tag
out += l[m:n]
j = l.find('>', n)+1
tag = l[n:j]
out += '</span>'+tag
n = j
else: # tag start
out += l[m:n]
j = l.find('>', n)+1
if j == 0:
out = l[n:]
n = len(l)
else:
tag = l[n:j]
if not tag.replace(' ','').replace('/','').lower() in ['<br>', '<hr>']:
if n != 0:
out += '</span>'
out += tag+'<span class="%s">' % rgb
else:
out += tag
n = j
m = n
out += l[n:]+'</span>'
return out
def send_notification(request, bookid, version, message, *message_args):
"""Send notification.
Add notification message to channel
Args:
reuest: Client Request object
bookid: Unique Book id
version: Book version
message: Notification message key
message_args: positional arguments for message format
"""
channel_name = '/booktype/book/%s/%s/' % (bookid, version)
user = request.user
sputnik.addMessageToChannel(request, channel_name, {
'command': 'notification',
'message': message,
'username': user.username,
'message_args': message_args
}, myself=False)
| agpl-3.0 |
Pythonify/awesome | venv/lib/python2.7/site-packages/pycparser/ply/ctokens.py | 197 | 3177 | # ----------------------------------------------------------------------
# ctokens.py
#
# Token specifications for symbols in ANSI C and C++. This file is
# meant to be used as a library in other tokenizers.
# ----------------------------------------------------------------------
# Reserved words
tokens = [
# Literals (identifier, integer constant, float constant, string constant, char const)
'ID', 'TYPEID', 'INTEGER', 'FLOAT', 'STRING', 'CHARACTER',
# Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=)
'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MODULO',
'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
'LOR', 'LAND', 'LNOT',
'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
# Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=)
'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL',
'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL',
# Increment/decrement (++,--)
'INCREMENT', 'DECREMENT',
# Structure dereference (->)
'ARROW',
# Ternary operator (?)
'TERNARY',
# Delimeters ( ) [ ] { } , . ; :
'LPAREN', 'RPAREN',
'LBRACKET', 'RBRACKET',
'LBRACE', 'RBRACE',
'COMMA', 'PERIOD', 'SEMI', 'COLON',
# Ellipsis (...)
'ELLIPSIS',
]
# Operators
t_PLUS = r'\+'
t_MINUS = r'-'
t_TIMES = r'\*'
t_DIVIDE = r'/'
t_MODULO = r'%'
t_OR = r'\|'
t_AND = r'&'
t_NOT = r'~'
t_XOR = r'\^'
t_LSHIFT = r'<<'
t_RSHIFT = r'>>'
t_LOR = r'\|\|'
t_LAND = r'&&'
t_LNOT = r'!'
t_LT = r'<'
t_GT = r'>'
t_LE = r'<='
t_GE = r'>='
t_EQ = r'=='
t_NE = r'!='
# Assignment operators
t_EQUALS = r'='
t_TIMESEQUAL = r'\*='
t_DIVEQUAL = r'/='
t_MODEQUAL = r'%='
t_PLUSEQUAL = r'\+='
t_MINUSEQUAL = r'-='
t_LSHIFTEQUAL = r'<<='
t_RSHIFTEQUAL = r'>>='
t_ANDEQUAL = r'&='
t_OREQUAL = r'\|='
t_XOREQUAL = r'\^='
# Increment/decrement
t_INCREMENT = r'\+\+'
t_DECREMENT = r'--'
# ->
t_ARROW = r'->'
# ?
t_TERNARY = r'\?'
# Delimeters
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_LBRACKET = r'\['
t_RBRACKET = r'\]'
t_LBRACE = r'\{'
t_RBRACE = r'\}'
t_COMMA = r','
t_PERIOD = r'\.'
t_SEMI = r';'
t_COLON = r':'
t_ELLIPSIS = r'\.\.\.'
# Identifiers
t_ID = r'[A-Za-z_][A-Za-z0-9_]*'
# Integer literal
t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
# Floating literal
t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
# String literal
t_STRING = r'\"([^\\\n]|(\\.))*?\"'
# Character constant 'c' or L'c'
t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\''
# Comment (C-Style)
def t_COMMENT(t):
r'/\*(.|\n)*?\*/'
t.lexer.lineno += t.value.count('\n')
return t
# Comment (C++-Style)
def t_CPPCOMMENT(t):
r'//.*\n'
t.lexer.lineno += 1
return t
| gpl-3.0 |
sternb0t/django-pandas | django_pandas/io.py | 1 | 3578 | import pandas as pd
from .utils import update_with_verbose
import django
def to_fields(qs, fieldnames):
for fieldname in fieldnames:
model = qs.model
for fieldname_part in fieldname.split('__'):
try:
field = model._meta.get_field(fieldname_part)
except django.db.models.fields.FieldDoesNotExist:
rels = model._meta.get_all_related_objects_with_model()
for relobj, _ in rels:
if relobj.get_accessor_name() == fieldname_part:
field = relobj.field
model = field.model
break
else:
if hasattr(field, "one_to_many") and field.one_to_many:
model = field.related_model
elif field.get_internal_type() in ('ForeignKey', 'OneToOneField', 'ManyToManyField'):
model = field.rel.to
yield field
def read_frame(qs, fieldnames=(), index_col=None, coerce_float=False,
verbose=True):
"""
Returns a dataframe from a QuerySet
Optionally specify the field names/columns to utilize and
a field as the index
Parameters
----------
qs: The Django QuerySet.
fieldnames: The model field names to use in creating the frame.
You can span a relationship in the usual Django way
by using double underscores to specify a related field
in another model
You can span a relationship in the usual Django way
by using double underscores to specify a related field
in another model
index_col: specify the field to use for the index. If the index
field is not in the field list it will be appended
coerce_float : boolean, default False
Attempt to convert values to non-string, non-numeric data (like
decimal.Decimal) to floating point, useful for SQL result sets
verbose: boolean If this is ``True`` then populate the DataFrame with the
human readable versions of any foreign key fields else use
the primary keys values.
The human readable version of the foreign key field is
defined in the ``__unicode__`` or ``__str__``
methods of the related class definition
"""
if fieldnames:
if index_col is not None and index_col not in fieldnames:
# Add it to the field names if not already there
fieldnames = tuple(fieldnames) + (index_col,)
fields = to_fields(qs, fieldnames)
elif isinstance(qs, django.db.models.query.ValuesQuerySet):
if django.VERSION < (1, 8):
annotation_field_names = qs.aggregate_names
else:
annotation_field_names = qs.annotation_names
fieldnames = qs.field_names + annotation_field_names + qs.extra_names
fields = [qs.model._meta.get_field(f) for f in qs.field_names] + \
[None] * (len(annotation_field_names) + len(qs.extra_names))
else:
fields = qs.model._meta.fields
fieldnames = [f.name for f in fields]
if isinstance(qs, django.db.models.query.ValuesQuerySet):
recs = list(qs)
else:
recs = list(qs.values_list(*fieldnames))
df = pd.DataFrame.from_records(recs, columns=fieldnames,
coerce_float=coerce_float)
if verbose:
update_with_verbose(df, fieldnames, fields)
if index_col is not None:
df.set_index(index_col, inplace=True)
return df
| bsd-3-clause |
martinbuc/missionplanner | Lib/tokenize.py | 53 | 16889 | """Tokenization help for Python programs.
generate_tokens(readline) is a generator that breaks a stream of
text into Python tokens. It accepts a readline-like method which is called
repeatedly to get the next line of input (or "" for EOF). It generates
5-tuples with these members:
the token type (see token.py)
the token (a string)
the starting (row, column) indices of the token (a 2-tuple of ints)
the ending (row, column) indices of the token (a 2-tuple of ints)
the original line (string)
It is designed to match the working of the Python tokenizer exactly, except
that it produces COMMENT tokens for comments and gives type OP for all
operators
Older entry points
tokenize_loop(readline, tokeneater)
tokenize(readline, tokeneater=printtoken)
are the same, except instead of generating tokens, tokeneater is a callback
function to which the 5 fields described above are passed as 5 arguments,
each time a new token is found."""
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
'Skip Montanaro, Raymond Hettinger')
import string, re
from token import *
import token
__all__ = [x for x in dir(token) if not x.startswith("_")]
__all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
del x
del token
COMMENT = N_TOKENS
tok_name[COMMENT] = 'COMMENT'
NL = N_TOKENS + 1
tok_name[NL] = 'NL'
N_TOKENS += 2
def group(*choices): return '(' + '|'.join(choices) + ')'
def any(*choices): return group(*choices) + '*'
def maybe(*choices): return group(*choices) + '?'
Whitespace = r'[ \f\t]*'
Comment = r'#[^\r\n]*'
Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
Name = r'[a-zA-Z_]\w*'
Hexnumber = r'0[xX][\da-fA-F]+[lL]?'
Octnumber = r'(0[oO][0-7]+)|(0[0-7]*)[lL]?'
Binnumber = r'0[bB][01]+[lL]?'
Decnumber = r'[1-9]\d*[lL]?'
Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber)
Exponent = r'[eE][-+]?\d+'
Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
Expfloat = r'\d+' + Exponent
Floatnumber = group(Pointfloat, Expfloat)
Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
Number = group(Imagnumber, Floatnumber, Intnumber)
# Tail end of ' string.
Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
# Tail end of " string.
Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
# Tail end of ''' string.
Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
# Tail end of """ string.
Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
Triple = group("[uU]?[rR]?'''", '[uU]?[rR]?"""')
# Single-line ' or " string.
String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
# Because of leftmost-then-longest match semantics, be sure to put the
# longest operators first (e.g., if = came before ==, == would get
# recognized as two instances of =).
Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
r"//=?",
r"[+\-*/%&|^=<>]=?",
r"~")
Bracket = '[][(){}]'
Special = group(r'\r?\n', r'[:;.,`@]')
Funny = group(Operator, Bracket, Special)
PlainToken = group(Number, Funny, String, Name)
Token = Ignore + PlainToken
# First (or only) line of ' or " string.
ContStr = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
group("'", r'\\\r?\n'),
r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
group('"', r'\\\r?\n'))
PseudoExtras = group(r'\\\r?\n', Comment, Triple)
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
tokenprog, pseudoprog, single3prog, double3prog = map(
re.compile, (Token, PseudoToken, Single3, Double3))
endprogs = {"'": re.compile(Single), '"': re.compile(Double),
"'''": single3prog, '"""': double3prog,
"r'''": single3prog, 'r"""': double3prog,
"u'''": single3prog, 'u"""': double3prog,
"ur'''": single3prog, 'ur"""': double3prog,
"R'''": single3prog, 'R"""': double3prog,
"U'''": single3prog, 'U"""': double3prog,
"uR'''": single3prog, 'uR"""': double3prog,
"Ur'''": single3prog, 'Ur"""': double3prog,
"UR'''": single3prog, 'UR"""': double3prog,
"b'''": single3prog, 'b"""': double3prog,
"br'''": single3prog, 'br"""': double3prog,
"B'''": single3prog, 'B"""': double3prog,
"bR'''": single3prog, 'bR"""': double3prog,
"Br'''": single3prog, 'Br"""': double3prog,
"BR'''": single3prog, 'BR"""': double3prog,
'r': None, 'R': None, 'u': None, 'U': None,
'b': None, 'B': None}
triple_quoted = {}
for t in ("'''", '"""',
"r'''", 'r"""', "R'''", 'R"""',
"u'''", 'u"""', "U'''", 'U"""',
"ur'''", 'ur"""', "Ur'''", 'Ur"""',
"uR'''", 'uR"""', "UR'''", 'UR"""',
"b'''", 'b"""', "B'''", 'B"""',
"br'''", 'br"""', "Br'''", 'Br"""',
"bR'''", 'bR"""', "BR'''", 'BR"""'):
triple_quoted[t] = t
single_quoted = {}
for t in ("'", '"',
"r'", 'r"', "R'", 'R"',
"u'", 'u"', "U'", 'U"',
"ur'", 'ur"', "Ur'", 'Ur"',
"uR'", 'uR"', "UR'", 'UR"',
"b'", 'b"', "B'", 'B"',
"br'", 'br"', "Br'", 'Br"',
"bR'", 'bR"', "BR'", 'BR"' ):
single_quoted[t] = t
tabsize = 8
class TokenError(Exception): pass
class StopTokenizing(Exception): pass
def printtoken(type, token, srow_scol, erow_ecol, line): # for testing
srow, scol = srow_scol
erow, ecol = erow_ecol
print "%d,%d-%d,%d:\t%s\t%s" % \
(srow, scol, erow, ecol, tok_name[type], repr(token))
def tokenize(readline, tokeneater=printtoken):
"""
The tokenize() function accepts two parameters: one representing the
input stream, and one providing an output mechanism for tokenize().
The first parameter, readline, must be a callable object which provides
the same interface as the readline() method of built-in file objects.
Each call to the function should return one line of input as a string.
The second parameter, tokeneater, must also be a callable object. It is
called once for each token, with five arguments, corresponding to the
tuples generated by generate_tokens().
"""
try:
tokenize_loop(readline, tokeneater)
except StopTokenizing:
pass
# backwards compatible interface
def tokenize_loop(readline, tokeneater):
for token_info in generate_tokens(readline):
tokeneater(*token_info)
class Untokenizer:
def __init__(self):
self.tokens = []
self.prev_row = 1
self.prev_col = 0
def add_whitespace(self, start):
row, col = start
assert row <= self.prev_row
col_offset = col - self.prev_col
if col_offset:
self.tokens.append(" " * col_offset)
def untokenize(self, iterable):
for t in iterable:
if len(t) == 2:
self.compat(t, iterable)
break
tok_type, token, start, end, line = t
self.add_whitespace(start)
self.tokens.append(token)
self.prev_row, self.prev_col = end
if tok_type in (NEWLINE, NL):
self.prev_row += 1
self.prev_col = 0
return "".join(self.tokens)
def compat(self, token, iterable):
startline = False
indents = []
toks_append = self.tokens.append
toknum, tokval = token
if toknum in (NAME, NUMBER):
tokval += ' '
if toknum in (NEWLINE, NL):
startline = True
prevstring = False
for tok in iterable:
toknum, tokval = tok[:2]
if toknum in (NAME, NUMBER):
tokval += ' '
# Insert a space between two consecutive strings
if toknum == STRING:
if prevstring:
tokval = ' ' + tokval
prevstring = True
else:
prevstring = False
if toknum == INDENT:
indents.append(tokval)
continue
elif toknum == DEDENT:
indents.pop()
continue
elif toknum in (NEWLINE, NL):
startline = True
elif startline and indents:
toks_append(indents[-1])
startline = False
toks_append(tokval)
def untokenize(iterable):
"""Transform tokens back into Python source code.
Each element returned by the iterable must be a token sequence
with at least two elements, a token number and token value. If
only two tokens are passed, the resulting output is poor.
Round-trip invariant for full input:
Untokenized source will match input source exactly
Round-trip invariant for limited intput:
# Output text will tokenize the back to the input
t1 = [tok[:2] for tok in generate_tokens(f.readline)]
newcode = untokenize(t1)
readline = iter(newcode.splitlines(1)).next
t2 = [tok[:2] for tok in generate_tokens(readline)]
assert t1 == t2
"""
ut = Untokenizer()
return ut.untokenize(iterable)
def generate_tokens(readline):
"""
The generate_tokens() generator requires one argment, readline, which
must be a callable object which provides the same interface as the
readline() method of built-in file objects. Each call to the function
should return one line of input as a string. Alternately, readline
can be a callable function terminating with StopIteration:
readline = open(myfile).next # Example of alternate readline
The generator produces 5-tuples with these members: the token type; the
token string; a 2-tuple (srow, scol) of ints specifying the row and
column where the token begins in the source; a 2-tuple (erow, ecol) of
ints specifying the row and column where the token ends in the source;
and the line on which the token was found. The line passed is the
logical line; continuation lines are included.
"""
lnum = parenlev = continued = 0
namechars, numchars = string.ascii_letters + '_', '0123456789'
contstr, needcont = '', 0
contline = None
indents = [0]
while 1: # loop over lines in stream
try:
line = readline()
except StopIteration:
line = ''
lnum += 1
pos, max = 0, len(line)
if contstr: # continued string
if not line:
raise TokenError, ("EOF in multi-line string", strstart)
endmatch = endprog.match(line)
if endmatch:
pos = end = endmatch.end(0)
yield (STRING, contstr + line[:end],
strstart, (lnum, end), contline + line)
contstr, needcont = '', 0
contline = None
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
yield (ERRORTOKEN, contstr + line,
strstart, (lnum, len(line)), contline)
contstr = ''
contline = None
continue
else:
contstr = contstr + line
contline = contline + line
continue
elif parenlev == 0 and not continued: # new statement
if not line: break
column = 0
while pos < max: # measure leading whitespace
if line[pos] == ' ':
column += 1
elif line[pos] == '\t':
column = (column//tabsize + 1)*tabsize
elif line[pos] == '\f':
column = 0
else:
break
pos += 1
if pos == max:
break
if line[pos] in '#\r\n': # skip comments or blank lines
if line[pos] == '#':
comment_token = line[pos:].rstrip('\r\n')
nl_pos = pos + len(comment_token)
yield (COMMENT, comment_token,
(lnum, pos), (lnum, pos + len(comment_token)), line)
yield (NL, line[nl_pos:],
(lnum, nl_pos), (lnum, len(line)), line)
else:
yield ((NL, COMMENT)[line[pos] == '#'], line[pos:],
(lnum, pos), (lnum, len(line)), line)
continue
if column > indents[-1]: # count indents or dedents
indents.append(column)
yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
while column < indents[-1]:
if column not in indents:
raise IndentationError(
"unindent does not match any outer indentation level",
("<tokenize>", lnum, pos, line))
indents = indents[:-1]
yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
else: # continued statement
if not line:
raise TokenError, ("EOF in multi-line statement", (lnum, 0))
continued = 0
while pos < max:
pseudomatch = pseudoprog.match(line, pos)
if pseudomatch: # scan for tokens
start, end = pseudomatch.span(1)
spos, epos, pos = (lnum, start), (lnum, end), end
token, initial = line[start:end], line[start]
if initial in numchars or \
(initial == '.' and token != '.'): # ordinary number
yield (NUMBER, token, spos, epos, line)
elif initial in '\r\n':
yield (NL if parenlev > 0 else NEWLINE,
token, spos, epos, line)
elif initial == '#':
assert not token.endswith("\n")
yield (COMMENT, token, spos, epos, line)
elif token in triple_quoted:
endprog = endprogs[token]
endmatch = endprog.match(line, pos)
if endmatch: # all on one line
pos = endmatch.end(0)
token = line[start:pos]
yield (STRING, token, spos, (lnum, pos), line)
else:
strstart = (lnum, start) # multiple lines
contstr = line[start:]
contline = line
break
elif initial in single_quoted or \
token[:2] in single_quoted or \
token[:3] in single_quoted:
if token[-1] == '\n': # continued string
strstart = (lnum, start)
endprog = (endprogs[initial] or endprogs[token[1]] or
endprogs[token[2]])
contstr, needcont = line[start:], 1
contline = line
break
else: # ordinary string
yield (STRING, token, spos, epos, line)
elif initial in namechars: # ordinary name
yield (NAME, token, spos, epos, line)
elif initial == '\\': # continued stmt
continued = 1
else:
if initial in '([{':
parenlev += 1
elif initial in ')]}':
parenlev -= 1
yield (OP, token, spos, epos, line)
else:
yield (ERRORTOKEN, line[pos],
(lnum, pos), (lnum, pos+1), line)
pos += 1
for indent in indents[1:]: # pop remaining indent levels
yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
if __name__ == '__main__': # testing
import sys
if len(sys.argv) > 1:
tokenize(open(sys.argv[1]).readline)
else:
tokenize(sys.stdin.readline)
| gpl-3.0 |
HiSPARC/station-software | user/python/Lib/logging/handlers.py | 7 | 48297 | # Copyright 2001-2013 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Additional handlers for the logging package for Python. The core package is
based on PEP 282 and comments thereto in comp.lang.python.
Copyright (C) 2001-2013 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging.handlers' and log away!
"""
import errno, logging, socket, os, cPickle, struct, time, re
from stat import ST_DEV, ST_INO, ST_MTIME
try:
import codecs
except ImportError:
codecs = None
try:
unicode
_unicode = True
except NameError:
_unicode = False
#
# Some constants...
#
DEFAULT_TCP_LOGGING_PORT = 9020
DEFAULT_UDP_LOGGING_PORT = 9021
DEFAULT_HTTP_LOGGING_PORT = 9022
DEFAULT_SOAP_LOGGING_PORT = 9023
SYSLOG_UDP_PORT = 514
SYSLOG_TCP_PORT = 514
_MIDNIGHT = 24 * 60 * 60 # number of seconds in a day
class BaseRotatingHandler(logging.FileHandler):
"""
Base class for handlers that rotate log files at a certain point.
Not meant to be instantiated directly. Instead, use RotatingFileHandler
or TimedRotatingFileHandler.
"""
def __init__(self, filename, mode, encoding=None, delay=0):
"""
Use the specified filename for streamed logging
"""
if codecs is None:
encoding = None
logging.FileHandler.__init__(self, filename, mode, encoding, delay)
self.mode = mode
self.encoding = encoding
def emit(self, record):
"""
Emit a record.
Output the record to the file, catering for rollover as described
in doRollover().
"""
try:
if self.shouldRollover(record):
self.doRollover()
logging.FileHandler.emit(self, record)
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
class RotatingFileHandler(BaseRotatingHandler):
"""
Handler for logging to a set of files, which switches from one file
to the next when the current file reaches a certain size.
"""
def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=0):
"""
Open the specified file and use it as the stream for logging.
By default, the file grows indefinitely. You can specify particular
values of maxBytes and backupCount to allow the file to rollover at
a predetermined size.
Rollover occurs whenever the current log file is nearly maxBytes in
length. If backupCount is >= 1, the system will successively create
new files with the same pathname as the base file, but with extensions
".1", ".2" etc. appended to it. For example, with a backupCount of 5
and a base file name of "app.log", you would get "app.log",
"app.log.1", "app.log.2", ... through to "app.log.5". The file being
written to is always "app.log" - when it gets filled up, it is closed
and renamed to "app.log.1", and if files "app.log.1", "app.log.2" etc.
exist, then they are renamed to "app.log.2", "app.log.3" etc.
respectively.
If maxBytes is zero, rollover never occurs.
"""
# If rotation/rollover is wanted, it doesn't make sense to use another
# mode. If for example 'w' were specified, then if there were multiple
# runs of the calling application, the logs from previous runs would be
# lost if the 'w' is respected, because the log file would be truncated
# on each run.
if maxBytes > 0:
mode = 'a'
BaseRotatingHandler.__init__(self, filename, mode, encoding, delay)
self.maxBytes = maxBytes
self.backupCount = backupCount
def doRollover(self):
"""
Do a rollover, as described in __init__().
"""
if self.stream:
self.stream.close()
self.stream = None
if self.backupCount > 0:
for i in range(self.backupCount - 1, 0, -1):
sfn = "%s.%d" % (self.baseFilename, i)
dfn = "%s.%d" % (self.baseFilename, i + 1)
if os.path.exists(sfn):
#print "%s -> %s" % (sfn, dfn)
if os.path.exists(dfn):
os.remove(dfn)
os.rename(sfn, dfn)
dfn = self.baseFilename + ".1"
if os.path.exists(dfn):
os.remove(dfn)
# Issue 18940: A file may not have been created if delay is True.
if os.path.exists(self.baseFilename):
os.rename(self.baseFilename, dfn)
if not self.delay:
self.stream = self._open()
def shouldRollover(self, record):
"""
Determine if rollover should occur.
Basically, see if the supplied record would cause the file to exceed
the size limit we have.
"""
if self.stream is None: # delay was set...
self.stream = self._open()
if self.maxBytes > 0: # are we rolling over?
msg = "%s\n" % self.format(record)
self.stream.seek(0, 2) #due to non-posix-compliant Windows feature
if self.stream.tell() + len(msg) >= self.maxBytes:
return 1
return 0
class TimedRotatingFileHandler(BaseRotatingHandler):
"""
Handler for logging to a file, rotating the log file at certain timed
intervals.
If backupCount is > 0, when rollover is done, no more than backupCount
files are kept - the oldest ones are deleted.
"""
def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False):
BaseRotatingHandler.__init__(self, filename, 'a', encoding, delay)
self.when = when.upper()
self.backupCount = backupCount
self.utc = utc
# Calculate the real rollover interval, which is just the number of
# seconds between rollovers. Also set the filename suffix used when
# a rollover occurs. Current 'when' events supported:
# S - Seconds
# M - Minutes
# H - Hours
# D - Days
# midnight - roll over at midnight
# W{0-6} - roll over on a certain day; 0 - Monday
#
# Case of the 'when' specifier is not important; lower or upper case
# will work.
if self.when == 'S':
self.interval = 1 # one second
self.suffix = "%Y-%m-%d_%H-%M-%S"
self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}$"
elif self.when == 'M':
self.interval = 60 # one minute
self.suffix = "%Y-%m-%d_%H-%M"
self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}$"
elif self.when == 'H':
self.interval = 60 * 60 # one hour
self.suffix = "%Y-%m-%d_%H"
self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}$"
elif self.when == 'D' or self.when == 'MIDNIGHT':
self.interval = 60 * 60 * 24 # one day
self.suffix = "%Y-%m-%d"
self.extMatch = r"^\d{4}-\d{2}-\d{2}$"
elif self.when.startswith('W'):
self.interval = 60 * 60 * 24 * 7 # one week
if len(self.when) != 2:
raise ValueError("You must specify a day for weekly rollover from 0 to 6 (0 is Monday): %s" % self.when)
if self.when[1] < '0' or self.when[1] > '6':
raise ValueError("Invalid day specified for weekly rollover: %s" % self.when)
self.dayOfWeek = int(self.when[1])
self.suffix = "%Y-%m-%d"
self.extMatch = r"^\d{4}-\d{2}-\d{2}$"
else:
raise ValueError("Invalid rollover interval specified: %s" % self.when)
self.extMatch = re.compile(self.extMatch)
self.interval = self.interval * interval # multiply by units requested
if os.path.exists(filename):
t = os.stat(filename)[ST_MTIME]
else:
t = int(time.time())
self.rolloverAt = self.computeRollover(t)
def computeRollover(self, currentTime):
"""
Work out the rollover time based on the specified time.
"""
result = currentTime + self.interval
# If we are rolling over at midnight or weekly, then the interval is already known.
# What we need to figure out is WHEN the next interval is. In other words,
# if you are rolling over at midnight, then your base interval is 1 day,
# but you want to start that one day clock at midnight, not now. So, we
# have to fudge the rolloverAt value in order to trigger the first rollover
# at the right time. After that, the regular interval will take care of
# the rest. Note that this code doesn't care about leap seconds. :)
if self.when == 'MIDNIGHT' or self.when.startswith('W'):
# This could be done with less code, but I wanted it to be clear
if self.utc:
t = time.gmtime(currentTime)
else:
t = time.localtime(currentTime)
currentHour = t[3]
currentMinute = t[4]
currentSecond = t[5]
# r is the number of seconds left between now and midnight
r = _MIDNIGHT - ((currentHour * 60 + currentMinute) * 60 +
currentSecond)
result = currentTime + r
# If we are rolling over on a certain day, add in the number of days until
# the next rollover, but offset by 1 since we just calculated the time
# until the next day starts. There are three cases:
# Case 1) The day to rollover is today; in this case, do nothing
# Case 2) The day to rollover is further in the interval (i.e., today is
# day 2 (Wednesday) and rollover is on day 6 (Sunday). Days to
# next rollover is simply 6 - 2 - 1, or 3.
# Case 3) The day to rollover is behind us in the interval (i.e., today
# is day 5 (Saturday) and rollover is on day 3 (Thursday).
# Days to rollover is 6 - 5 + 3, or 4. In this case, it's the
# number of days left in the current week (1) plus the number
# of days in the next week until the rollover day (3).
# The calculations described in 2) and 3) above need to have a day added.
# This is because the above time calculation takes us to midnight on this
# day, i.e. the start of the next day.
if self.when.startswith('W'):
day = t[6] # 0 is Monday
if day != self.dayOfWeek:
if day < self.dayOfWeek:
daysToWait = self.dayOfWeek - day
else:
daysToWait = 6 - day + self.dayOfWeek + 1
newRolloverAt = result + (daysToWait * (60 * 60 * 24))
if not self.utc:
dstNow = t[-1]
dstAtRollover = time.localtime(newRolloverAt)[-1]
if dstNow != dstAtRollover:
if not dstNow: # DST kicks in before next rollover, so we need to deduct an hour
addend = -3600
else: # DST bows out before next rollover, so we need to add an hour
addend = 3600
newRolloverAt += addend
result = newRolloverAt
return result
def shouldRollover(self, record):
"""
Determine if rollover should occur.
record is not used, as we are just comparing times, but it is needed so
the method signatures are the same
"""
t = int(time.time())
if t >= self.rolloverAt:
return 1
#print "No need to rollover: %d, %d" % (t, self.rolloverAt)
return 0
def getFilesToDelete(self):
"""
Determine the files to delete when rolling over.
More specific than the earlier method, which just used glob.glob().
"""
dirName, baseName = os.path.split(self.baseFilename)
fileNames = os.listdir(dirName)
result = []
prefix = baseName + "."
plen = len(prefix)
for fileName in fileNames:
if fileName[:plen] == prefix:
suffix = fileName[plen:]
if self.extMatch.match(suffix):
result.append(os.path.join(dirName, fileName))
result.sort()
if len(result) < self.backupCount:
result = []
else:
result = result[:len(result) - self.backupCount]
return result
def doRollover(self):
"""
do a rollover; in this case, a date/time stamp is appended to the filename
when the rollover happens. However, you want the file to be named for the
start of the interval, not the current time. If there is a backup count,
then we have to get a list of matching filenames, sort them and remove
the one with the oldest suffix.
"""
if self.stream:
self.stream.close()
self.stream = None
# get the time that this sequence started at and make it a TimeTuple
currentTime = int(time.time())
dstNow = time.localtime(currentTime)[-1]
t = self.rolloverAt - self.interval
if self.utc:
timeTuple = time.gmtime(t)
else:
timeTuple = time.localtime(t)
dstThen = timeTuple[-1]
if dstNow != dstThen:
if dstNow:
addend = 3600
else:
addend = -3600
timeTuple = time.localtime(t + addend)
dfn = self.baseFilename + "." + time.strftime(self.suffix, timeTuple)
if os.path.exists(dfn):
os.remove(dfn)
# Issue 18940: A file may not have been created if delay is True.
if os.path.exists(self.baseFilename):
os.rename(self.baseFilename, dfn)
if self.backupCount > 0:
for s in self.getFilesToDelete():
os.remove(s)
if not self.delay:
self.stream = self._open()
newRolloverAt = self.computeRollover(currentTime)
while newRolloverAt <= currentTime:
newRolloverAt = newRolloverAt + self.interval
#If DST changes and midnight or weekly rollover, adjust for this.
if (self.when == 'MIDNIGHT' or self.when.startswith('W')) and not self.utc:
dstAtRollover = time.localtime(newRolloverAt)[-1]
if dstNow != dstAtRollover:
if not dstNow: # DST kicks in before next rollover, so we need to deduct an hour
addend = -3600
else: # DST bows out before next rollover, so we need to add an hour
addend = 3600
newRolloverAt += addend
self.rolloverAt = newRolloverAt
class WatchedFileHandler(logging.FileHandler):
"""
A handler for logging to a file, which watches the file
to see if it has changed while in use. This can happen because of
usage of programs such as newsyslog and logrotate which perform
log file rotation. This handler, intended for use under Unix,
watches the file to see if it has changed since the last emit.
(A file has changed if its device or inode have changed.)
If it has changed, the old file stream is closed, and the file
opened to get a new stream.
This handler is not appropriate for use under Windows, because
under Windows open files cannot be moved or renamed - logging
opens the files with exclusive locks - and so there is no need
for such a handler. Furthermore, ST_INO is not supported under
Windows; stat always returns zero for this value.
This handler is based on a suggestion and patch by Chad J.
Schroeder.
"""
def __init__(self, filename, mode='a', encoding=None, delay=0):
logging.FileHandler.__init__(self, filename, mode, encoding, delay)
self.dev, self.ino = -1, -1
self._statstream()
def _statstream(self):
if self.stream:
sres = os.fstat(self.stream.fileno())
self.dev, self.ino = sres[ST_DEV], sres[ST_INO]
def emit(self, record):
"""
Emit a record.
First check if the underlying file has changed, and if it
has, close the old stream and reopen the file to get the
current stream.
"""
# Reduce the chance of race conditions by stat'ing by path only
# once and then fstat'ing our new fd if we opened a new log stream.
# See issue #14632: Thanks to John Mulligan for the problem report
# and patch.
try:
# stat the file by path, checking for existence
sres = os.stat(self.baseFilename)
except OSError as err:
if err.errno == errno.ENOENT:
sres = None
else:
raise
# compare file system stat with that of our stream file handle
if not sres or sres[ST_DEV] != self.dev or sres[ST_INO] != self.ino:
if self.stream is not None:
# we have an open file handle, clean it up
self.stream.flush()
self.stream.close()
self.stream = None # See Issue #21742: _open () might fail.
# open a new file handle and get new stat info from that fd
self.stream = self._open()
self._statstream()
logging.FileHandler.emit(self, record)
class SocketHandler(logging.Handler):
"""
A handler class which writes logging records, in pickle format, to
a streaming socket. The socket is kept open across logging calls.
If the peer resets it, an attempt is made to reconnect on the next call.
The pickle which is sent is that of the LogRecord's attribute dictionary
(__dict__), so that the receiver does not need to have the logging module
installed in order to process the logging event.
To unpickle the record at the receiving end into a LogRecord, use the
makeLogRecord function.
"""
def __init__(self, host, port):
"""
Initializes the handler with a specific host address and port.
The attribute 'closeOnError' is set to 1 - which means that if
a socket error occurs, the socket is silently closed and then
reopened on the next logging call.
"""
logging.Handler.__init__(self)
self.host = host
self.port = port
self.sock = None
self.closeOnError = 0
self.retryTime = None
#
# Exponential backoff parameters.
#
self.retryStart = 1.0
self.retryMax = 30.0
self.retryFactor = 2.0
def makeSocket(self, timeout=1):
"""
A factory method which allows subclasses to define the precise
type of socket they want.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if hasattr(s, 'settimeout'):
s.settimeout(timeout)
s.connect((self.host, self.port))
return s
def createSocket(self):
"""
Try to create a socket, using an exponential backoff with
a max retry time. Thanks to Robert Olson for the original patch
(SF #815911) which has been slightly refactored.
"""
now = time.time()
# Either retryTime is None, in which case this
# is the first time back after a disconnect, or
# we've waited long enough.
if self.retryTime is None:
attempt = 1
else:
attempt = (now >= self.retryTime)
if attempt:
try:
self.sock = self.makeSocket()
self.retryTime = None # next time, no delay before trying
except socket.error:
#Creation failed, so set the retry time and return.
if self.retryTime is None:
self.retryPeriod = self.retryStart
else:
self.retryPeriod = self.retryPeriod * self.retryFactor
if self.retryPeriod > self.retryMax:
self.retryPeriod = self.retryMax
self.retryTime = now + self.retryPeriod
def send(self, s):
"""
Send a pickled string to the socket.
This function allows for partial sends which can happen when the
network is busy.
"""
if self.sock is None:
self.createSocket()
#self.sock can be None either because we haven't reached the retry
#time yet, or because we have reached the retry time and retried,
#but are still unable to connect.
if self.sock:
try:
if hasattr(self.sock, "sendall"):
self.sock.sendall(s)
else:
sentsofar = 0
left = len(s)
while left > 0:
sent = self.sock.send(s[sentsofar:])
sentsofar = sentsofar + sent
left = left - sent
except socket.error:
self.sock.close()
self.sock = None # so we can call createSocket next time
def makePickle(self, record):
"""
Pickles the record in binary format with a length prefix, and
returns it ready for transmission across the socket.
"""
ei = record.exc_info
if ei:
# just to get traceback text into record.exc_text ...
dummy = self.format(record)
record.exc_info = None # to avoid Unpickleable error
# See issue #14436: If msg or args are objects, they may not be
# available on the receiving end. So we convert the msg % args
# to a string, save it as msg and zap the args.
d = dict(record.__dict__)
d['msg'] = record.getMessage()
d['args'] = None
s = cPickle.dumps(d, 1)
if ei:
record.exc_info = ei # for next handler
slen = struct.pack(">L", len(s))
return slen + s
def handleError(self, record):
"""
Handle an error during logging.
An error has occurred during logging. Most likely cause -
connection lost. Close the socket so that we can retry on the
next event.
"""
if self.closeOnError and self.sock:
self.sock.close()
self.sock = None #try to reconnect next time
else:
logging.Handler.handleError(self, record)
def emit(self, record):
"""
Emit a record.
Pickles the record and writes it to the socket in binary format.
If there is an error with the socket, silently drop the packet.
If there was a problem with the socket, re-establishes the
socket.
"""
try:
s = self.makePickle(record)
self.send(s)
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
def close(self):
"""
Closes the socket.
"""
self.acquire()
try:
sock = self.sock
if sock:
self.sock = None
sock.close()
finally:
self.release()
logging.Handler.close(self)
class DatagramHandler(SocketHandler):
"""
A handler class which writes logging records, in pickle format, to
a datagram socket. The pickle which is sent is that of the LogRecord's
attribute dictionary (__dict__), so that the receiver does not need to
have the logging module installed in order to process the logging event.
To unpickle the record at the receiving end into a LogRecord, use the
makeLogRecord function.
"""
def __init__(self, host, port):
"""
Initializes the handler with a specific host address and port.
"""
SocketHandler.__init__(self, host, port)
self.closeOnError = 0
def makeSocket(self):
"""
The factory method of SocketHandler is here overridden to create
a UDP socket (SOCK_DGRAM).
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return s
def send(self, s):
"""
Send a pickled string to a socket.
This function no longer allows for partial sends which can happen
when the network is busy - UDP does not guarantee delivery and
can deliver packets out of sequence.
"""
if self.sock is None:
self.createSocket()
self.sock.sendto(s, (self.host, self.port))
class SysLogHandler(logging.Handler):
"""
A handler class which sends formatted logging records to a syslog
server. Based on Sam Rushing's syslog module:
http://www.nightmare.com/squirl/python-ext/misc/syslog.py
Contributed by Nicolas Untz (after which minor refactoring changes
have been made).
"""
# from <linux/sys/syslog.h>:
# ======================================================================
# priorities/facilities are encoded into a single 32-bit quantity, where
# the bottom 3 bits are the priority (0-7) and the top 28 bits are the
# facility (0-big number). Both the priorities and the facilities map
# roughly one-to-one to strings in the syslogd(8) source code. This
# mapping is included in this file.
#
# priorities (these are ordered)
LOG_EMERG = 0 # system is unusable
LOG_ALERT = 1 # action must be taken immediately
LOG_CRIT = 2 # critical conditions
LOG_ERR = 3 # error conditions
LOG_WARNING = 4 # warning conditions
LOG_NOTICE = 5 # normal but significant condition
LOG_INFO = 6 # informational
LOG_DEBUG = 7 # debug-level messages
# facility codes
LOG_KERN = 0 # kernel messages
LOG_USER = 1 # random user-level messages
LOG_MAIL = 2 # mail system
LOG_DAEMON = 3 # system daemons
LOG_AUTH = 4 # security/authorization messages
LOG_SYSLOG = 5 # messages generated internally by syslogd
LOG_LPR = 6 # line printer subsystem
LOG_NEWS = 7 # network news subsystem
LOG_UUCP = 8 # UUCP subsystem
LOG_CRON = 9 # clock daemon
LOG_AUTHPRIV = 10 # security/authorization messages (private)
LOG_FTP = 11 # FTP daemon
# other codes through 15 reserved for system use
LOG_LOCAL0 = 16 # reserved for local use
LOG_LOCAL1 = 17 # reserved for local use
LOG_LOCAL2 = 18 # reserved for local use
LOG_LOCAL3 = 19 # reserved for local use
LOG_LOCAL4 = 20 # reserved for local use
LOG_LOCAL5 = 21 # reserved for local use
LOG_LOCAL6 = 22 # reserved for local use
LOG_LOCAL7 = 23 # reserved for local use
priority_names = {
"alert": LOG_ALERT,
"crit": LOG_CRIT,
"critical": LOG_CRIT,
"debug": LOG_DEBUG,
"emerg": LOG_EMERG,
"err": LOG_ERR,
"error": LOG_ERR, # DEPRECATED
"info": LOG_INFO,
"notice": LOG_NOTICE,
"panic": LOG_EMERG, # DEPRECATED
"warn": LOG_WARNING, # DEPRECATED
"warning": LOG_WARNING,
}
facility_names = {
"auth": LOG_AUTH,
"authpriv": LOG_AUTHPRIV,
"cron": LOG_CRON,
"daemon": LOG_DAEMON,
"ftp": LOG_FTP,
"kern": LOG_KERN,
"lpr": LOG_LPR,
"mail": LOG_MAIL,
"news": LOG_NEWS,
"security": LOG_AUTH, # DEPRECATED
"syslog": LOG_SYSLOG,
"user": LOG_USER,
"uucp": LOG_UUCP,
"local0": LOG_LOCAL0,
"local1": LOG_LOCAL1,
"local2": LOG_LOCAL2,
"local3": LOG_LOCAL3,
"local4": LOG_LOCAL4,
"local5": LOG_LOCAL5,
"local6": LOG_LOCAL6,
"local7": LOG_LOCAL7,
}
#The map below appears to be trivially lowercasing the key. However,
#there's more to it than meets the eye - in some locales, lowercasing
#gives unexpected results. See SF #1524081: in the Turkish locale,
#"INFO".lower() != "info"
priority_map = {
"DEBUG" : "debug",
"INFO" : "info",
"WARNING" : "warning",
"ERROR" : "error",
"CRITICAL" : "critical"
}
def __init__(self, address=('localhost', SYSLOG_UDP_PORT),
facility=LOG_USER, socktype=None):
"""
Initialize a handler.
If address is specified as a string, a UNIX socket is used. To log to a
local syslogd, "SysLogHandler(address="/dev/log")" can be used.
If facility is not specified, LOG_USER is used. If socktype is
specified as socket.SOCK_DGRAM or socket.SOCK_STREAM, that specific
socket type will be used. For Unix sockets, you can also specify a
socktype of None, in which case socket.SOCK_DGRAM will be used, falling
back to socket.SOCK_STREAM.
"""
logging.Handler.__init__(self)
self.address = address
self.facility = facility
self.socktype = socktype
if isinstance(address, basestring):
self.unixsocket = 1
self._connect_unixsocket(address)
else:
self.unixsocket = False
if socktype is None:
socktype = socket.SOCK_DGRAM
host, port = address
ress = socket.getaddrinfo(host, port, 0, socktype)
if not ress:
raise socket.error("getaddrinfo returns an empty list")
for res in ress:
af, socktype, proto, _, sa = res
err = sock = None
try:
sock = socket.socket(af, socktype, proto)
if socktype == socket.SOCK_STREAM:
sock.connect(sa)
break
except socket.error as exc:
err = exc
if sock is not None:
sock.close()
if err is not None:
raise err
self.socket = sock
self.socktype = socktype
def _connect_unixsocket(self, address):
use_socktype = self.socktype
if use_socktype is None:
use_socktype = socket.SOCK_DGRAM
self.socket = socket.socket(socket.AF_UNIX, use_socktype)
try:
self.socket.connect(address)
# it worked, so set self.socktype to the used type
self.socktype = use_socktype
except socket.error:
self.socket.close()
if self.socktype is not None:
# user didn't specify falling back, so fail
raise
use_socktype = socket.SOCK_STREAM
self.socket = socket.socket(socket.AF_UNIX, use_socktype)
try:
self.socket.connect(address)
# it worked, so set self.socktype to the used type
self.socktype = use_socktype
except socket.error:
self.socket.close()
raise
# curious: when talking to the unix-domain '/dev/log' socket, a
# zero-terminator seems to be required. this string is placed
# into a class variable so that it can be overridden if
# necessary.
log_format_string = '<%d>%s\000'
def encodePriority(self, facility, priority):
"""
Encode the facility and priority. You can pass in strings or
integers - if strings are passed, the facility_names and
priority_names mapping dictionaries are used to convert them to
integers.
"""
if isinstance(facility, basestring):
facility = self.facility_names[facility]
if isinstance(priority, basestring):
priority = self.priority_names[priority]
return (facility << 3) | priority
def close(self):
"""
Closes the socket.
"""
self.acquire()
try:
if self.unixsocket:
self.socket.close()
finally:
self.release()
logging.Handler.close(self)
def mapPriority(self, levelName):
"""
Map a logging level name to a key in the priority_names map.
This is useful in two scenarios: when custom levels are being
used, and in the case where you can't do a straightforward
mapping by lowercasing the logging level name because of locale-
specific issues (see SF #1524081).
"""
return self.priority_map.get(levelName, "warning")
def emit(self, record):
"""
Emit a record.
The record is formatted, and then sent to the syslog server. If
exception information is present, it is NOT sent to the server.
"""
try:
msg = self.format(record) + '\000'
"""
We need to convert record level to lowercase, maybe this will
change in the future.
"""
prio = '<%d>' % self.encodePriority(self.facility,
self.mapPriority(record.levelname))
# Message is a string. Convert to bytes as required by RFC 5424
if type(msg) is unicode:
msg = msg.encode('utf-8')
msg = prio + msg
if self.unixsocket:
try:
self.socket.send(msg)
except socket.error:
self.socket.close() # See issue 17981
self._connect_unixsocket(self.address)
self.socket.send(msg)
elif self.socktype == socket.SOCK_DGRAM:
self.socket.sendto(msg, self.address)
else:
self.socket.sendall(msg)
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
class SMTPHandler(logging.Handler):
"""
A handler class which sends an SMTP email for each logging event.
"""
def __init__(self, mailhost, fromaddr, toaddrs, subject,
credentials=None, secure=None):
"""
Initialize the handler.
Initialize the instance with the from and to addresses and subject
line of the email. To specify a non-standard SMTP port, use the
(host, port) tuple format for the mailhost argument. To specify
authentication credentials, supply a (username, password) tuple
for the credentials argument. To specify the use of a secure
protocol (TLS), pass in a tuple for the secure argument. This will
only be used when authentication credentials are supplied. The tuple
will be either an empty tuple, or a single-value tuple with the name
of a keyfile, or a 2-value tuple with the names of the keyfile and
certificate file. (This tuple is passed to the `starttls` method).
"""
logging.Handler.__init__(self)
if isinstance(mailhost, (list, tuple)):
self.mailhost, self.mailport = mailhost
else:
self.mailhost, self.mailport = mailhost, None
if isinstance(credentials, (list, tuple)):
self.username, self.password = credentials
else:
self.username = None
self.fromaddr = fromaddr
if isinstance(toaddrs, basestring):
toaddrs = [toaddrs]
self.toaddrs = toaddrs
self.subject = subject
self.secure = secure
self._timeout = 5.0
def getSubject(self, record):
"""
Determine the subject for the email.
If you want to specify a subject line which is record-dependent,
override this method.
"""
return self.subject
def emit(self, record):
"""
Emit a record.
Format the record and send it to the specified addressees.
"""
try:
import smtplib
from email.utils import formatdate
port = self.mailport
if not port:
port = smtplib.SMTP_PORT
smtp = smtplib.SMTP(self.mailhost, port, timeout=self._timeout)
msg = self.format(record)
msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % (
self.fromaddr,
",".join(self.toaddrs),
self.getSubject(record),
formatdate(), msg)
if self.username:
if self.secure is not None:
smtp.ehlo()
smtp.starttls(*self.secure)
smtp.ehlo()
smtp.login(self.username, self.password)
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
class NTEventLogHandler(logging.Handler):
"""
A handler class which sends events to the NT Event Log. Adds a
registry entry for the specified application name. If no dllname is
provided, win32service.pyd (which contains some basic message
placeholders) is used. Note that use of these placeholders will make
your event logs big, as the entire message source is held in the log.
If you want slimmer logs, you have to pass in the name of your own DLL
which contains the message definitions you want to use in the event log.
"""
def __init__(self, appname, dllname=None, logtype="Application"):
logging.Handler.__init__(self)
try:
import win32evtlogutil, win32evtlog
self.appname = appname
self._welu = win32evtlogutil
if not dllname:
dllname = os.path.split(self._welu.__file__)
dllname = os.path.split(dllname[0])
dllname = os.path.join(dllname[0], r'win32service.pyd')
self.dllname = dllname
self.logtype = logtype
self._welu.AddSourceToRegistry(appname, dllname, logtype)
self.deftype = win32evtlog.EVENTLOG_ERROR_TYPE
self.typemap = {
logging.DEBUG : win32evtlog.EVENTLOG_INFORMATION_TYPE,
logging.INFO : win32evtlog.EVENTLOG_INFORMATION_TYPE,
logging.WARNING : win32evtlog.EVENTLOG_WARNING_TYPE,
logging.ERROR : win32evtlog.EVENTLOG_ERROR_TYPE,
logging.CRITICAL: win32evtlog.EVENTLOG_ERROR_TYPE,
}
except ImportError:
print("The Python Win32 extensions for NT (service, event "\
"logging) appear not to be available.")
self._welu = None
def getMessageID(self, record):
"""
Return the message ID for the event record. If you are using your
own messages, you could do this by having the msg passed to the
logger being an ID rather than a formatting string. Then, in here,
you could use a dictionary lookup to get the message ID. This
version returns 1, which is the base message ID in win32service.pyd.
"""
return 1
def getEventCategory(self, record):
"""
Return the event category for the record.
Override this if you want to specify your own categories. This version
returns 0.
"""
return 0
def getEventType(self, record):
"""
Return the event type for the record.
Override this if you want to specify your own types. This version does
a mapping using the handler's typemap attribute, which is set up in
__init__() to a dictionary which contains mappings for DEBUG, INFO,
WARNING, ERROR and CRITICAL. If you are using your own levels you will
either need to override this method or place a suitable dictionary in
the handler's typemap attribute.
"""
return self.typemap.get(record.levelno, self.deftype)
def emit(self, record):
"""
Emit a record.
Determine the message ID, event category and event type. Then
log the message in the NT event log.
"""
if self._welu:
try:
id = self.getMessageID(record)
cat = self.getEventCategory(record)
type = self.getEventType(record)
msg = self.format(record)
self._welu.ReportEvent(self.appname, id, cat, type, [msg])
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
def close(self):
"""
Clean up this handler.
You can remove the application name from the registry as a
source of event log entries. However, if you do this, you will
not be able to see the events as you intended in the Event Log
Viewer - it needs to be able to access the registry to get the
DLL name.
"""
#self._welu.RemoveSourceFromRegistry(self.appname, self.logtype)
logging.Handler.close(self)
class HTTPHandler(logging.Handler):
"""
A class which sends records to a Web server, using either GET or
POST semantics.
"""
def __init__(self, host, url, method="GET"):
"""
Initialize the instance with the host, the request URL, and the method
("GET" or "POST")
"""
logging.Handler.__init__(self)
method = method.upper()
if method not in ["GET", "POST"]:
raise ValueError("method must be GET or POST")
self.host = host
self.url = url
self.method = method
def mapLogRecord(self, record):
"""
Default implementation of mapping the log record into a dict
that is sent as the CGI data. Overwrite in your class.
Contributed by Franz Glasner.
"""
return record.__dict__
def emit(self, record):
"""
Emit a record.
Send the record to the Web server as a percent-encoded dictionary
"""
try:
import httplib, urllib
host = self.host
h = httplib.HTTP(host)
url = self.url
data = urllib.urlencode(self.mapLogRecord(record))
if self.method == "GET":
if (url.find('?') >= 0):
sep = '&'
else:
sep = '?'
url = url + "%c%s" % (sep, data)
h.putrequest(self.method, url)
# support multiple hosts on one IP address...
# need to strip optional :port from host, if present
i = host.find(":")
if i >= 0:
host = host[:i]
h.putheader("Host", host)
if self.method == "POST":
h.putheader("Content-type",
"application/x-www-form-urlencoded")
h.putheader("Content-length", str(len(data)))
h.endheaders(data if self.method == "POST" else None)
h.getreply() #can't do anything with the result
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
class BufferingHandler(logging.Handler):
"""
A handler class which buffers logging records in memory. Whenever each
record is added to the buffer, a check is made to see if the buffer should
be flushed. If it should, then flush() is expected to do what's needed.
"""
def __init__(self, capacity):
"""
Initialize the handler with the buffer size.
"""
logging.Handler.__init__(self)
self.capacity = capacity
self.buffer = []
def shouldFlush(self, record):
"""
Should the handler flush its buffer?
Returns true if the buffer is up to capacity. This method can be
overridden to implement custom flushing strategies.
"""
return (len(self.buffer) >= self.capacity)
def emit(self, record):
"""
Emit a record.
Append the record. If shouldFlush() tells us to, call flush() to process
the buffer.
"""
self.buffer.append(record)
if self.shouldFlush(record):
self.flush()
def flush(self):
"""
Override to implement custom flushing behaviour.
This version just zaps the buffer to empty.
"""
self.acquire()
try:
self.buffer = []
finally:
self.release()
def close(self):
"""
Close the handler.
This version just flushes and chains to the parent class' close().
"""
try:
self.flush()
finally:
logging.Handler.close(self)
class MemoryHandler(BufferingHandler):
"""
A handler class which buffers logging records in memory, periodically
flushing them to a target handler. Flushing occurs whenever the buffer
is full, or when an event of a certain severity or greater is seen.
"""
def __init__(self, capacity, flushLevel=logging.ERROR, target=None):
"""
Initialize the handler with the buffer size, the level at which
flushing should occur and an optional target.
Note that without a target being set either here or via setTarget(),
a MemoryHandler is no use to anyone!
"""
BufferingHandler.__init__(self, capacity)
self.flushLevel = flushLevel
self.target = target
def shouldFlush(self, record):
"""
Check for buffer full or a record at the flushLevel or higher.
"""
return (len(self.buffer) >= self.capacity) or \
(record.levelno >= self.flushLevel)
def setTarget(self, target):
"""
Set the target handler for this handler.
"""
self.target = target
def flush(self):
"""
For a MemoryHandler, flushing means just sending the buffered
records to the target, if there is one. Override if you want
different behaviour.
"""
self.acquire()
try:
if self.target:
for record in self.buffer:
self.target.handle(record)
self.buffer = []
finally:
self.release()
def close(self):
"""
Flush, set the target to None and lose the buffer.
"""
try:
self.flush()
finally:
self.acquire()
try:
self.target = None
BufferingHandler.close(self)
finally:
self.release()
| gpl-3.0 |
nicholasserra/sentry | src/sentry/api/serializers/models/grouptagvalue.py | 2 | 2101 | from __future__ import absolute_import
import operator
from django.db.models import Q
from sentry.api.serializers import Serializer, register
from sentry.models import EventUser, GroupTagValue, TagKey, TagValue
def parse_user_tag(value):
lookup, value = value.split(':', 1)
if lookup == 'id':
lookup = 'ident'
elif lookup == 'ip':
lookup = 'ip_address'
return {lookup: value}
@register(GroupTagValue)
class GroupTagValueSerializer(Serializer):
def get_attrs(self, item_list, user):
project = item_list[0].project
user_lookups = [
Q(**parse_user_tag(i.value))
for i in item_list
if i.key == 'sentry:user'
and ':' in i.value
]
tag_labels = {}
if user_lookups:
tag_labels.update({
('sentry:user', euser.tag_value): euser.get_label()
for euser in EventUser.objects.filter(
reduce(operator.or_, user_lookups),
project=project,
)
})
other_lookups = [
Q(key=i.key, value=i.value)
for i in item_list
if i.key != 'sentry:user'
]
if other_lookups:
tag_labels.update({
(t.key, t.value): t.get_label()
for t in TagValue.objects.filter(
reduce(operator.or_, other_lookups),
project=project,
)
})
result = {}
for item in item_list:
try:
label = tag_labels[(item.key, item.value)]
except KeyError:
label = item.value
result[item] = {
'name': label,
}
return result
def serialize(self, obj, attrs, user):
return {
'name': attrs['name'],
'key': TagKey.get_standardized_key(obj.key),
'value': obj.value,
'count': obj.times_seen,
'lastSeen': obj.last_seen,
'firstSeen': obj.first_seen,
}
| bsd-3-clause |
mrquim/mrquimrepo | repo/script.module.trakt/lib/trakt/objects/movie.py | 4 | 5715 | from __future__ import absolute_import, division, print_function
from trakt.core.helpers import from_iso8601_datetime, to_iso8601_datetime,\
from_iso8601_date, to_iso8601_date, deprecated
from trakt.objects.core.helpers import update_attributes
from trakt.objects.video import Video
class Movie(Video):
def __init__(self, client, keys, index=None):
super(Movie, self).__init__(client, keys, index)
self.title = None
"""
:type: :class:`~python:str`
Title
"""
self.year = None
"""
:type: :class:`~python:int`
Year
"""
self.watchers = None # trending
"""
:type: :class:`~python:int`
Number of active watchers (returned by the :code:`Trakt['movies'].trending()`
and :code:`Trakt['shows'].trending()` methods)
"""
self.tagline = None
"""
:type: :class:`~python:str`
Tagline
"""
self.released = None
"""
:type: :class:`~python:datetime.date`
Release date
"""
self.runtime = None
"""
:type: :class:`~python:int`
Duration (in minutes)
"""
self.certification = None
"""
:type: :class:`~python:str`
Content certification (e.g :code:`PG-13`)
"""
self.updated_at = None
"""
:type: :class:`~python:datetime.datetime`
Updated date/time
"""
self.homepage = None
"""
:type: :class:`~python:str`
Homepage URL
"""
self.trailer = None
"""
:type: :class:`~python:str`
Trailer URL
"""
self.language = None
"""
:type: :class:`~python:str`
Language (for title, overview, etc..)
"""
self.available_translations = None
"""
:type: :class:`~python:list`
Available translations (for title, overview, etc..)
"""
self.genres = None
"""
:type: :class:`~python:list`
Genres
"""
def to_identifier(self):
"""Return the movie identifier which is compatible with requests that require movie definitions.
:return: Movie identifier/definition
:rtype: :class:`~python:dict`
"""
return {
'ids': dict(self.keys),
'title': self.title,
'year': self.year
}
@deprecated('Movie.to_info() has been moved to Movie.to_dict()')
def to_info(self):
"""**Deprecated:** use the :code:`to_dict()` method instead."""
return self.to_dict()
def to_dict(self):
"""Dump movie to a dictionary.
:return: Movie dictionary
:rtype: :class:`~python:dict`
"""
result = self.to_identifier()
result.update({
'watched': 1 if self.is_watched else 0,
'collected': 1 if self.is_collected else 0,
'plays': self.plays if self.plays is not None else 0,
'in_watchlist': self.in_watchlist if self.in_watchlist is not None else 0,
'progress': self.progress,
'last_watched_at': to_iso8601_datetime(self.last_watched_at),
'collected_at': to_iso8601_datetime(self.collected_at),
'paused_at': to_iso8601_datetime(self.paused_at)
})
if self.rating:
result['rating'] = self.rating.value
result['rated_at'] = to_iso8601_datetime(self.rating.timestamp)
# Extended Info
if self.released:
result['released'] = to_iso8601_date(self.released)
if self.updated_at:
result['updated_at'] = to_iso8601_datetime(self.updated_at)
if self.overview:
result['overview'] = self.overview
if self.tagline:
result['tagline'] = self.tagline
if self.runtime:
result['runtime'] = self.runtime
if self.certification:
result['certification'] = self.certification
if self.homepage:
result['homepage'] = self.homepage
if self.trailer:
result['trailer'] = self.trailer
if self.language:
result['language'] = self.language
if self.available_translations:
result['available_translations'] = self.available_translations
if self.genres:
result['genres'] = self.genres
return result
def _update(self, info=None, **kwargs):
if not info:
return
super(Movie, self)._update(info, **kwargs)
update_attributes(self, info, [
'title',
# Trending
'watchers',
# Extended Info
'tagline',
'certification',
'homepage',
'trailer',
'language',
'available_translations',
'genres'
])
# Ensure `year` attribute is an integer (fixes incorrect type returned by search)
if info.get('year'):
self.year = int(info['year'])
# Extended Info
if info.get('runtime'):
self.runtime = info['runtime']
if 'released' in info:
self.released = from_iso8601_date(info.get('released'))
if 'updated_at' in info:
self.updated_at = from_iso8601_datetime(info.get('updated_at'))
@classmethod
def _construct(cls, client, keys, info, index=None, **kwargs):
movie = cls(client, keys, index=index)
movie._update(info, **kwargs)
return movie
def __repr__(self):
return '<Movie %r (%s)>' % (self.title, self.year)
| gpl-2.0 |
Urumasi/Flask-Bones | app/data/models/oauth.py | 1 | 5170 | from flask_login import UserMixin
from app.extensions import cache ,bcrypt
import bcrypt as bcr
from .. import db
from ..mixins import CRUDMixin
import datetime
from rauth import OAuth1Service, OAuth2Service
from flask import current_app, url_for, request, redirect, session
class Oauth(CRUDMixin, UserMixin, db.Model):
__tablename__ = 'oauth'
id = db.Column(db.Integer, primary_key=True)
#user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
#oauth = db.relationship("Oauth", back_populates="users")
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
social_id = db.Column(db.String(64), nullable=False, unique=True)
nickname = db.Column(db.String(64), nullable=True)
email = db.Column(db.String(64), nullable=True)
jmeno = db.Column(db.String(128), nullable=False)
prijmeni = db.Column(db.String(128), nullable=False)
profile_url = db.Column(db.String(128), nullable=False)
image_url = db.Column(db.String(128), nullable=False)
class OAuthSignIn(object):
providers = None
def __init__(self, provider_name):
self.provider_name = provider_name
credentials = current_app.config['OAUTH_CREDENTIALS'][provider_name]
self.consumer_id = credentials['id']
self.consumer_secret = credentials['secret']
def authorize(self):
pass
def callback(self):
pass
def get_callback_url(self):
return url_for('public.oauth_callback', provider=self.provider_name,
_external=True)
@classmethod
def get_provider(self, provider_name):
if self.providers is None:
self.providers = {}
for provider_class in self.__subclasses__():
provider = provider_class()
self.providers[provider.provider_name] = provider
return self.providers[provider_name]
class FacebookSignIn(OAuthSignIn):
def __init__(self):
super(FacebookSignIn, self).__init__('facebook')
self.service = OAuth2Service(
name='facebook',
client_id=self.consumer_id,
client_secret=self.consumer_secret,
authorize_url='https://graph.facebook.com/oauth/authorize',
access_token_url='https://graph.facebook.com/oauth/access_token',
base_url='https://graph.facebook.com/'
)
def authorize(self):
return redirect(self.service.get_authorize_url(
scope='email',
response_type='code',
redirect_uri=self.get_callback_url())
)
def callback(self):
if 'code' not in request.args:
return None, None, None, None, None, None, None
oauth_session = self.service.get_auth_session(
data={'code': request.args['code'],
'grant_type': 'authorization_code',
'redirect_uri': self.get_callback_url()}
)
me = oauth_session.get('me?fields=id,email,name').json()
profile_url = "http://facebook.com/profile.php?id=%s" % me['id']
image_url = "http://graph.facebook.com/%s/picture" % me['id']
return (
'facebook$' + me['id'],
me.get('email').split('@')[0] if me.get('email') is not None else "anon"+me['id'],
me.get('email'),
me['name'].split(' ')[0],
me['name'].split(' ')[1],
profile_url,
image_url
)
class TwitterSignIn(OAuthSignIn):
def __init__(self):
super(TwitterSignIn, self).__init__('twitter')
self.service = OAuth1Service(
name='twitter',
consumer_key=self.consumer_id,
consumer_secret=self.consumer_secret,
request_token_url='https://api.twitter.com/oauth/request_token',
authorize_url='https://api.twitter.com/oauth/authorize',
access_token_url='https://api.twitter.com/oauth/access_token',
base_url='https://api.twitter.com/1.1/'
)
def authorize(self):
request_token = self.service.get_request_token(
params={'oauth_callback': self.get_callback_url()}
)
session['request_token'] = request_token
return redirect(self.service.get_authorize_url(request_token[0]))
def callback(self):
request_token = session.pop('request_token')
if 'oauth_verifier' not in request.args:
return None, None, None, None, None, None, None
oauth_session = self.service.get_auth_session(
request_token[0],
request_token[1],
data={'oauth_verifier': request.args['oauth_verifier']}
)
me = oauth_session.get('account/verify_credentials.json').json()
social_id = 'twitter$' + str(me.get('id'))
username = me.get('screen_name')
name = me.get('name').split(' ')
return (
social_id,
username,
None,
name[0],
name[1] if name.__len__()>1 else '',
#'@%s' % me.get('screen_name') - display name (@Atheloses)
"http://twitter.com/%s" % me.get('screen_name'),
me.get('profile_image_url')
) | mit |
waytai/django | django/utils/timesince.py | 409 | 2671 | from __future__ import unicode_literals
import calendar
import datetime
from django.utils.html import avoid_wrapping
from django.utils.timezone import is_aware, utc
from django.utils.translation import ugettext, ungettext_lazy
TIMESINCE_CHUNKS = (
(60 * 60 * 24 * 365, ungettext_lazy('%d year', '%d years')),
(60 * 60 * 24 * 30, ungettext_lazy('%d month', '%d months')),
(60 * 60 * 24 * 7, ungettext_lazy('%d week', '%d weeks')),
(60 * 60 * 24, ungettext_lazy('%d day', '%d days')),
(60 * 60, ungettext_lazy('%d hour', '%d hours')),
(60, ungettext_lazy('%d minute', '%d minutes'))
)
def timesince(d, now=None, reversed=False):
"""
Takes two datetime objects and returns the time between d and now
as a nicely formatted string, e.g. "10 minutes". If d occurs after now,
then "0 minutes" is returned.
Units used are years, months, weeks, days, hours, and minutes.
Seconds and microseconds are ignored. Up to two adjacent units will be
displayed. For example, "2 weeks, 3 days" and "1 year, 3 months" are
possible outputs, but "2 weeks, 3 hours" and "1 year, 5 days" are not.
Adapted from
http://web.archive.org/web/20060617175230/http://blog.natbat.co.uk/archive/2003/Jun/14/time_since
"""
# Convert datetime.date to datetime.datetime for comparison.
if not isinstance(d, datetime.datetime):
d = datetime.datetime(d.year, d.month, d.day)
if now and not isinstance(now, datetime.datetime):
now = datetime.datetime(now.year, now.month, now.day)
if not now:
now = datetime.datetime.now(utc if is_aware(d) else None)
delta = (d - now) if reversed else (now - d)
# Deal with leapyears by subtracing the number of leapdays
delta -= datetime.timedelta(calendar.leapdays(d.year, now.year))
# ignore microseconds
since = delta.days * 24 * 60 * 60 + delta.seconds
if since <= 0:
# d is in the future compared to now, stop processing.
return avoid_wrapping(ugettext('0 minutes'))
for i, (seconds, name) in enumerate(TIMESINCE_CHUNKS):
count = since // seconds
if count != 0:
break
result = avoid_wrapping(name % count)
if i + 1 < len(TIMESINCE_CHUNKS):
# Now get the second item
seconds2, name2 = TIMESINCE_CHUNKS[i + 1]
count2 = (since - (seconds * count)) // seconds2
if count2 != 0:
result += ugettext(', ') + avoid_wrapping(name2 % count2)
return result
def timeuntil(d, now=None):
"""
Like timesince, but returns a string measuring the time until
the given time.
"""
return timesince(d, now, reversed=True)
| bsd-3-clause |
PetrDlouhy/django | django/views/generic/list.py | 471 | 7630 | from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from django.core.paginator import InvalidPage, Paginator
from django.db.models.query import QuerySet
from django.http import Http404
from django.utils import six
from django.utils.translation import ugettext as _
from django.views.generic.base import ContextMixin, TemplateResponseMixin, View
class MultipleObjectMixin(ContextMixin):
"""
A mixin for views manipulating multiple objects.
"""
allow_empty = True
queryset = None
model = None
paginate_by = None
paginate_orphans = 0
context_object_name = None
paginator_class = Paginator
page_kwarg = 'page'
ordering = None
def get_queryset(self):
"""
Return the list of items for this view.
The return value must be an iterable and may be an instance of
`QuerySet` in which case `QuerySet` specific behavior will be enabled.
"""
if self.queryset is not None:
queryset = self.queryset
if isinstance(queryset, QuerySet):
queryset = queryset.all()
elif self.model is not None:
queryset = self.model._default_manager.all()
else:
raise ImproperlyConfigured(
"%(cls)s is missing a QuerySet. Define "
"%(cls)s.model, %(cls)s.queryset, or override "
"%(cls)s.get_queryset()." % {
'cls': self.__class__.__name__
}
)
ordering = self.get_ordering()
if ordering:
if isinstance(ordering, six.string_types):
ordering = (ordering,)
queryset = queryset.order_by(*ordering)
return queryset
def get_ordering(self):
"""
Return the field or fields to use for ordering the queryset.
"""
return self.ordering
def paginate_queryset(self, queryset, page_size):
"""
Paginate the queryset, if needed.
"""
paginator = self.get_paginator(
queryset, page_size, orphans=self.get_paginate_orphans(),
allow_empty_first_page=self.get_allow_empty())
page_kwarg = self.page_kwarg
page = self.kwargs.get(page_kwarg) or self.request.GET.get(page_kwarg) or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
try:
page = paginator.page(page_number)
return (paginator, page, page.object_list, page.has_other_pages())
except InvalidPage as e:
raise Http404(_('Invalid page (%(page_number)s): %(message)s') % {
'page_number': page_number,
'message': str(e)
})
def get_paginate_by(self, queryset):
"""
Get the number of items to paginate by, or ``None`` for no pagination.
"""
return self.paginate_by
def get_paginator(self, queryset, per_page, orphans=0,
allow_empty_first_page=True, **kwargs):
"""
Return an instance of the paginator for this view.
"""
return self.paginator_class(
queryset, per_page, orphans=orphans,
allow_empty_first_page=allow_empty_first_page, **kwargs)
def get_paginate_orphans(self):
"""
Returns the maximum number of orphans extend the last page by when
paginating.
"""
return self.paginate_orphans
def get_allow_empty(self):
"""
Returns ``True`` if the view should display empty lists, and ``False``
if a 404 should be raised instead.
"""
return self.allow_empty
def get_context_object_name(self, object_list):
"""
Get the name of the item to be used in the context.
"""
if self.context_object_name:
return self.context_object_name
elif hasattr(object_list, 'model'):
return '%s_list' % object_list.model._meta.model_name
else:
return None
def get_context_data(self, **kwargs):
"""
Get the context for this view.
"""
queryset = kwargs.pop('object_list', self.object_list)
page_size = self.get_paginate_by(queryset)
context_object_name = self.get_context_object_name(queryset)
if page_size:
paginator, page, queryset, is_paginated = self.paginate_queryset(queryset, page_size)
context = {
'paginator': paginator,
'page_obj': page,
'is_paginated': is_paginated,
'object_list': queryset
}
else:
context = {
'paginator': None,
'page_obj': None,
'is_paginated': False,
'object_list': queryset
}
if context_object_name is not None:
context[context_object_name] = queryset
context.update(kwargs)
return super(MultipleObjectMixin, self).get_context_data(**context)
class BaseListView(MultipleObjectMixin, View):
"""
A base view for displaying a list of objects.
"""
def get(self, request, *args, **kwargs):
self.object_list = self.get_queryset()
allow_empty = self.get_allow_empty()
if not allow_empty:
# When pagination is enabled and object_list is a queryset,
# it's better to do a cheap query than to load the unpaginated
# queryset in memory.
if (self.get_paginate_by(self.object_list) is not None
and hasattr(self.object_list, 'exists')):
is_empty = not self.object_list.exists()
else:
is_empty = len(self.object_list) == 0
if is_empty:
raise Http404(_("Empty list and '%(class_name)s.allow_empty' is False.")
% {'class_name': self.__class__.__name__})
context = self.get_context_data()
return self.render_to_response(context)
class MultipleObjectTemplateResponseMixin(TemplateResponseMixin):
"""
Mixin for responding with a template and list of objects.
"""
template_name_suffix = '_list'
def get_template_names(self):
"""
Return a list of template names to be used for the request. Must return
a list. May not be called if render_to_response is overridden.
"""
try:
names = super(MultipleObjectTemplateResponseMixin, self).get_template_names()
except ImproperlyConfigured:
# If template_name isn't specified, it's not a problem --
# we just start with an empty list.
names = []
# If the list is a queryset, we'll invent a template name based on the
# app and model name. This name gets put at the end of the template
# name list so that user-supplied names override the automatically-
# generated ones.
if hasattr(self.object_list, 'model'):
opts = self.object_list.model._meta
names.append("%s/%s%s.html" % (opts.app_label, opts.model_name, self.template_name_suffix))
return names
class ListView(MultipleObjectTemplateResponseMixin, BaseListView):
"""
Render some list of objects, set by `self.model` or `self.queryset`.
`self.queryset` can actually be any iterable of items, not just a queryset.
"""
| bsd-3-clause |
HighwayStar/android_kernel_xiaomi_mocha | tools/perf/util/setup.py | 2079 | 1438 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = ['-fno-strict-aliasing', '-Wno-write-strings']
cflags += getenv('CFLAGS', '').split()
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
libtraceevent = getenv('LIBTRACEEVENT')
liblk = getenv('LIBLK')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
extra_objects = [libtraceevent, liblk],
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 |
edoko/AirKernel_NS_JBN | arch/ia64/scripts/unwcheck.py | 13143 | 1714 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
nvoron23/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/test/regrtest.py | 55 | 37786 | #! /usr/bin/env python
"""Regression test.
This will find all modules whose name is "test_*" in the test
directory, and run them. Various command line options provide
additional facilities.
Command line options:
-v: verbose -- run tests in verbose mode with output to stdout
-w: verbose2 -- re-run failed tests in verbose mode
-q: quiet -- don't print anything except if a test fails
-x: exclude -- arguments are tests to *exclude*
-s: single -- run only a single test (see below)
-S: slow -- print the slowest 10 tests
-r: random -- randomize test execution order
-f: fromfile -- read names of tests to run from a file (see below)
-l: findleaks -- if GC is available detect tests that leak memory
-u: use -- specify which special resource intensive tests to run
-h: help -- print this text and exit
-t: threshold -- call gc.set_threshold(N)
-T: coverage -- turn on code coverage using the trace module
-D: coverdir -- Directory where coverage files are put
-N: nocoverdir -- Put coverage files alongside modules
-L: runleaks -- run the leaks(1) command just before exit
-R: huntrleaks -- search for reference leaks (needs debug build, v. slow)
-M: memlimit -- run very large memory-consuming tests
If non-option arguments are present, they are names for tests to run,
unless -x is given, in which case they are names for tests not to run.
If no test names are given, all tests are run.
-T turns on code coverage tracing with the trace module.
-D specifies the directory where coverage files are put.
-N Put coverage files alongside modules.
-s means to run only a single test and exit. This is useful when
doing memory analysis on the Python interpreter (which tend to consume
too many resources to run the full regression test non-stop). The
file /tmp/pynexttest is read to find the next test to run. If this
file is missing, the first test_*.py file in testdir or on the command
line is used. (actually tempfile.gettempdir() is used instead of
/tmp).
-f reads the names of tests from the file given as f's argument, one
or more test names per line. Whitespace is ignored. Blank lines and
lines beginning with '#' are ignored. This is especially useful for
whittling down failures involving interactions among tests.
-L causes the leaks(1) command to be run just before exit if it exists.
leaks(1) is available on Mac OS X and presumably on some other
FreeBSD-derived systems.
-R runs each test several times and examines sys.gettotalrefcount() to
see if the test appears to be leaking references. The argument should
be of the form stab:run:fname where 'stab' is the number of times the
test is run to let gettotalrefcount settle down, 'run' is the number
of times further it is run and 'fname' is the name of the file the
reports are written to. These parameters all have defaults (5, 4 and
"reflog.txt" respectively), so the minimal invocation is '-R ::'.
-M runs tests that require an exorbitant amount of memory. These tests
typically try to ascertain containers keep working when containing more than
2 billion objects, which only works on 64-bit systems. There are also some
tests that try to exhaust the address space of the process, which only makes
sense on 32-bit systems with at least 2Gb of memory. The passed-in memlimit,
which is a string in the form of '2.5Gb', determines howmuch memory the
tests will limit themselves to (but they may go slightly over.) The number
shouldn't be more memory than the machine has (including swap memory). You
should also keep in mind that swap memory is generally much, much slower
than RAM, and setting memlimit to all available RAM or higher will heavily
tax the machine. On the other hand, it is no use running these tests with a
limit of less than 2.5Gb, and many require more than 20Gb. Tests that expect
to use more than memlimit memory will be skipped. The big-memory tests
generally run very, very long.
-u is used to specify which special resource intensive tests to run,
such as those requiring large file support or network connectivity.
The argument is a comma-separated list of words indicating the
resources to test. Currently only the following are defined:
all - Enable all special resources.
audio - Tests that use the audio device. (There are known
cases of broken audio drivers that can crash Python or
even the Linux kernel.)
curses - Tests that use curses and will modify the terminal's
state and output modes.
lib2to3 - Run the tests for 2to3 (They take a while.)
largefile - It is okay to run some test that may create huge
files. These tests can take a long time and may
consume >2GB of disk space temporarily.
network - It is okay to run tests that use external network
resource, e.g. testing SSL support for sockets.
bsddb - It is okay to run the bsddb testsuite, which takes
a long time to complete.
decimal - Test the decimal module against a large suite that
verifies compliance with standards.
compiler - Test the compiler package by compiling all the source
in the standard library and test suite. This takes
a long time. Enabling this resource also allows
test_tokenize to verify round-trip lexing on every
file in the test library.
subprocess Run all tests for the subprocess module.
urlfetch - It is okay to download files required on testing.
To enable all resources except one, use '-uall,-<resource>'. For
example, to run all the tests except for the bsddb tests, give the
option '-uall,-bsddb'.
"""
import cStringIO
import getopt
import os
import random
import re
import sys
import time
import traceback
import warnings
# I see no other way to suppress these warnings;
# putting them in test_grammar.py has no effect:
warnings.filterwarnings("ignore", "hex/oct constants", FutureWarning,
".*test.test_grammar$")
if sys.maxint > 0x7fffffff:
# Also suppress them in <string>, because for 64-bit platforms,
# that's where test_grammar.py hides them.
warnings.filterwarnings("ignore", "hex/oct constants", FutureWarning,
"<string>")
# Ignore ImportWarnings that only occur in the source tree,
# (because of modules with the same name as source-directories in Modules/)
for mod in ("ctypes", "gzip", "zipfile", "tarfile", "encodings.zlib_codec",
"test.test_zipimport", "test.test_zlib", "test.test_zipfile",
"test.test_codecs", "test.string_tests"):
warnings.filterwarnings(module=".*%s$" % (mod,),
action="ignore", category=ImportWarning)
# MacOSX (a.k.a. Darwin) has a default stack size that is too small
# for deeply recursive regular expressions. We see this as crashes in
# the Python test suite when running test_re.py and test_sre.py. The
# fix is to set the stack limit to 2048.
# This approach may also be useful for other Unixy platforms that
# suffer from small default stack limits.
if sys.platform == 'darwin':
try:
import resource
except ImportError:
pass
else:
soft, hard = resource.getrlimit(resource.RLIMIT_STACK)
newsoft = min(hard, max(soft, 1024*2048))
resource.setrlimit(resource.RLIMIT_STACK, (newsoft, hard))
from test import test_support
RESOURCE_NAMES = ('audio', 'curses', 'largefile', 'network', 'bsddb',
'decimal', 'compiler', 'subprocess', 'urlfetch')
def usage(code, msg=''):
print __doc__
if msg: print msg
sys.exit(code)
def main(tests=None, testdir=None, verbose=0, quiet=False,
exclude=False, single=False, randomize=False, fromfile=None,
findleaks=False, use_resources=None, trace=False, coverdir='coverage',
runleaks=False, huntrleaks=False, verbose2=False, print_slow=False):
"""Execute a test suite.
This also parses command-line options and modifies its behavior
accordingly.
tests -- a list of strings containing test names (optional)
testdir -- the directory in which to look for tests (optional)
Users other than the Python test suite will certainly want to
specify testdir; if it's omitted, the directory containing the
Python test suite is searched for.
If the tests argument is omitted, the tests listed on the
command-line will be used. If that's empty, too, then all *.py
files beginning with test_ will be used.
The other default arguments (verbose, quiet, exclude,
single, randomize, findleaks, use_resources, trace, coverdir, and
print_slow) allow programmers calling main() directly to set the
values that would normally be set by flags on the command line.
"""
test_support.record_original_stdout(sys.stdout)
try:
opts, args = getopt.getopt(sys.argv[1:], 'hvgqxsSrf:lu:t:TD:NLR:wM:',
['help', 'verbose', 'quiet', 'exclude',
'single', 'slow', 'random', 'fromfile',
'findleaks', 'use=', 'threshold=', 'trace',
'coverdir=', 'nocoverdir', 'runleaks',
'huntrleaks=', 'verbose2', 'memlimit=',
])
except getopt.error, msg:
usage(2, msg)
# Defaults
if use_resources is None:
use_resources = []
for o, a in opts:
if o in ('-h', '--help'):
usage(0)
elif o in ('-v', '--verbose'):
verbose += 1
elif o in ('-w', '--verbose2'):
verbose2 = True
elif o in ('-q', '--quiet'):
quiet = True;
verbose = 0
elif o in ('-x', '--exclude'):
exclude = True
elif o in ('-s', '--single'):
single = True
elif o in ('-S', '--slow'):
print_slow = True
elif o in ('-r', '--randomize'):
randomize = True
elif o in ('-f', '--fromfile'):
fromfile = a
elif o in ('-l', '--findleaks'):
findleaks = True
elif o in ('-L', '--runleaks'):
runleaks = True
elif o in ('-t', '--threshold'):
import gc
gc.set_threshold(int(a))
elif o in ('-T', '--coverage'):
trace = True
elif o in ('-D', '--coverdir'):
coverdir = os.path.join(os.getcwd(), a)
elif o in ('-N', '--nocoverdir'):
coverdir = None
elif o in ('-R', '--huntrleaks'):
huntrleaks = a.split(':')
if len(huntrleaks) != 3:
print a, huntrleaks
usage(2, '-R takes three colon-separated arguments')
if len(huntrleaks[0]) == 0:
huntrleaks[0] = 5
else:
huntrleaks[0] = int(huntrleaks[0])
if len(huntrleaks[1]) == 0:
huntrleaks[1] = 4
else:
huntrleaks[1] = int(huntrleaks[1])
if len(huntrleaks[2]) == 0:
huntrleaks[2] = "reflog.txt"
elif o in ('-M', '--memlimit'):
test_support.set_memlimit(a)
elif o in ('-u', '--use'):
u = [x.lower() for x in a.split(',')]
for r in u:
if r == 'all':
use_resources[:] = RESOURCE_NAMES
continue
remove = False
if r[0] == '-':
remove = True
r = r[1:]
if r not in RESOURCE_NAMES:
usage(1, 'Invalid -u/--use option: ' + a)
if remove:
if r in use_resources:
use_resources.remove(r)
elif r not in use_resources:
use_resources.append(r)
if single and fromfile:
usage(2, "-s and -f don't go together!")
good = []
bad = []
skipped = []
resource_denieds = []
if findleaks:
try:
import gc
except ImportError:
print 'No GC available, disabling findleaks.'
findleaks = False
else:
# Uncomment the line below to report garbage that is not
# freeable by reference counting alone. By default only
# garbage that is not collectable by the GC is reported.
#gc.set_debug(gc.DEBUG_SAVEALL)
found_garbage = []
if single:
from tempfile import gettempdir
filename = os.path.join(gettempdir(), 'pynexttest')
try:
fp = open(filename, 'r')
next = fp.read().strip()
tests = [next]
fp.close()
except IOError:
pass
if fromfile:
tests = []
fp = open(fromfile)
for line in fp:
guts = line.split() # assuming no test has whitespace in its name
if guts and not guts[0].startswith('#'):
tests.extend(guts)
fp.close()
# Strip .py extensions.
if args:
args = map(removepy, args)
if tests:
tests = map(removepy, tests)
stdtests = STDTESTS[:]
nottests = NOTTESTS[:]
if exclude:
for arg in args:
if arg in stdtests:
stdtests.remove(arg)
nottests[:0] = args
args = []
tests = tests or args or findtests(testdir, stdtests, nottests)
if single:
tests = tests[:1]
if randomize:
random.shuffle(tests)
if trace:
import trace
tracer = trace.Trace(ignoredirs=[sys.prefix, sys.exec_prefix],
trace=False, count=True)
test_times = []
test_support.verbose = verbose # Tell tests to be moderately quiet
test_support.use_resources = use_resources
save_modules = sys.modules.keys()
for test in tests:
if not quiet:
print test
sys.stdout.flush()
if trace:
# If we're tracing code coverage, then we don't exit with status
# if on a false return value from main.
tracer.runctx('runtest(test, verbose, quiet,'
' test_times, testdir)',
globals=globals(), locals=vars())
else:
try:
ok = runtest(test, verbose, quiet, test_times,
testdir, huntrleaks)
except KeyboardInterrupt:
# print a newline separate from the ^C
print
break
except:
raise
if ok > 0:
good.append(test)
elif ok == 0:
bad.append(test)
else:
skipped.append(test)
if ok == -2:
resource_denieds.append(test)
if findleaks:
gc.collect()
if gc.garbage:
print "Warning: test created", len(gc.garbage),
print "uncollectable object(s)."
# move the uncollectable objects somewhere so we don't see
# them again
found_garbage.extend(gc.garbage)
del gc.garbage[:]
# Unload the newly imported modules (best effort finalization)
for module in sys.modules.keys():
if module not in save_modules and module.startswith("test."):
test_support.unload(module)
# The lists won't be sorted if running with -r
good.sort()
bad.sort()
skipped.sort()
if good and not quiet:
if not bad and not skipped and len(good) > 1:
print "All",
print count(len(good), "test"), "OK."
if print_slow:
test_times.sort(reverse=True)
print "10 slowest tests:"
for time, test in test_times[:10]:
print "%s: %.1fs" % (test, time)
if bad:
print count(len(bad), "test"), "failed:"
printlist(bad)
if skipped and not quiet:
print count(len(skipped), "test"), "skipped:"
printlist(skipped)
e = _ExpectedSkips()
plat = sys.platform
if e.isvalid():
surprise = set(skipped) - e.getexpected() - set(resource_denieds)
if surprise:
print count(len(surprise), "skip"), \
"unexpected on", plat + ":"
printlist(surprise)
else:
print "Those skips are all expected on", plat + "."
else:
print "Ask someone to teach regrtest.py about which tests are"
print "expected to get skipped on", plat + "."
if verbose2 and bad:
print "Re-running failed tests in verbose mode"
for test in bad:
print "Re-running test %r in verbose mode" % test
sys.stdout.flush()
try:
test_support.verbose = True
ok = runtest(test, True, quiet, test_times, testdir,
huntrleaks)
except KeyboardInterrupt:
# print a newline separate from the ^C
print
break
except:
raise
if single:
alltests = findtests(testdir, stdtests, nottests)
for i in range(len(alltests)):
if tests[0] == alltests[i]:
if i == len(alltests) - 1:
os.unlink(filename)
else:
fp = open(filename, 'w')
fp.write(alltests[i+1] + '\n')
fp.close()
break
else:
os.unlink(filename)
if trace:
r = tracer.results()
r.write_results(show_missing=True, summary=True, coverdir=coverdir)
if runleaks:
os.system("leaks %d" % os.getpid())
sys.exit(len(bad) > 0)
STDTESTS = [
'test_grammar',
'test_opcodes',
'test_dict',
'test_builtin',
'test_exceptions',
'test_types',
'test_unittest',
'test_doctest',
'test_doctest2',
]
NOTTESTS = [
'test_support',
'test_future1',
'test_future2',
]
def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS):
"""Return a list of all applicable test modules."""
if not testdir: testdir = findtestdir()
names = os.listdir(testdir)
tests = []
for name in names:
if name[:5] == "test_" and name[-3:] == os.extsep+"py":
modname = name[:-3]
if modname not in stdtests and modname not in nottests:
tests.append(modname)
tests.sort()
return stdtests + tests
def runtest(test, verbose, quiet, test_times,
testdir=None, huntrleaks=False):
"""Run a single test.
test -- the name of the test
verbose -- if true, print more messages
quiet -- if true, don't print 'skipped' messages (probably redundant)
test_times -- a list of (time, test_name) pairs
testdir -- test directory
huntrleaks -- run multiple times to test for leaks; requires a debug
build; a triple corresponding to -R's three arguments
Return:
-2 test skipped because resource denied
-1 test skipped for some other reason
0 test failed
1 test passed
"""
try:
return runtest_inner(test, verbose, quiet, test_times,
testdir, huntrleaks)
finally:
cleanup_test_droppings(test, verbose)
def runtest_inner(test, verbose, quiet, test_times,
testdir=None, huntrleaks=False):
test_support.unload(test)
if not testdir:
testdir = findtestdir()
if verbose:
capture_stdout = None
else:
capture_stdout = cStringIO.StringIO()
try:
save_stdout = sys.stdout
try:
if capture_stdout:
sys.stdout = capture_stdout
if test.startswith('test.'):
abstest = test
else:
# Always import it from the test package
abstest = 'test.' + test
start_time = time.time()
the_package = __import__(abstest, globals(), locals(), [])
the_module = getattr(the_package, test)
# Old tests run to completion simply as a side-effect of
# being imported. For tests based on unittest or doctest,
# explicitly invoke their test_main() function (if it exists).
indirect_test = getattr(the_module, "test_main", None)
if indirect_test is not None:
indirect_test()
if huntrleaks:
dash_R(the_module, test, indirect_test, huntrleaks)
test_time = time.time() - start_time
test_times.append((test_time, test))
finally:
sys.stdout = save_stdout
except test_support.ResourceDenied, msg:
if not quiet:
print test, "skipped --", msg
sys.stdout.flush()
return -2
except (ImportError, test_support.TestSkipped), msg:
if not quiet:
print test, "skipped --", msg
sys.stdout.flush()
return -1
except KeyboardInterrupt:
raise
except test_support.TestFailed, msg:
print "test", test, "failed --", msg
sys.stdout.flush()
return 0
except:
type, value = sys.exc_info()[:2]
print "test", test, "crashed --", str(type) + ":", value
sys.stdout.flush()
if verbose:
traceback.print_exc(file=sys.stdout)
sys.stdout.flush()
return 0
else:
# Except in verbose mode, tests should not print anything
if verbose or huntrleaks:
return 1
output = capture_stdout.getvalue()
if not output:
return 1
print "test", test, "produced unexpected output:"
print "*" * 70
print output
print "*" * 70
sys.stdout.flush()
return 0
def cleanup_test_droppings(testname, verbose):
import shutil
# Try to clean up junk commonly left behind. While tests shouldn't leave
# any files or directories behind, when a test fails that can be tedious
# for it to arrange. The consequences can be especially nasty on Windows,
# since if a test leaves a file open, it cannot be deleted by name (while
# there's nothing we can do about that here either, we can display the
# name of the offending test, which is a real help).
for name in (test_support.TESTFN,
"db_home",
):
if not os.path.exists(name):
continue
if os.path.isdir(name):
kind, nuker = "directory", shutil.rmtree
elif os.path.isfile(name):
kind, nuker = "file", os.unlink
else:
raise SystemError("os.path says %r exists but is neither "
"directory nor file" % name)
if verbose:
print "%r left behind %s %r" % (testname, kind, name)
try:
nuker(name)
except Exception, msg:
print >> sys.stderr, ("%r left behind %s %r and it couldn't be "
"removed: %s" % (testname, kind, name, msg))
def dash_R(the_module, test, indirect_test, huntrleaks):
# This code is hackish and inelegant, but it seems to do the job.
import copy_reg, _abcoll, io
if not hasattr(sys, 'gettotalrefcount'):
raise Exception("Tracking reference leaks requires a debug build "
"of Python")
# Save current values for dash_R_cleanup() to restore.
fs = warnings.filters[:]
ps = copy_reg.dispatch_table.copy()
pic = sys.path_importer_cache.copy()
abcs = {}
modules = _abcoll, io
for abc in [getattr(mod, a) for mod in modules for a in mod.__all__]:
# XXX isinstance(abc, ABCMeta) leads to infinite recursion
if not hasattr(abc, '_abc_registry'):
continue
for obj in abc.__subclasses__() + [abc]:
abcs[obj] = obj._abc_registry.copy()
if indirect_test:
def run_the_test():
indirect_test()
else:
def run_the_test():
reload(the_module)
deltas = []
nwarmup, ntracked, fname = huntrleaks
repcount = nwarmup + ntracked
print >> sys.stderr, "beginning", repcount, "repetitions"
print >> sys.stderr, ("1234567890"*(repcount//10 + 1))[:repcount]
dash_R_cleanup(fs, ps, pic, abcs)
for i in range(repcount):
rc = sys.gettotalrefcount()
run_the_test()
sys.stderr.write('.')
dash_R_cleanup(fs, ps, pic, abcs)
if i >= nwarmup:
deltas.append(sys.gettotalrefcount() - rc - 2)
print >> sys.stderr
if any(deltas):
msg = '%s leaked %s references, sum=%s' % (test, deltas, sum(deltas))
print >> sys.stderr, msg
refrep = open(fname, "a")
print >> refrep, msg
refrep.close()
def dash_R_cleanup(fs, ps, pic, abcs):
import gc, copy_reg
import _strptime, linecache
dircache = test_support.import_module('dircache', deprecated=True)
import urlparse, urllib, urllib2, mimetypes, doctest
import struct, filecmp
from distutils.dir_util import _path_created
# Clear the warnings registry, so they can be displayed again
for mod in sys.modules.values():
if hasattr(mod, '__warningregistry__'):
del mod.__warningregistry__
# Restore some original values.
warnings.filters[:] = fs
copy_reg.dispatch_table.clear()
copy_reg.dispatch_table.update(ps)
sys.path_importer_cache.clear()
sys.path_importer_cache.update(pic)
# clear type cache
sys._clear_type_cache()
# Clear ABC registries, restoring previously saved ABC registries.
for abc, registry in abcs.items():
abc._abc_registry = registry.copy()
abc._abc_cache.clear()
abc._abc_negative_cache.clear()
# Clear assorted module caches.
_path_created.clear()
re.purge()
_strptime._regex_cache.clear()
urlparse.clear_cache()
urllib.urlcleanup()
urllib2.install_opener(None)
dircache.reset()
linecache.clearcache()
mimetypes._default_mime_types()
filecmp._cache.clear()
struct._clearcache()
doctest.master = None
# Collect cyclic trash.
gc.collect()
def findtestdir():
if __name__ == '__main__':
file = sys.argv[0]
else:
file = __file__
testdir = os.path.dirname(file) or os.curdir
return testdir
def removepy(name):
if name.endswith(os.extsep + "py"):
name = name[:-3]
return name
def count(n, word):
if n == 1:
return "%d %s" % (n, word)
else:
return "%d %ss" % (n, word)
def printlist(x, width=70, indent=4):
"""Print the elements of iterable x to stdout.
Optional arg width (default 70) is the maximum line length.
Optional arg indent (default 4) is the number of blanks with which to
begin each line.
"""
from textwrap import fill
blanks = ' ' * indent
print fill(' '.join(map(str, x)), width,
initial_indent=blanks, subsequent_indent=blanks)
# Map sys.platform to a string containing the basenames of tests
# expected to be skipped on that platform.
#
# Special cases:
# test_pep277
# The _ExpectedSkips constructor adds this to the set of expected
# skips if not os.path.supports_unicode_filenames.
# test_socket_ssl
# Controlled by test_socket_ssl.skip_expected. Requires the network
# resource, and a socket module with ssl support.
# test_timeout
# Controlled by test_timeout.skip_expected. Requires the network
# resource and a socket module.
#
# Tests that are expected to be skipped everywhere except on one platform
# are also handled separately.
_expectations = {
'win32':
"""
test__locale
test_bsddb185
test_bsddb3
test_commands
test_crypt
test_curses
test_dbm
test_dl
test_fcntl
test_fork1
test_epoll
test_gdbm
test_grp
test_ioctl
test_largefile
test_kqueue
test_mhlib
test_openpty
test_ossaudiodev
test_pipes
test_poll
test_posix
test_pty
test_pwd
test_resource
test_signal
test_threadsignals
test_timing
test_wait3
test_wait4
""",
'linux2':
"""
test_bsddb185
test_curses
test_dl
test_largefile
test_kqueue
test_ossaudiodev
""",
'mac':
"""
test_atexit
test_bsddb
test_bsddb185
test_bsddb3
test_bz2
test_commands
test_crypt
test_curses
test_dbm
test_dl
test_fcntl
test_fork1
test_epoll
test_grp
test_ioctl
test_largefile
test_locale
test_kqueue
test_mmap
test_openpty
test_ossaudiodev
test_poll
test_popen
test_popen2
test_posix
test_pty
test_pwd
test_resource
test_signal
test_sundry
test_tarfile
test_timing
""",
'unixware7':
"""
test_bsddb
test_bsddb185
test_dl
test_epoll
test_largefile
test_kqueue
test_minidom
test_openpty
test_pyexpat
test_sax
test_sundry
""",
'openunix8':
"""
test_bsddb
test_bsddb185
test_dl
test_epoll
test_largefile
test_kqueue
test_minidom
test_openpty
test_pyexpat
test_sax
test_sundry
""",
'sco_sv3':
"""
test_asynchat
test_bsddb
test_bsddb185
test_dl
test_fork1
test_epoll
test_gettext
test_largefile
test_locale
test_kqueue
test_minidom
test_openpty
test_pyexpat
test_queue
test_sax
test_sundry
test_thread
test_threaded_import
test_threadedtempfile
test_threading
""",
'riscos':
"""
test_asynchat
test_atexit
test_bsddb
test_bsddb185
test_bsddb3
test_commands
test_crypt
test_dbm
test_dl
test_fcntl
test_fork1
test_epoll
test_gdbm
test_grp
test_largefile
test_locale
test_kqueue
test_mmap
test_openpty
test_poll
test_popen2
test_pty
test_pwd
test_strop
test_sundry
test_thread
test_threaded_import
test_threadedtempfile
test_threading
test_timing
""",
'darwin':
"""
test__locale
test_bsddb
test_bsddb3
test_curses
test_epoll
test_gdbm
test_largefile
test_locale
test_kqueue
test_minidom
test_ossaudiodev
test_poll
""",
'sunos5':
"""
test_bsddb
test_bsddb185
test_curses
test_dbm
test_epoll
test_kqueue
test_gdbm
test_gzip
test_openpty
test_zipfile
test_zlib
""",
'hp-ux11':
"""
test_bsddb
test_bsddb185
test_curses
test_dl
test_epoll
test_gdbm
test_gzip
test_largefile
test_locale
test_kqueue
test_minidom
test_openpty
test_pyexpat
test_sax
test_zipfile
test_zlib
""",
'atheos':
"""
test_bsddb185
test_curses
test_dl
test_gdbm
test_epoll
test_largefile
test_locale
test_kqueue
test_mhlib
test_mmap
test_poll
test_popen2
test_resource
""",
'cygwin':
"""
test_bsddb185
test_bsddb3
test_curses
test_dbm
test_epoll
test_ioctl
test_kqueue
test_largefile
test_locale
test_ossaudiodev
test_socketserver
""",
'os2emx':
"""
test_audioop
test_bsddb185
test_bsddb3
test_commands
test_curses
test_dl
test_epoll
test_kqueue
test_largefile
test_mhlib
test_mmap
test_openpty
test_ossaudiodev
test_pty
test_resource
test_signal
""",
'freebsd4':
"""
test_bsddb
test_bsddb3
test_epoll
test_gdbm
test_locale
test_ossaudiodev
test_pep277
test_pty
test_socket_ssl
test_socketserver
test_tcl
test_timeout
test_urllibnet
test_multiprocessing
""",
'aix5':
"""
test_bsddb
test_bsddb185
test_bsddb3
test_bz2
test_dl
test_epoll
test_gdbm
test_gzip
test_kqueue
test_ossaudiodev
test_tcl
test_zipimport
test_zlib
""",
'openbsd3':
"""
test_bsddb
test_bsddb3
test_ctypes
test_dl
test_epoll
test_gdbm
test_locale
test_normalization
test_ossaudiodev
test_pep277
test_tcl
test_multiprocessing
""",
'netbsd3':
"""
test_bsddb
test_bsddb185
test_bsddb3
test_ctypes
test_curses
test_dl
test_epoll
test_gdbm
test_locale
test_ossaudiodev
test_pep277
test_tcl
test_multiprocessing
""",
}
_expectations['freebsd5'] = _expectations['freebsd4']
_expectations['freebsd6'] = _expectations['freebsd4']
_expectations['freebsd7'] = _expectations['freebsd4']
_expectations['freebsd8'] = _expectations['freebsd4']
class _ExpectedSkips:
def __init__(self):
import os.path
from test import test_timeout
self.valid = False
if sys.platform in _expectations:
s = _expectations[sys.platform]
self.expected = set(s.split())
# expected to be skipped on every platform, even Linux
self.expected.add('test_linuxaudiodev')
if not os.path.supports_unicode_filenames:
self.expected.add('test_pep277')
try:
from test import test_socket_ssl
except ImportError:
pass
else:
if test_socket_ssl.skip_expected:
self.expected.add('test_socket_ssl')
if test_timeout.skip_expected:
self.expected.add('test_timeout')
if sys.maxint == 9223372036854775807L:
self.expected.add('test_imageop')
if not sys.platform in ("mac", "darwin"):
MAC_ONLY = ["test_macos", "test_macostools", "test_aepack",
"test_plistlib", "test_scriptpackages",
"test_applesingle"]
for skip in MAC_ONLY:
self.expected.add(skip)
elif len(u'\0'.encode('unicode-internal')) == 4:
self.expected.add("test_macostools")
if sys.platform != "win32":
# test_sqlite is only reliable on Windows where the library
# is distributed with Python
WIN_ONLY = ["test_unicode_file", "test_winreg",
"test_winsound", "test_startfile",
"test_sqlite"]
for skip in WIN_ONLY:
self.expected.add(skip)
if sys.platform != 'irix':
IRIX_ONLY = ["test_imageop", "test_al", "test_cd", "test_cl",
"test_gl", "test_imgfile"]
for skip in IRIX_ONLY:
self.expected.add(skip)
if sys.platform != 'sunos5':
self.expected.add('test_sunaudiodev')
self.expected.add('test_nis')
if not sys.py3kwarning:
self.expected.add('test_py3kwarn')
self.valid = True
def isvalid(self):
"Return true iff _ExpectedSkips knows about the current platform."
return self.valid
def getexpected(self):
"""Return set of test names we expect to skip on current platform.
self.isvalid() must be true.
"""
assert self.isvalid()
return self.expected
if __name__ == '__main__':
# Remove regrtest.py's own directory from the module search path. This
# prevents relative imports from working, and relative imports will screw
# up the testing framework. E.g. if both test.test_support and
# test_support are imported, they will not contain the same globals, and
# much of the testing framework relies on the globals in the
# test.test_support module.
mydir = os.path.abspath(os.path.normpath(os.path.dirname(sys.argv[0])))
i = pathlen = len(sys.path)
while i >= 0:
i -= 1
if os.path.abspath(os.path.normpath(sys.path[i])) == mydir:
del sys.path[i]
if len(sys.path) == pathlen:
print 'Could not find %r in sys.path to remove it' % mydir
main()
| apache-2.0 |
slayerjain/servo | components/script/dom/bindings/codegen/parser/tests/test_union.py | 134 | 5370 | import WebIDL
import itertools
import string
# We'd like to use itertools.chain but it's 2.6 or higher.
def chain(*iterables):
# chain('ABC', 'DEF') --> A B C D E F
for it in iterables:
for element in it:
yield element
# We'd like to use itertools.combinations but it's 2.6 or higher.
def combinations(iterable, r):
# combinations('ABCD', 2) --> AB AC AD BC BD CD
# combinations(range(4), 3) --> 012 013 023 123
pool = tuple(iterable)
n = len(pool)
if r > n:
return
indices = range(r)
yield tuple(pool[i] for i in indices)
while True:
for i in reversed(range(r)):
if indices[i] != i + n - r:
break
else:
return
indices[i] += 1
for j in range(i+1, r):
indices[j] = indices[j-1] + 1
yield tuple(pool[i] for i in indices)
# We'd like to use itertools.combinations_with_replacement but it's 2.7 or
# higher.
def combinations_with_replacement(iterable, r):
# combinations_with_replacement('ABC', 2) --> AA AB AC BB BC CC
pool = tuple(iterable)
n = len(pool)
if not n and r:
return
indices = [0] * r
yield tuple(pool[i] for i in indices)
while True:
for i in reversed(range(r)):
if indices[i] != n - 1:
break
else:
return
indices[i:] = [indices[i] + 1] * (r - i)
yield tuple(pool[i] for i in indices)
def WebIDLTest(parser, harness):
types = ["float",
"double",
"short",
"unsigned short",
"long",
"unsigned long",
"long long",
"unsigned long long",
"boolean",
"byte",
"octet",
"DOMString",
#"sequence<float>",
"object",
"ArrayBuffer",
#"Date",
"TestInterface1",
"TestInterface2"]
testPre = """
interface TestInterface1 {
};
interface TestInterface2 {
};
"""
interface = testPre + """
interface PrepareForTest {
"""
for (i, type) in enumerate(types):
interface += string.Template("""
readonly attribute ${type} attr${i};
""").substitute(i=i, type=type)
interface += """
};
"""
parser.parse(interface)
results = parser.finish()
iface = results[2]
parser = parser.reset()
def typesAreDistinguishable(t):
return all(u[0].isDistinguishableFrom(u[1]) for u in combinations(t, 2))
def typesAreNotDistinguishable(t):
return any(not u[0].isDistinguishableFrom(u[1]) for u in combinations(t, 2))
def unionTypeName(t):
if len(t) > 2:
t[0:2] = [unionTypeName(t[0:2])]
return "(" + " or ".join(t) + ")"
# typeCombinations is an iterable of tuples containing the name of the type
# as a string and the parsed IDL type.
def unionTypes(typeCombinations, predicate):
for c in typeCombinations:
if predicate(t[1] for t in c):
yield unionTypeName([t[0] for t in c])
# We limit invalid union types with a union member type to the subset of 3
# types with one invalid combination.
# typeCombinations is an iterable of tuples containing the name of the type
# as a string and the parsed IDL type.
def invalidUnionWithUnion(typeCombinations):
for c in typeCombinations:
if (typesAreNotDistinguishable((c[0][1], c[1][1])) and
typesAreDistinguishable((c[1][1], c[2][1])) and
typesAreDistinguishable((c[0][1], c[2][1]))):
yield unionTypeName([t[0] for t in c])
# Create a list of tuples containing the name of the type as a string and
# the parsed IDL type.
types = zip(types, (a.type for a in iface.members))
validUnionTypes = chain(unionTypes(combinations(types, 2), typesAreDistinguishable),
unionTypes(combinations(types, 3), typesAreDistinguishable))
invalidUnionTypes = chain(unionTypes(combinations_with_replacement(types, 2), typesAreNotDistinguishable),
invalidUnionWithUnion(combinations(types, 3)))
interface = testPre + """
interface TestUnion {
"""
for (i, type) in enumerate(validUnionTypes):
interface += string.Template("""
void method${i}(${type} arg);
${type} returnMethod${i}();
attribute ${type} attr${i};
void arrayMethod${i}(${type}[] arg);
${type}[] arrayReturnMethod${i}();
attribute ${type}[] arrayAttr${i};
void optionalMethod${i}(${type}? arg);
""").substitute(i=i, type=type)
interface += """
};
"""
parser.parse(interface)
results = parser.finish()
parser = parser.reset()
for invalid in invalidUnionTypes:
interface = testPre + string.Template("""
interface TestUnion {
void method(${type} arg);
};
""").substitute(type=invalid)
threw = False
try:
parser.parse(interface)
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
parser = parser.reset()
| mpl-2.0 |
ruibarreira/linuxtrail | usr/lib/python2.7/encodings/iso8859_8.py | 593 | 11292 | """ Python Character Mapping Codec iso8859_8 generated from 'MAPPINGS/ISO8859/8859-8.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-8',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\ufffe'
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\xa5' # 0xA5 -> YEN SIGN
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\xd7' # 0xAA -> MULTIPLICATION SIGN
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\xaf' # 0xAF -> MACRON
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\xb4' # 0xB4 -> ACUTE ACCENT
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\xb8' # 0xB8 -> CEDILLA
u'\xb9' # 0xB9 -> SUPERSCRIPT ONE
u'\xf7' # 0xBA -> DIVISION SIGN
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u2017' # 0xDF -> DOUBLE LOW LINE
u'\u05d0' # 0xE0 -> HEBREW LETTER ALEF
u'\u05d1' # 0xE1 -> HEBREW LETTER BET
u'\u05d2' # 0xE2 -> HEBREW LETTER GIMEL
u'\u05d3' # 0xE3 -> HEBREW LETTER DALET
u'\u05d4' # 0xE4 -> HEBREW LETTER HE
u'\u05d5' # 0xE5 -> HEBREW LETTER VAV
u'\u05d6' # 0xE6 -> HEBREW LETTER ZAYIN
u'\u05d7' # 0xE7 -> HEBREW LETTER HET
u'\u05d8' # 0xE8 -> HEBREW LETTER TET
u'\u05d9' # 0xE9 -> HEBREW LETTER YOD
u'\u05da' # 0xEA -> HEBREW LETTER FINAL KAF
u'\u05db' # 0xEB -> HEBREW LETTER KAF
u'\u05dc' # 0xEC -> HEBREW LETTER LAMED
u'\u05dd' # 0xED -> HEBREW LETTER FINAL MEM
u'\u05de' # 0xEE -> HEBREW LETTER MEM
u'\u05df' # 0xEF -> HEBREW LETTER FINAL NUN
u'\u05e0' # 0xF0 -> HEBREW LETTER NUN
u'\u05e1' # 0xF1 -> HEBREW LETTER SAMEKH
u'\u05e2' # 0xF2 -> HEBREW LETTER AYIN
u'\u05e3' # 0xF3 -> HEBREW LETTER FINAL PE
u'\u05e4' # 0xF4 -> HEBREW LETTER PE
u'\u05e5' # 0xF5 -> HEBREW LETTER FINAL TSADI
u'\u05e6' # 0xF6 -> HEBREW LETTER TSADI
u'\u05e7' # 0xF7 -> HEBREW LETTER QOF
u'\u05e8' # 0xF8 -> HEBREW LETTER RESH
u'\u05e9' # 0xF9 -> HEBREW LETTER SHIN
u'\u05ea' # 0xFA -> HEBREW LETTER TAV
u'\ufffe'
u'\ufffe'
u'\u200e' # 0xFD -> LEFT-TO-RIGHT MARK
u'\u200f' # 0xFE -> RIGHT-TO-LEFT MARK
u'\ufffe'
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| gpl-3.0 |
lattwood/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/bot/layouttestresultsreader_unittest.py | 124 | 6488 | # Copyright (c) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.common.system.filesystem_mock import MockFileSystem
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.common.net.layouttestresults import LayoutTestResults
from webkitpy.common.host_mock import MockHost
from .layouttestresultsreader import LayoutTestResultsReader
class LayoutTestResultsReaderTest(unittest.TestCase):
def test_missing_layout_test_results(self):
host = MockHost()
reader = LayoutTestResultsReader(host, "/mock-results", "/var/logs")
layout_tests_results_path = '/mock-results/full_results.json'
unit_tests_results_path = '/mock-results/webkit_unit_tests_output.xml'
host.filesystem = MockFileSystem({layout_tests_results_path: None,
unit_tests_results_path: None})
# Make sure that our filesystem mock functions as we expect.
self.assertRaises(IOError, host.filesystem.read_text_file, layout_tests_results_path)
self.assertRaises(IOError, host.filesystem.read_text_file, unit_tests_results_path)
# layout_test_results shouldn't raise even if the results.json file is missing.
self.assertIsNone(reader.results())
def test_create_unit_test_results(self):
host = MockHost()
reader = LayoutTestResultsReader(host, "/mock-results", "/var/logs")
unit_tests_results_path = '/mock-results/webkit_unit_tests_output.xml'
no_failures_xml = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="0" disabled="0" errors="0" time="11.35" name="AllTests">
<testsuite name="RenderTableCellDeathTest" tests="3" failures="0" disabled="0" errors="0" time="0.677">
<testcase name="CanSetColumn" status="run" time="0.168" classname="RenderTableCellDeathTest" />
<testcase name="CrashIfSettingUnsetColumnIndex" status="run" time="0.129" classname="RenderTableCellDeathTest" />
<testcase name="CrashIfSettingUnsetRowIndex" status="run" time="0.123" classname="RenderTableCellDeathTest" />
</testsuite>
</testsuites>"""
host.filesystem = MockFileSystem({unit_tests_results_path: no_failures_xml})
self.assertEqual(reader._create_unit_test_results(), [])
def test_missing_unit_test_results_path(self):
host = MockHost()
reader = LayoutTestResultsReader(host, "/mock-results", "/var/logs")
reader._create_layout_test_results = lambda: LayoutTestResults([])
reader._create_unit_test_results = lambda: None
# layout_test_results shouldn't raise even if the unit tests xml file is missing.
self.assertIsNotNone(reader.results(), None)
self.assertEqual(reader.results().failing_tests(), [])
def test_layout_test_results(self):
reader = LayoutTestResultsReader(MockHost(), "/mock-results", "/var/logs")
reader._read_file_contents = lambda path: None
self.assertIsNone(reader.results())
reader._read_file_contents = lambda path: ""
self.assertIsNone(reader.results())
reader._create_layout_test_results = lambda: LayoutTestResults([])
results = reader.results()
self.assertIsNotNone(results)
self.assertEqual(results.failure_limit_count(), 30) # This value matches RunTests.NON_INTERACTIVE_FAILURE_LIMIT_COUNT
def test_archive_last_layout_test_results(self):
host = MockHost()
results_directory = "/mock-results"
reader = LayoutTestResultsReader(host, results_directory, "/var/logs")
patch = host.bugs.fetch_attachment(10001)
host.filesystem = MockFileSystem()
# Should fail because the results_directory does not exist.
expected_logs = "/mock-results does not exist, not archiving.\n"
archive = OutputCapture().assert_outputs(self, reader.archive, [patch], expected_logs=expected_logs)
self.assertIsNone(archive)
host.filesystem.maybe_make_directory(results_directory)
self.assertTrue(host.filesystem.exists(results_directory))
self.assertIsNotNone(reader.archive(patch))
self.assertFalse(host.filesystem.exists(results_directory))
def test_archive_last_layout_test_results_with_relative_path(self):
host = MockHost()
results_directory = "/mock-checkout/layout-test-results"
host.filesystem.maybe_make_directory(results_directory)
host.filesystem.maybe_make_directory('/var/logs')
self.assertTrue(host.filesystem.exists(results_directory))
host.filesystem.chdir('/var')
reader = LayoutTestResultsReader(host, results_directory, 'logs')
patch = host.bugs.fetch_attachment(10001)
# Should fail because the results_directory does not exist.
self.assertIsNotNone(reader.archive(patch))
self.assertEqual(host.workspace.source_path, results_directory)
self.assertEqual(host.workspace.zip_path, '/var/logs/50000-layout-test-results.zip')
| bsd-3-clause |
felixfontein/ansible | test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py | 47 | 10112 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c) 2015 Peter Sprygada, <psprygada@ansible.com>
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
import shlex
import time
from ansible.module_utils.parsing.convert_bool import (
BOOLEANS_TRUE,
BOOLEANS_FALSE,
)
from ansible.module_utils.six import string_types, text_type
from ansible.module_utils.six.moves import zip
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
elif val is not None:
return [val]
else:
return list()
class FailedConditionsError(Exception):
def __init__(self, msg, failed_conditions):
super(FailedConditionsError, self).__init__(msg)
self.failed_conditions = failed_conditions
class FailedConditionalError(Exception):
def __init__(self, msg, failed_conditional):
super(FailedConditionalError, self).__init__(msg)
self.failed_conditional = failed_conditional
class AddCommandError(Exception):
def __init__(self, msg, command):
super(AddCommandError, self).__init__(msg)
self.command = command
class AddConditionError(Exception):
def __init__(self, msg, condition):
super(AddConditionError, self).__init__(msg)
self.condition = condition
class Cli(object):
def __init__(self, connection):
self.connection = connection
self.default_output = connection.default_output or "text"
self._commands = list()
@property
def commands(self):
return [str(c) for c in self._commands]
def __call__(self, commands, output=None):
objects = list()
for cmd in to_list(commands):
objects.append(self.to_command(cmd, output))
return self.connection.run_commands(objects)
def to_command(
self, command, output=None, prompt=None, response=None, **kwargs
):
output = output or self.default_output
if isinstance(command, Command):
return command
if isinstance(prompt, string_types):
prompt = re.compile(re.escape(prompt))
return Command(
command, output, prompt=prompt, response=response, **kwargs
)
def add_commands(self, commands, output=None, **kwargs):
for cmd in commands:
self._commands.append(self.to_command(cmd, output, **kwargs))
def run_commands(self):
responses = self.connection.run_commands(self._commands)
for resp, cmd in zip(responses, self._commands):
cmd.response = resp
# wipe out the commands list to avoid issues if additional
# commands are executed later
self._commands = list()
return responses
class Command(object):
def __init__(
self, command, output=None, prompt=None, response=None, **kwargs
):
self.command = command
self.output = output
self.command_string = command
self.prompt = prompt
self.response = response
self.args = kwargs
def __str__(self):
return self.command_string
class CommandRunner(object):
def __init__(self, module):
self.module = module
self.items = list()
self.conditionals = set()
self.commands = list()
self.retries = 10
self.interval = 1
self.match = "all"
self._default_output = module.connection.default_output
def add_command(
self, command, output=None, prompt=None, response=None, **kwargs
):
if command in [str(c) for c in self.commands]:
raise AddCommandError(
"duplicated command detected", command=command
)
cmd = self.module.cli.to_command(
command, output=output, prompt=prompt, response=response, **kwargs
)
self.commands.append(cmd)
def get_command(self, command, output=None):
for cmd in self.commands:
if cmd.command == command:
return cmd.response
raise ValueError("command '%s' not found" % command)
def get_responses(self):
return [cmd.response for cmd in self.commands]
def add_conditional(self, condition):
try:
self.conditionals.add(Conditional(condition))
except AttributeError as exc:
raise AddConditionError(msg=str(exc), condition=condition)
def run(self):
while self.retries > 0:
self.module.cli.add_commands(self.commands)
responses = self.module.cli.run_commands()
for item in list(self.conditionals):
if item(responses):
if self.match == "any":
return item
self.conditionals.remove(item)
if not self.conditionals:
break
time.sleep(self.interval)
self.retries -= 1
else:
failed_conditions = [item.raw for item in self.conditionals]
errmsg = (
"One or more conditional statements have not been satisfied"
)
raise FailedConditionsError(errmsg, failed_conditions)
class Conditional(object):
"""Used in command modules to evaluate waitfor conditions
"""
OPERATORS = {
"eq": ["eq", "=="],
"neq": ["neq", "ne", "!="],
"gt": ["gt", ">"],
"ge": ["ge", ">="],
"lt": ["lt", "<"],
"le": ["le", "<="],
"contains": ["contains"],
"matches": ["matches"],
}
def __init__(self, conditional, encoding=None):
self.raw = conditional
self.negate = False
try:
components = shlex.split(conditional)
key, val = components[0], components[-1]
op_components = components[1:-1]
if "not" in op_components:
self.negate = True
op_components.pop(op_components.index("not"))
op = op_components[0]
except ValueError:
raise ValueError("failed to parse conditional")
self.key = key
self.func = self._func(op)
self.value = self._cast_value(val)
def __call__(self, data):
value = self.get_value(dict(result=data))
if not self.negate:
return self.func(value)
else:
return not self.func(value)
def _cast_value(self, value):
if value in BOOLEANS_TRUE:
return True
elif value in BOOLEANS_FALSE:
return False
elif re.match(r"^\d+\.d+$", value):
return float(value)
elif re.match(r"^\d+$", value):
return int(value)
else:
return text_type(value)
def _func(self, oper):
for func, operators in self.OPERATORS.items():
if oper in operators:
return getattr(self, func)
raise AttributeError("unknown operator: %s" % oper)
def get_value(self, result):
try:
return self.get_json(result)
except (IndexError, TypeError, AttributeError):
msg = "unable to apply conditional to result"
raise FailedConditionalError(msg, self.raw)
def get_json(self, result):
string = re.sub(r"\[[\'|\"]", ".", self.key)
string = re.sub(r"[\'|\"]\]", ".", string)
parts = re.split(r"\.(?=[^\]]*(?:\[|$))", string)
for part in parts:
match = re.findall(r"\[(\S+?)\]", part)
if match:
key = part[: part.find("[")]
result = result[key]
for m in match:
try:
m = int(m)
except ValueError:
m = str(m)
result = result[m]
else:
result = result.get(part)
return result
def number(self, value):
if "." in str(value):
return float(value)
else:
return int(value)
def eq(self, value):
return value == self.value
def neq(self, value):
return value != self.value
def gt(self, value):
return self.number(value) > self.value
def ge(self, value):
return self.number(value) >= self.value
def lt(self, value):
return self.number(value) < self.value
def le(self, value):
return self.number(value) <= self.value
def contains(self, value):
return str(self.value) in value
def matches(self, value):
match = re.search(self.value, value, re.M)
return match is not None
| gpl-3.0 |
ResEl-TB/stages | search/tests.py | 1 | 2810 | from django.test import TestCase
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from post.models import Zone, Duree, Domaine, TypeContrat, Annonce
from .forms import SearchForm
class SearchForms(TestCase):
def setUp(self):
zone = Zone.objects.create(nom='île-de-france')
duree = Duree.objects.create(duree='6 mois')
domain = Domaine.objects.create(nom='Télécoms')
contract = TypeContrat.objects.create(type_contrat='stage')
for i in range(5):
Annonce.objects.create(
zone=zone,
duree=duree,
type_de_contrat=contract
).domain.add(domain)
def test_search_empty(self):
form = SearchForm(data={})
self.assertTrue(form.is_valid())
self.assertEqual(len(form.build_queryset()), 5)
def test_search_not_empty(self):
form = SearchForm(data={'nom_entreprise': 'thales'})
self.assertTrue(form.is_valid())
self.assertEqual(len(form.build_queryset()), 0)
def test_search_zone(self):
form = SearchForm(data={'zone': 1})
self.assertTrue(form.is_valid())
self.assertEqual(len(form.build_queryset()), 5)
def test_search_domain(self):
form = SearchForm(data={'domain': [1]})
self.assertTrue(form.is_valid())
self.assertEqual(len(form.build_queryset()), 5)
class SearchViews(TestCase):
def setUp(self):
User.objects.create_user('john', 'john@smith.com', 'password')
zone = Zone.objects.create(nom='île-de-france')
duree = Duree.objects.create(duree='6 mois')
domain = Domaine.objects.create(nom='Télécoms')
contract = TypeContrat.objects.create(type_contrat='stage')
for i in range(5):
Annonce.objects.create(
zone=zone,
duree=duree,
type_de_contrat=contract
).domain.add(domain)
def test_index_login(self):
response = self.client.get(reverse('search:index'))
self.assertEqual(response.status_code, 302)
def test_index_normal(self):
self.client.login(username='john', password='password')
response = self.client.get(reverse('search:index'))
self.assertTrue(response.status_code, 200)
self.assertTemplateUsed(response, 'search/index.html')
self.assertEqual(len(response.context.get('object_list')), 5)
def test_index_search(self):
self.client.login(username='john', password='password')
response = self.client.get(reverse('search:index'), {'zone': '1'})
self.assertTrue(response.status_code, 200)
self.assertTemplateUsed(response, 'search/index.html')
self.assertEqual(len(response.context.get('object_list')), 5) | gpl-2.0 |
markslwong/tensorflow | tensorflow/python/debug/cli/curses_widgets_test.py | 137 | 9632 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Unit tests for curses-based CLI widgets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.debug.cli import curses_widgets
from tensorflow.python.debug.cli import debugger_cli_common
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
RTL = debugger_cli_common.RichTextLines
CNH = curses_widgets.CursesNavigationHistory
class CNHTest(test_util.TensorFlowTestCase):
def testConstructorWorks(self):
CNH(10)
def testConstructorWithInvalidCapacityErrors(self):
with self.assertRaises(ValueError):
CNH(0)
with self.assertRaises(ValueError):
CNH(-1)
def testInitialStateIsCorrect(self):
nav_history = CNH(10)
self.assertEqual(0, nav_history.size())
self.assertFalse(nav_history.can_go_forward())
self.assertFalse(nav_history.can_go_back())
with self.assertRaisesRegexp(ValueError, "Empty navigation history"):
nav_history.go_back()
with self.assertRaisesRegexp(ValueError, "Empty navigation history"):
nav_history.go_forward()
with self.assertRaisesRegexp(ValueError, "Empty navigation history"):
nav_history.update_scroll_position(3)
def testAddOneItemWorks(self):
nav_history = CNH(10)
nav_history.add_item("foo", RTL(["bar"]), 0)
self.assertEqual(1, nav_history.size())
self.assertEqual(0, nav_history.pointer())
self.assertFalse(nav_history.can_go_forward())
self.assertFalse(nav_history.can_go_back())
output = nav_history.go_back()
self.assertEqual("foo", output.command)
self.assertEqual(["bar"], output.screen_output.lines)
self.assertEqual(0, output.scroll_position)
def testAddItemsBeyondCapacityWorks(self):
nav_history = CNH(2)
nav_history.add_item("foo", RTL(["foo_output"]), 0)
nav_history.add_item("bar", RTL(["bar_output"]), 0)
self.assertEqual(2, nav_history.size())
self.assertEqual(1, nav_history.pointer())
self.assertTrue(nav_history.can_go_back())
self.assertFalse(nav_history.can_go_forward())
nav_history.add_item("baz", RTL(["baz_output"]), 0)
self.assertEqual(2, nav_history.size())
self.assertEqual(1, nav_history.pointer())
self.assertTrue(nav_history.can_go_back())
self.assertFalse(nav_history.can_go_forward())
item = nav_history.go_back()
self.assertEqual("bar", item.command)
self.assertFalse(nav_history.can_go_back())
self.assertTrue(nav_history.can_go_forward())
item = nav_history.go_forward()
self.assertEqual("baz", item.command)
self.assertTrue(nav_history.can_go_back())
self.assertFalse(nav_history.can_go_forward())
def testAddItemFromNonLatestPointerPositionWorks(self):
nav_history = CNH(2)
nav_history.add_item("foo", RTL(["foo_output"]), 0)
nav_history.add_item("bar", RTL(["bar_output"]), 0)
nav_history.go_back()
nav_history.add_item("baz", RTL(["baz_output"]), 0)
self.assertEqual(2, nav_history.size())
self.assertEqual(1, nav_history.pointer())
self.assertTrue(nav_history.can_go_back())
self.assertFalse(nav_history.can_go_forward())
item = nav_history.go_back()
self.assertEqual("foo", item.command)
item = nav_history.go_forward()
self.assertEqual("baz", item.command)
def testUpdateScrollPositionOnLatestItemWorks(self):
nav_history = CNH(2)
nav_history.add_item("foo", RTL(["foo_out", "more_foo_out"]), 0)
nav_history.add_item("bar", RTL(["bar_out", "more_bar_out"]), 0)
nav_history.update_scroll_position(1)
nav_history.go_back()
item = nav_history.go_forward()
self.assertEqual("bar", item.command)
self.assertEqual(1, item.scroll_position)
def testUpdateScrollPositionOnOldItemWorks(self):
nav_history = CNH(2)
nav_history.add_item("foo", RTL(["foo_out", "more_foo_out"]), 0)
nav_history.add_item("bar", RTL(["bar_out", "more_bar_out"]), 0)
item = nav_history.go_back()
self.assertEqual("foo", item.command)
self.assertEqual(0, item.scroll_position)
nav_history.update_scroll_position(1)
nav_history.go_forward()
item = nav_history.go_back()
self.assertEqual("foo", item.command)
self.assertEqual(1, item.scroll_position)
item = nav_history.go_forward()
self.assertEqual("bar", item.command)
self.assertEqual(0, item.scroll_position)
def testRenderWithEmptyHistoryWorks(self):
nav_history = CNH(2)
output = nav_history.render(40, "prev", "next")
self.assertEqual(1, len(output.lines))
self.assertEqual(
"| " + CNH.BACK_ARROW_TEXT + " " + CNH.FORWARD_ARROW_TEXT,
output.lines[0])
self.assertEqual({}, output.font_attr_segs)
def testRenderLatestWithSufficientLengthWorks(self):
nav_history = CNH(2)
nav_history.add_item("foo", RTL(["foo_out", "more_foo_out"]), 0)
nav_history.add_item("bar", RTL(["bar_out", "more_bar_out"]), 0)
output = nav_history.render(
40,
"prev",
"next",
latest_command_attribute="green",
old_command_attribute="yellow")
self.assertEqual(1, len(output.lines))
self.assertEqual(
"| " + CNH.BACK_ARROW_TEXT + " " + CNH.FORWARD_ARROW_TEXT +
" | bar",
output.lines[0])
self.assertEqual(2, output.font_attr_segs[0][0][0])
self.assertEqual(5, output.font_attr_segs[0][0][1])
self.assertEqual("prev", output.font_attr_segs[0][0][2].content)
self.assertEqual(12, output.font_attr_segs[0][1][0])
self.assertEqual(15, output.font_attr_segs[0][1][1])
self.assertEqual("green", output.font_attr_segs[0][1][2])
def testRenderOldButNotOldestWithSufficientLengthWorks(self):
nav_history = CNH(3)
nav_history.add_item("foo", RTL(["foo_out", "more_foo_out"]), 0)
nav_history.add_item("bar", RTL(["bar_out", "more_bar_out"]), 0)
nav_history.add_item("baz", RTL(["baz_out", "more_baz_out"]), 0)
nav_history.go_back()
output = nav_history.render(
40,
"prev",
"next",
latest_command_attribute="green",
old_command_attribute="yellow")
self.assertEqual(1, len(output.lines))
self.assertEqual(
"| " + CNH.BACK_ARROW_TEXT + " " + CNH.FORWARD_ARROW_TEXT +
" | (-1) bar",
output.lines[0])
self.assertEqual(2, output.font_attr_segs[0][0][0])
self.assertEqual(5, output.font_attr_segs[0][0][1])
self.assertEqual("prev", output.font_attr_segs[0][0][2].content)
self.assertEqual(6, output.font_attr_segs[0][1][0])
self.assertEqual(9, output.font_attr_segs[0][1][1])
self.assertEqual("next", output.font_attr_segs[0][1][2].content)
self.assertEqual(12, output.font_attr_segs[0][2][0])
self.assertEqual(17, output.font_attr_segs[0][2][1])
self.assertEqual("yellow", output.font_attr_segs[0][2][2])
self.assertEqual(17, output.font_attr_segs[0][3][0])
self.assertEqual(20, output.font_attr_segs[0][3][1])
self.assertEqual("yellow", output.font_attr_segs[0][3][2])
def testRenderOldestWithSufficientLengthWorks(self):
nav_history = CNH(3)
nav_history.add_item("foo", RTL(["foo_out", "more_foo_out"]), 0)
nav_history.add_item("bar", RTL(["bar_out", "more_bar_out"]), 0)
nav_history.add_item("baz", RTL(["baz_out", "more_baz_out"]), 0)
nav_history.go_back()
nav_history.go_back()
output = nav_history.render(
40,
"prev",
"next",
latest_command_attribute="green",
old_command_attribute="yellow")
self.assertEqual(1, len(output.lines))
self.assertEqual(
"| " + CNH.BACK_ARROW_TEXT + " " + CNH.FORWARD_ARROW_TEXT +
" | (-2) foo",
output.lines[0])
self.assertEqual(6, output.font_attr_segs[0][0][0])
self.assertEqual(9, output.font_attr_segs[0][0][1])
self.assertEqual("next", output.font_attr_segs[0][0][2].content)
self.assertEqual(12, output.font_attr_segs[0][1][0])
self.assertEqual(17, output.font_attr_segs[0][1][1])
self.assertEqual("yellow", output.font_attr_segs[0][1][2])
self.assertEqual(17, output.font_attr_segs[0][2][0])
self.assertEqual(20, output.font_attr_segs[0][2][1])
self.assertEqual("yellow", output.font_attr_segs[0][2][2])
def testRenderWithInsufficientLengthWorks(self):
nav_history = CNH(2)
nav_history.add_item("long_command", RTL(["output"]), 0)
output = nav_history.render(
15,
"prev",
"next",
latest_command_attribute="green",
old_command_attribute="yellow")
self.assertEqual(1, len(output.lines))
self.assertEqual(
"| " + CNH.BACK_ARROW_TEXT + " " + CNH.FORWARD_ARROW_TEXT +
" | lon",
output.lines[0])
self.assertEqual(12, output.font_attr_segs[0][0][0])
self.assertEqual(15, output.font_attr_segs[0][0][1])
self.assertEqual("green", output.font_attr_segs[0][0][2])
if __name__ == "__main__":
googletest.main()
| apache-2.0 |
astorije/ansible-modules-extras | packaging/os/pacman.py | 3 | 10316 | #!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# (c) 2012, Afterburn <http://github.com/afterburn>
# (c) 2013, Aaron Bull Schaefer <aaron@elasticdog.com>
# (c) 2015, Indrajit Raychaudhuri <irc+code@indrajit.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: pacman
short_description: Manage packages with I(pacman)
description:
- Manage packages with the I(pacman) package manager, which is used by
Arch Linux and its variants.
version_added: "1.0"
author:
- "Indrajit Raychaudhuri (@indrajitr)"
- "'Aaron Bull Schaefer (@elasticdog)' <aaron@elasticdog.com>"
- "Afterburn"
notes: []
requirements: []
options:
name:
description:
- Name of the package to install, upgrade, or remove.
required: false
default: null
state:
description:
- Desired state of the package.
required: false
default: "present"
choices: ["present", "absent", "latest"]
recurse:
description:
- When removing a package, also remove its dependencies, provided
that they are not required by other packages and were not
explicitly installed by a user.
required: false
default: no
choices: ["yes", "no"]
version_added: "1.3"
force:
description:
- Force remove package, without any checks.
required: false
default: no
choices: ["yes", "no"]
version_added: "2.0"
update_cache:
description:
- Whether or not to refresh the master package lists. This can be
run as part of a package installation or as a separate step.
required: false
default: no
choices: ["yes", "no"]
upgrade:
description:
- Whether or not to upgrade whole system
required: false
default: no
choices: ["yes", "no"]
version_added: "2.0"
'''
EXAMPLES = '''
# Install package foo
- pacman: name=foo state=present
# Upgrade package foo
- pacman: name=foo state=latest update_cache=yes
# Remove packages foo and bar
- pacman: name=foo,bar state=absent
# Recursively remove package baz
- pacman: name=baz state=absent recurse=yes
# Run the equivalent of "pacman -Sy" as a separate step
- pacman: update_cache=yes
# Run the equivalent of "pacman -Su" as a separate step
- pacman: upgrade=yes
# Run the equivalent of "pacman -Rdd", force remove package baz
- pacman: name=baz state=absent force=yes
'''
import json
import shlex
import os
import re
import sys
def get_version(pacman_output):
"""Take pacman -Qi or pacman -Si output and get the Version"""
lines = pacman_output.split('\n')
for line in lines:
if 'Version' in line:
return line.split(':')[1].strip()
return None
def query_package(module, pacman_path, name, state="present"):
"""Query the package status in both the local system and the repository. Returns a boolean to indicate if the package is installed, and a second boolean to indicate if the package is up-to-date."""
if state == "present":
lcmd = "%s -Qi %s" % (pacman_path, name)
lrc, lstdout, lstderr = module.run_command(lcmd, check_rc=False)
if lrc != 0:
# package is not installed locally
return False, False
# get the version installed locally (if any)
lversion = get_version(lstdout)
rcmd = "%s -Si %s" % (pacman_path, name)
rrc, rstdout, rstderr = module.run_command(rcmd, check_rc=False)
# get the version in the repository
rversion = get_version(rstdout)
if rrc == 0:
# Return True to indicate that the package is installed locally, and the result of the version number comparison
# to determine if the package is up-to-date.
return True, (lversion == rversion)
return False, False
def update_package_db(module, pacman_path):
cmd = "%s -Sy" % (pacman_path)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return True
else:
module.fail_json(msg="could not update package db")
def upgrade(module, pacman_path):
cmdupgrade = "%s -Suq --noconfirm" % (pacman_path)
cmdneedrefresh = "%s -Qqu" % (pacman_path)
rc, stdout, stderr = module.run_command(cmdneedrefresh, check_rc=False)
if rc == 0:
rc, stdout, stderr = module.run_command(cmdupgrade, check_rc=False)
if rc == 0:
module.exit_json(changed=True, msg='System upgraded')
else:
module.fail_json(msg="could not upgrade")
else:
module.exit_json(changed=False, msg='Nothing to upgrade')
def remove_packages(module, pacman_path, packages):
if module.params["recurse"]:
args = "Rs"
else:
args = "R"
def remove_packages(module, pacman_path, packages):
if module.params["force"]:
args = "Rdd"
else:
args = "R"
remove_c = 0
# Using a for loop incase of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
installed, updated = query_package(module, pacman_path, package)
if not installed:
continue
cmd = "%s -%s %s --noconfirm" % (pacman_path, args, package)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to remove %s" % (package))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, pacman_path, state, packages, package_files):
install_c = 0
for i, package in enumerate(packages):
# if the package is installed and state == present or state == latest and is up-to-date then skip
installed, updated = query_package(module, pacman_path, package)
if installed and (state == 'present' or (state == 'latest' and updated)):
continue
if package_files[i]:
params = '-U %s' % package_files[i]
else:
params = '-S %s' % package
cmd = "%s %s --noconfirm" % (pacman_path, params)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to install %s" % (package))
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="installed %s package(s)" % (install_c))
module.exit_json(changed=False, msg="package(s) already installed")
def check_packages(module, pacman_path, packages, state):
would_be_changed = []
for package in packages:
installed, updated = query_package(module, pacman_path, package)
if ((state in ["present", "latest"] and not installed) or
(state == "absent" and installed) or
(state == "latest" and not updated)):
would_be_changed.append(package)
if would_be_changed:
if state == "absent":
state = "removed"
module.exit_json(changed=True, msg="%s package(s) would be %s" % (
len(would_be_changed), state))
else:
module.exit_json(change=False, msg="package(s) already %s" % state)
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(aliases=['pkg']),
state = dict(default='present', choices=['present', 'installed', "latest", 'absent', 'removed']),
recurse = dict(default=False, type='bool'),
force = dict(default=False, type='bool'),
upgrade = dict(default=False, type='bool'),
update_cache = dict(default=False, aliases=['update-cache'], type='bool')),
required_one_of = [['name', 'update_cache', 'upgrade']],
supports_check_mode = True)
pacman_path = module.get_bin_path('pacman', True)
if not os.path.exists(pacman_path):
module.fail_json(msg="cannot find pacman, in path %s" % (pacman_path))
p = module.params
# normalize the state parameter
if p['state'] in ['present', 'installed']:
p['state'] = 'present'
elif p['state'] in ['absent', 'removed']:
p['state'] = 'absent'
if p["update_cache"] and not module.check_mode:
update_package_db(module, pacman_path)
if not p['name']:
module.exit_json(changed=True, msg='updated the package master lists')
if p['update_cache'] and module.check_mode and not p['name']:
module.exit_json(changed=True, msg='Would have updated the package cache')
if p['upgrade']:
upgrade(module, pacman_path)
if p['name']:
pkgs = p['name'].split(',')
pkg_files = []
for i, pkg in enumerate(pkgs):
if pkg.endswith('.pkg.tar.xz'):
# The package given is a filename, extract the raw pkg name from
# it and store the filename
pkg_files.append(pkg)
pkgs[i] = re.sub('-[0-9].*$', '', pkgs[i].split('/')[-1])
else:
pkg_files.append(None)
if module.check_mode:
check_packages(module, pacman_path, pkgs, p['state'])
if p['state'] in ['present', 'latest']:
install_packages(module, pacman_path, p['state'], pkgs, pkg_files)
elif p['state'] == 'absent':
remove_packages(module, pacman_path, pkgs)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == "__main__":
main()
| gpl-3.0 |
pgleeson/TempRepo3 | lib/jython/Lib/SocketServer.py | 10 | 19275 | """Generic socket server classes.
This module tries to capture the various aspects of defining a server:
For socket-based servers:
- address family:
- AF_INET{,6}: IP (Internet Protocol) sockets (default)
- AF_UNIX: Unix domain sockets
- others, e.g. AF_DECNET are conceivable (see <socket.h>
- socket type:
- SOCK_STREAM (reliable stream, e.g. TCP)
- SOCK_DGRAM (datagrams, e.g. UDP)
For request-based servers (including socket-based):
- client address verification before further looking at the request
(This is actually a hook for any processing that needs to look
at the request before anything else, e.g. logging)
- how to handle multiple requests:
- synchronous (one request is handled at a time)
- forking (each request is handled by a new process)
- threading (each request is handled by a new thread)
The classes in this module favor the server type that is simplest to
write: a synchronous TCP/IP server. This is bad class design, but
save some typing. (There's also the issue that a deep class hierarchy
slows down method lookups.)
There are five classes in an inheritance diagram, four of which represent
synchronous servers of four types:
+------------+
| BaseServer |
+------------+
|
v
+-----------+ +------------------+
| TCPServer |------->| UnixStreamServer |
+-----------+ +------------------+
|
v
+-----------+ +--------------------+
| UDPServer |------->| UnixDatagramServer |
+-----------+ +--------------------+
Note that UnixDatagramServer derives from UDPServer, not from
UnixStreamServer -- the only difference between an IP and a Unix
stream server is the address family, which is simply repeated in both
unix server classes.
Forking and threading versions of each type of server can be created
using the ForkingMixIn and ThreadingMixIn mix-in classes. For
instance, a threading UDP server class is created as follows:
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
The Mix-in class must come first, since it overrides a method defined
in UDPServer! Setting the various member variables also changes
the behavior of the underlying server mechanism.
To implement a service, you must derive a class from
BaseRequestHandler and redefine its handle() method. You can then run
various versions of the service by combining one of the server classes
with your request handler class.
The request handler class must be different for datagram or stream
services. This can be hidden by using the request handler
subclasses StreamRequestHandler or DatagramRequestHandler.
Of course, you still have to use your head!
For instance, it makes no sense to use a forking server if the service
contains state in memory that can be modified by requests (since the
modifications in the child process would never reach the initial state
kept in the parent process and passed to each child). In this case,
you can use a threading server, but you will probably have to use
locks to avoid two requests that come in nearly simultaneous to apply
conflicting changes to the server state.
On the other hand, if you are building e.g. an HTTP server, where all
data is stored externally (e.g. in the file system), a synchronous
class will essentially render the service "deaf" while one request is
being handled -- which may be for a very long time if a client is slow
to reqd all the data it has requested. Here a threading or forking
server is appropriate.
In some cases, it may be appropriate to process part of a request
synchronously, but to finish processing in a forked child depending on
the request data. This can be implemented by using a synchronous
server and doing an explicit fork in the request handler class
handle() method.
Another approach to handling multiple simultaneous requests in an
environment that supports neither threads nor fork (or where these are
too expensive or inappropriate for the service) is to maintain an
explicit table of partially finished requests and to use select() to
decide which request to work on next (or whether to handle a new
incoming request). This is particularly important for stream services
where each client can potentially be connected for a long time (if
threads or subprocesses cannot be used).
Future work:
- Standard classes for Sun RPC (which uses either UDP or TCP)
- Standard mix-in classes to implement various authentication
and encryption schemes
- Standard framework for select-based multiplexing
XXX Open problems:
- What to do with out-of-band data?
BaseServer:
- split generic "request" functionality out into BaseServer class.
Copyright (C) 2000 Luke Kenneth Casson Leighton <lkcl@samba.org>
example: read entries from a SQL database (requires overriding
get_request() to return a table entry from the database).
entry is processed by a RequestHandlerClass.
"""
# Author of the BaseServer patch: Luke Kenneth Casson Leighton
# XXX Warning!
# There is a test suite for this module, but it cannot be run by the
# standard regression test.
# To run it manually, run Lib/test/test_socketserver.py.
__version__ = "0.4"
import socket
import sys
import os
__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer",
"ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler",
"StreamRequestHandler","DatagramRequestHandler",
"ThreadingMixIn", "ForkingMixIn"]
if hasattr(socket, "AF_UNIX"):
__all__.extend(["UnixStreamServer","UnixDatagramServer",
"ThreadingUnixStreamServer",
"ThreadingUnixDatagramServer"])
class BaseServer:
"""Base class for server classes.
Methods for the caller:
- __init__(server_address, RequestHandlerClass)
- serve_forever()
- handle_request() # if you do not use serve_forever()
- fileno() -> int # for select()
Methods that may be overridden:
- server_bind()
- server_activate()
- get_request() -> request, client_address
- verify_request(request, client_address)
- server_close()
- process_request(request, client_address)
- close_request(request)
- handle_error()
Methods for derived classes:
- finish_request(request, client_address)
Class variables that may be overridden by derived classes or
instances:
- address_family
- socket_type
- allow_reuse_address
Instance variables:
- RequestHandlerClass
- socket
"""
def __init__(self, server_address, RequestHandlerClass):
"""Constructor. May be extended, do not override."""
self.server_address = server_address
self.RequestHandlerClass = RequestHandlerClass
def server_activate(self):
"""Called by constructor to activate the server.
May be overridden.
"""
pass
def serve_forever(self):
"""Handle one request at a time until doomsday."""
while 1:
self.handle_request()
# The distinction between handling, getting, processing and
# finishing a request is fairly arbitrary. Remember:
#
# - handle_request() is the top-level call. It calls
# get_request(), verify_request() and process_request()
# - get_request() is different for stream or datagram sockets
# - process_request() is the place that may fork a new process
# or create a new thread to finish the request
# - finish_request() instantiates the request handler class;
# this constructor will handle the request all by itself
def handle_request(self):
"""Handle one request, possibly blocking."""
try:
request, client_address = self.get_request()
except socket.error:
return
if self.verify_request(request, client_address):
try:
self.process_request(request, client_address)
except:
self.handle_error(request, client_address)
self.close_request(request)
def verify_request(self, request, client_address):
"""Verify the request. May be overridden.
Return True if we should proceed with this request.
"""
return True
def process_request(self, request, client_address):
"""Call finish_request.
Overridden by ForkingMixIn and ThreadingMixIn.
"""
self.finish_request(request, client_address)
self.close_request(request)
def server_close(self):
"""Called to clean-up the server.
May be overridden.
"""
pass
def finish_request(self, request, client_address):
"""Finish one request by instantiating RequestHandlerClass."""
self.RequestHandlerClass(request, client_address, self)
def close_request(self, request):
"""Called to clean up an individual request."""
pass
def handle_error(self, request, client_address):
"""Handle an error gracefully. May be overridden.
The default is to print a traceback and continue.
"""
print '-'*40
print 'Exception happened during processing of request from',
print client_address
import traceback
traceback.print_exc() # XXX But this goes to stderr!
print '-'*40
class TCPServer(BaseServer):
"""Base class for various socket-based server classes.
Defaults to synchronous IP stream (i.e., TCP).
Methods for the caller:
- __init__(server_address, RequestHandlerClass)
- serve_forever()
- handle_request() # if you don't use serve_forever()
- fileno() -> int # for select()
Methods that may be overridden:
- server_bind()
- server_activate()
- get_request() -> request, client_address
- verify_request(request, client_address)
- process_request(request, client_address)
- close_request(request)
- handle_error()
Methods for derived classes:
- finish_request(request, client_address)
Class variables that may be overridden by derived classes or
instances:
- address_family
- socket_type
- request_queue_size (only for stream sockets)
- allow_reuse_address
Instance variables:
- server_address
- RequestHandlerClass
- socket
"""
address_family = socket.AF_INET
socket_type = socket.SOCK_STREAM
request_queue_size = 5
allow_reuse_address = False
def __init__(self, server_address, RequestHandlerClass):
"""Constructor. May be extended, do not override."""
BaseServer.__init__(self, server_address, RequestHandlerClass)
self.socket = socket.socket(self.address_family,
self.socket_type)
self.server_bind()
self.server_activate()
def server_bind(self):
"""Called by constructor to bind the socket.
May be overridden.
"""
if self.allow_reuse_address:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.server_address)
self.server_address = self.socket.getsockname()
def server_activate(self):
"""Called by constructor to activate the server.
May be overridden.
"""
self.socket.listen(self.request_queue_size)
# Adding a second call to getsockname() because of this issue
# http://wiki.python.org/jython/NewSocketModule#Deferredsocketcreationonjython
self.server_address = self.socket.getsockname()
def server_close(self):
"""Called to clean-up the server.
May be overridden.
"""
self.socket.close()
def fileno(self):
"""Return socket file number.
Interface required by select().
"""
return self.socket.fileno()
def get_request(self):
"""Get the request and client address from the socket.
May be overridden.
"""
return self.socket.accept()
def close_request(self, request):
"""Called to clean up an individual request."""
request.close()
class UDPServer(TCPServer):
"""UDP server class."""
allow_reuse_address = False
socket_type = socket.SOCK_DGRAM
max_packet_size = 8192
def get_request(self):
data, client_addr = self.socket.recvfrom(self.max_packet_size)
return (data, self.socket), client_addr
def server_activate(self):
# No need to call listen() for UDP.
pass
def close_request(self, request):
# No need to close anything.
pass
class ForkingMixIn:
"""Mix-in class to handle each request in a new process."""
active_children = None
max_children = 40
def collect_children(self):
"""Internal routine to wait for died children."""
while self.active_children:
if len(self.active_children) < self.max_children:
options = os.WNOHANG
else:
# If the maximum number of children are already
# running, block while waiting for a child to exit
options = 0
try:
pid, status = os.waitpid(0, options)
except os.error:
pid = None
if not pid: break
self.active_children.remove(pid)
def process_request(self, request, client_address):
"""Fork a new subprocess to process the request."""
self.collect_children()
pid = os.fork()
if pid:
# Parent process
if self.active_children is None:
self.active_children = []
self.active_children.append(pid)
self.close_request(request)
return
else:
# Child process.
# This must never return, hence os._exit()!
try:
self.finish_request(request, client_address)
os._exit(0)
except:
try:
self.handle_error(request, client_address)
finally:
os._exit(1)
class ThreadingMixIn:
"""Mix-in class to handle each request in a new thread."""
# Decides how threads will act upon termination of the
# main process
daemon_threads = False
def process_request_thread(self, request, client_address):
"""Same as in BaseServer but as a thread.
In addition, exception handling is done here.
"""
try:
self.finish_request(request, client_address)
self.close_request(request)
except:
self.handle_error(request, client_address)
self.close_request(request)
def process_request(self, request, client_address):
"""Start a new thread to process the request."""
import threading
t = threading.Thread(target = self.process_request_thread,
args = (request, client_address))
if self.daemon_threads:
t.setDaemon (1)
t.start()
class ForkingUDPServer(ForkingMixIn, UDPServer): pass
class ForkingTCPServer(ForkingMixIn, TCPServer): pass
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass
if hasattr(socket, 'AF_UNIX'):
class UnixStreamServer(TCPServer):
address_family = socket.AF_UNIX
class UnixDatagramServer(UDPServer):
address_family = socket.AF_UNIX
class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass
class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass
class BaseRequestHandler:
"""Base class for request handler classes.
This class is instantiated for each request to be handled. The
constructor sets the instance variables request, client_address
and server, and then calls the handle() method. To implement a
specific service, all you need to do is to derive a class which
defines a handle() method.
The handle() method can find the request as self.request, the
client address as self.client_address, and the server (in case it
needs access to per-server information) as self.server. Since a
separate instance is created for each request, the handle() method
can define arbitrary other instance variariables.
"""
def __init__(self, request, client_address, server):
self.request = request
self.client_address = client_address
self.server = server
self.setup()
try:
self.handle()
finally:
self.finish()
def setup(self):
pass
def handle(self):
pass
def finish(self):
pass
# The following two classes make it possible to use the same service
# class for stream or datagram servers.
# Each class sets up these instance variables:
# - rfile: a file object from which receives the request is read
# - wfile: a file object to which the reply is written
# When the handle() method returns, wfile is flushed properly
class StreamRequestHandler(BaseRequestHandler):
"""Define self.rfile and self.wfile for stream sockets."""
# Default buffer sizes for rfile, wfile.
# We default rfile to buffered because otherwise it could be
# really slow for large data (a getc() call per byte); we make
# wfile unbuffered because (a) often after a write() we want to
# read and we need to flush the line; (b) big writes to unbuffered
# files are typically optimized by stdio even when big reads
# aren't.
rbufsize = -1
wbufsize = 0
def setup(self):
self.connection = self.request
self.rfile = self.connection.makefile('rb', self.rbufsize)
self.wfile = self.connection.makefile('wb', self.wbufsize)
def finish(self):
if not self.wfile.closed:
self.wfile.flush()
self.wfile.close()
self.rfile.close()
class DatagramRequestHandler(BaseRequestHandler):
# XXX Regrettably, I cannot get this working on Linux;
# s.recvfrom() doesn't return a meaningful client address.
"""Define self.rfile and self.wfile for datagram sockets."""
def setup(self):
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
self.packet, self.socket = self.request
self.rfile = StringIO(self.packet)
self.wfile = StringIO()
def finish(self):
self.socket.sendto(self.wfile.getvalue(), self.client_address)
| gpl-2.0 |
dlazz/ansible | lib/ansible/plugins/connection/saltstack.py | 7 | 3736 | # Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# Based on chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com>
# Based on func.py
# (c) 2014, Michael Scherer <misc@zarb.org>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
author: Michael Scherer (@mscherer) <misc@zarb.org>
connection: saltstack
short_description: Allow ansible to piggyback on salt minions
description:
- This allows you to use existing Saltstack infrastructure to connect to targets.
version_added: "2.2"
"""
import re
import os
import pty
import subprocess
from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.six.moves import cPickle
HAVE_SALTSTACK = False
try:
import salt.client as sc
HAVE_SALTSTACK = True
except ImportError:
pass
import os
from ansible import errors
from ansible.plugins.connection import ConnectionBase
class Connection(ConnectionBase):
''' Salt-based connections '''
has_pipelining = False
# while the name of the product is salt, naming that module salt cause
# trouble with module import
transport = 'saltstack'
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self.host = self._play_context.remote_addr
def _connect(self):
if not HAVE_SALTSTACK:
raise errors.AnsibleError("saltstack is not installed")
self.client = sc.LocalClient()
self._connected = True
return self
def exec_command(self, cmd, sudoable=False, in_data=None):
''' run a command on the remote minion '''
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
if in_data:
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
self._display.vvv("EXEC %s" % (cmd), host=self.host)
# need to add 'true;' to work around https://github.com/saltstack/salt/issues/28077
res = self.client.cmd(self.host, 'cmd.exec_code_all', ['bash', 'true;' + cmd])
if self.host not in res:
raise errors.AnsibleError("Minion %s didn't answer, check if salt-minion is running and the name is correct" % self.host)
p = res[self.host]
return (p['retcode'], p['stdout'], p['stderr'])
def _normalize_path(self, path, prefix):
if not path.startswith(os.path.sep):
path = os.path.join(os.path.sep, path)
normpath = os.path.normpath(path)
return os.path.join(prefix, normpath[1:])
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
super(Connection, self).put_file(in_path, out_path)
out_path = self._normalize_path(out_path, '/')
self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
content = open(in_path).read()
self.client.cmd(self.host, 'file.write', [out_path, content])
# TODO test it
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
super(Connection, self).fetch_file(in_path, out_path)
in_path = self._normalize_path(in_path, '/')
self._display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
content = self.client.cmd(self.host, 'cp.get_file_str', [in_path])[self.host]
open(out_path, 'wb').write(content)
def close(self):
''' terminate the connection; nothing to do here '''
pass
| gpl-3.0 |
AndyHelix/django-xadmin | xadmin/views/edit.py | 9 | 19694 | import copy
from django import forms
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import PermissionDenied, FieldError
from django.db import models, transaction
from django.forms.models import modelform_factory, modelform_defines_fields
from django.http import Http404, HttpResponseRedirect
from django.template.response import TemplateResponse
from django.utils.encoding import force_unicode
from django.utils.html import escape
from django.template import loader
from django.utils.translation import ugettext as _
from xadmin import widgets
from xadmin.layout import FormHelper, Layout, Fieldset, TabHolder, Container, Column, Col, Field
from xadmin.util import unquote
from xadmin.views.detail import DetailAdminUtil
from base import ModelAdminView, filter_hook, csrf_protect_m
FORMFIELD_FOR_DBFIELD_DEFAULTS = {
models.DateTimeField: {
'form_class': forms.SplitDateTimeField,
'widget': widgets.AdminSplitDateTime
},
models.DateField: {'widget': widgets.AdminDateWidget},
models.TimeField: {'widget': widgets.AdminTimeWidget},
models.TextField: {'widget': widgets.AdminTextareaWidget},
models.URLField: {'widget': widgets.AdminURLFieldWidget},
models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget},
models.BigIntegerField: {'widget': widgets.AdminIntegerFieldWidget},
models.CharField: {'widget': widgets.AdminTextInputWidget},
models.IPAddressField: {'widget': widgets.AdminTextInputWidget},
models.ImageField: {'widget': widgets.AdminFileWidget},
models.FileField: {'widget': widgets.AdminFileWidget},
models.ForeignKey: {'widget': widgets.AdminSelectWidget},
models.OneToOneField: {'widget': widgets.AdminSelectWidget},
models.ManyToManyField: {'widget': widgets.AdminSelectMultiple},
}
class ReadOnlyField(Field):
template = "xadmin/layout/field_value.html"
def __init__(self, *args, **kwargs):
self.detail = kwargs.pop('detail')
super(ReadOnlyField, self).__init__(*args, **kwargs)
def render(self, form, form_style, context):
html = ''
for field in self.fields:
result = self.detail.get_field_result(field)
field = {'auto_id': field}
html += loader.render_to_string(
self.template, {'field': field, 'result': result})
return html
class ModelFormAdminView(ModelAdminView):
form = forms.ModelForm
formfield_overrides = {}
readonly_fields = ()
style_fields = {}
exclude = None
relfield_style = None
save_as = False
save_on_top = False
add_form_template = None
change_form_template = None
form_layout = None
def __init__(self, request, *args, **kwargs):
overrides = FORMFIELD_FOR_DBFIELD_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
super(ModelFormAdminView, self).__init__(request, *args, **kwargs)
@filter_hook
def formfield_for_dbfield(self, db_field, **kwargs):
# If it uses an intermediary model that isn't auto created, don't show
# a field in admin.
if isinstance(db_field, models.ManyToManyField) and not db_field.rel.through._meta.auto_created:
return None
attrs = self.get_field_attrs(db_field, **kwargs)
return db_field.formfield(**dict(attrs, **kwargs))
@filter_hook
def get_field_style(self, db_field, style, **kwargs):
if style in ('radio', 'radio-inline') and (db_field.choices or isinstance(db_field, models.ForeignKey)):
attrs = {'widget': widgets.AdminRadioSelect(
attrs={'inline': 'inline' if style == 'radio-inline' else ''})}
if db_field.choices:
attrs['choices'] = db_field.get_choices(
include_blank=db_field.blank,
blank_choice=[('', _('Null'))]
)
return attrs
if style in ('checkbox', 'checkbox-inline') and isinstance(db_field, models.ManyToManyField):
return {'widget': widgets.AdminCheckboxSelect(attrs={'inline': style == 'checkbox-inline'}),
'help_text': None}
@filter_hook
def get_field_attrs(self, db_field, **kwargs):
if db_field.name in self.style_fields:
attrs = self.get_field_style(
db_field, self.style_fields[db_field.name], **kwargs)
if attrs:
return attrs
if hasattr(db_field, "rel") and db_field.rel:
related_modeladmin = self.admin_site._registry.get(db_field.rel.to)
if related_modeladmin and hasattr(related_modeladmin, 'relfield_style'):
attrs = self.get_field_style(
db_field, related_modeladmin.relfield_style, **kwargs)
if attrs:
return attrs
if db_field.choices:
return {'widget': widgets.AdminSelectWidget}
for klass in db_field.__class__.mro():
if klass in self.formfield_overrides:
return self.formfield_overrides[klass].copy()
return {}
@filter_hook
def prepare_form(self):
self.model_form = self.get_model_form()
@filter_hook
def instance_forms(self):
self.form_obj = self.model_form(**self.get_form_datas())
def setup_forms(self):
helper = self.get_form_helper()
if helper:
self.form_obj.helper = helper
@filter_hook
def valid_forms(self):
return self.form_obj.is_valid()
@filter_hook
def get_model_form(self, **kwargs):
"""
Returns a Form class for use in the admin add view. This is used by
add_view and change_view.
"""
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields())
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# ModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# if exclude is an empty list we pass None to be consistant with the
# default on modelform_factory
exclude = exclude or None
defaults = {
"form": self.form,
"fields": self.fields and list(self.fields) or None,
"exclude": exclude,
"formfield_callback": self.formfield_for_dbfield,
}
defaults.update(kwargs)
if defaults['fields'] is None and not modelform_defines_fields(defaults['form']):
defaults['fields'] = forms.ALL_FIELDS
return modelform_factory(self.model, **defaults)
try:
return modelform_factory(self.model, **defaults)
except FieldError as e:
raise FieldError('%s. Check fields/fieldsets/exclude attributes of class %s.'
% (e, self.__class__.__name__))
@filter_hook
def get_form_layout(self):
layout = copy.deepcopy(self.form_layout)
fields = self.form_obj.fields.keys() + list(self.get_readonly_fields())
if layout is None:
layout = Layout(Container(Col('full',
Fieldset("", *fields, css_class="unsort no_title"), horizontal=True, span=12)
))
elif type(layout) in (list, tuple) and len(layout) > 0:
if isinstance(layout[0], Column):
fs = layout
elif isinstance(layout[0], (Fieldset, TabHolder)):
fs = (Col('full', *layout, horizontal=True, span=12),)
else:
fs = (Col('full', Fieldset("", *layout, css_class="unsort no_title"), horizontal=True, span=12),)
layout = Layout(Container(*fs))
rendered_fields = [i[1] for i in layout.get_field_names()]
container = layout[0].fields
other_fieldset = Fieldset(_(u'Other Fields'), *[f for f in fields if f not in rendered_fields])
if len(other_fieldset.fields):
if len(container) and isinstance(container[0], Column):
container[0].fields.append(other_fieldset)
else:
container.append(other_fieldset)
return layout
@filter_hook
def get_form_helper(self):
helper = FormHelper()
helper.form_tag = False
helper.add_layout(self.get_form_layout())
# deal with readonly fields
readonly_fields = self.get_readonly_fields()
if readonly_fields:
detail = self.get_model_view(
DetailAdminUtil, self.model, self.form_obj.instance)
for field in readonly_fields:
helper[field].wrap(ReadOnlyField, detail=detail)
return helper
@filter_hook
def get_readonly_fields(self):
"""
Hook for specifying custom readonly fields.
"""
return self.readonly_fields
@filter_hook
def save_forms(self):
self.new_obj = self.form_obj.save(commit=False)
@filter_hook
def save_models(self):
self.new_obj.save()
@filter_hook
def save_related(self):
self.form_obj.save_m2m()
@csrf_protect_m
@filter_hook
def get(self, request, *args, **kwargs):
self.instance_forms()
self.setup_forms()
return self.get_response()
@csrf_protect_m
@transaction.atomic
@filter_hook
def post(self, request, *args, **kwargs):
self.instance_forms()
self.setup_forms()
if self.valid_forms():
self.save_forms()
self.save_models()
self.save_related()
response = self.post_response()
if isinstance(response, basestring):
return HttpResponseRedirect(response)
else:
return response
return self.get_response()
@filter_hook
def get_context(self):
add = self.org_obj is None
change = self.org_obj is not None
new_context = {
'form': self.form_obj,
'original': self.org_obj,
'show_delete': self.org_obj is not None,
'add': add,
'change': change,
'errors': self.get_error_list(),
'has_add_permission': self.has_add_permission(),
'has_view_permission': self.has_view_permission(),
'has_change_permission': self.has_change_permission(self.org_obj),
'has_delete_permission': self.has_delete_permission(self.org_obj),
'has_file_field': True, # FIXME - this should check if form or formsets have a FileField,
'has_absolute_url': hasattr(self.model, 'get_absolute_url'),
'form_url': '',
'content_type_id': ContentType.objects.get_for_model(self.model).id,
'save_as': self.save_as,
'save_on_top': self.save_on_top,
}
# for submit line
new_context.update({
'onclick_attrib': '',
'show_delete_link': (new_context['has_delete_permission']
and (change or new_context['show_delete'])),
'show_save_as_new': change and self.save_as,
'show_save_and_add_another': new_context['has_add_permission'] and
(not self.save_as or add),
'show_save_and_continue': new_context['has_change_permission'],
'show_save': True
})
if self.org_obj and new_context['show_delete_link']:
new_context['delete_url'] = self.model_admin_url(
'delete', self.org_obj.pk)
context = super(ModelFormAdminView, self).get_context()
context.update(new_context)
return context
@filter_hook
def get_error_list(self):
errors = forms.util.ErrorList()
if self.form_obj.is_bound:
errors.extend(self.form_obj.errors.values())
return errors
@filter_hook
def get_media(self):
return super(ModelFormAdminView, self).get_media() + self.form_obj.media + \
self.vendor('xadmin.page.form.js', 'xadmin.form.css')
class CreateAdminView(ModelFormAdminView):
def init_request(self, *args, **kwargs):
self.org_obj = None
if not self.has_add_permission():
raise PermissionDenied
# comm method for both get and post
self.prepare_form()
@filter_hook
def get_form_datas(self):
# Prepare the dict of initial data from the request.
# We have to special-case M2Ms as a list of comma-separated PKs.
if self.request_method == 'get':
initial = dict(self.request.GET.items())
for k in initial:
try:
f = self.opts.get_field(k)
except models.FieldDoesNotExist:
continue
if isinstance(f, models.ManyToManyField):
initial[k] = initial[k].split(",")
return {'initial': initial}
else:
return {'data': self.request.POST, 'files': self.request.FILES}
@filter_hook
def get_context(self):
new_context = {
'title': _('Add %s') % force_unicode(self.opts.verbose_name),
}
context = super(CreateAdminView, self).get_context()
context.update(new_context)
return context
@filter_hook
def get_breadcrumb(self):
bcs = super(ModelFormAdminView, self).get_breadcrumb()
item = {'title': _('Add %s') % force_unicode(self.opts.verbose_name)}
if self.has_add_permission():
item['url'] = self.model_admin_url('add')
bcs.append(item)
return bcs
@filter_hook
def get_response(self):
context = self.get_context()
context.update(self.kwargs or {})
return TemplateResponse(
self.request, self.add_form_template or self.get_template_list(
'views/model_form.html'),
context, current_app=self.admin_site.name)
@filter_hook
def post_response(self):
"""
Determines the HttpResponse for the add_view stage.
"""
request = self.request
msg = _(
'The %(name)s "%(obj)s" was added successfully.') % {'name': force_unicode(self.opts.verbose_name),
'obj': "<a class='alert-link' href='%s'>%s</a>" % (self.model_admin_url('change', self.new_obj._get_pk_val()), force_unicode(self.new_obj))}
if "_continue" in request.REQUEST:
self.message_user(
msg + ' ' + _("You may edit it again below."), 'success')
return self.model_admin_url('change', self.new_obj._get_pk_val())
if "_addanother" in request.REQUEST:
self.message_user(msg + ' ' + (_("You may add another %s below.") % force_unicode(self.opts.verbose_name)), 'success')
return request.path
else:
self.message_user(msg, 'success')
# Figure out where to redirect. If the user has change permission,
# redirect to the change-list page for this object. Otherwise,
# redirect to the admin index.
if "_redirect" in request.REQUEST:
return request.REQUEST["_redirect"]
elif self.has_view_permission():
return self.model_admin_url('changelist')
else:
return self.get_admin_url('index')
class UpdateAdminView(ModelFormAdminView):
def init_request(self, object_id, *args, **kwargs):
self.org_obj = self.get_object(unquote(object_id))
if not self.has_change_permission(self.org_obj):
raise PermissionDenied
if self.org_obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') %
{'name': force_unicode(self.opts.verbose_name), 'key': escape(object_id)})
# comm method for both get and post
self.prepare_form()
@filter_hook
def get_form_datas(self):
params = {'instance': self.org_obj}
if self.request_method == 'post':
params.update(
{'data': self.request.POST, 'files': self.request.FILES})
return params
@filter_hook
def get_context(self):
new_context = {
'title': _('Change %s') % force_unicode(self.org_obj),
'object_id': str(self.org_obj.pk),
}
context = super(UpdateAdminView, self).get_context()
context.update(new_context)
return context
@filter_hook
def get_breadcrumb(self):
bcs = super(ModelFormAdminView, self).get_breadcrumb()
item = {'title': force_unicode(self.org_obj)}
if self.has_change_permission():
item['url'] = self.model_admin_url('change', self.org_obj.pk)
bcs.append(item)
return bcs
@filter_hook
def get_response(self, *args, **kwargs):
context = self.get_context()
context.update(kwargs or {})
return TemplateResponse(
self.request, self.change_form_template or self.get_template_list(
'views/model_form.html'),
context, current_app=self.admin_site.name)
def post(self, request, *args, **kwargs):
if "_saveasnew" in self.request.REQUEST:
return self.get_model_view(CreateAdminView, self.model).post(request)
return super(UpdateAdminView, self).post(request, *args, **kwargs)
@filter_hook
def post_response(self):
"""
Determines the HttpResponse for the change_view stage.
"""
opts = self.new_obj._meta
obj = self.new_obj
request = self.request
verbose_name = opts.verbose_name
pk_value = obj._get_pk_val()
msg = _('The %(name)s "%(obj)s" was changed successfully.') % {'name':
force_unicode(verbose_name), 'obj': force_unicode(obj)}
if "_continue" in request.REQUEST:
self.message_user(
msg + ' ' + _("You may edit it again below."), 'success')
return request.path
elif "_addanother" in request.REQUEST:
self.message_user(msg + ' ' + (_("You may add another %s below.")
% force_unicode(verbose_name)), 'success')
return self.model_admin_url('add')
else:
self.message_user(msg, 'success')
# Figure out where to redirect. If the user has change permission,
# redirect to the change-list page for this object. Otherwise,
# redirect to the admin index.
if "_redirect" in request.REQUEST:
return request.REQUEST["_redirect"]
elif self.has_view_permission():
change_list_url = self.model_admin_url('changelist')
if 'LIST_QUERY' in self.request.session \
and self.request.session['LIST_QUERY'][0] == self.model_info:
change_list_url += '?' + self.request.session['LIST_QUERY'][1]
return change_list_url
else:
return self.get_admin_url('index')
class ModelFormAdminUtil(ModelFormAdminView):
def init_request(self, obj=None):
self.org_obj = obj
self.prepare_form()
self.instance_forms()
@filter_hook
def get_form_datas(self):
return {'instance': self.org_obj}
| bsd-3-clause |
chjw8016/GreenOdoo7-haibao | openerp/addons/crm/crm_phonecall.py | 14 | 14638 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.addons.base_status.base_state import base_state
import crm
from datetime import datetime
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
from openerp.tools.translate import _
class crm_phonecall(base_state, osv.osv):
""" Model for CRM phonecalls """
_name = "crm.phonecall"
_description = "Phonecall"
_order = "id desc"
_inherit = ['mail.thread']
_columns = {
# base_state required fields
'date_action_last': fields.datetime('Last Action', readonly=1),
'date_action_next': fields.datetime('Next Action', readonly=1),
'create_date': fields.datetime('Creation Date' , readonly=True),
'section_id': fields.many2one('crm.case.section', 'Sales Team', \
select=True, help='Sales team to which Case belongs to.'),
'user_id': fields.many2one('res.users', 'Responsible'),
'partner_id': fields.many2one('res.partner', 'Contact'),
'company_id': fields.many2one('res.company', 'Company'),
'description': fields.text('Description'),
'state': fields.selection([ ('draft', 'Draft'),
('open', 'Confirmed'),
('pending', 'Not Held'),
('cancel', 'Cancelled'),
('done', 'Held'),],
string='Status', size=16, readonly=True, track_visibility='onchange',
help='The status is set to \'Todo\', when a case is created.\
If the case is in progress the status is set to \'Open\'.\
When the call is over, the status is set to \'Held\'.\
If the call needs to be done then the status is set to \'Not Held\'.'),
'email_from': fields.char('Email', size=128, help="These people will receive email."),
'date_open': fields.datetime('Opened', readonly=True),
# phonecall fields
'name': fields.char('Call Summary', size=64, required=True),
'active': fields.boolean('Active', required=False),
'duration': fields.float('Duration', help="Duration in Minutes"),
'categ_id': fields.many2one('crm.case.categ', 'Category', \
domain="['|',('section_id','=',section_id),('section_id','=',False),\
('object_id.model', '=', 'crm.phonecall')]"),
'partner_phone': fields.char('Phone', size=32),
'partner_mobile': fields.char('Mobile', size=32),
'priority': fields.selection(crm.AVAILABLE_PRIORITIES, 'Priority'),
'date_closed': fields.datetime('Closed', readonly=True),
'date': fields.datetime('Date'),
'opportunity_id': fields.many2one ('crm.lead', 'Lead/Opportunity'),
}
def _get_default_state(self, cr, uid, context=None):
if context and context.get('default_state', False):
return context.get('default_state')
return 'open'
_defaults = {
'date': fields.datetime.now,
'priority': crm.AVAILABLE_PRIORITIES[2][0],
'state': _get_default_state,
'user_id': lambda self,cr,uid,ctx: uid,
'active': 1
}
def case_close(self, cr, uid, ids, context=None):
""" Overrides close for crm_case for setting duration """
res = True
for phone in self.browse(cr, uid, ids, context=context):
phone_id = phone.id
data = {}
if phone.duration <=0:
duration = datetime.now() - datetime.strptime(phone.date, DEFAULT_SERVER_DATETIME_FORMAT)
data['duration'] = duration.seconds/float(60)
res = super(crm_phonecall, self).case_close(cr, uid, [phone_id], context=context)
self.write(cr, uid, [phone_id], data, context=context)
return res
def case_reset(self, cr, uid, ids, context=None):
"""Resets case as Todo
"""
res = super(crm_phonecall, self).case_reset(cr, uid, ids, context)
self.write(cr, uid, ids, {'duration': 0.0, 'state':'open'}, context=context)
return res
def schedule_another_phonecall(self, cr, uid, ids, schedule_time, call_summary, \
user_id=False, section_id=False, categ_id=False, action='schedule', context=None):
"""
action :('schedule','Schedule a call'), ('log','Log a call')
"""
model_data = self.pool.get('ir.model.data')
phonecall_dict = {}
if not categ_id:
try:
res_id = model_data._get_id(cr, uid, 'crm', 'categ_phone2')
categ_id = model_data.browse(cr, uid, res_id, context=context).res_id
except ValueError:
pass
for call in self.browse(cr, uid, ids, context=context):
if not section_id:
section_id = call.section_id and call.section_id.id or False
if not user_id:
user_id = call.user_id and call.user_id.id or False
if not schedule_time:
schedule_time = call.date
vals = {
'name' : call_summary,
'user_id' : user_id or False,
'categ_id' : categ_id or False,
'description' : call.description or False,
'date' : schedule_time,
'section_id' : section_id or False,
'partner_id': call.partner_id and call.partner_id.id or False,
'partner_phone' : call.partner_phone,
'partner_mobile' : call.partner_mobile,
'priority': call.priority,
}
new_id = self.create(cr, uid, vals, context=context)
if action == 'log':
self.case_close(cr, uid, [new_id])
phonecall_dict[call.id] = new_id
return phonecall_dict
def _call_create_partner(self, cr, uid, phonecall, context=None):
partner = self.pool.get('res.partner')
partner_id = partner.create(cr, uid, {
'name': phonecall.name,
'user_id': phonecall.user_id.id,
'comment': phonecall.description,
'address': []
})
return partner_id
def on_change_opportunity(self, cr, uid, ids, opportunity_id, context=None):
values = {}
if opportunity_id:
opportunity = self.pool.get('crm.lead').browse(cr, uid, opportunity_id, context=context)
values = {
'section_id' : opportunity.section_id and opportunity.section_id.id or False,
'partner_phone' : opportunity.phone,
'partner_mobile' : opportunity.mobile,
'partner_id' : opportunity.partner_id and opportunity.partner_id.id or False,
}
return {'value' : values}
def _call_set_partner(self, cr, uid, ids, partner_id, context=None):
write_res = self.write(cr, uid, ids, {'partner_id' : partner_id}, context=context)
self._call_set_partner_send_note(cr, uid, ids, context)
return write_res
def _call_create_partner_address(self, cr, uid, phonecall, partner_id, context=None):
address = self.pool.get('res.partner')
return address.create(cr, uid, {
'parent_id': partner_id,
'name': phonecall.name,
'phone': phonecall.partner_phone,
})
def handle_partner_assignation(self, cr, uid, ids, action='create', partner_id=False, context=None):
"""
Handle partner assignation during a lead conversion.
if action is 'create', create new partner with contact and assign lead to new partner_id.
otherwise assign lead to specified partner_id
:param list ids: phonecalls ids to process
:param string action: what has to be done regarding partners (create it, assign an existing one, or nothing)
:param int partner_id: partner to assign if any
:return dict: dictionary organized as followed: {lead_id: partner_assigned_id}
"""
#TODO this is a duplication of the handle_partner_assignation method of crm_lead
partner_ids = {}
# If a partner_id is given, force this partner for all elements
force_partner_id = partner_id
for call in self.browse(cr, uid, ids, context=context):
# If the action is set to 'create' and no partner_id is set, create a new one
if action == 'create':
partner_id = force_partner_id or self._call_create_partner(cr, uid, call, context=context)
self._call_create_partner_address(cr, uid, call, partner_id, context=context)
self._call_set_partner(cr, uid, [call.id], partner_id, context=context)
partner_ids[call.id] = partner_id
return partner_ids
def redirect_phonecall_view(self, cr, uid, phonecall_id, context=None):
model_data = self.pool.get('ir.model.data')
# Select the view
tree_view = model_data.get_object_reference(cr, uid, 'crm', 'crm_case_phone_tree_view')
form_view = model_data.get_object_reference(cr, uid, 'crm', 'crm_case_phone_form_view')
search_view = model_data.get_object_reference(cr, uid, 'crm', 'view_crm_case_phonecalls_filter')
value = {
'name': _('Phone Call'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'crm.phonecall',
'res_id' : int(phonecall_id),
'views': [(form_view and form_view[1] or False, 'form'), (tree_view and tree_view[1] or False, 'tree'), (False, 'calendar')],
'type': 'ir.actions.act_window',
'search_view_id': search_view and search_view[1] or False,
}
return value
def convert_opportunity(self, cr, uid, ids, opportunity_summary=False, partner_id=False, planned_revenue=0.0, probability=0.0, context=None):
partner = self.pool.get('res.partner')
opportunity = self.pool.get('crm.lead')
opportunity_dict = {}
default_contact = False
for call in self.browse(cr, uid, ids, context=context):
if not partner_id:
partner_id = call.partner_id and call.partner_id.id or False
if partner_id:
address_id = partner.address_get(cr, uid, [partner_id])['default']
if address_id:
default_contact = partner.browse(cr, uid, address_id, context=context)
opportunity_id = opportunity.create(cr, uid, {
'name': opportunity_summary or call.name,
'planned_revenue': planned_revenue,
'probability': probability,
'partner_id': partner_id or False,
'mobile': default_contact and default_contact.mobile,
'section_id': call.section_id and call.section_id.id or False,
'description': call.description or False,
'priority': call.priority,
'type': 'opportunity',
'phone': call.partner_phone or False,
'email_from': default_contact and default_contact.email,
})
vals = {
'partner_id': partner_id,
'opportunity_id' : opportunity_id,
}
self.write(cr, uid, [call.id], vals)
self.case_close(cr, uid, [call.id])
opportunity.case_open(cr, uid, [opportunity_id])
opportunity_dict[call.id] = opportunity_id
return opportunity_dict
def action_make_meeting(self, cr, uid, ids, context=None):
"""
Open meeting's calendar view to schedule a meeting on current phonecall.
:return dict: dictionary value for created meeting view
"""
phonecall = self.browse(cr, uid, ids[0], context)
res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'base_calendar', 'action_crm_meeting', context)
res['context'] = {
'default_phonecall_id': phonecall.id,
'default_partner_id': phonecall.partner_id and phonecall.partner_id.id or False,
'default_user_id': uid,
'default_email_from': phonecall.email_from,
'default_state': 'open',
'default_name': phonecall.name,
}
return res
def action_button_convert2opportunity(self, cr, uid, ids, context=None):
"""
Convert a phonecall into an opp and then redirect to the opp view.
:param list ids: list of calls ids to convert (typically contains a single id)
:return dict: containing view information
"""
if len(ids) != 1:
raise osv.except_osv(_('Warning!'),_('It\'s only possible to convert one phonecall at a time.'))
opportunity_dict = self.convert_opportunity(cr, uid, ids, context=context)
return self.pool.get('crm.lead').redirect_opportunity_view(cr, uid, opportunity_dict[ids[0]], context)
# ----------------------------------------
# OpenChatter
# ----------------------------------------
def _call_set_partner_send_note(self, cr, uid, ids, context=None):
return self.message_post(cr, uid, ids, body=_("Partner has been <b>created</b>."), context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| mit |
elainenaomi/sciwonc-dataflow-examples | sbbd2016/experiments/1-postgres/3_workflow_full_10files_primary_nosh_nors_annot_with_proj_3s/pegasus.bDkvI/pegasus-4.6.0/lib/python2.7/dist-packages/Pegasus/monitoring/notifications.py | 1 | 34263 | """
Class for managing notifications in pegasus-monitord.
"""
##
# Copyright 2007-2011 University Of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
# Import Python modules
import os
import sys
import math
import time
import shlex
import signal
import logging
import tempfile
import subprocess
from Pegasus.tools import utils
NOTIFICATION_FILE = "monitord-notifications.log" # filename for writing the output of notification scripts
WAIT_CHILD_FINISH = 5 # in seconds
logger = logging.getLogger(__name__)
class Notifications:
"""
This object contains all functions needed for managing
notifications and starting notification scripts.
"""
def __init__(self, notification_file_prefix,
max_parallel_notifications=10, notifications_timeout=0):
"""
This function initialized the Notifications class.
"""
self._active_notifications = []
self._pending_notifications = []
self._max_parallel_notifications = max_parallel_notifications
self._notifications_timeout = notifications_timeout
self._notifications_fn = os.path.join(notification_file_prefix, NOTIFICATION_FILE)
self._notifications_log = None
self._notifications = {}
# Open notifications' log file
try:
self._notifications_log = open(self._notifications_fn, 'a')
except IOError:
logger.critical("cannot create notifications' log file... exiting...")
sys.exit(1)
def has_pending_notifications(self):
"""
This function returns True if we have pending notifications.
"""
return len(self._pending_notifications) > 0
def has_active_notifications(self):
"""
This function returns True if we have active notifications.
"""
return len(self._active_notifications) > 0
def terminate_notification(self, my_entry):
"""
This function terminates a notification process, and cleans up its
output/error files.
"""
my_p = my_entry["subp"]
my_pid = my_entry["pid"]
my_notification = my_entry["notification"]
my_out_fn = my_entry["out_fn"]
my_err_fn = my_entry["err_fn"]
my_out_fd = my_entry["out_fd"]
my_err_fd = my_entry["err_fd"]
my_action = my_entry["action"]
my_p.poll()
# If process hasn't finished...
if my_p.returncode is None:
# Send SIGTERM first...
try:
os.kill(my_pid, signal.SIGTERM)
except OSError:
logger.info("error sending SIGTERM to notification script...")
# Wait for child to finish
logger.warning("waiting for notification process to finish: %s - %s"
% (my_notification, my_action))
time.sleep(WAIT_CHILD_FINISH)
my_p.poll()
if my_p.returncode is None:
# Send SIGKILL now...
logger.warning("killing notification process to finish: %s - %s"
% (my_notification, my_action))
try:
os.kill(my_pid, signal.SIGKILL)
except OSError:
logger.info("error sending SIGKILL to notification script...")
# Finally, clean up files...
try:
os.unlink(my_out_fn)
os.unlink(my_err_fn)
except OSError:
# No error here...
pass
logger.warning("notification terminated: %s - %s" % (my_notification, my_action))
def service_notifications(self):
"""
This function services notifications. It chekcs the notifications
in the active list to see if they have finished. If so, it copies
the stdout/stderr from these notifications to the
monitord-notifications.log file. For notifications in the
pending_notifications list, it starts the notification scripts,
unless there are already too many notifications running in the
system.
"""
logger.info("active notifications %d, pending notifications: %d"
% (len(self._active_notifications), len(self._pending_notifications)))
# Step 1: Look at existing notifications
if len(self._active_notifications) > 0:
# We have active notifications, let's check on their statuses
my_notif_index = 0
while my_notif_index < len(self._active_notifications):
my_active_notif = self._active_notifications[my_notif_index]
# Get subprocess object
my_active_p = my_active_notif["subp"]
my_status = my_active_p.poll()
if my_status is not None:
# Process finished notification
my_finished_out_fn = my_active_notif["out_fn"]
my_finished_err_fn = my_active_notif["err_fn"]
my_finished_out_fd = my_active_notif["out_fd"]
my_finished_err_fd = my_active_notif["err_fd"]
my_finished_notification = my_active_notif["notification"]
my_finished_action = my_active_notif["action"]
my_finished_notification_params = my_active_notif["params"]
# Close out/err files, if not already closed...
try:
my_finished_out_fd.close()
except IOError:
logger.warning("error closing stdout file for notification %s... continuing..."
% (my_finished_notification))
try:
my_finished_err_fd.close()
except IOError:
logger.warning("error closing stderr file for notification %s... continuing..."
% (my_finished_notification))
if self._notifications_log is not None:
if logger.isEnabledFor(logging.INFO):
self._notifications_log.write("%s\n" % ('-' * 80))
self._notifications_log.write("Notification time : %s\n" % (utils.isodate()))
self._notifications_log.write("Notification event : %s\n" % (my_finished_notification))
self._notifications_log.write("Notification action: %s\n" % (my_finished_action))
self._notifications_log.write("Notification status: %s\n" % (my_status))
self._notifications_log.write("\n")
self._notifications_log.write("Notification environment\n")
for k in my_finished_notification_params:
self._notifications_log.write("%s : %s\n" % (k, my_finished_notification_params[k]))
self._notifications_log.write("\n")
self._notifications_log.write("stdout:\n")
try:
my_f = open(my_finished_out_fn, 'r')
for line in my_f:
self._notifications_log.write(line)
except IOError:
logger.warning("error processing notification stdout file: %s. continuing..."
% (my_finished_out_fn))
else:
my_f.close()
self._notifications_log.write("\n")
self._notifications_log.write("stderr:\n")
try:
my_f = open(my_finished_err_fn, 'r')
for line in my_f:
self._notifications_log.write(line)
except IOError:
logger.warning("error processing notification stderr file: %s. continuing..."
% (my_finished_err_fn))
else:
my_f.close()
self._notifications_log.write("\n")
self._notifications_log.write("\n")
else:
# Only log a one-liner so we can debug things later if we need to
self._notifications_log.write("%s - %s - %s - %s\n" % (utils.isodate(),
my_finished_notification,
my_finished_action,
my_status))
else:
logger.critical("notifications' output log file not initialized... exiting...")
sys.exit(1)
# Now, delete output and error files
try:
os.unlink(my_finished_out_fn)
except OSError:
logger.warning("error deleting notification stdout file: %s. continuing..."
% (my_finished_out_fn))
try:
os.unlink(my_finished_err_fn)
except OSError:
logger.warning("error deleting notification stderr file: %s. continuing..."
% (my_finished_err_fn))
# Delete this notification from our list
my_deleted_entry = self._active_notifications.pop(my_notif_index)
else:
# Process still going... leave it...
my_notif_index = my_notif_index + 1
# Step 2: Look at our notification queue
while len(self._pending_notifications) > 0:
# Ok we have notifications to service...
# print "pending notifications: %s" % (len(self._pending_notifications))
logger.debug("pending notifications: %s" % (len(self._pending_notifications)))
# Check if we have reached the maximum number of concurrent notifications
if len(self._active_notifications) > self._max_parallel_notifications:
# print "reaching maximum number of concurrent notifications... waiting until next cycle..."
logger.info("reaching maximum number of concurrent notifications... waiting until next cycle...")
break
# Get first notification from the list
try:
my_action, my_env = self._pending_notifications.pop(0)
except IndexError:
logger.error("error processing notification list... exiting!")
sys.exit(1)
# Merge default environment with notification-specific environment
my_complete_env = os.environ.copy()
my_complete_env.update(my_env)
try:
my_notification = "%s - %s" % (my_env["PEGASUS_JOBID"], my_env["PEGASUS_EVENT"])
except KeyError:
logger.warning("notification missing PEGASUS_JOBID or PEGASUS_EVENT... skipping...")
continue
# Split arguments
my_args = shlex.split(my_action)
# Create output and error files for the notification script to use
try:
my_temp_out = tempfile.mkstemp(prefix="notification-", suffix="-out.log", dir="/tmp")
my_temp_err = tempfile.mkstemp(prefix="notification-", suffix="-err.log", dir="/tmp")
os.close(my_temp_out[0])
os.close(my_temp_err[0])
my_out_fn = my_temp_out[1]
my_err_fn = my_temp_err[1]
except OSError:
logger.warning("cannot create temp files for notification: %s... skipping..." % (my_notification))
continue
# Open output and error files for the notification script
try:
my_f_out = open(my_out_fn, 'w')
my_f_err = open(my_err_fn, 'w')
except IOError:
logger.warning("cannot open temp files for notification: %s... skipping..." % (my_notification))
try:
os.unlink(my_out_fn)
os.unlink(my_err_fn)
except OSError:
# No error here...
pass
continue
# Ok, here we go...
try:
my_p = subprocess.Popen(my_args, stdout=my_f_out, stderr=my_f_err, env=my_complete_env)
except OSError:
logger.warning("cannot start notification executable: %s... skipping..." % (my_notification))
try:
my_f_out.close()
my_f_err.close()
os.unlink(my_out_fn)
os.unlink(my_err_fn)
except OSError:
logger.warning("found problem cleaning up notification: %s... skipping..." % (my_notification))
continue
# Clean up ok, just continue
continue
except:
logger.warning("problem starting notification: %s... skipping..." % (my_notification))
try:
my_f_out.close()
my_f_err.close()
os.unlink(my_out_fn)
os.unlink(my_err_fn)
except OSError:
logger.warning("found problem cleaning up notification: %s... skipping..." % (my_notification))
continue
# Clean up ok, just continue
continue
# Let's keep everything we need for the future
my_started_notification = {}
my_started_notification["pid"] = my_p.pid
my_started_notification["subp"] = my_p
my_started_notification["env"] = my_complete_env
my_started_notification["params"] = my_env
my_started_notification["args"] = my_args
my_started_notification["action"] = my_action
my_started_notification["out_fd"] = my_f_out
my_started_notification["err_fd"] = my_f_err
my_started_notification["out_fn"] = my_out_fn
my_started_notification["err_fn"] = my_err_fn
my_started_notification["notification"] = my_notification
my_started_notification["time"] = time.time()
# Add to the active list, and done!
self._active_notifications.append(my_started_notification)
logger.info("started notification for: %s" % (my_notification))
# Step 3: Check if any notifications ran over the allowed time
if self._notifications_timeout > 0:
# Only go through the list if a timeout was specified
# Get current time
now = int(math.floor(time.time()))
# Go through our list
my_index = 0
while my_index < len(self._active_notifications):
my_entry = self._active_notifications[my_index]
my_exp_time = my_entry["time"] + self._notifications_timeout
# Check if notification has expired
if my_exp_time < now:
# Notification has expired... kill it...
logger.warning("notification expired... terminating it...")
self.terminate_notification(my_entry)
# Delete this notification from our list
my_deleted_entry = self._active_notifications.pop(my_index)
else:
# Notification hasn't expired yet, move to next one...
my_index = my_index + 1
def finish_notifications(self):
"""
This function flushes all notifications, and closes the
notifications' log file. It also logs all pending (but not yet
issued) notifications.
"""
# Take care of active notifications
if len(self._active_notifications) > 0:
for my_entry in self._active_notifications:
self.terminate_notification(my_entry)
# Take care of pending notifications
if len(self._pending_notifications) > 0:
for my_action, my_env in self._pending_notifications:
try:
my_notification = "%s - %s" % (my_env["PEGASUS_JOBID"], my_env["PEGASUS_EVENT"])
except KeyError:
logger.warning("notification missing PEGASUS_JOBID or PEGASUS_EVENT... skipping...")
continue
logger.warning("pending notification skipped: %s - %s" % (my_notification, my_action))
# Close notifications' log file
if self._notifications_log is not None:
try:
self._notifications_log.close()
except IOError:
logger.warning("error closing notifications' log file...")
self._notifications_log = None
def read_notification_file(self, notify_file, wf_uuid):
"""
This function reads the notification file, parsing all
notifications and creating our list of events to track.
It returns the number of notifications read from the
notifications' file.
"""
if notify_file is None:
return 0
logger.info("loading notifications from %s" % (notify_file))
# Open file
try:
NOTIFY = open(notify_file, "r")
except IOError:
logger.warning("cannot load notification file %s, continuing without notifications" % (notify_file))
return 0
# Start with empty dictionaries for the three types of notifications
my_notifications_read = 0
my_notifications = {"workflow" : {},
"job" : {},
"invocation": {}}
# For workflow and job notifications, we have a dict(workflow_id|job_id, dict(cond, [actions]))
# For invocation notifications, we have a dict(job_id, dict(inv_id, dict(cond, [actions])))
# Process notifications
for line in NOTIFY:
line = line.strip()
# Skip blank lines
if len(line) == 0:
continue
# Skip comments
if line.startswith("#"):
continue
# Check if we split it in 4 or 5 pieces
if line.lower().startswith("invocation"):
# This is an invocation notification, split and get all pieces
my_entry = line.split(None, 4)
if len(my_entry) != 5:
logger.warning("cannot parse notification: %s, skipping..." % (line))
continue
my_type = my_entry[0].lower()
my_id = my_entry[1]
try:
my_inv = int(my_entry[2])
except ValueError:
logger.warning("cannot parse notification: %s, skipping..." % (line))
continue
my_condition = my_entry[3]
my_action = my_entry[4]
else:
# This is a workflow/job notification, split and get all pieces
my_entry = line.split(None, 3)
if len(my_entry) != 4:
logger.warning("cannot parse notification: %s, skipping..." % (line))
continue
my_type = my_entry[0].lower()
my_id = my_entry[1]
my_condition = my_entry[2]
my_action = my_entry[3]
# Pick the right dictionary, depending on event type
if my_type == "workflow":
my_dict = my_notifications["workflow"]
if my_id != wf_uuid:
logger.warning("workflow notification has id %s, our id is %s, skipping..."
% (my_id, wf_uuid))
continue
elif my_type == "job" or my_type == "daxjob" or my_type == "dagjob":
my_dict = my_notifications["job"]
elif my_type == "invocation":
my_dict = my_notifications["invocation"]
else:
logger.warning("unknown notification type: %s, skipping..." % (line))
continue
logger.debug("loading notification: %s" % (line))
my_notifications_read = my_notifications_read + 1
# Make sure id is in dictionary
if not my_id in my_dict:
my_dict[my_id] = {}
# For invocations, one extra level...
if my_type == "invocation":
my_dict = my_dict[my_id]
if not my_inv in my_dict:
my_dict[my_inv] = {}
# Now add the notification condition, action pair
if not my_condition in my_dict[my_inv]:
# No actions, start with the list
my_dict[my_inv][my_condition] = [my_action]
else:
# We already have an action(s), let's add the new one to the list
my_dict[my_inv][my_condition].append(my_action)
else:
# Now add the notification condition, action pair
if not my_condition in my_dict[my_id]:
my_dict[my_id][my_condition] = [my_action]
else:
my_dict[my_id][my_condition].append(my_action)
# Save our notifications for later use...
if wf_uuid in self._notifications:
logger.debug("reloaded notifications for workflow %s" % (wf_uuid))
self._notifications[wf_uuid] = my_notifications
# Close file
try:
NOTIFY.close()
except IOError:
pass
# Return number of notifications read
logger.debug("loaded %d notifications for workflow %s" % (my_notifications_read, wf_uuid))
return my_notifications_read
def process_workflow_notifications(self, wf, state):
"""
This function takes care of processing workflow-level notifications.
"""
# Check if we have notifications for this workflow
if not wf._wf_uuid in self._notifications:
return
# Get the notifications' dictionary for this workflow id
wf_notifications = self._notifications[wf._wf_uuid]
if "workflow" in wf_notifications:
my_dict = wf_notifications["workflow"]
if len(my_dict) == 0:
# No workflow notifications
return
else:
logger.warning("notification structure missing workflow entry...")
return
# Our workflow is must be in there...
if wf._wf_uuid in my_dict:
my_notifications = my_dict[wf._wf_uuid]
else:
logger.warning("notification has mismatching workflow id: %s different from %s" %
(wf._wf_uuid, str(my_dict)))
return
# Sanity check the state...
if state != "start" and state != "end":
logger.warning("unknown workflow state %s, continuing..." % (state))
return
# Now, match the workflow state to the conditions in the notifications...
for k in my_notifications:
# Look up the actions for this notification now
my_actions = my_notifications[k]
if state == "start":
if k != "start" and k != "all":
continue
# Change k == 'all' to 'start'
k = "start"
if state == "end":
if k == "on_error":
if wf._dagman_exit_code == 0:
continue
elif k == "on_success":
if wf._dagman_exit_code != 0:
continue
elif k != "at_end" and k != "all":
continue
if k == "all":
k = "at_end"
# Ok, we have a match!
for action in my_actions:
# Create dictionary with needed environment variables
my_env = {}
my_env["PEGASUS_EVENT"] = k
my_env["PEGASUS_EVENT_TIMESTAMP"] = str(wf._current_timestamp)
my_env["PEGASUS_EVENT_TIMESTAMP_ISO"] = utils.isodate(wf._current_timestamp)
my_env["PEGASUS_SUBMIT_DIR"] = wf._original_submit_dir
my_env["PEGASUS_STDOUT"] = wf._out_file
my_env["PEGASUS_JOBID"] = wf._wf_uuid
my_env["PEGASUS_WFID"] = ((wf._dax_label or "unknown") +
"-" + (wf._dax_index or "unknown"))
if state == "end":
# Workflow status is already in plain format, no need for conversion
my_env["PEGASUS_STATUS"] = str(wf._dagman_exit_code)
# Done, queue the notification
self._pending_notifications.append((action, my_env))
# print "WORKFLOW NOTIFICATION ---> ", action, my_env
def process_job_notifications(self, wf, state, job, status):
"""
This function takes care of processing job-level notifications.
"""
# Check if we have notifications for this workflow
if not wf._wf_uuid in self._notifications:
return
# Get the notifications' dictionary for this workflow id
wf_notifications = self._notifications[wf._wf_uuid]
if "job" in wf_notifications:
my_dict = wf_notifications["job"]
else:
logger.warning("notification structure missing job entry...")
return
# Check if we have notifications for this job
if not job._exec_job_id in my_dict:
return
my_notifications = my_dict[job._exec_job_id]
if job._exec_job_id in wf._job_info:
if wf._job_info[job._exec_job_id][3] is None:
job_has_post_script = False
else:
job_has_post_script = True
else:
logger.warning("cannot find job %s in job_info database... skipping notification..." % (job._exec_job_id))
return
# Now, match the job state to the conditions in the notifications...
for k in my_notifications:
# Look up the actions for this notification now
my_actions = my_notifications[k]
if state == "EXECUTE":
if k != "start" and k != "all":
continue
# Change k to "start"
k = "start"
my_status = None
elif state == "JOB_SUCCESS":
if job_has_post_script:
# Wait till postscript...
continue
if k == "start" or k == "on_error":
continue
if k == "all":
k = "at_end"
my_status = "0"
elif state == "POST_SCRIPT_SUCCESS":
if k == "start" or k == "on_error":
continue
if k == "all":
k = "at_end"
my_status = "0"
elif state == "JOB_FAILURE":
if job_has_post_script:
# Wait till postscript...
continue
if k == "start" or k == "on_success":
continue
if k == "all":
k = "at_end"
my_status = status
elif state == "POST_SCRIPT_FAILURE":
if k == "start" or k == "on_success":
continue
if k == "all":
k = "at_end"
my_status = status
else:
# We are in some other state...
continue
my_output = os.path.join(wf._original_submit_dir, job._output_file)
my_error = os.path.join(wf._original_submit_dir, job._error_file)
# Use the rotated file names if at the end of the job
if k != "start":
my_output = my_output + ".%03d" % (job._job_output_counter)
my_error = my_error + ".%03d" % (job._job_output_counter)
# Ok, we have a match!
for action in my_actions:
# Create dictionary with needed environment variables
my_env = {}
my_env["PEGASUS_EVENT"] = k
my_env["PEGASUS_EVENT_TIMESTAMP"] = str(wf._current_timestamp)
my_env["PEGASUS_EVENT_TIMESTAMP_ISO"] = utils.isodate(wf._current_timestamp)
my_env["PEGASUS_SUBMIT_DIR"] = wf._original_submit_dir
my_env["PEGASUS_JOBID"] = job._exec_job_id
my_env["PEGASUS_WFID"] = ((wf._dax_label or "unknown") +
"-" + (wf._dax_index or "unknown"))
my_env["PEGASUS_STDOUT"] = my_output
my_env["PEGASUS_STDERR"] = my_error
if my_status is not None:
my_env["PEGASUS_STATUS"] = str(my_status)
# Done, queue the notification
self._pending_notifications.append((action, my_env))
# print "JOB NOTIFICATION ---> ", action, my_env
def process_invocation_notifications(self, wf, job, task_id, record=None):
"""
This function takes care of processing invocation-level notifications.
"""
if record is None:
record = {}
# Check if we have notifications for this workflow
if not wf._wf_uuid in self._notifications:
return
# Get the notifications' dictionary for this workflow id
wf_notifications = self._notifications[wf._wf_uuid]
if "invocation" in wf_notifications:
my_dict = wf_notifications["invocation"]
else:
logger.warning("notification structure missing invocation entry...")
return
# Check if we have notifications for this job
if not job._exec_job_id in my_dict:
return
# Advance to the task dictionary
my_dict = my_dict[job._exec_job_id]
# Check if we have notifications for this invocation
if not task_id in my_dict:
return
my_notifications = my_dict[task_id]
# Now, match the invocation state to the condition in the notification
for k in my_notifications:
# Look up the actions for this notification now
my_actions = my_notifications[k]
if "raw" in record:
my_status = record["raw"]
else:
my_status = job._main_job_exitcode
# Convert exitcode to int
try:
my_status = int(my_status)
except ValueError:
pass
# Now, compare to the notification condition(s)
if my_status == 0:
if k == "on_error":
continue
if my_status != 0:
if k == "on_success":
continue
if k == "all":
k = "at_end"
# Here, we always use the rotated file names as the invocation has already finished...
my_output = os.path.join(wf._original_submit_dir, job._output_file) + ".%03d" % (job._job_output_counter)
my_error = os.path.join(wf._original_submit_dir, job._error_file) + ".%03d" % (job._job_output_counter)
# Ok, we have a match!
for action in my_actions:
# Create dictionary with needed environment variables
my_env = {}
my_env["PEGASUS_EVENT"] = k
my_env["PEGASUS_EVENT_TIMESTAMP"] = str(wf._current_timestamp)
my_env["PEGASUS_EVENT_TIMESTAMP_ISO"] = utils.isodate(wf._current_timestamp)
my_env["PEGASUS_SUBMIT_DIR"] = wf._original_submit_dir
my_env["PEGASUS_JOBID"] = job._exec_job_id
my_env["PEGASUS_INVID"] = str(task_id)
my_env["PEGASUS_WFID"] = ((wf._dax_label or "unknown") +
"-" + (wf._dax_index or "unknown"))
my_env["PEGASUS_STDOUT"] = my_output
my_env["PEGASUS_STDERR"] = my_error
if k != "start":
# Convert raw exitcode into human-parseable format
my_env["PEGASUS_STATUS"] = str(utils.raw_to_regular(my_status))
# Done, queue the notification
self._pending_notifications.append((action, my_env))
# print "INVOCATION NOTIFICATION ---> ", action, my_env
def remove_notifications(self, wf_uuid):
"""
This function removes the notifications for workflow wf_uuid
from our _notifications dictionary.
"""
# Check if we have notifications for this workflow
if not wf_uuid in self._notifications:
return
logger.debug("deleting notifications for workflow %s..." % (wf_uuid))
# Delete them from our dictionary
del self._notifications[wf_uuid]
| gpl-3.0 |
pfgenyun/tamarin-redux | build/buildbot/master/custom/buildbot/slave/commands.py | 8 | 105843 | # -*- test-case-name: buildbot.test.test_slavecommand -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os, re, signal, shutil, types, time
from stat import ST_CTIME, ST_MTIME, ST_SIZE
from zope.interface import implements
from twisted.internet.protocol import ProcessProtocol
from twisted.internet import reactor, defer, task
from twisted.python import log, failure, runtime
from twisted.python.procutils import which
from buildbot.slave.interfaces import ISlaveCommand
from buildbot.slave.registry import registerSlaveCommand
# this used to be a CVS $-style "Revision" auto-updated keyword, but since I
# moved to Darcs as the primary repository, this is updated manually each
# time this file is changed. The last cvs_ver that was here was 1.51 .
command_version = "2.8"
# version history:
# >=1.17: commands are interruptable
# >=1.28: Arch understands 'revision', added Bazaar
# >=1.33: Source classes understand 'retry'
# >=1.39: Source classes correctly handle changes in branch (except Git)
# Darcs accepts 'revision' (now all do but Git) (well, and P4Sync)
# Arch/Baz should accept 'build-config'
# >=1.51: (release 0.7.3)
# >= 2.1: SlaveShellCommand now accepts 'initial_stdin', 'keep_stdin_open',
# and 'logfiles'. It now sends 'log' messages in addition to
# stdout/stdin/header/rc. It acquired writeStdin/closeStdin methods,
# but these are not remotely callable yet.
# (not externally visible: ShellCommandPP has writeStdin/closeStdin.
# ShellCommand accepts new arguments (logfiles=, initialStdin=,
# keepStdinOpen=) and no longer accepts stdin=)
# (release 0.7.4)
# >= 2.2: added monotone, uploadFile, and downloadFile (release 0.7.5)
# >= 2.3: added bzr (release 0.7.6)
# >= 2.4: Git understands 'revision' and branches
# >= 2.5: workaround added for remote 'hg clone --rev REV' when hg<0.9.2
# >= 2.6: added uploadDirectory
# >= 2.7: added usePTY option to SlaveShellCommand
# >= 2.8: added username and password args to SVN class
class CommandInterrupted(Exception):
pass
class TimeoutError(Exception):
pass
class Obfuscated:
"""An obfuscated string in a command"""
def __init__(self, real, fake):
self.real = real
self.fake = fake
def __str__(self):
return self.fake
def __repr__(self):
return `self.fake`
def get_real(command):
rv = command
if type(command) == types.ListType:
rv = []
for elt in command:
if isinstance(elt, Obfuscated):
rv.append(elt.real)
else:
rv.append(elt)
return rv
get_real = staticmethod(get_real)
def get_fake(command):
rv = command
if type(command) == types.ListType:
rv = []
for elt in command:
if isinstance(elt, Obfuscated):
rv.append(elt.fake)
else:
rv.append(elt)
return rv
get_fake = staticmethod(get_fake)
class AbandonChain(Exception):
"""A series of chained steps can raise this exception to indicate that
one of the intermediate ShellCommands has failed, such that there is no
point in running the remainder. 'rc' should be the non-zero exit code of
the failing ShellCommand."""
def __repr__(self):
return "<AbandonChain rc=%s>" % self.args[0]
def getCommand(name):
possibles = which(name)
if not possibles:
raise RuntimeError("Couldn't find executable for '%s'" % name)
return possibles[0]
def rmdirRecursive(dir):
"""This is a replacement for shutil.rmtree that works better under
windows. Thanks to Bear at the OSAF for the code."""
if not os.path.exists(dir):
return
if os.path.islink(dir):
os.remove(dir)
return
# Verify the directory is read/write/execute for the current user
os.chmod(dir, 0700)
for name in os.listdir(dir):
full_name = os.path.join(dir, name)
# on Windows, if we don't have write permission we can't remove
# the file/directory either, so turn that on
if os.name == 'nt':
if not os.access(full_name, os.W_OK):
# I think this is now redundant, but I don't have an NT
# machine to test on, so I'm going to leave it in place
# -warner
os.chmod(full_name, 0600)
if os.path.isdir(full_name):
rmdirRecursive(full_name)
else:
os.chmod(full_name, 0700)
os.remove(full_name)
os.rmdir(dir)
class ShellCommandPP(ProcessProtocol):
debug = False
def __init__(self, command):
self.command = command
self.pending_stdin = ""
self.stdin_finished = False
def writeStdin(self, data):
assert not self.stdin_finished
if self.connected:
self.transport.write(data)
else:
self.pending_stdin += data
def closeStdin(self):
if self.connected:
if self.debug: log.msg(" closing stdin")
self.transport.closeStdin()
self.stdin_finished = True
def connectionMade(self):
if self.debug:
log.msg("ShellCommandPP.connectionMade")
if not self.command.process:
if self.debug:
log.msg(" assigning self.command.process: %s" %
(self.transport,))
self.command.process = self.transport
# TODO: maybe we shouldn't close stdin when using a PTY. I can't test
# this yet, recent debian glibc has a bug which causes thread-using
# test cases to SIGHUP trial, and the workaround is to either run
# the whole test with /bin/sh -c " ".join(argv) (way gross) or to
# not use a PTY. Once the bug is fixed, I'll be able to test what
# happens when you close stdin on a pty. My concern is that it will
# SIGHUP the child (since we are, in a sense, hanging up on them).
# But it may well be that keeping stdout open prevents the SIGHUP
# from being sent.
#if not self.command.usePTY:
if self.pending_stdin:
if self.debug: log.msg(" writing to stdin")
self.transport.write(self.pending_stdin)
if self.stdin_finished:
if self.debug: log.msg(" closing stdin")
self.transport.closeStdin()
def outReceived(self, data):
if self.debug:
log.msg("ShellCommandPP.outReceived")
self.command.addStdout(data)
def errReceived(self, data):
if self.debug:
log.msg("ShellCommandPP.errReceived")
self.command.addStderr(data)
def processEnded(self, status_object):
if self.debug:
log.msg("ShellCommandPP.processEnded", status_object)
# status_object is a Failure wrapped around an
# error.ProcessTerminated or and error.ProcessDone.
# requires twisted >= 1.0.4 to overcome a bug in process.py
sig = status_object.value.signal
rc = status_object.value.exitCode
self.command.finished(sig, rc)
class LogFileWatcher:
POLL_INTERVAL = 2
def __init__(self, command, name, logfile):
self.command = command
self.name = name
self.logfile = logfile
log.msg("LogFileWatcher created to watch %s" % logfile)
# we are created before the ShellCommand starts. If the logfile we're
# supposed to be watching already exists, record its size and
# ctime/mtime so we can tell when it starts to change.
self.old_logfile_stats = self.statFile()
self.started = False
# every 2 seconds we check on the file again
self.poller = task.LoopingCall(self.poll)
def start(self):
self.poller.start(self.POLL_INTERVAL).addErrback(self._cleanupPoll)
def _cleanupPoll(self, err):
log.err(err, msg="Polling error")
self.poller = None
def stop(self):
self.poll()
if self.poller is not None:
self.poller.stop()
if self.started:
self.f.close()
def statFile(self):
if os.path.exists(self.logfile):
s = os.stat(self.logfile)
return (s[ST_CTIME], s[ST_MTIME], s[ST_SIZE])
return None
def poll(self):
if not self.started:
s = self.statFile()
if s == self.old_logfile_stats:
return # not started yet
if not s:
# the file was there, but now it's deleted. Forget about the
# initial state, clearly the process has deleted the logfile
# in preparation for creating a new one.
self.old_logfile_stats = None
return # no file to work with
self.f = open(self.logfile, "rb")
self.started = True
self.f.seek(self.f.tell(), 0)
while True:
data = self.f.read(10000)
if not data:
return
self.command.addLogfile(self.name, data)
class ShellCommand:
# This is a helper class, used by SlaveCommands to run programs in a
# child shell.
notreally = False
BACKUP_TIMEOUT = 5
KILL = "KILL"
CHUNK_LIMIT = 128*1024
# For sending elapsed time:
startTime = None
elapsedTime = None
# I wish we had easy access to CLOCK_MONOTONIC in Python:
# http://www.opengroup.org/onlinepubs/000095399/functions/clock_getres.html
# Then changes to the system clock during a run wouldn't effect the "elapsed
# time" results.
def __init__(self, builder, command,
workdir, environ=None,
sendStdout=True, sendStderr=True, sendRC=True,
timeout=None, initialStdin=None, keepStdinOpen=False,
keepStdout=False, keepStderr=False, logEnviron=False,
logfiles={}, usePTY="slave-config"):
"""
@param keepStdout: if True, we keep a copy of all the stdout text
that we've seen. This copy is available in
self.stdout, which can be read after the command
has finished.
@param keepStderr: same, for stderr
@param usePTY: "slave-config" -> use the SlaveBuilder's usePTY;
otherwise, true to use a PTY, false to not use a PTY.
"""
self.builder = builder
self.command = Obfuscated.get_real(command)
self.fake_command = Obfuscated.get_fake(command)
self.sendStdout = sendStdout
self.sendStderr = sendStderr
self.sendRC = sendRC
self.logfiles = logfiles
self.workdir = workdir
self.environ = os.environ.copy()
if environ:
if environ.has_key('PYTHONPATH'):
ppath = environ['PYTHONPATH']
# Need to do os.pathsep translation. We could either do that
# by replacing all incoming ':'s with os.pathsep, or by
# accepting lists. I like lists better.
if not isinstance(ppath, str):
# If it's not a string, treat it as a sequence to be
# turned in to a string.
ppath = os.pathsep.join(ppath)
if self.environ.has_key('PYTHONPATH'):
# special case, prepend the builder's items to the
# existing ones. This will break if you send over empty
# strings, so don't do that.
ppath = ppath + os.pathsep + self.environ['PYTHONPATH']
environ['PYTHONPATH'] = ppath
self.environ.update(environ)
self.initialStdin = initialStdin
self.keepStdinOpen = keepStdinOpen
self.logEnviron = logEnviron
self.timeout = timeout
self.timer = None
self.keepStdout = keepStdout
self.keepStderr = keepStderr
if usePTY == "slave-config":
self.usePTY = self.builder.usePTY
else:
self.usePTY = usePTY
# usePTY=True is a convenience for cleaning up all children and
# grandchildren of a hung command. Fall back to usePTY=False on systems
# and in situations where ptys cause problems. PTYs are posix-only,
# and for .closeStdin to matter, we must use a pipe, not a PTY
if runtime.platformType != "posix" or initialStdin is not None:
if self.usePTY and usePTY != "slave-config":
self.sendStatus({'header': "WARNING: disabling usePTY for this command"})
self.usePTY = False
self.logFileWatchers = []
for name,filename in self.logfiles.items():
w = LogFileWatcher(self, name,
os.path.join(self.workdir, filename))
self.logFileWatchers.append(w)
def __repr__(self):
return "<slavecommand.ShellCommand '%s'>" % self.fake_command
def sendStatus(self, status):
self.builder.sendUpdate(status)
def start(self):
# return a Deferred which fires (with the exit code) when the command
# completes
if self.keepStdout:
self.stdout = ""
if self.keepStderr:
self.stderr = ""
self.deferred = defer.Deferred()
try:
self._startCommand()
except:
log.msg("error in ShellCommand._startCommand")
log.err()
# pretend it was a shell error
self.deferred.errback(AbandonChain(-1))
return self.deferred
def _startCommand(self):
# ensure workdir exists
if not os.path.isdir(self.workdir):
os.makedirs(self.workdir)
log.msg("ShellCommand._startCommand")
if self.notreally:
self.sendStatus({'header': "command '%s' in dir %s" % \
(self.fake_command, self.workdir)})
self.sendStatus({'header': "(not really)\n"})
self.finished(None, 0)
return
self.pp = ShellCommandPP(self)
if type(self.command) in types.StringTypes:
if runtime.platformType == 'win32':
argv = os.environ['COMSPEC'].split() # allow %COMSPEC% to have args
if '/c' not in argv: argv += ['/c']
argv += [self.command]
else:
# for posix, use /bin/sh. for other non-posix, well, doesn't
# hurt to try
argv = ['/bin/sh', '-c', self.command]
display = self.fake_command
else:
if runtime.platformType == 'win32':
argv = os.environ['COMSPEC'].split() # allow %COMSPEC% to have args
if '/c' not in argv: argv += ['/c']
argv += list(self.command)
else:
argv = self.command
display = " ".join(self.fake_command)
# $PWD usually indicates the current directory; spawnProcess may not
# update this value, though, so we set it explicitly here.
self.environ['PWD'] = os.path.abspath(self.workdir)
# self.stdin is handled in ShellCommandPP.connectionMade
# first header line is the command in plain text, argv joined with
# spaces. You should be able to cut-and-paste this into a shell to
# obtain the same results. If there are spaces in the arguments, too
# bad.
log.msg(" " + display)
self.sendStatus({'header': display+"\n"})
# then comes the secondary information
msg = " in dir %s" % (self.workdir,)
if self.timeout:
msg += " (timeout %d secs)" % (self.timeout,)
log.msg(" " + msg)
self.sendStatus({'header': msg+"\n"})
msg = " watching logfiles %s" % (self.logfiles,)
log.msg(" " + msg)
self.sendStatus({'header': msg+"\n"})
# then the obfuscated command array for resolving unambiguity
msg = " argv: %s" % (self.fake_command,)
log.msg(" " + msg)
self.sendStatus({'header': msg+"\n"})
# then the environment, since it sometimes causes problems
if self.logEnviron:
msg = " environment:\n"
env_names = self.environ.keys()
env_names.sort()
for name in env_names:
msg += " %s=%s\n" % (name, self.environ[name])
log.msg(" environment: %s" % (self.environ,))
self.sendStatus({'header': msg})
if self.initialStdin:
msg = " writing %d bytes to stdin" % len(self.initialStdin)
log.msg(" " + msg)
self.sendStatus({'header': msg+"\n"})
if self.keepStdinOpen:
msg = " leaving stdin open"
else:
msg = " closing stdin"
log.msg(" " + msg)
self.sendStatus({'header': msg+"\n"})
msg = " using PTY: %s" % bool(self.usePTY)
log.msg(" " + msg)
self.sendStatus({'header': msg+"\n"})
# this will be buffered until connectionMade is called
if self.initialStdin:
self.pp.writeStdin(self.initialStdin)
if not self.keepStdinOpen:
self.pp.closeStdin()
# win32eventreactor's spawnProcess (under twisted <= 2.0.1) returns
# None, as opposed to all the posixbase-derived reactors (which
# return the new Process object). This is a nuisance. We can make up
# for it by having the ProcessProtocol give us their .transport
# attribute after they get one. I'd prefer to get it from
# spawnProcess because I'm concerned about returning from this method
# without having a valid self.process to work with. (if kill() were
# called right after we return, but somehow before connectionMade
# were called, then kill() would blow up).
self.process = None
self.startTime = time.time()
p = reactor.spawnProcess(self.pp, argv[0], argv,
self.environ,
self.workdir,
usePTY=self.usePTY)
# connectionMade might have been called during spawnProcess
if not self.process:
self.process = p
# connectionMade also closes stdin as long as we're not using a PTY.
# This is intended to kill off inappropriately interactive commands
# better than the (long) hung-command timeout. ProcessPTY should be
# enhanced to allow the same childFDs argument that Process takes,
# which would let us connect stdin to /dev/null .
if self.timeout:
self.timer = reactor.callLater(self.timeout, self.doTimeout)
for w in self.logFileWatchers:
w.start()
def _chunkForSend(self, data):
# limit the chunks that we send over PB to 128k, since it has a
# hardwired string-size limit of 640k.
LIMIT = self.CHUNK_LIMIT
for i in range(0, len(data), LIMIT):
yield data[i:i+LIMIT]
def addStdout(self, data):
if self.sendStdout:
for chunk in self._chunkForSend(data):
self.sendStatus({'stdout': chunk})
if self.keepStdout:
self.stdout += data
if self.timer:
self.timer.reset(self.timeout)
def addStderr(self, data):
if self.sendStderr:
for chunk in self._chunkForSend(data):
self.sendStatus({'stderr': chunk})
if self.keepStderr:
self.stderr += data
if self.timer:
self.timer.reset(self.timeout)
def addLogfile(self, name, data):
for chunk in self._chunkForSend(data):
self.sendStatus({'log': (name, chunk)})
if self.timer:
self.timer.reset(self.timeout)
def finished(self, sig, rc):
self.elapsedTime = time.time() - self.startTime
log.msg("command finished with signal %s, exit code %s, elapsedTime: %0.6f" % (sig,rc,self.elapsedTime))
for w in self.logFileWatchers:
# this will send the final updates
w.stop()
if sig is not None:
rc = -1
if self.sendRC:
if sig is not None:
self.sendStatus(
{'header': "process killed by signal %d\n" % sig})
self.sendStatus({'rc': rc})
self.sendStatus({'header': "elapsedTime=%0.6f\n" % self.elapsedTime})
if self.timer:
self.timer.cancel()
self.timer = None
d = self.deferred
self.deferred = None
if d:
d.callback(rc)
else:
log.msg("Hey, command %s finished twice" % self)
def failed(self, why):
log.msg("ShellCommand.failed: command failed: %s" % (why,))
if self.timer:
self.timer.cancel()
self.timer = None
d = self.deferred
self.deferred = None
if d:
d.errback(why)
else:
log.msg("Hey, command %s finished twice" % self)
def doTimeout(self):
self.timer = None
msg = "command timed out: %d seconds without output" % self.timeout
self.kill(msg)
def kill(self, msg):
# This may be called by the timeout, or when the user has decided to
# abort this build.
if self.timer:
self.timer.cancel()
self.timer = None
if hasattr(self.process, "pid"):
msg += ", killing pid %d" % self.process.pid
log.msg(msg)
self.sendStatus({'header': "\n" + msg + "\n"})
hit = 0
if runtime.platformType == "posix":
try:
# really want to kill off all child processes too. Process
# Groups are ideal for this, but that requires
# spawnProcess(usePTY=1). Try both ways in case process was
# not started that way.
# the test suite sets self.KILL=None to tell us we should
# only pretend to kill the child. This lets us test the
# backup timer.
sig = None
if self.KILL is not None:
sig = getattr(signal, "SIG"+ self.KILL, None)
if self.KILL == None:
log.msg("self.KILL==None, only pretending to kill child")
elif sig is None:
log.msg("signal module is missing SIG%s" % self.KILL)
elif not hasattr(os, "kill"):
log.msg("os module is missing the 'kill' function")
else:
log.msg("trying os.kill(-pid, %d)" % (sig,))
# TODO: maybe use os.killpg instead of a negative pid?
os.kill(-self.process.pid, sig)
log.msg(" signal %s sent successfully" % sig)
hit = 1
except OSError:
# probably no-such-process, maybe because there is no process
# group
pass
if not hit:
try:
if self.KILL is None:
log.msg("self.KILL==None, only pretending to kill child")
else:
log.msg("trying process.signalProcess('KILL')")
self.process.signalProcess(self.KILL)
log.msg(" signal %s sent successfully" % (self.KILL,))
hit = 1
except OSError:
# could be no-such-process, because they finished very recently
pass
if not hit:
log.msg("signalProcess/os.kill failed both times")
if runtime.platformType == "posix":
# we only do this under posix because the win32eventreactor
# blocks here until the process has terminated, while closing
# stderr. This is weird.
self.pp.transport.loseConnection()
# finished ought to be called momentarily. Just in case it doesn't,
# set a timer which will abandon the command.
self.timer = reactor.callLater(self.BACKUP_TIMEOUT,
self.doBackupTimeout)
def doBackupTimeout(self):
log.msg("we tried to kill the process, and it wouldn't die.."
" finish anyway")
self.timer = None
self.sendStatus({'header': "SIGKILL failed to kill process\n"})
if self.sendRC:
self.sendStatus({'header': "using fake rc=-1\n"})
self.sendStatus({'rc': -1})
self.failed(TimeoutError("SIGKILL failed to kill process"))
def writeStdin(self, data):
self.pp.writeStdin(data)
def closeStdin(self):
self.pp.closeStdin()
class Command:
implements(ISlaveCommand)
"""This class defines one command that can be invoked by the build master.
The command is executed on the slave side, and always sends back a
completion message when it finishes. It may also send intermediate status
as it runs (by calling builder.sendStatus). Some commands can be
interrupted (either by the build master or a local timeout), in which
case the step is expected to complete normally with a status message that
indicates an error occurred.
These commands are used by BuildSteps on the master side. Each kind of
BuildStep uses a single Command. The slave must implement all the
Commands required by the set of BuildSteps used for any given build:
this is checked at startup time.
All Commands are constructed with the same signature:
c = CommandClass(builder, args)
where 'builder' is the parent SlaveBuilder object, and 'args' is a
dict that is interpreted per-command.
The setup(args) method is available for setup, and is run from __init__.
The Command is started with start(). This method must be implemented in a
subclass, and it should return a Deferred. When your step is done, you
should fire the Deferred (the results are not used). If the command is
interrupted, it should fire the Deferred anyway.
While the command runs. it may send status messages back to the
buildmaster by calling self.sendStatus(statusdict). The statusdict is
interpreted by the master-side BuildStep however it likes.
A separate completion message is sent when the deferred fires, which
indicates that the Command has finished, but does not carry any status
data. If the Command needs to return an exit code of some sort, that
should be sent as a regular status message before the deferred is fired .
Once builder.commandComplete has been run, no more status messages may be
sent.
If interrupt() is called, the Command should attempt to shut down as
quickly as possible. Child processes should be killed, new ones should
not be started. The Command should send some kind of error status update,
then complete as usual by firing the Deferred.
.interrupted should be set by interrupt(), and can be tested to avoid
sending multiple error status messages.
If .running is False, the bot is shutting down (or has otherwise lost the
connection to the master), and should not send any status messages. This
is checked in Command.sendStatus .
"""
# builder methods:
# sendStatus(dict) (zero or more)
# commandComplete() or commandInterrupted() (one, at end)
debug = False
interrupted = False
running = False # set by Builder, cleared on shutdown or when the
# Deferred fires
def __init__(self, builder, stepId, args):
self.builder = builder
self.stepId = stepId # just for logging
self.args = args
self.setup(args)
def setup(self, args):
"""Override this in a subclass to extract items from the args dict."""
pass
def doStart(self):
self.running = True
d = defer.maybeDeferred(self.start)
d.addBoth(self.commandComplete)
return d
def start(self):
"""Start the command. This method should return a Deferred that will
fire when the command has completed. The Deferred's argument will be
ignored.
This method should be overridden by subclasses."""
raise NotImplementedError, "You must implement this in a subclass"
def sendStatus(self, status):
"""Send a status update to the master."""
if self.debug:
log.msg("sendStatus", status)
if not self.running:
log.msg("would sendStatus but not .running")
return
self.builder.sendUpdate(status)
def doInterrupt(self):
self.running = False
self.interrupt()
def interrupt(self):
"""Override this in a subclass to allow commands to be interrupted.
May be called multiple times, test and set self.interrupted=True if
this matters."""
pass
def commandComplete(self, res):
self.running = False
return res
# utility methods, mostly used by SlaveShellCommand and the like
def _abandonOnFailure(self, rc):
if type(rc) is not int:
log.msg("weird, _abandonOnFailure was given rc=%s (%s)" % \
(rc, type(rc)))
assert isinstance(rc, int)
if rc != 0:
raise AbandonChain(rc)
return rc
def _sendRC(self, res):
self.sendStatus({'rc': 0})
def _checkAbandoned(self, why):
log.msg("_checkAbandoned", why)
why.trap(AbandonChain)
log.msg(" abandoning chain", why.value)
self.sendStatus({'rc': why.value.args[0]})
return None
class SlaveFileUploadCommand(Command):
"""
Upload a file from slave to build master
Arguments:
- ['workdir']: base directory to use
- ['slavesrc']: name of the slave-side file to read from
- ['writer']: RemoteReference to a transfer._FileWriter object
- ['maxsize']: max size (in bytes) of file to write
- ['blocksize']: max size for each data block
"""
debug = False
def setup(self, args):
self.workdir = args['workdir']
self.filename = args['slavesrc']
self.writer = args['writer']
self.remaining = args['maxsize']
self.blocksize = args['blocksize']
self.stderr = None
self.rc = 0
def start(self):
if self.debug:
log.msg('SlaveFileUploadCommand started')
# Open file
self.path = os.path.join(self.builder.basedir,
self.workdir,
os.path.expanduser(self.filename))
try:
self.fp = open(self.path, 'rb')
if self.debug:
log.msg('Opened %r for upload' % self.path)
except:
# TODO: this needs cleanup
self.fp = None
self.stderr = 'Cannot open file %r for upload' % self.path
self.rc = 1
if self.debug:
log.msg('Cannot open file %r for upload' % self.path)
self.sendStatus({'header': "sending %s" % self.path})
d = defer.Deferred()
reactor.callLater(0, self._loop, d)
def _close(res):
# close the file, but pass through any errors from _loop
d1 = self.writer.callRemote("close")
d1.addErrback(log.err)
d1.addCallback(lambda ignored: res)
return d1
d.addBoth(_close)
d.addBoth(self.finished)
return d
def _loop(self, fire_when_done):
d = defer.maybeDeferred(self._writeBlock)
def _done(finished):
if finished:
fire_when_done.callback(None)
else:
self._loop(fire_when_done)
def _err(why):
fire_when_done.errback(why)
d.addCallbacks(_done, _err)
return None
def _writeBlock(self):
"""Write a block of data to the remote writer"""
if self.interrupted or self.fp is None:
if self.debug:
log.msg('SlaveFileUploadCommand._writeBlock(): end')
return True
length = self.blocksize
if self.remaining is not None and length > self.remaining:
length = self.remaining
if length <= 0:
if self.stderr is None:
self.stderr = 'Maximum filesize reached, truncating file %r' \
% self.path
self.rc = 1
data = ''
else:
data = self.fp.read(length)
if self.debug:
log.msg('SlaveFileUploadCommand._writeBlock(): '+
'allowed=%d readlen=%d' % (length, len(data)))
if len(data) == 0:
log.msg("EOF: callRemote(close)")
return True
if self.remaining is not None:
self.remaining = self.remaining - len(data)
assert self.remaining >= 0
d = self.writer.callRemote('write', data)
d.addCallback(lambda res: False)
return d
def interrupt(self):
if self.debug:
log.msg('interrupted')
if self.interrupted:
return
if self.stderr is None:
self.stderr = 'Upload of %r interrupted' % self.path
self.rc = 1
self.interrupted = True
# the next _writeBlock call will notice the .interrupted flag
def finished(self, res):
if self.debug:
log.msg('finished: stderr=%r, rc=%r' % (self.stderr, self.rc))
if self.stderr is None:
self.sendStatus({'rc': self.rc})
else:
self.sendStatus({'stderr': self.stderr, 'rc': self.rc})
return res
registerSlaveCommand("uploadFile", SlaveFileUploadCommand, command_version)
class SlaveDirectoryUploadCommand(Command):
"""
Upload a directory from slave to build master
Arguments:
- ['workdir']: base directory to use
- ['slavesrc']: name of the slave-side directory to read from
- ['writer']: RemoteReference to a transfer._DirectoryWriter object
- ['maxsize']: max size (in bytes) of file to write
- ['blocksize']: max size for each data block
"""
debug = True
def setup(self, args):
self.workdir = args['workdir']
self.dirname = args['slavesrc']
self.writer = args['writer']
self.remaining = args['maxsize']
self.blocksize = args['blocksize']
self.stderr = None
self.rc = 0
def start(self):
if self.debug:
log.msg('SlaveDirectoryUploadCommand started')
# create some lists with all files and directories
foundFiles = []
foundDirs = []
self.baseRoot = os.path.join(self.builder.basedir,
self.workdir,
os.path.expanduser(self.dirname))
if self.debug:
log.msg("baseRoot: %r" % self.baseRoot)
for root, dirs, files in os.walk(self.baseRoot):
tempRoot = root
relRoot = ''
while (tempRoot != self.baseRoot):
tempRoot, tempRelRoot = os.path.split(tempRoot)
relRoot = os.path.join(tempRelRoot, relRoot)
for name in files:
foundFiles.append(os.path.join(relRoot, name))
for directory in dirs:
foundDirs.append(os.path.join(relRoot, directory))
if self.debug:
log.msg("foundDirs: %s" % (str(foundDirs)))
log.msg("foundFiles: %s" % (str(foundFiles)))
# create all directories on the master, to catch also empty ones
for dirname in foundDirs:
self.writer.callRemote("createdir", dirname)
for filename in foundFiles:
self._writeFile(filename)
return None
def _writeFile(self, filename):
"""Write a file to the remote writer"""
log.msg("_writeFile: %r" % (filename))
self.writer.callRemote('open', filename)
data = open(os.path.join(self.baseRoot, filename), "r").read()
self.writer.callRemote('write', data)
self.writer.callRemote('close')
return None
def interrupt(self):
if self.debug:
log.msg('interrupted')
if self.interrupted:
return
if self.stderr is None:
self.stderr = 'Upload of %r interrupted' % self.path
self.rc = 1
self.interrupted = True
# the next _writeBlock call will notice the .interrupted flag
def finished(self, res):
if self.debug:
log.msg('finished: stderr=%r, rc=%r' % (self.stderr, self.rc))
if self.stderr is None:
self.sendStatus({'rc': self.rc})
else:
self.sendStatus({'stderr': self.stderr, 'rc': self.rc})
return res
registerSlaveCommand("uploadDirectory", SlaveDirectoryUploadCommand, command_version)
class SlaveFileDownloadCommand(Command):
"""
Download a file from master to slave
Arguments:
- ['workdir']: base directory to use
- ['slavedest']: name of the slave-side file to be created
- ['reader']: RemoteReference to a transfer._FileReader object
- ['maxsize']: max size (in bytes) of file to write
- ['blocksize']: max size for each data block
- ['mode']: access mode for the new file
"""
debug = False
def setup(self, args):
self.workdir = args['workdir']
self.filename = args['slavedest']
self.reader = args['reader']
self.bytes_remaining = args['maxsize']
self.blocksize = args['blocksize']
self.mode = args['mode']
self.stderr = None
self.rc = 0
def start(self):
if self.debug:
log.msg('SlaveFileDownloadCommand starting')
# Open file
self.path = os.path.join(self.builder.basedir,
self.workdir,
os.path.expanduser(self.filename))
dirname = os.path.dirname(self.path)
if not os.path.exists(dirname):
os.makedirs(dirname)
try:
self.fp = open(self.path, 'wb')
if self.debug:
log.msg('Opened %r for download' % self.path)
if self.mode is not None:
# note: there is a brief window during which the new file
# will have the buildslave's default (umask) mode before we
# set the new one. Don't use this mode= feature to keep files
# private: use the buildslave's umask for that instead. (it
# is possible to call os.umask() before and after the open()
# call, but cleaning up from exceptions properly is more of a
# nuisance that way).
os.chmod(self.path, self.mode)
except IOError:
# TODO: this still needs cleanup
self.fp = None
self.stderr = 'Cannot open file %r for download' % self.path
self.rc = 1
if self.debug:
log.msg('Cannot open file %r for download' % self.path)
d = defer.Deferred()
reactor.callLater(0, self._loop, d)
def _close(res):
# close the file, but pass through any errors from _loop
d1 = self.reader.callRemote('close')
d1.addErrback(log.err)
d1.addCallback(lambda ignored: res)
return d1
d.addBoth(_close)
d.addBoth(self.finished)
return d
def _loop(self, fire_when_done):
d = defer.maybeDeferred(self._readBlock)
def _done(finished):
if finished:
fire_when_done.callback(None)
else:
self._loop(fire_when_done)
def _err(why):
fire_when_done.errback(why)
d.addCallbacks(_done, _err)
return None
def _readBlock(self):
"""Read a block of data from the remote reader."""
if self.interrupted or self.fp is None:
if self.debug:
log.msg('SlaveFileDownloadCommand._readBlock(): end')
return True
length = self.blocksize
if self.bytes_remaining is not None and length > self.bytes_remaining:
length = self.bytes_remaining
if length <= 0:
if self.stderr is None:
self.stderr = 'Maximum filesize reached, truncating file %r' \
% self.path
self.rc = 1
return True
else:
d = self.reader.callRemote('read', length)
d.addCallback(self._writeData)
return d
def _writeData(self, data):
if self.debug:
log.msg('SlaveFileDownloadCommand._readBlock(): readlen=%d' %
len(data))
if len(data) == 0:
return True
if self.bytes_remaining is not None:
self.bytes_remaining = self.bytes_remaining - len(data)
assert self.bytes_remaining >= 0
self.fp.write(data)
return False
def interrupt(self):
if self.debug:
log.msg('interrupted')
if self.interrupted:
return
if self.stderr is None:
self.stderr = 'Download of %r interrupted' % self.path
self.rc = 1
self.interrupted = True
# now we wait for the next read request to return. _readBlock will
# abandon the file when it sees self.interrupted set.
def finished(self, res):
if self.fp is not None:
self.fp.close()
if self.debug:
log.msg('finished: stderr=%r, rc=%r' % (self.stderr, self.rc))
if self.stderr is None:
self.sendStatus({'rc': self.rc})
else:
self.sendStatus({'stderr': self.stderr, 'rc': self.rc})
return res
registerSlaveCommand("downloadFile", SlaveFileDownloadCommand, command_version)
class SlaveShellCommand(Command):
"""This is a Command which runs a shell command. The args dict contains
the following keys:
- ['command'] (required): a shell command to run. If this is a string,
it will be run with /bin/sh (['/bin/sh',
'-c', command]). If it is a list
(preferred), it will be used directly.
- ['workdir'] (required): subdirectory in which the command will be
run, relative to the builder dir
- ['env']: a dict of environment variables to augment/replace
os.environ . PYTHONPATH is treated specially, and
should be a list of path components to be prepended to
any existing PYTHONPATH environment variable.
- ['initial_stdin']: a string which will be written to the command's
stdin as soon as it starts
- ['keep_stdin_open']: unless True, the command's stdin will be
closed as soon as initial_stdin has been
written. Set this to True if you plan to write
to stdin after the command has been started.
- ['want_stdout']: 0 if stdout should be thrown away
- ['want_stderr']: 0 if stderr should be thrown away
- ['usePTY']: True or False if the command should use a PTY (defaults to
configuration of the slave)
- ['not_really']: 1 to skip execution and return rc=0
- ['timeout']: seconds of silence to tolerate before killing command
- ['logfiles']: dict mapping LogFile name to the workdir-relative
filename of a local log file. This local file will be
watched just like 'tail -f', and all changes will be
written to 'log' status updates.
ShellCommand creates the following status messages:
- {'stdout': data} : when stdout data is available
- {'stderr': data} : when stderr data is available
- {'header': data} : when headers (command start/stop) are available
- {'log': (logfile_name, data)} : when log files have new contents
- {'rc': rc} : when the process has terminated
"""
def start(self):
args = self.args
# args['workdir'] is relative to Builder directory, and is required.
assert args['workdir'] is not None
workdir = os.path.join(self.builder.basedir, args['workdir'])
c = ShellCommand(self.builder, args['command'],
workdir, environ=args.get('env'),
timeout=args.get('timeout', None),
sendStdout=args.get('want_stdout', True),
sendStderr=args.get('want_stderr', True),
sendRC=True,
initialStdin=args.get('initial_stdin'),
keepStdinOpen=args.get('keep_stdin_open'),
logfiles=args.get('logfiles', {}),
usePTY=args.get('usePTY', "slave-config"),
)
self.command = c
d = self.command.start()
return d
def interrupt(self):
self.interrupted = True
self.command.kill("command interrupted")
def writeStdin(self, data):
self.command.writeStdin(data)
def closeStdin(self):
self.command.closeStdin()
registerSlaveCommand("shell", SlaveShellCommand, command_version)
class DummyCommand(Command):
"""
I am a dummy no-op command that by default takes 5 seconds to complete.
See L{buildbot.steps.dummy.RemoteDummy}
"""
def start(self):
self.d = defer.Deferred()
log.msg(" starting dummy command [%s]" % self.stepId)
self.timer = reactor.callLater(1, self.doStatus)
return self.d
def interrupt(self):
if self.interrupted:
return
self.timer.cancel()
self.timer = None
self.interrupted = True
self.finished()
def doStatus(self):
log.msg(" sending intermediate status")
self.sendStatus({'stdout': 'data'})
timeout = self.args.get('timeout', 5) + 1
self.timer = reactor.callLater(timeout - 1, self.finished)
def finished(self):
log.msg(" dummy command finished [%s]" % self.stepId)
if self.interrupted:
self.sendStatus({'rc': 1})
else:
self.sendStatus({'rc': 0})
self.d.callback(0)
registerSlaveCommand("dummy", DummyCommand, command_version)
# this maps handle names to a callable. When the WaitCommand starts, this
# callable is invoked with no arguments. It should return a Deferred. When
# that Deferred fires, our WaitCommand will finish.
waitCommandRegistry = {}
class WaitCommand(Command):
"""
I am a dummy command used by the buildbot unit test suite. I want for the
unit test to tell us to finish. See L{buildbot.steps.dummy.Wait}
"""
def start(self):
self.d = defer.Deferred()
log.msg(" starting wait command [%s]" % self.stepId)
handle = self.args['handle']
cb = waitCommandRegistry[handle]
del waitCommandRegistry[handle]
def _called():
log.msg(" wait-%s starting" % (handle,))
d = cb()
def _done(res):
log.msg(" wait-%s finishing: %s" % (handle, res))
return res
d.addBoth(_done)
d.addCallbacks(self.finished, self.failed)
reactor.callLater(0, _called)
return self.d
def interrupt(self):
log.msg(" wait command interrupted")
if self.interrupted:
return
self.interrupted = True
self.finished("interrupted")
def finished(self, res):
log.msg(" wait command finished [%s]" % self.stepId)
if self.interrupted:
self.sendStatus({'rc': 2})
else:
self.sendStatus({'rc': 0})
self.d.callback(0)
def failed(self, why):
log.msg(" wait command failed [%s]" % self.stepId)
self.sendStatus({'rc': 1})
self.d.callback(0)
registerSlaveCommand("dummy.wait", WaitCommand, command_version)
class SourceBase(Command):
"""Abstract base class for Version Control System operations (checkout
and update). This class extracts the following arguments from the
dictionary received from the master:
- ['workdir']: (required) the subdirectory where the buildable sources
should be placed
- ['mode']: one of update/copy/clobber/export, defaults to 'update'
- ['revision']: If not None, this is an int or string which indicates
which sources (along a time-like axis) should be used.
It is the thing you provide as the CVS -r or -D
argument.
- ['patch']: If not None, this is a tuple of (striplevel, patch)
which contains a patch that should be applied after the
checkout has occurred. Once applied, the tree is no
longer eligible for use with mode='update', and it only
makes sense to use this in conjunction with a
['revision'] argument. striplevel is an int, and patch
is a string in standard unified diff format. The patch
will be applied with 'patch -p%d <PATCH', with
STRIPLEVEL substituted as %d. The command will fail if
the patch process fails (rejected hunks).
- ['timeout']: seconds of silence tolerated before we kill off the
command
- ['retry']: If not None, this is a tuple of (delay, repeats)
which means that any failed VC updates should be
reattempted, up to REPEATS times, after a delay of
DELAY seconds. This is intended to deal with slaves
that experience transient network failures.
"""
sourcedata = ""
def setup(self, args):
# if we need to parse the output, use this environment. Otherwise
# command output will be in whatever the buildslave's native language
# has been set to.
self.env = os.environ.copy()
self.env['LC_MESSAGES'] = "C"
self.workdir = args['workdir']
self.mode = args.get('mode', "update")
self.revision = args.get('revision')
self.patch = args.get('patch')
self.timeout = args.get('timeout', 120)
self.retry = args.get('retry')
# VC-specific subclasses should override this to extract more args.
# Make sure to upcall!
def start(self):
self.sendStatus({'header': "starting " + self.header + "\n"})
self.command = None
# self.srcdir is where the VC system should put the sources
if self.mode == "copy":
self.srcdir = "source" # hardwired directory name, sorry
else:
self.srcdir = self.workdir
self.sourcedatafile = os.path.join(self.builder.basedir,
self.srcdir,
".buildbot-sourcedata")
d = defer.succeed(None)
self.maybeClobber(d)
if not (self.sourcedirIsUpdateable() and self.sourcedataMatches()):
# the directory cannot be updated, so we have to clobber it.
# Perhaps the master just changed modes from 'export' to
# 'update'.
d.addCallback(self.doClobber, self.srcdir)
d.addCallback(self.doVC)
if self.mode == "copy":
d.addCallback(self.doCopy)
if self.patch:
d.addCallback(self.doPatch)
d.addCallbacks(self._sendRC, self._checkAbandoned)
return d
def maybeClobber(self, d):
# do we need to clobber anything?
if self.mode in ("copy", "clobber", "export"):
d.addCallback(self.doClobber, self.workdir)
def interrupt(self):
self.interrupted = True
if self.command:
self.command.kill("command interrupted")
def doVC(self, res):
if self.interrupted:
raise AbandonChain(1)
if self.sourcedirIsUpdateable() and self.sourcedataMatches():
d = self.doVCUpdate()
d.addCallback(self.maybeDoVCFallback)
else:
d = self.doVCFull()
d.addBoth(self.maybeDoVCRetry)
d.addCallback(self._abandonOnFailure)
d.addCallback(self._handleGotRevision)
d.addCallback(self.writeSourcedata)
return d
def sourcedataMatches(self):
try:
olddata = open(self.sourcedatafile, "r").read()
if olddata != self.sourcedata:
return False
except IOError:
return False
return True
def _handleGotRevision(self, res):
d = defer.maybeDeferred(self.parseGotRevision)
d.addCallback(lambda got_revision:
self.sendStatus({'got_revision': got_revision}))
return d
def parseGotRevision(self):
"""Override this in a subclass. It should return a string that
represents which revision was actually checked out, or a Deferred
that will fire with such a string. If, in a future build, you were to
pass this 'got_revision' string in as the 'revision' component of a
SourceStamp, you should wind up with the same source code as this
checkout just obtained.
It is probably most useful to scan self.command.stdout for a string
of some sort. Be sure to set keepStdout=True on the VC command that
you run, so that you'll have something available to look at.
If this information is unavailable, just return None."""
return None
def writeSourcedata(self, res):
open(self.sourcedatafile, "w").write(self.sourcedata)
return res
def sourcedirIsUpdateable(self):
raise NotImplementedError("this must be implemented in a subclass")
def doVCUpdate(self):
raise NotImplementedError("this must be implemented in a subclass")
def doVCFull(self):
raise NotImplementedError("this must be implemented in a subclass")
def maybeDoVCFallback(self, rc):
if type(rc) is int and rc == 0:
return rc
if self.interrupted:
raise AbandonChain(1)
msg = "update failed, clobbering and trying again"
self.sendStatus({'header': msg + "\n"})
log.msg(msg)
d = self.doClobber(None, self.srcdir)
d.addCallback(self.doVCFallback2)
return d
def doVCFallback2(self, res):
msg = "now retrying VC operation"
self.sendStatus({'header': msg + "\n"})
log.msg(msg)
d = self.doVCFull()
d.addBoth(self.maybeDoVCRetry)
d.addCallback(self._abandonOnFailure)
return d
def maybeDoVCRetry(self, res):
"""We get here somewhere after a VC chain has finished. res could
be::
- 0: the operation was successful
- nonzero: the operation failed. retry if possible
- AbandonChain: the operation failed, someone else noticed. retry.
- Failure: some other exception, re-raise
"""
if isinstance(res, failure.Failure):
if self.interrupted:
return res # don't re-try interrupted builds
res.trap(AbandonChain)
else:
if type(res) is int and res == 0:
return res
if self.interrupted:
raise AbandonChain(1)
# if we get here, we should retry, if possible
if self.retry:
delay, repeats = self.retry
if repeats >= 0:
self.retry = (delay, repeats-1)
msg = ("update failed, trying %d more times after %d seconds"
% (repeats, delay))
self.sendStatus({'header': msg + "\n"})
log.msg(msg)
d = defer.Deferred()
self.maybeClobber(d)
d.addCallback(lambda res: self.doVCFull())
d.addBoth(self.maybeDoVCRetry)
reactor.callLater(delay, d.callback, None)
return d
return res
def doClobber(self, dummy, dirname):
# TODO: remove the old tree in the background
## workdir = os.path.join(self.builder.basedir, self.workdir)
## deaddir = self.workdir + ".deleting"
## if os.path.isdir(workdir):
## try:
## os.rename(workdir, deaddir)
## # might fail if deaddir already exists: previous deletion
## # hasn't finished yet
## # start the deletion in the background
## # TODO: there was a solaris/NetApp/NFS problem where a
## # process that was still running out of the directory we're
## # trying to delete could prevent the rm-rf from working. I
## # think it stalled the rm, but maybe it just died with
## # permission issues. Try to detect this.
## os.commands("rm -rf %s &" % deaddir)
## except:
## # fall back to sequential delete-then-checkout
## pass
d = os.path.join(self.builder.basedir, dirname)
if runtime.platformType != "posix":
# if we're running on w32, use rmtree instead. It will block,
# but hopefully it won't take too long.
rmdirRecursive(d)
return defer.succeed(0)
command = ["rm", "-rf", d]
c = ShellCommand(self.builder, command, self.builder.basedir,
sendRC=0, timeout=self.timeout, usePTY=False)
self.command = c
# sendRC=0 means the rm command will send stdout/stderr to the
# master, but not the rc=0 when it finishes. That job is left to
# _sendRC
d = c.start()
d.addCallback(self._abandonOnFailure)
return d
def doCopy(self, res):
# now copy tree to workdir
fromdir = os.path.join(self.builder.basedir, self.srcdir)
todir = os.path.join(self.builder.basedir, self.workdir)
if runtime.platformType != "posix":
self.sendStatus({'header': "Since we're on a non-POSIX platform, "
"we're not going to try to execute cp in a subprocess, but instead "
"use shutil.copytree(), which will block until it is complete. "
"fromdir: %s, todir: %s\n" % (fromdir, todir)})
shutil.copytree(fromdir, todir)
return defer.succeed(0)
if not os.path.exists(os.path.dirname(todir)):
os.makedirs(os.path.dirname(todir))
if os.path.exists(todir):
# I don't think this happens, but just in case..
log.msg("cp target '%s' already exists -- cp will not do what you think!" % todir)
command = ['cp', '-R', '-P', '-p', fromdir, todir]
c = ShellCommand(self.builder, command, self.builder.basedir,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._abandonOnFailure)
return d
def doPatch(self, res):
patchlevel, diff = self.patch
command = [getCommand("patch"), '-p%d' % patchlevel]
dir = os.path.join(self.builder.basedir, self.workdir)
# mark the directory so we don't try to update it later
open(os.path.join(dir, ".buildbot-patched"), "w").write("patched\n")
# now apply the patch
c = ShellCommand(self.builder, command, dir,
sendRC=False, timeout=self.timeout,
initialStdin=diff, usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._abandonOnFailure)
return d
class CVS(SourceBase):
"""CVS-specific VC operation. In addition to the arguments handled by
SourceBase, this command reads the following keys:
['cvsroot'] (required): the CVSROOT repository string
['cvsmodule'] (required): the module to be retrieved
['branch']: a '-r' tag or branch name to use for the checkout/update
['login']: a string for use as a password to 'cvs login'
['global_options']: a list of strings to use before the CVS verb
"""
header = "cvs operation"
def setup(self, args):
SourceBase.setup(self, args)
self.vcexe = getCommand("cvs")
self.cvsroot = args['cvsroot']
self.cvsmodule = args['cvsmodule']
self.global_options = args.get('global_options', [])
self.branch = args.get('branch')
self.login = args.get('login')
self.sourcedata = "%s\n%s\n%s\n" % (self.cvsroot, self.cvsmodule,
self.branch)
def sourcedirIsUpdateable(self):
if os.path.exists(os.path.join(self.builder.basedir,
self.srcdir, ".buildbot-patched")):
return False
return os.path.isdir(os.path.join(self.builder.basedir,
self.srcdir, "CVS"))
def start(self):
if self.login is not None:
# need to do a 'cvs login' command first
d = self.builder.basedir
command = ([self.vcexe, '-d', self.cvsroot] + self.global_options
+ ['login'])
c = ShellCommand(self.builder, command, d,
sendRC=False, timeout=self.timeout,
initialStdin=self.login+"\n", usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._abandonOnFailure)
d.addCallback(self._didLogin)
return d
else:
return self._didLogin(None)
def _didLogin(self, res):
# now we really start
return SourceBase.start(self)
def doVCUpdate(self):
d = os.path.join(self.builder.basedir, self.srcdir)
command = [self.vcexe, '-z3'] + self.global_options + ['update', '-dP']
if self.branch:
command += ['-r', self.branch]
if self.revision:
command += ['-D', self.revision]
c = ShellCommand(self.builder, command, d,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
return c.start()
def doVCFull(self):
d = self.builder.basedir
if self.mode == "export":
verb = "export"
else:
verb = "checkout"
command = ([self.vcexe, '-d', self.cvsroot, '-z3'] +
self.global_options +
[verb, '-d', self.srcdir])
if self.branch:
command += ['-r', self.branch]
if self.revision:
command += ['-D', self.revision]
command += [self.cvsmodule]
c = ShellCommand(self.builder, command, d,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
return c.start()
def parseGotRevision(self):
# CVS does not have any kind of revision stamp to speak of. We return
# the current timestamp as a best-effort guess, but this depends upon
# the local system having a clock that is
# reasonably-well-synchronized with the repository.
return time.strftime("%Y-%m-%d %H:%M:%S +0000", time.gmtime())
registerSlaveCommand("cvs", CVS, command_version)
class SVN(SourceBase):
"""Subversion-specific VC operation. In addition to the arguments
handled by SourceBase, this command reads the following keys:
['svnurl'] (required): the SVN repository string
['username'] Username passed to the svn command
['password'] Password passed to the svn command
"""
header = "svn operation"
def setup(self, args):
SourceBase.setup(self, args)
self.vcexe = getCommand("svn")
self.svnurl = args['svnurl']
self.sourcedata = "%s\n" % self.svnurl
self.extra_args = []
if args.has_key('username'):
self.extra_args.extend(["--username", args['username']])
if args.has_key('password'):
self.extra_args.extend(["--password", Obfuscated(args['password'], "XXXX")])
def sourcedirIsUpdateable(self):
if os.path.exists(os.path.join(self.builder.basedir,
self.srcdir, ".buildbot-patched")):
return False
return os.path.isdir(os.path.join(self.builder.basedir,
self.srcdir, ".svn"))
def doVCUpdate(self):
revision = self.args['revision'] or 'HEAD'
# update: possible for mode in ('copy', 'update')
d = os.path.join(self.builder.basedir, self.srcdir)
command = [self.vcexe, 'update'] + \
self.extra_args + \
['--revision', str(revision),
'--non-interactive', '--no-auth-cache']
c = ShellCommand(self.builder, command, d,
sendRC=False, timeout=self.timeout,
keepStdout=True, usePTY=False)
self.command = c
return c.start()
def doVCFull(self):
revision = self.args['revision'] or 'HEAD'
d = self.builder.basedir
if self.mode == "export":
command = [self.vcexe, 'export'] + \
self.extra_args + \
['--revision', str(revision),
'--non-interactive', '--no-auth-cache',
self.svnurl, self.srcdir]
else:
# mode=='clobber', or copy/update on a broken workspace
command = [self.vcexe, 'checkout'] + \
self.extra_args + \
['--revision', str(revision),
'--non-interactive', '--no-auth-cache',
self.svnurl, self.srcdir]
c = ShellCommand(self.builder, command, d,
sendRC=False, timeout=self.timeout,
keepStdout=True, usePTY=False)
self.command = c
return c.start()
def getSvnVersionCommand(self):
"""
Get the (shell) command used to determine SVN revision number
of checked-out code
return: list of strings, passable as the command argument to ShellCommand
"""
# svn checkout operations finish with 'Checked out revision 16657.'
# svn update operations finish the line 'At revision 16654.'
# But we don't use those. Instead, run 'svnversion'.
svnversion_command = getCommand("svnversion")
# older versions of 'svnversion' (1.1.4) require the WC_PATH
# argument, newer ones (1.3.1) do not.
return [svnversion_command, "."]
def parseGotRevision(self):
c = ShellCommand(self.builder,
self.getSvnVersionCommand(),
os.path.join(self.builder.basedir, self.srcdir),
environ=self.env,
sendStdout=False, sendStderr=False, sendRC=False,
keepStdout=True, usePTY=False)
d = c.start()
def _parse(res):
r_raw = c.stdout.strip()
# Extract revision from the version "number" string
r = r_raw.rstrip('MS')
r = r.split(':')[-1]
got_version = None
try:
got_version = int(r)
except ValueError:
msg =("SVN.parseGotRevision unable to parse output "
"of svnversion: '%s'" % r_raw)
log.msg(msg)
self.sendStatus({'header': msg + "\n"})
return got_version
d.addCallback(_parse)
return d
registerSlaveCommand("svn", SVN, command_version)
class Darcs(SourceBase):
"""Darcs-specific VC operation. In addition to the arguments
handled by SourceBase, this command reads the following keys:
['repourl'] (required): the Darcs repository string
"""
header = "darcs operation"
def setup(self, args):
SourceBase.setup(self, args)
self.vcexe = getCommand("darcs")
self.repourl = args['repourl']
self.sourcedata = "%s\n" % self.repourl
self.revision = self.args.get('revision')
def sourcedirIsUpdateable(self):
if os.path.exists(os.path.join(self.builder.basedir,
self.srcdir, ".buildbot-patched")):
return False
if self.revision:
# checking out a specific revision requires a full 'darcs get'
return False
return os.path.isdir(os.path.join(self.builder.basedir,
self.srcdir, "_darcs"))
def doVCUpdate(self):
assert not self.revision
# update: possible for mode in ('copy', 'update')
d = os.path.join(self.builder.basedir, self.srcdir)
command = [self.vcexe, 'pull', '--all', '--verbose']
c = ShellCommand(self.builder, command, d,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
return c.start()
def doVCFull(self):
# checkout or export
d = self.builder.basedir
command = [self.vcexe, 'get', '--verbose', '--partial',
'--repo-name', self.srcdir]
if self.revision:
# write the context to a file
n = os.path.join(self.builder.basedir, ".darcs-context")
f = open(n, "wb")
f.write(self.revision)
f.close()
# tell Darcs to use that context
command.append('--context')
command.append(n)
command.append(self.repourl)
c = ShellCommand(self.builder, command, d,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
d = c.start()
if self.revision:
d.addCallback(self.removeContextFile, n)
return d
def removeContextFile(self, res, n):
os.unlink(n)
return res
def parseGotRevision(self):
# we use 'darcs context' to find out what we wound up with
command = [self.vcexe, "changes", "--context"]
c = ShellCommand(self.builder, command,
os.path.join(self.builder.basedir, self.srcdir),
environ=self.env,
sendStdout=False, sendStderr=False, sendRC=False,
keepStdout=True, usePTY=False)
d = c.start()
d.addCallback(lambda res: c.stdout)
return d
registerSlaveCommand("darcs", Darcs, command_version)
class Monotone(SourceBase):
"""Monotone-specific VC operation. In addition to the arguments handled
by SourceBase, this command reads the following keys:
['server_addr'] (required): the address of the server to pull from
['branch'] (required): the branch the revision is on
['db_path'] (required): the local database path to use
['revision'] (required): the revision to check out
['monotone']: (required): path to monotone executable
"""
header = "monotone operation"
def setup(self, args):
SourceBase.setup(self, args)
self.server_addr = args["server_addr"]
self.branch = args["branch"]
self.db_path = args["db_path"]
self.revision = args["revision"]
self.monotone = args["monotone"]
self._made_fulls = False
self._pull_timeout = args["timeout"]
def _makefulls(self):
if not self._made_fulls:
basedir = self.builder.basedir
self.full_db_path = os.path.join(basedir, self.db_path)
self.full_srcdir = os.path.join(basedir, self.srcdir)
self._made_fulls = True
def sourcedirIsUpdateable(self):
self._makefulls()
if os.path.exists(os.path.join(self.full_srcdir,
".buildbot_patched")):
return False
return (os.path.isfile(self.full_db_path)
and os.path.isdir(os.path.join(self.full_srcdir, "MT")))
def doVCUpdate(self):
return self._withFreshDb(self._doUpdate)
def _doUpdate(self):
# update: possible for mode in ('copy', 'update')
command = [self.monotone, "update",
"-r", self.revision,
"-b", self.branch]
c = ShellCommand(self.builder, command, self.full_srcdir,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
return c.start()
def doVCFull(self):
return self._withFreshDb(self._doFull)
def _doFull(self):
command = [self.monotone, "--db=" + self.full_db_path,
"checkout",
"-r", self.revision,
"-b", self.branch,
self.full_srcdir]
c = ShellCommand(self.builder, command, self.builder.basedir,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
return c.start()
def _withFreshDb(self, callback):
self._makefulls()
# first ensure the db exists and is usable
if os.path.isfile(self.full_db_path):
# already exists, so run 'db migrate' in case monotone has been
# upgraded under us
command = [self.monotone, "db", "migrate",
"--db=" + self.full_db_path]
else:
# We'll be doing an initial pull, so up the timeout to 3 hours to
# make sure it will have time to complete.
self._pull_timeout = max(self._pull_timeout, 3 * 60 * 60)
self.sendStatus({"header": "creating database %s\n"
% (self.full_db_path,)})
command = [self.monotone, "db", "init",
"--db=" + self.full_db_path]
c = ShellCommand(self.builder, command, self.builder.basedir,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._abandonOnFailure)
d.addCallback(self._didDbInit)
d.addCallback(self._didPull, callback)
return d
def _didDbInit(self, res):
command = [self.monotone, "--db=" + self.full_db_path,
"pull", "--ticker=dot", self.server_addr, self.branch]
c = ShellCommand(self.builder, command, self.builder.basedir,
sendRC=False, timeout=self._pull_timeout, usePTY=False)
self.sendStatus({"header": "pulling %s from %s\n"
% (self.branch, self.server_addr)})
self.command = c
return c.start()
def _didPull(self, res, callback):
return callback()
registerSlaveCommand("monotone", Monotone, command_version)
class Git(SourceBase):
"""Git specific VC operation. In addition to the arguments
handled by SourceBase, this command reads the following keys:
['repourl'] (required): the upstream GIT repository string
['branch'] (optional): which version (i.e. branch or tag) to
retrieve. Default: "master".
"""
header = "git operation"
def setup(self, args):
SourceBase.setup(self, args)
self.repourl = args['repourl']
self.branch = args.get('branch')
if not self.branch:
self.branch = "master"
self.sourcedata = "%s %s\n" % (self.repourl, self.branch)
def _fullSrcdir(self):
return os.path.join(self.builder.basedir, self.srcdir)
def _commitSpec(self):
if self.revision:
return self.revision
return self.branch
def sourcedirIsUpdateable(self):
if os.path.exists(os.path.join(self._fullSrcdir(),
".buildbot-patched")):
return False
return os.path.isdir(os.path.join(self._fullSrcdir(), ".git"))
def readSourcedata(self):
return open(self.sourcedatafile, "r").read()
# If the repourl matches the sourcedata file, then
# we can say that the sourcedata matches. We can
# ignore branch changes, since Git can work with
# many branches fetched, and we deal with it properly
# in doVCUpdate.
def sourcedataMatches(self):
try:
olddata = self.readSourcedata()
if not olddata.startswith(self.repourl+' '):
return False
except IOError:
return False
return True
def _didFetch(self, res):
if self.revision:
head = self.revision
else:
head = 'FETCH_HEAD'
command = ['git', 'reset', '--hard', head]
c = ShellCommand(self.builder, command, self._fullSrcdir(),
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
return c.start()
# Update first runs "git clean", removing local changes,
# if the branch to be checked out has changed. This, combined
# with the later "git reset" equates clobbering the repo,
# but it's much more efficient.
def doVCUpdate(self):
try:
# Check to see if our branch has changed
diffbranch = self.sourcedata != self.readSourcedata()
except IOError:
diffbranch = False
if diffbranch:
command = ['git', 'clean', '-f', '-d']
c = ShellCommand(self.builder, command, self._fullSrcdir(),
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._abandonOnFailure)
d.addCallback(self._didClean)
return d
return self._didClean(None)
def _didClean(self, dummy):
command = ['git', 'fetch', '-t', self.repourl, self.branch]
self.sendStatus({"header": "fetching branch %s from %s\n"
% (self.branch, self.repourl)})
c = ShellCommand(self.builder, command, self._fullSrcdir(),
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._abandonOnFailure)
d.addCallback(self._didFetch)
return d
def _didInit(self, res):
return self.doVCUpdate()
def doVCFull(self):
os.mkdir(self._fullSrcdir())
c = ShellCommand(self.builder, ['git', 'init'], self._fullSrcdir(),
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._abandonOnFailure)
d.addCallback(self._didInit)
return d
def parseGotRevision(self):
command = ['git', 'rev-parse', 'HEAD']
c = ShellCommand(self.builder, command, self._fullSrcdir(),
sendRC=False, keepStdout=True, usePTY=False)
d = c.start()
def _parse(res):
hash = c.stdout.strip()
if len(hash) != 40:
return None
return hash
d.addCallback(_parse)
return d
registerSlaveCommand("git", Git, command_version)
class Arch(SourceBase):
"""Arch-specific (tla-specific) VC operation. In addition to the
arguments handled by SourceBase, this command reads the following keys:
['url'] (required): the repository string
['version'] (required): which version (i.e. branch) to retrieve
['revision'] (optional): the 'patch-NN' argument to check out
['archive']: the archive name to use. If None, use the archive's default
['build-config']: if present, give to 'tla build-config' after checkout
"""
header = "arch operation"
buildconfig = None
def setup(self, args):
SourceBase.setup(self, args)
self.vcexe = getCommand("tla")
self.archive = args.get('archive')
self.url = args['url']
self.version = args['version']
self.revision = args.get('revision')
self.buildconfig = args.get('build-config')
self.sourcedata = "%s\n%s\n%s\n" % (self.url, self.version,
self.buildconfig)
def sourcedirIsUpdateable(self):
if self.revision:
# Arch cannot roll a directory backwards, so if they ask for a
# specific revision, clobber the directory. Technically this
# could be limited to the cases where the requested revision is
# later than our current one, but it's too hard to extract the
# current revision from the tree.
return False
if os.path.exists(os.path.join(self.builder.basedir,
self.srcdir, ".buildbot-patched")):
return False
return os.path.isdir(os.path.join(self.builder.basedir,
self.srcdir, "{arch}"))
def doVCUpdate(self):
# update: possible for mode in ('copy', 'update')
d = os.path.join(self.builder.basedir, self.srcdir)
command = [self.vcexe, 'replay']
if self.revision:
command.append(self.revision)
c = ShellCommand(self.builder, command, d,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
return c.start()
def doVCFull(self):
# to do a checkout, we must first "register" the archive by giving
# the URL to tla, which will go to the repository at that URL and
# figure out the archive name. tla will tell you the archive name
# when it is done, and all further actions must refer to this name.
command = [self.vcexe, 'register-archive', '--force', self.url]
c = ShellCommand(self.builder, command, self.builder.basedir,
sendRC=False, keepStdout=True,
timeout=self.timeout, usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._abandonOnFailure)
d.addCallback(self._didRegister, c)
return d
def _didRegister(self, res, c):
# find out what tla thinks the archive name is. If the user told us
# to use something specific, make sure it matches.
r = re.search(r'Registering archive: (\S+)\s*$', c.stdout)
if r:
msg = "tla reports archive name is '%s'" % r.group(1)
log.msg(msg)
self.builder.sendUpdate({'header': msg+"\n"})
if self.archive and r.group(1) != self.archive:
msg = (" mismatch, we wanted an archive named '%s'"
% self.archive)
log.msg(msg)
self.builder.sendUpdate({'header': msg+"\n"})
raise AbandonChain(-1)
self.archive = r.group(1)
assert self.archive, "need archive name to continue"
return self._doGet()
def _doGet(self):
ver = self.version
if self.revision:
ver += "--%s" % self.revision
command = [self.vcexe, 'get', '--archive', self.archive,
'--no-pristine',
ver, self.srcdir]
c = ShellCommand(self.builder, command, self.builder.basedir,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._abandonOnFailure)
if self.buildconfig:
d.addCallback(self._didGet)
return d
def _didGet(self, res):
d = os.path.join(self.builder.basedir, self.srcdir)
command = [self.vcexe, 'build-config', self.buildconfig]
c = ShellCommand(self.builder, command, d,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._abandonOnFailure)
return d
def parseGotRevision(self):
# using code from tryclient.TlaExtractor
# 'tla logs --full' gives us ARCHIVE/BRANCH--REVISION
# 'tla logs' gives us REVISION
command = [self.vcexe, "logs", "--full", "--reverse"]
c = ShellCommand(self.builder, command,
os.path.join(self.builder.basedir, self.srcdir),
environ=self.env,
sendStdout=False, sendStderr=False, sendRC=False,
keepStdout=True, usePTY=False)
d = c.start()
def _parse(res):
tid = c.stdout.split("\n")[0].strip()
slash = tid.index("/")
dd = tid.rindex("--")
#branch = tid[slash+1:dd]
baserev = tid[dd+2:]
return baserev
d.addCallback(_parse)
return d
registerSlaveCommand("arch", Arch, command_version)
class Bazaar(Arch):
"""Bazaar (/usr/bin/baz) is an alternative client for Arch repositories.
It is mostly option-compatible, but archive registration is different
enough to warrant a separate Command.
['archive'] (required): the name of the archive being used
"""
def setup(self, args):
Arch.setup(self, args)
self.vcexe = getCommand("baz")
# baz doesn't emit the repository name after registration (and
# grepping through the output of 'baz archives' is too hard), so we
# require that the buildmaster configuration to provide both the
# archive name and the URL.
self.archive = args['archive'] # required for Baz
self.sourcedata = "%s\n%s\n%s\n" % (self.url, self.version,
self.buildconfig)
# in _didRegister, the regexp won't match, so we'll stick with the name
# in self.archive
def _doGet(self):
# baz prefers ARCHIVE/VERSION. This will work even if
# my-default-archive is not set.
ver = self.archive + "/" + self.version
if self.revision:
ver += "--%s" % self.revision
command = [self.vcexe, 'get', '--no-pristine',
ver, self.srcdir]
c = ShellCommand(self.builder, command, self.builder.basedir,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._abandonOnFailure)
if self.buildconfig:
d.addCallback(self._didGet)
return d
def parseGotRevision(self):
# using code from tryclient.BazExtractor
command = [self.vcexe, "tree-id"]
c = ShellCommand(self.builder, command,
os.path.join(self.builder.basedir, self.srcdir),
environ=self.env,
sendStdout=False, sendStderr=False, sendRC=False,
keepStdout=True, usePTY=False)
d = c.start()
def _parse(res):
tid = c.stdout.strip()
slash = tid.index("/")
dd = tid.rindex("--")
#branch = tid[slash+1:dd]
baserev = tid[dd+2:]
return baserev
d.addCallback(_parse)
return d
registerSlaveCommand("bazaar", Bazaar, command_version)
class Bzr(SourceBase):
"""bzr-specific VC operation. In addition to the arguments
handled by SourceBase, this command reads the following keys:
['repourl'] (required): the Bzr repository string
"""
header = "bzr operation"
def setup(self, args):
SourceBase.setup(self, args)
self.vcexe = getCommand("bzr")
self.repourl = args['repourl']
self.sourcedata = "%s\n" % self.repourl
self.revision = self.args.get('revision')
def sourcedirIsUpdateable(self):
if os.path.exists(os.path.join(self.builder.basedir,
self.srcdir, ".buildbot-patched")):
return False
if self.revision:
# checking out a specific revision requires a full 'bzr checkout'
return False
return os.path.isdir(os.path.join(self.builder.basedir,
self.srcdir, ".bzr"))
def doVCUpdate(self):
assert not self.revision
# update: possible for mode in ('copy', 'update')
srcdir = os.path.join(self.builder.basedir, self.srcdir)
command = [self.vcexe, 'update']
c = ShellCommand(self.builder, command, srcdir,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
return c.start()
def doVCFull(self):
# checkout or export
d = self.builder.basedir
if self.mode == "export":
# exporting in bzr requires a separate directory
return self.doVCExport()
# originally I added --lightweight here, but then 'bzr revno' is
# wrong. The revno reported in 'bzr version-info' is correct,
# however. Maybe this is a bzr bug?
#
# In addition, you cannot perform a 'bzr update' on a repo pulled
# from an HTTP repository that used 'bzr checkout --lightweight'. You
# get a "ERROR: Cannot lock: transport is read only" when you try.
#
# So I won't bother using --lightweight for now.
command = [self.vcexe, 'checkout']
if self.revision:
command.append('--revision')
command.append(str(self.revision))
command.append(self.repourl)
command.append(self.srcdir)
c = ShellCommand(self.builder, command, d,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
d = c.start()
return d
def doVCExport(self):
tmpdir = os.path.join(self.builder.basedir, "export-temp")
srcdir = os.path.join(self.builder.basedir, self.srcdir)
command = [self.vcexe, 'checkout', '--lightweight']
if self.revision:
command.append('--revision')
command.append(str(self.revision))
command.append(self.repourl)
command.append(tmpdir)
c = ShellCommand(self.builder, command, self.builder.basedir,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
d = c.start()
def _export(res):
command = [self.vcexe, 'export', srcdir]
c = ShellCommand(self.builder, command, tmpdir,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
return c.start()
d.addCallback(_export)
return d
def get_revision_number(self, out):
# it feels like 'bzr revno' sometimes gives different results than
# the 'revno:' line from 'bzr version-info', and the one from
# version-info is more likely to be correct.
for line in out.split("\n"):
colon = line.find(":")
if colon != -1:
key, value = line[:colon], line[colon+2:]
if key == "revno":
return int(value)
raise ValueError("unable to find revno: in bzr output: '%s'" % out)
def parseGotRevision(self):
command = [self.vcexe, "version-info"]
c = ShellCommand(self.builder, command,
os.path.join(self.builder.basedir, self.srcdir),
environ=self.env,
sendStdout=False, sendStderr=False, sendRC=False,
keepStdout=True, usePTY=False)
d = c.start()
def _parse(res):
try:
return self.get_revision_number(c.stdout)
except ValueError:
msg =("Bzr.parseGotRevision unable to parse output "
"of bzr version-info: '%s'" % c.stdout.strip())
log.msg(msg)
self.sendStatus({'header': msg + "\n"})
return None
d.addCallback(_parse)
return d
registerSlaveCommand("bzr", Bzr, command_version)
class Mercurial(SourceBase):
"""Mercurial specific VC operation. In addition to the arguments
handled by SourceBase, this command reads the following keys:
['repourl'] (required): the Cogito repository string
"""
header = "mercurial operation"
def setup(self, args):
SourceBase.setup(self, args)
self.vcexe = getCommand("hg")
self.repourl = args['repourl']
self.sourcedata = "%s\n" % self.repourl
self.stdout = ""
self.stderr = ""
def sourcedirIsUpdateable(self):
if os.path.exists(os.path.join(self.builder.basedir,
self.srcdir, ".buildbot-patched")):
return False
# like Darcs, to check out a specific (old) revision, we have to do a
# full checkout. TODO: I think 'hg pull' plus 'hg update' might work
if self.revision:
return False
return os.path.isdir(os.path.join(self.builder.basedir,
self.srcdir, ".hg"))
def doVCUpdate(self):
d = os.path.join(self.builder.basedir, self.srcdir)
command = [self.vcexe, 'pull', '--verbose', self.repourl]
c = ShellCommand(self.builder, command, d,
sendRC=False, timeout=self.timeout,
keepStdout=True, usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._handleEmptyUpdate)
d.addCallback(self._update)
return d
def _handleEmptyUpdate(self, res):
if type(res) is int and res == 1:
if self.command.stdout.find("no changes found") != -1:
# 'hg pull', when it doesn't have anything to do, exits with
# rc=1, and there appears to be no way to shut this off. It
# emits a distinctive message to stdout, though. So catch
# this and pretend that it completed successfully.
return 0
return res
def doVCFull(self):
d = os.path.join(self.builder.basedir, self.srcdir)
command = [self.vcexe, 'init', d]
c = ShellCommand(self.builder, command, self.builder.basedir,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
cmd1 = c.start()
def _vcupdate(res):
return self.doVCUpdate()
cmd1.addCallback(_vcupdate)
return cmd1
def _update(self, res):
if res != 0:
return res
# compare current branch to update
self.update_branch = self.args.get('branch', 'default')
d = os.path.join(self.builder.basedir, self.srcdir)
parentscmd = [self.vcexe, 'identify', '--num', '--branch']
cmd = ShellCommand(self.builder, parentscmd, d,
sendStdout=False, sendStderr=False,
keepStdout=True, keepStderr=True, usePTY=False)
def _parse(res):
if res != 0:
msg = "'hg identify' failed: %s\n%s" % (cmd.stdout, cmd.stderr)
self.sendStatus({'header': msg + "\n"})
log.msg(msg)
return res
log.msg('Output: %s' % cmd.stdout)
match = re.search(r'^(.+) (.+)$', cmd.stdout)
assert match
rev = match.group(1)
current_branch = match.group(2)
if rev == '-1':
msg = "Fresh hg repo, don't worry about branch"
log.msg(msg)
elif self.update_branch != current_branch:
msg = "Working dir is on branch '%s' and build needs '%s'. Clobbering." % (current_branch, self.update_branch)
self.sendStatus({'header': msg + "\n"})
log.msg(msg)
def _vcfull(res):
return self.doVCFull()
d = self.doClobber(None, self.srcdir)
d.addCallback(_vcfull)
return d
else:
msg = "Working dir on same branch as build (%s)." % (current_branch)
log.msg(msg)
return 0
c = cmd.start()
c.addCallback(_parse)
c.addCallback(self._update2)
return c
def _update2(self, res):
d = os.path.join(self.builder.basedir, self.srcdir)
updatecmd=[self.vcexe, 'update', '--clean', '--repository', d]
if self.args.get('revision'):
updatecmd.extend(['--rev', self.args['revision']])
else:
updatecmd.extend(['--rev', self.args.get('branch', 'default')])
self.command = ShellCommand(self.builder, updatecmd,
self.builder.basedir, sendRC=False,
timeout=self.timeout, usePTY=False)
return self.command.start()
def parseGotRevision(self):
# we use 'hg identify' to find out what we wound up with
command = [self.vcexe, "identify"]
c = ShellCommand(self.builder, command,
os.path.join(self.builder.basedir, self.srcdir),
environ=self.env,
sendStdout=False, sendStderr=False, sendRC=False,
keepStdout=True, usePTY=False)
d = c.start()
def _parse(res):
m = re.search(r'^(\w+)', c.stdout)
return m.group(1)
d.addCallback(_parse)
return d
registerSlaveCommand("hg", Mercurial, command_version)
class P4Base(SourceBase):
"""Base class for P4 source-updaters
['p4port'] (required): host:port for server to access
['p4user'] (optional): user to use for access
['p4passwd'] (optional): passwd to try for the user
['p4client'] (optional): client spec to use
"""
def setup(self, args):
SourceBase.setup(self, args)
self.p4port = args['p4port']
self.p4client = args['p4client']
self.p4user = args['p4user']
self.p4passwd = args['p4passwd']
def parseGotRevision(self):
# Executes a p4 command that will give us the latest changelist number
# of any file under the current (or default) client:
command = ['p4']
if self.p4port:
command.extend(['-p', self.p4port])
if self.p4user:
command.extend(['-u', self.p4user])
if self.p4passwd:
command.extend(['-P', self.p4passwd])
if self.p4client:
command.extend(['-c', self.p4client])
command.extend(['changes', '-m', '1', '#have'])
c = ShellCommand(self.builder, command, self.builder.basedir,
environ=self.env, timeout=self.timeout,
sendStdout=True, sendStderr=False, sendRC=False,
keepStdout=True, usePTY=False)
self.command = c
d = c.start()
def _parse(res):
# 'p4 -c clien-name change -m 1 "#have"' will produce an output like:
# "Change 28147 on 2008/04/07 by p4user@hostname..."
# The number after "Change" is the one we want.
m = re.match('Change\s+(\d+)\s+', c.stdout)
if m:
return m.group(1)
return None
d.addCallback(_parse)
return d
class P4(P4Base):
"""A P4 source-updater.
['p4port'] (required): host:port for server to access
['p4user'] (optional): user to use for access
['p4passwd'] (optional): passwd to try for the user
['p4client'] (optional): client spec to use
['p4extra_views'] (optional): additional client views to use
"""
header = "p4"
def setup(self, args):
P4Base.setup(self, args)
self.p4base = args['p4base']
self.p4extra_views = args['p4extra_views']
self.p4mode = args['mode']
self.p4branch = args['branch']
self.sourcedata = str([
# Perforce server.
self.p4port,
# Client spec.
self.p4client,
# Depot side of view spec.
self.p4base,
self.p4branch,
self.p4extra_views,
# Local side of view spec (srcdir is made from these).
self.builder.basedir,
self.mode,
self.workdir
])
def sourcedirIsUpdateable(self):
if os.path.exists(os.path.join(self.builder.basedir,
self.srcdir, ".buildbot-patched")):
return False
# We assume our client spec is still around.
# We just say we aren't updateable if the dir doesn't exist so we
# don't get ENOENT checking the sourcedata.
return os.path.isdir(os.path.join(self.builder.basedir,
self.srcdir))
def doVCUpdate(self):
return self._doP4Sync(force=False)
def _doP4Sync(self, force):
command = ['p4']
if self.p4port:
command.extend(['-p', self.p4port])
if self.p4user:
command.extend(['-u', self.p4user])
if self.p4passwd:
command.extend(['-P', self.p4passwd])
if self.p4client:
command.extend(['-c', self.p4client])
command.extend(['sync'])
if force:
command.extend(['-f'])
if self.revision:
command.extend(['@' + str(self.revision)])
env = {}
c = ShellCommand(self.builder, command, self.builder.basedir,
environ=env, sendRC=False, timeout=self.timeout,
keepStdout=True, usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._abandonOnFailure)
return d
def doVCFull(self):
env = {}
command = ['p4']
client_spec = ''
client_spec += "Client: %s\n\n" % self.p4client
client_spec += "Owner: %s\n\n" % self.p4user
client_spec += "Description:\n\tCreated by %s\n\n" % self.p4user
client_spec += "Root:\t%s\n\n" % self.builder.basedir
client_spec += "Options:\tallwrite rmdir\n\n"
client_spec += "LineEnd:\tlocal\n\n"
# Setup a view
client_spec += "View:\n\t%s" % (self.p4base)
if self.p4branch:
client_spec += "%s/" % (self.p4branch)
client_spec += "... //%s/%s/...\n" % (self.p4client, self.srcdir)
if self.p4extra_views:
for k, v in self.p4extra_views:
client_spec += "\t%s/... //%s/%s%s/...\n" % (k, self.p4client,
self.srcdir, v)
if self.p4port:
command.extend(['-p', self.p4port])
if self.p4user:
command.extend(['-u', self.p4user])
if self.p4passwd:
command.extend(['-P', self.p4passwd])
command.extend(['client', '-i'])
log.msg(client_spec)
c = ShellCommand(self.builder, command, self.builder.basedir,
environ=env, sendRC=False, timeout=self.timeout,
initialStdin=client_spec, usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._abandonOnFailure)
d.addCallback(lambda _: self._doP4Sync(force=True))
return d
registerSlaveCommand("p4", P4, command_version)
class P4Sync(P4Base):
"""A partial P4 source-updater. Requires manual setup of a per-slave P4
environment. The only thing which comes from the master is P4PORT.
'mode' is required to be 'copy'.
['p4port'] (required): host:port for server to access
['p4user'] (optional): user to use for access
['p4passwd'] (optional): passwd to try for the user
['p4client'] (optional): client spec to use
"""
header = "p4 sync"
def setup(self, args):
P4Base.setup(self, args)
self.vcexe = getCommand("p4")
def sourcedirIsUpdateable(self):
return True
def _doVC(self, force):
d = os.path.join(self.builder.basedir, self.srcdir)
command = [self.vcexe]
if self.p4port:
command.extend(['-p', self.p4port])
if self.p4user:
command.extend(['-u', self.p4user])
if self.p4passwd:
command.extend(['-P', self.p4passwd])
if self.p4client:
command.extend(['-c', self.p4client])
command.extend(['sync'])
if force:
command.extend(['-f'])
if self.revision:
command.extend(['@' + self.revision])
env = {}
c = ShellCommand(self.builder, command, d, environ=env,
sendRC=False, timeout=self.timeout, usePTY=False)
self.command = c
return c.start()
def doVCUpdate(self):
return self._doVC(force=False)
def doVCFull(self):
return self._doVC(force=True)
registerSlaveCommand("p4sync", P4Sync, command_version)
| mpl-2.0 |
mparus/android_kernel_huawei_msm8916-caf | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/EventClass.py | 4653 | 3596 | # EventClass.py
#
# This is a library defining some events types classes, which could
# be used by other scripts to analyzing the perf samples.
#
# Currently there are just a few classes defined for examples,
# PerfEvent is the base class for all perf event sample, PebsEvent
# is a HW base Intel x86 PEBS event, and user could add more SW/HW
# event classes based on requirements.
import struct
# Event types, user could add more here
EVTYPE_GENERIC = 0
EVTYPE_PEBS = 1 # Basic PEBS event
EVTYPE_PEBS_LL = 2 # PEBS event with load latency info
EVTYPE_IBS = 3
#
# Currently we don't have good way to tell the event type, but by
# the size of raw buffer, raw PEBS event with load latency data's
# size is 176 bytes, while the pure PEBS event's size is 144 bytes.
#
def create_event(name, comm, dso, symbol, raw_buf):
if (len(raw_buf) == 144):
event = PebsEvent(name, comm, dso, symbol, raw_buf)
elif (len(raw_buf) == 176):
event = PebsNHM(name, comm, dso, symbol, raw_buf)
else:
event = PerfEvent(name, comm, dso, symbol, raw_buf)
return event
class PerfEvent(object):
event_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_GENERIC):
self.name = name
self.comm = comm
self.dso = dso
self.symbol = symbol
self.raw_buf = raw_buf
self.ev_type = ev_type
PerfEvent.event_num += 1
def show(self):
print "PMU event: name=%12s, symbol=%24s, comm=%8s, dso=%12s" % (self.name, self.symbol, self.comm, self.dso)
#
# Basic Intel PEBS (Precise Event-based Sampling) event, whose raw buffer
# contains the context info when that event happened: the EFLAGS and
# linear IP info, as well as all the registers.
#
class PebsEvent(PerfEvent):
pebs_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS):
tmp_buf=raw_buf[0:80]
flags, ip, ax, bx, cx, dx, si, di, bp, sp = struct.unpack('QQQQQQQQQQ', tmp_buf)
self.flags = flags
self.ip = ip
self.ax = ax
self.bx = bx
self.cx = cx
self.dx = dx
self.si = si
self.di = di
self.bp = bp
self.sp = sp
PerfEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsEvent.pebs_num += 1
del tmp_buf
#
# Intel Nehalem and Westmere support PEBS plus Load Latency info which lie
# in the four 64 bit words write after the PEBS data:
# Status: records the IA32_PERF_GLOBAL_STATUS register value
# DLA: Data Linear Address (EIP)
# DSE: Data Source Encoding, where the latency happens, hit or miss
# in L1/L2/L3 or IO operations
# LAT: the actual latency in cycles
#
class PebsNHM(PebsEvent):
pebs_nhm_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS_LL):
tmp_buf=raw_buf[144:176]
status, dla, dse, lat = struct.unpack('QQQQ', tmp_buf)
self.status = status
self.dla = dla
self.dse = dse
self.lat = lat
PebsEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsNHM.pebs_nhm_num += 1
del tmp_buf
| gpl-2.0 |
amolenaar/gaphor | gaphor/tests/testcase.py | 1 | 5292 | """
Basic test case for Gaphor tests.
Everything is about services so the TestCase can define it's required
services and start off.
"""
import logging
import unittest
from io import StringIO
from typing import Type, TypeVar
from gaphas.aspect import ConnectionSink
from gaphas.aspect import Connector as ConnectorAspect
# For DiagramItemConnector aspect:
import gaphor.diagram.diagramtools # noqa
from gaphor import UML
from gaphor.application import Session
from gaphor.diagram.connectors import Connector
from gaphor.diagram.grouping import Group
T = TypeVar("T")
log = logging.getLogger("Gaphor")
log.setLevel(logging.WARNING)
class TestCase(unittest.TestCase):
services = [
"event_manager",
"component_registry",
"element_factory",
"element_dispatcher",
"modeling_language",
"sanitizer",
]
def setUp(self):
self.session = Session(services=self.services)
self.element_factory = self.session.get_service("element_factory")
self.modeling_language = self.session.get_service("modeling_language")
assert len(list(self.element_factory.select())) == 0, list(
self.element_factory.select()
)
self.diagram = self.element_factory.create(UML.Diagram)
assert len(list(self.element_factory.select())) == 1, list(
self.element_factory.select()
)
def tearDown(self):
self.element_factory.shutdown()
self.session.shutdown()
def get_service(self, name):
return self.session.get_service(name)
def create(self, item_cls: Type[T], subject_cls=None, subject=None) -> T:
"""
Create an item with specified subject.
"""
if subject_cls is not None:
subject = self.element_factory.create(subject_cls)
item = self.diagram.create(item_cls, subject=subject)
self.diagram.canvas.update()
return item
def allow(self, line, handle, item, port=None):
"""
Glue line's handle to an item.
If port is not provided, then first port is used.
"""
if port is None and len(item.ports()) > 0:
port = item.ports()[0]
adapter = Connector(item, line)
return adapter.allow(handle, port)
def connect(self, line, handle, item, port=None):
"""
Connect line's handle to an item.
If port is not provided, then first port is used.
"""
canvas = line.canvas
assert canvas is item.canvas
if port is None and len(item.ports()) > 0:
port = item.ports()[0]
sink = ConnectionSink(item, port)
connector = ConnectorAspect(line, handle)
connector.connect(sink)
cinfo = canvas.get_connection(handle)
assert cinfo.connected is item
assert cinfo.port is port
def disconnect(self, line, handle):
"""
Disconnect line's handle.
"""
canvas = self.diagram.canvas
# disconnection on adapter level is performed due to callback, so
# no adapter look up here
canvas.disconnect_item(line, handle)
assert not canvas.get_connection(handle)
def get_connected(self, handle):
"""
Get item connected to line via handle.
"""
cinfo = self.diagram.canvas.get_connection(handle)
if cinfo:
return cinfo.connected
return None
def get_connection(self, handle):
"""
Get connection information.
"""
return self.diagram.canvas.get_connection(handle)
def can_group(self, parent, item):
"""
Check if an item can be grouped by parent.
"""
adapter = Group(parent, item)
return adapter.can_contain()
def group(self, parent, item):
"""
Group item within a parent.
"""
self.diagram.canvas.reparent(item, parent)
adapter = Group(parent, item)
adapter.group()
def ungroup(self, parent, item):
"""
Remove item from a parent.
"""
adapter = Group(parent, item)
adapter.ungroup()
self.diagram.canvas.reparent(item, None)
def kindof(self, cls):
"""
Find UML metaclass instances using element factory.
"""
return self.element_factory.lselect(cls)
def save(self):
"""
Save diagram into string.
"""
from gaphor.storage import storage
from gaphor.storage.xmlwriter import XMLWriter
f = StringIO()
storage.save(XMLWriter(f), factory=self.element_factory)
data = f.getvalue()
f.close()
self.element_factory.flush()
assert not list(self.element_factory.select())
assert not list(self.element_factory.lselect())
return data
def load(self, data):
"""
Load data from specified string. Update ``TestCase.diagram``
attribute to hold new loaded diagram.
"""
from gaphor.storage import storage
f = StringIO(data)
storage.load(
f, factory=self.element_factory, modeling_language=self.modeling_language
)
f.close()
self.diagram = self.element_factory.lselect(UML.Diagram)[0]
| lgpl-2.1 |
patriciolobos/desa8 | openerp/addons/base/tests/test_views.py | 60 | 39203 | # -*- encoding: utf-8 -*-
from functools import partial
import itertools
import unittest2
from lxml import etree as ET
from lxml.builder import E
from psycopg2 import IntegrityError
from openerp.tests import common
import openerp.tools
Field = E.field
class ViewCase(common.TransactionCase):
def setUp(self):
super(ViewCase, self).setUp()
self.addTypeEqualityFunc(ET._Element, self.assertTreesEqual)
self.Views = self.registry('ir.ui.view')
def browse(self, id, context=None):
return self.Views.browse(self.cr, self.uid, id, context=context)
def create(self, value, context=None):
return self.Views.create(self.cr, self.uid, value, context=context)
def read_combined(self, id):
return self.Views.read_combined(
self.cr, self.uid,
id, ['arch'],
context={'check_view_ids': self.Views.search(self.cr, self.uid, [])}
)
def assertTreesEqual(self, n1, n2, msg=None):
self.assertEqual(n1.tag, n2.tag, msg)
self.assertEqual((n1.text or '').strip(), (n2.text or '').strip(), msg)
self.assertEqual((n1.tail or '').strip(), (n2.tail or '').strip(), msg)
# Because lxml uses ordereddicts in which order is important to
# equality (!?!?!?!)
self.assertEqual(dict(n1.attrib), dict(n2.attrib), msg)
for c1, c2 in itertools.izip_longest(n1, n2):
self.assertEqual(c1, c2, msg)
class TestNodeLocator(common.TransactionCase):
"""
The node locator returns None when it can not find a node, and the first
match when it finds something (no jquery-style node sets)
"""
def setUp(self):
super(TestNodeLocator, self).setUp()
self.Views = self.registry('ir.ui.view')
def test_no_match_xpath(self):
"""
xpath simply uses the provided @expr pattern to find a node
"""
node = self.Views.locate_node(
E.root(E.foo(), E.bar(), E.baz()),
E.xpath(expr="//qux"))
self.assertIsNone(node)
def test_match_xpath(self):
bar = E.bar()
node = self.Views.locate_node(
E.root(E.foo(), bar, E.baz()),
E.xpath(expr="//bar"))
self.assertIs(node, bar)
def test_no_match_field(self):
"""
A field spec will match by @name against all fields of the view
"""
node = self.Views.locate_node(
E.root(E.foo(), E.bar(), E.baz()),
Field(name="qux"))
self.assertIsNone(node)
node = self.Views.locate_node(
E.root(Field(name="foo"), Field(name="bar"), Field(name="baz")),
Field(name="qux"))
self.assertIsNone(node)
def test_match_field(self):
bar = Field(name="bar")
node = self.Views.locate_node(
E.root(Field(name="foo"), bar, Field(name="baz")),
Field(name="bar"))
self.assertIs(node, bar)
def test_no_match_other(self):
"""
Non-xpath non-fields are matched by node name first
"""
node = self.Views.locate_node(
E.root(E.foo(), E.bar(), E.baz()),
E.qux())
self.assertIsNone(node)
def test_match_other(self):
bar = E.bar()
node = self.Views.locate_node(
E.root(E.foo(), bar, E.baz()),
E.bar())
self.assertIs(bar, node)
def test_attribute_mismatch(self):
"""
Non-xpath non-field are filtered by matching attributes on spec and
matched nodes
"""
node = self.Views.locate_node(
E.root(E.foo(attr='1'), E.bar(attr='2'), E.baz(attr='3')),
E.bar(attr='5'))
self.assertIsNone(node)
def test_attribute_filter(self):
match = E.bar(attr='2')
node = self.Views.locate_node(
E.root(E.bar(attr='1'), match, E.root(E.bar(attr='3'))),
E.bar(attr='2'))
self.assertIs(node, match)
def test_version_mismatch(self):
"""
A @version on the spec will be matched against the view's version
"""
node = self.Views.locate_node(
E.root(E.foo(attr='1'), version='4'),
E.foo(attr='1', version='3'))
self.assertIsNone(node)
class TestViewInheritance(ViewCase):
def arch_for(self, name, view_type='form', parent=None):
""" Generates a trivial view of the specified ``view_type``.
The generated view is empty but ``name`` is set as its root's ``@string``.
If ``parent`` is not falsy, generates an extension view (instead of
a root view) replacing the parent's ``@string`` by ``name``
:param str name: ``@string`` value for the view root
:param str view_type:
:param bool parent:
:return: generated arch
:rtype: str
"""
if not parent:
element = E(view_type, string=name)
else:
element = E(view_type,
E.attribute(name, name='string'),
position='attributes'
)
return ET.tostring(element)
def makeView(self, name, parent=None, arch=None):
""" Generates a basic ir.ui.view with the provided name, parent and arch.
If no parent is provided, the view is top-level.
If no arch is provided, generates one by calling :meth:`~.arch_for`.
:param str name:
:param int parent: id of the parent view, if any
:param str arch:
:returns: the created view's id.
:rtype: int
"""
view_id = self.View.create(self.cr, self.uid, {
'model': self.model,
'name': name,
'arch': arch or self.arch_for(name, parent=parent),
'inherit_id': parent,
'priority': 5, # higher than default views
})
self.ids[name] = view_id
return view_id
def setUp(self):
super(TestViewInheritance, self).setUp()
self.model = 'ir.ui.view.custom'
self.View = self.registry('ir.ui.view')
self._init = self.View.pool._init
self.View.pool._init = False
self.ids = {}
a = self.makeView("A")
a1 = self.makeView("A1", a)
a11 = self.makeView("A11", a1)
self.makeView("A111", a11)
self.makeView("A12", a1)
a2 = self.makeView("A2", a)
self.makeView("A21", a2)
a22 = self.makeView("A22", a2)
self.makeView("A221", a22)
b = self.makeView('B', arch=self.arch_for("B", 'tree'))
self.makeView('B1', b, arch=self.arch_for("B1", 'tree', parent=b))
c = self.makeView('C', arch=self.arch_for("C", 'tree'))
self.View.write(self.cr, self.uid, c, {'priority': 1})
def tearDown(self):
self.View.pool._init = self._init
super(TestViewInheritance, self).tearDown()
def test_get_inheriting_views_arch(self):
self.assertEqual(self.View.get_inheriting_views_arch(
self.cr, self.uid, self.ids['A'], self.model), [
(self.arch_for('A1', parent=True), self.ids['A1']),
(self.arch_for('A2', parent=True), self.ids['A2']),
])
self.assertEqual(self.View.get_inheriting_views_arch(
self.cr, self.uid, self.ids['A21'], self.model),
[])
self.assertEqual(self.View.get_inheriting_views_arch(
self.cr, self.uid, self.ids['A11'], self.model),
[(self.arch_for('A111', parent=True), self.ids['A111'])])
def test_default_view(self):
default = self.View.default_view(
self.cr, self.uid, model=self.model, view_type='form')
self.assertEqual(default, self.ids['A'])
default_tree = self.View.default_view(
self.cr, self.uid, model=self.model, view_type='tree')
self.assertEqual(default_tree, self.ids['C'])
def test_no_default_view(self):
self.assertFalse(
self.View.default_view(
self.cr, self.uid, model='does.not.exist', view_type='form'))
self.assertFalse(
self.View.default_view(
self.cr, self.uid, model=self.model, view_type='graph'))
class TestApplyInheritanceSpecs(ViewCase):
""" Applies a sequence of inheritance specification nodes to a base
architecture. IO state parameters (cr, uid, model, context) are used for
error reporting
The base architecture is altered in-place.
"""
def setUp(self):
super(TestApplyInheritanceSpecs, self).setUp()
self.View = self.registry('ir.ui.view')
self.base_arch = E.form(
Field(name="target"),
string="Title")
def test_replace(self):
spec = Field(
Field(name="replacement"),
name="target", position="replace")
self.View.apply_inheritance_specs(self.cr, self.uid,
self.base_arch,
spec, None)
self.assertEqual(
self.base_arch,
E.form(Field(name="replacement"), string="Title"))
def test_delete(self):
spec = Field(name="target", position="replace")
self.View.apply_inheritance_specs(self.cr, self.uid,
self.base_arch,
spec, None)
self.assertEqual(
self.base_arch,
E.form(string="Title"))
def test_insert_after(self):
spec = Field(
Field(name="inserted"),
name="target", position="after")
self.View.apply_inheritance_specs(self.cr, self.uid,
self.base_arch,
spec, None)
self.assertEqual(
self.base_arch,
E.form(
Field(name="target"),
Field(name="inserted"),
string="Title"
))
def test_insert_before(self):
spec = Field(
Field(name="inserted"),
name="target", position="before")
self.View.apply_inheritance_specs(self.cr, self.uid,
self.base_arch,
spec, None)
self.assertEqual(
self.base_arch,
E.form(
Field(name="inserted"),
Field(name="target"),
string="Title"))
def test_insert_inside(self):
default = Field(Field(name="inserted"), name="target")
spec = Field(Field(name="inserted 2"), name="target", position='inside')
self.View.apply_inheritance_specs(self.cr, self.uid,
self.base_arch,
default, None)
self.View.apply_inheritance_specs(self.cr, self.uid,
self.base_arch,
spec, None)
self.assertEqual(
self.base_arch,
E.form(
Field(
Field(name="inserted"),
Field(name="inserted 2"),
name="target"),
string="Title"))
def test_unpack_data(self):
spec = E.data(
Field(Field(name="inserted 0"), name="target"),
Field(Field(name="inserted 1"), name="target"),
Field(Field(name="inserted 2"), name="target"),
Field(Field(name="inserted 3"), name="target"),
)
self.View.apply_inheritance_specs(self.cr, self.uid,
self.base_arch,
spec, None)
self.assertEqual(
self.base_arch,
E.form(
Field(
Field(name="inserted 0"),
Field(name="inserted 1"),
Field(name="inserted 2"),
Field(name="inserted 3"),
name="target"),
string="Title"))
@openerp.tools.mute_logger('openerp.addons.base.ir.ir_ui_view')
def test_invalid_position(self):
spec = Field(
Field(name="whoops"),
name="target", position="serious_series")
with self.assertRaises(AttributeError):
self.View.apply_inheritance_specs(self.cr, self.uid,
self.base_arch,
spec, None)
@openerp.tools.mute_logger('openerp.addons.base.ir.ir_ui_view')
def test_incorrect_version(self):
# Version ignored on //field elements, so use something else
arch = E.form(E.element(foo="42"))
spec = E.element(
Field(name="placeholder"),
foo="42", version="7.0")
with self.assertRaises(AttributeError):
self.View.apply_inheritance_specs(self.cr, self.uid,
arch,
spec, None)
@openerp.tools.mute_logger('openerp.addons.base.ir.ir_ui_view')
def test_target_not_found(self):
spec = Field(name="targut")
with self.assertRaises(AttributeError):
self.View.apply_inheritance_specs(self.cr, self.uid,
self.base_arch,
spec, None)
class TestApplyInheritedArchs(ViewCase):
""" Applies a sequence of modificator archs to a base view
"""
class TestNoModel(ViewCase):
def test_create_view_nomodel(self):
View = self.registry('ir.ui.view')
view_id = View.create(self.cr, self.uid, {
'name': 'dummy',
'arch': '<template name="foo"/>',
'inherit_id': False,
'type': 'qweb',
})
fields = ['name', 'arch', 'type', 'priority', 'inherit_id', 'model']
[view] = View.read(self.cr, self.uid, [view_id], fields)
self.assertEqual(view, {
'id': view_id,
'name': 'dummy',
'arch': '<template name="foo"/>',
'type': 'qweb',
'priority': 16,
'inherit_id': False,
'model': False,
})
text_para = E.p("", {'class': 'legalese'})
arch = E.body(
E.div(
E.h1("Title"),
id="header"),
E.p("Welcome!"),
E.div(
E.hr(),
text_para,
id="footer"),
{'class': "index"},)
def test_qweb_translation(self):
"""
Test if translations work correctly without a model
"""
View = self.registry('ir.ui.view')
self.registry('res.lang').load_lang(self.cr, self.uid, 'fr_FR')
orig_text = "Copyright copyrighter"
translated_text = u"Copyrighter, tous droits réservés"
self.text_para.text = orig_text
self.registry('ir.translation').create(self.cr, self.uid, {
'name': 'website',
'type': 'view',
'lang': 'fr_FR',
'src': orig_text,
'value': translated_text,
})
sarch = View.translate_qweb(self.cr, self.uid, None, self.arch, 'fr_FR')
self.text_para.text = translated_text
self.assertEqual(sarch, self.arch)
class TestTemplating(ViewCase):
def setUp(self):
import openerp.modules
super(TestTemplating, self).setUp()
self._pool = openerp.modules.registry.RegistryManager.get(common.DB)
self._init = self._pool._init
# fuck off
self._pool._init = False
def tearDown(self):
self._pool._init = self._init
super(TestTemplating, self).tearDown()
def test_branding_inherit(self):
Views = self.registry('ir.ui.view')
id = Views.create(self.cr, self.uid, {
'name': "Base view",
'type': 'qweb',
'arch': """<root>
<item order="1"/>
</root>
"""
})
id2 = Views.create(self.cr, self.uid, {
'name': "Extension",
'type': 'qweb',
'inherit_id': id,
'arch': """<xpath expr="//item" position="before">
<item order="2"/>
</xpath>
"""
})
arch_string = Views.read_combined(
self.cr, self.uid, id, fields=['arch'],
context={'inherit_branding': True})['arch']
arch = ET.fromstring(arch_string)
Views.distribute_branding(arch)
[initial] = arch.xpath('//item[@order=1]')
self.assertEqual(
str(id),
initial.get('data-oe-id'),
"initial should come from the root view")
self.assertEqual(
'/root[1]/item[1]',
initial.get('data-oe-xpath'),
"initial's xpath should be within the root view only")
[second] = arch.xpath('//item[@order=2]')
self.assertEqual(
str(id2),
second.get('data-oe-id'),
"second should come from the extension view")
def test_branding_distribute_inner(self):
""" Checks that the branding is correctly distributed within a view
extension
"""
Views = self.registry('ir.ui.view')
id = Views.create(self.cr, self.uid, {
'name': "Base view",
'type': 'qweb',
'arch': """<root>
<item order="1"/>
</root>"""
})
id2 = Views.create(self.cr, self.uid, {
'name': "Extension",
'type': 'qweb',
'inherit_id': id,
'arch': """<xpath expr="//item" position="before">
<item order="2">
<content t-att-href="foo">bar</content>
</item>
</xpath>"""
})
arch_string = Views.read_combined(
self.cr, self.uid, id, fields=['arch'],
context={'inherit_branding': True})['arch']
arch = ET.fromstring(arch_string)
Views.distribute_branding(arch)
self.assertEqual(
arch,
E.root(
E.item(
E.content("bar", {
't-att-href': "foo",
'data-oe-model': 'ir.ui.view',
'data-oe-id': str(id2),
'data-oe-field': 'arch',
'data-oe-xpath': '/xpath/item/content[1]',
'data-oe-source-id': str(id)
}), {
'order': '2',
}),
E.item({
'order': '1',
'data-oe-model': 'ir.ui.view',
'data-oe-id': str(id),
'data-oe-field': 'arch',
'data-oe-xpath': '/root[1]/item[1]'
})
)
)
def test_esc_no_branding(self):
Views = self.registry('ir.ui.view')
id = Views.create(self.cr, self.uid, {
'name': "Base View",
'type': 'qweb',
'arch': """<root>
<item><span t-esc="foo"/></item>
</root>""",
})
arch_string = Views.read_combined(
self.cr, self.uid, id, fields=['arch'],
context={'inherit_branding': True})['arch']
arch = ET.fromstring(arch_string)
Views.distribute_branding(arch)
self.assertEqual(arch, E.root(E.item(E.span({'t-esc': "foo"}))))
def test_ignore_unbrand(self):
Views = self.registry('ir.ui.view')
id = Views.create(self.cr, self.uid, {
'name': "Base view",
'type': 'qweb',
'arch': """<root>
<item order="1" t-ignore="true">
<t t-esc="foo"/>
</item>
</root>"""
})
id2 = Views.create(self.cr, self.uid, {
'name': "Extension",
'type': 'qweb',
'inherit_id': id,
'arch': """<xpath expr="//item[@order='1']" position="inside">
<item order="2">
<content t-att-href="foo">bar</content>
</item>
</xpath>"""
})
arch_string = Views.read_combined(
self.cr, self.uid, id, fields=['arch'],
context={'inherit_branding': True})['arch']
arch = ET.fromstring(arch_string)
Views.distribute_branding(arch)
self.assertEqual(
arch,
E.root(
E.item(
{'t-ignore': 'true', 'order': '1'},
E.t({'t-esc': 'foo'}),
E.item(
{'order': '2'},
E.content(
{'t-att-href': 'foo'},
"bar")
)
)
),
"t-ignore should apply to injected sub-view branding, not just to"
" the main view's"
)
class test_views(ViewCase):
def test_nonexistent_attribute_removal(self):
Views = self.registry('ir.ui.view')
Views.create(self.cr, self.uid, {
'name': 'Test View',
'model': 'ir.ui.view',
'inherit_id': self.browse_ref('base.view_view_tree').id,
'arch': """<?xml version="1.0"?>
<xpath expr="//field[@name='name']" position="attributes">
<attribute name="non_existing_attribute"></attribute>
</xpath>
""",
})
def _insert_view(self, **kw):
"""Insert view into database via a query to passtrough validation"""
kw.pop('id', None)
kw.setdefault('mode', 'extension' if kw.get('inherit_id') else 'primary')
kw.setdefault('active', True)
keys = sorted(kw.keys())
fields = ','.join('"%s"' % (k.replace('"', r'\"'),) for k in keys)
params = ','.join('%%(%s)s' % (k,) for k in keys)
query = 'INSERT INTO ir_ui_view(%s) VALUES(%s) RETURNING id' % (fields, params)
self.cr.execute(query, kw)
return self.cr.fetchone()[0]
def test_custom_view_validation(self):
Views = self.registry('ir.ui.view')
model = 'ir.actions.act_url'
validate = partial(Views._validate_custom_views, self.cr, self.uid, model)
# validation of a single view
vid = self._insert_view(
name='base view',
model=model,
priority=1,
arch="""<?xml version="1.0"?>
<tree string="view">
<field name="url"/>
</tree>
""",
)
self.assertTrue(validate()) # single view
# validation of a inherited view
self._insert_view(
name='inherited view',
model=model,
priority=1,
inherit_id=vid,
arch="""<?xml version="1.0"?>
<xpath expr="//field[@name='url']" position="before">
<field name="name"/>
</xpath>
""",
)
self.assertTrue(validate()) # inherited view
def test_view_inheritance(self):
Views = self.registry('ir.ui.view')
v1 = Views.create(self.cr, self.uid, {
'name': "bob",
'model': 'ir.ui.view',
'arch': """
<form string="Base title" version="7.0">
<separator string="separator" colspan="4"/>
<footer>
<button name="action_next" type="object" string="Next button"/>
or
<button string="Skip" special="cancel" />
</footer>
</form>
"""
})
v2 = Views.create(self.cr, self.uid, {
'name': "edmund",
'model': 'ir.ui.view',
'inherit_id': v1,
'arch': """
<data>
<form position="attributes" version="7.0">
<attribute name="string">Replacement title</attribute>
</form>
<footer position="replace">
<footer>
<button name="action_next" type="object" string="New button"/>
</footer>
</footer>
<separator string="separator" position="replace">
<p>Replacement data</p>
</separator>
</data>
"""
})
v3 = Views.create(self.cr, self.uid, {
'name': 'jake',
'model': 'ir.ui.view',
'inherit_id': v1,
'priority': 17,
'arch': """
<footer position="attributes">
<attribute name="thing">bob</attribute>
</footer>
"""
})
view = self.registry('ir.ui.view').fields_view_get(
self.cr, self.uid, v2, view_type='form', context={
# fucking what?
'check_view_ids': [v2, v3]
})
self.assertEqual(view['type'], 'form')
self.assertEqual(
ET.fromstring(
view['arch'],
parser=ET.XMLParser(remove_blank_text=True)
),
E.form(
E.p("Replacement data"),
E.footer(
E.button(name="action_next", type="object", string="New button"),
thing="bob"
),
string="Replacement title", version="7.0"))
def test_view_inheritance_divergent_models(self):
Views = self.registry('ir.ui.view')
v1 = Views.create(self.cr, self.uid, {
'name': "bob",
'model': 'ir.ui.view.custom',
'arch': """
<form string="Base title" version="7.0">
<separator string="separator" colspan="4"/>
<footer>
<button name="action_next" type="object" string="Next button"/>
or
<button string="Skip" special="cancel" />
</footer>
</form>
"""
})
v2 = Views.create(self.cr, self.uid, {
'name': "edmund",
'model': 'ir.ui.view',
'inherit_id': v1,
'arch': """
<data>
<form position="attributes" version="7.0">
<attribute name="string">Replacement title</attribute>
</form>
<footer position="replace">
<footer>
<button name="action_next" type="object" string="New button"/>
</footer>
</footer>
<separator string="separator" position="replace">
<p>Replacement data</p>
</separator>
</data>
"""
})
v3 = Views.create(self.cr, self.uid, {
'name': 'jake',
'model': 'ir.ui.menu',
'inherit_id': v1,
'priority': 17,
'arch': """
<footer position="attributes">
<attribute name="thing">bob</attribute>
</footer>
"""
})
view = self.registry('ir.ui.view').fields_view_get(
self.cr, self.uid, v2, view_type='form', context={
# fucking what?
'check_view_ids': [v2, v3]
})
self.assertEqual(view['type'], 'form')
self.assertEqual(
ET.fromstring(
view['arch'],
parser=ET.XMLParser(remove_blank_text=True)
),
E.form(
E.p("Replacement data"),
E.footer(
E.button(name="action_next", type="object", string="New button")),
string="Replacement title", version="7.0"
))
class ViewModeField(ViewCase):
"""
This should probably, eventually, be folded back into other test case
classes, integrating the test (or not) of the mode field to regular cases
"""
def testModeImplicitValue(self):
""" mode is auto-generated from inherit_id:
* inherit_id -> mode=extension
* not inherit_id -> mode=primary
"""
view = self.browse(self.create({
'inherit_id': None,
'arch': '<qweb/>'
}))
self.assertEqual(view.mode, 'primary')
view2 = self.browse(self.create({
'inherit_id': view.id,
'arch': '<qweb/>'
}))
self.assertEqual(view2.mode, 'extension')
@openerp.tools.mute_logger('openerp.sql_db')
def testModeExplicit(self):
view = self.browse(self.create({
'inherit_id': None,
'arch': '<qweb/>'
}))
view2 = self.browse(self.create({
'inherit_id': view.id,
'mode': 'primary',
'arch': '<qweb/>'
}))
self.assertEqual(view.mode, 'primary')
with self.assertRaises(IntegrityError):
self.create({
'inherit_id': None,
'mode': 'extension',
'arch': '<qweb/>'
})
@openerp.tools.mute_logger('openerp.sql_db')
def testPurePrimaryToExtension(self):
"""
A primary view with inherit_id=None can't be converted to extension
"""
view_pure_primary = self.browse(self.create({
'inherit_id': None,
'arch': '<qweb/>'
}))
with self.assertRaises(IntegrityError):
view_pure_primary.write({'mode': 'extension'})
def testInheritPrimaryToExtension(self):
"""
A primary view with an inherit_id can be converted to extension
"""
base = self.create({'inherit_id': None, 'arch': '<qweb/>'})
view = self.browse(self.create({
'inherit_id': base,
'mode': 'primary',
'arch': '<qweb/>'
}))
view.write({'mode': 'extension'})
def testDefaultExtensionToPrimary(self):
"""
An extension view can be converted to primary
"""
base = self.create({'inherit_id': None, 'arch': '<qweb/>'})
view = self.browse(self.create({
'inherit_id': base,
'arch': '<qweb/>'
}))
view.write({'mode': 'primary'})
class TestDefaultView(ViewCase):
def testDefaultViewBase(self):
self.create({
'inherit_id': False,
'priority': 10,
'mode': 'primary',
'arch': '<qweb/>',
})
v2 = self.create({
'inherit_id': False,
'priority': 1,
'mode': 'primary',
'arch': '<qweb/>',
})
default = self.Views.default_view(self.cr, self.uid, False, 'qweb')
self.assertEqual(
default, v2,
"default_view should get the view with the lowest priority for "
"a (model, view_type) pair"
)
def testDefaultViewPrimary(self):
v1 = self.create({
'inherit_id': False,
'priority': 10,
'mode': 'primary',
'arch': '<qweb/>',
})
self.create({
'inherit_id': False,
'priority': 5,
'mode': 'primary',
'arch': '<qweb/>',
})
v3 = self.create({
'inherit_id': v1,
'priority': 1,
'mode': 'primary',
'arch': '<qweb/>',
})
default = self.Views.default_view(self.cr, self.uid, False, 'qweb')
self.assertEqual(
default, v3,
"default_view should get the view with the lowest priority for "
"a (model, view_type) pair in all the primary tables"
)
class TestViewCombined(ViewCase):
"""
* When asked for a view, instead of looking for the closest parent with
inherit_id=False look for mode=primary
* If root.inherit_id, resolve the arch for root.inherit_id (?using which
model?), then apply root's inheritance specs to it
* Apply inheriting views on top
"""
def setUp(self):
super(TestViewCombined, self).setUp()
self.a1 = self.create({
'model': 'a',
'arch': '<qweb><a1/></qweb>'
})
self.a2 = self.create({
'model': 'a',
'inherit_id': self.a1,
'priority': 5,
'arch': '<xpath expr="//a1" position="after"><a2/></xpath>'
})
self.a3 = self.create({
'model': 'a',
'inherit_id': self.a1,
'arch': '<xpath expr="//a1" position="after"><a3/></xpath>'
})
# mode=primary should be an inheritance boundary in both direction,
# even within a model it should not extend the parent
self.a4 = self.create({
'model': 'a',
'inherit_id': self.a1,
'mode': 'primary',
'arch': '<xpath expr="//a1" position="after"><a4/></xpath>',
})
self.b1 = self.create({
'model': 'b',
'inherit_id': self.a3,
'mode': 'primary',
'arch': '<xpath expr="//a1" position="after"><b1/></xpath>'
})
self.b2 = self.create({
'model': 'b',
'inherit_id': self.b1,
'arch': '<xpath expr="//a1" position="after"><b2/></xpath>'
})
self.c1 = self.create({
'model': 'c',
'inherit_id': self.a1,
'mode': 'primary',
'arch': '<xpath expr="//a1" position="after"><c1/></xpath>'
})
self.c2 = self.create({
'model': 'c',
'inherit_id': self.c1,
'priority': 5,
'arch': '<xpath expr="//a1" position="after"><c2/></xpath>'
})
self.c3 = self.create({
'model': 'c',
'inherit_id': self.c2,
'priority': 10,
'arch': '<xpath expr="//a1" position="after"><c3/></xpath>'
})
self.d1 = self.create({
'model': 'd',
'inherit_id': self.b1,
'mode': 'primary',
'arch': '<xpath expr="//a1" position="after"><d1/></xpath>'
})
def test_basic_read(self):
arch = self.read_combined(self.a1)['arch']
self.assertEqual(
ET.fromstring(arch),
E.qweb(
E.a1(),
E.a3(),
E.a2(),
), arch)
def test_read_from_child(self):
arch = self.read_combined(self.a3)['arch']
self.assertEqual(
ET.fromstring(arch),
E.qweb(
E.a1(),
E.a3(),
E.a2(),
), arch)
def test_read_from_child_primary(self):
arch = self.read_combined(self.a4)['arch']
self.assertEqual(
ET.fromstring(arch),
E.qweb(
E.a1(),
E.a4(),
E.a3(),
E.a2(),
), arch)
def test_cross_model_simple(self):
arch = self.read_combined(self.c2)['arch']
self.assertEqual(
ET.fromstring(arch),
E.qweb(
E.a1(),
E.c3(),
E.c2(),
E.c1(),
E.a3(),
E.a2(),
), arch)
def test_cross_model_double(self):
arch = self.read_combined(self.d1)['arch']
self.assertEqual(
ET.fromstring(arch),
E.qweb(
E.a1(),
E.d1(),
E.b2(),
E.b1(),
E.a3(),
E.a2(),
), arch)
class TestOptionalViews(ViewCase):
"""
Tests ability to enable/disable inherited views, formerly known as
inherit_option_id
"""
def setUp(self):
super(TestOptionalViews, self).setUp()
self.v0 = self.create({
'model': 'a',
'arch': '<qweb><base/></qweb>',
})
self.v1 = self.create({
'model': 'a',
'inherit_id': self.v0,
'active': True,
'priority': 10,
'arch': '<xpath expr="//base" position="after"><v1/></xpath>',
})
self.v2 = self.create({
'model': 'a',
'inherit_id': self.v0,
'active': True,
'priority': 9,
'arch': '<xpath expr="//base" position="after"><v2/></xpath>',
})
self.v3 = self.create({
'model': 'a',
'inherit_id': self.v0,
'active': False,
'priority': 8,
'arch': '<xpath expr="//base" position="after"><v3/></xpath>'
})
def test_applied(self):
""" mandatory and enabled views should be applied
"""
arch = self.read_combined(self.v0)['arch']
self.assertEqual(
ET.fromstring(arch),
E.qweb(
E.base(),
E.v1(),
E.v2(),
)
)
def test_applied_state_toggle(self):
""" Change active states of v2 and v3, check that the results
are as expected
"""
self.browse(self.v2).toggle()
arch = self.read_combined(self.v0)['arch']
self.assertEqual(
ET.fromstring(arch),
E.qweb(
E.base(),
E.v1(),
)
)
self.browse(self.v3).toggle()
arch = self.read_combined(self.v0)['arch']
self.assertEqual(
ET.fromstring(arch),
E.qweb(
E.base(),
E.v1(),
E.v3(),
)
)
self.browse(self.v2).toggle()
arch = self.read_combined(self.v0)['arch']
self.assertEqual(
ET.fromstring(arch),
E.qweb(
E.base(),
E.v1(),
E.v2(),
E.v3(),
)
)
class TestXPathExtentions(common.BaseCase):
def test_hasclass(self):
tree = E.node(
E.node({'class': 'foo bar baz'}),
E.node({'class': 'foo bar'}),
{'class': "foo"})
self.assertEqual(
len(tree.xpath('//node[hasclass("foo")]')),
3)
self.assertEqual(
len(tree.xpath('//node[hasclass("bar")]')),
2)
self.assertEqual(
len(tree.xpath('//node[hasclass("baz")]')),
1)
self.assertEqual(
len(tree.xpath('//node[hasclass("foo")][not(hasclass("bar"))]')),
1)
self.assertEqual(
len(tree.xpath('//node[hasclass("foo", "baz")]')),
1)
| agpl-3.0 |
xorpaul/shinken | test/test_poller_addition.py | 1 | 10776 | #!/usr/bin/env python
# Copyright (C) 2009-2010:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
from shinken_test import *
class GoodArbiter(ArbiterLink):
# To lie about satellites
def ping(self):
print "Dummy OK for", self.get_name()
self.set_alive()
def have_conf(self, i):
return True
def do_not_run(self):
pass
class GoodScheduler(SchedulerLink):
# To lie about satellites
def ping(self):
print "Dummy OK for", self.get_name()
self.set_alive()
def have_conf(self, i):
return True
def put_conf(self, conf):
return True
class BadScheduler(SchedulerLink):
def ping(self):
print "Dummy bad ping", self.get_name()
self.add_failed_check_attempt()
def have_conf(self, i):
return False
class GoodPoller(PollerLink):
# To lie about satellites
def ping(self):
print "Dummy OK for", self.get_name()
self.set_alive()
def put_conf(self, conf):
return True
class BadPoller(PollerLink):
def ping(self):
print "Dummy bad ping", self.get_name()
self.add_failed_check_attempt()
class GoodReactionner(ReactionnerLink):
# To lie about satellites
def ping(self):
print "Dummy OK for", self.get_name()
self.set_alive()
def put_conf(self, conf):
return True
class BadReactionner(ReactionnerLink):
def ping(self):
print "Dummy bad ping", self.get_name()
self.add_failed_check_attempt()
class GoodBroker(BrokerLink):
# To lie about satellites
def ping(self):
print "Dummy OK for", self.get_name()
self.set_alive()
def put_conf(self, conf):
return True
class BadBroker(BrokerLink):
def ping(self):
print "Dummy bad ping", self.get_name()
self.add_failed_check_attempt()
class TestPollerAddition(ShinkenTest):
def setUp(self):
self.setup_with_file('etc/nagios_dispatcher.cfg')
def test_simple_dispatch_and_addition(self):
print "The dispatcher", self.dispatcher
# dummy for the arbiter
for a in self.conf.arbiters:
a.__class__ = GoodArbiter
print "Preparing schedulers"
scheduler1 = self.conf.schedulers.find_by_name('scheduler-all-1')
self.assert_(scheduler1 is not None)
scheduler1.__class__ = GoodScheduler
scheduler2 = self.conf.schedulers.find_by_name('scheduler-all-2')
self.assert_(scheduler2 is not None)
scheduler2.__class__ = BadScheduler
print "Preparing pollers"
poller1 = self.conf.pollers.find_by_name('poller-all-1')
self.assert_(poller1 is not None)
poller1.__class__ = GoodPoller
poller2 = self.conf.pollers.find_by_name('poller-all-2')
self.assert_(poller2 is not None)
poller2.__class__ = BadPoller
print "Preparing reactionners"
reactionner1 = self.conf.reactionners.find_by_name('reactionner-all-1')
self.assert_(reactionner1 is not None)
reactionner1.__class__ = GoodReactionner
reactionner2 = self.conf.reactionners.find_by_name('reactionner-all-2')
self.assert_(reactionner2 is not None)
reactionner2.__class__ = BadReactionner
print "Preparing brokers"
broker1 = self.conf.brokers.find_by_name('broker-all-1')
self.assert_(broker1 is not None)
broker1.__class__ = GoodBroker
broker2 = self.conf.brokers.find_by_name('broker-all-2')
self.assert_(broker2 is not None)
broker2.__class__ = BadBroker
# Ping all elements. Should have 1 as OK, 2 as
# one bad attempt (3 max)
self.dispatcher.check_alive()
# Check good values
self.assert_(scheduler1.alive == True)
self.assert_(scheduler1.attempt == 0)
self.assert_(scheduler1.reachable == True)
# still alive, just unreach
self.assert_(scheduler2.alive == True)
self.assert_(scheduler2.attempt == 1)
self.assert_(scheduler2.reachable == False)
# and others satellites too
self.assert_(poller1.alive == True)
self.assert_(poller1.attempt == 0)
self.assert_(poller1.reachable == True)
# still alive, just unreach
self.assert_(poller2.alive == True)
self.assert_(poller2.attempt == 1)
self.assert_(poller2.reachable == False)
# and others satellites too
self.assert_(reactionner1.alive == True)
self.assert_(reactionner1.attempt == 0)
self.assert_(reactionner1.reachable == True)
# still alive, just unreach
self.assert_(reactionner2.alive == True)
self.assert_(reactionner2.attempt == 1)
self.assert_(reactionner2.reachable == False)
# and others satellites too
self.assert_(broker1.alive == True)
self.assert_(broker1.attempt == 0)
self.assert_(broker1.reachable == True)
# still alive, just unreach
self.assert_(broker2.alive == True)
self.assert_(broker2.attempt == 1)
self.assert_(broker2.reachable == False)
time.sleep(60)
### Now add another attempt, still alive, but attemp=2/3
self.dispatcher.check_alive()
# Check good values
self.assert_(scheduler1.alive == True)
self.assert_(scheduler1.attempt == 0)
self.assert_(scheduler1.reachable == True)
# still alive, just unreach
self.assert_(scheduler2.alive == True)
self.assert_(scheduler2.attempt == 2)
self.assert_(scheduler2.reachable == False)
# and others satellites too
self.assert_(poller1.alive == True)
self.assert_(poller1.attempt == 0)
self.assert_(poller1.reachable == True)
# still alive, just unreach
self.assert_(poller2.alive == True)
self.assert_(poller2.attempt == 2)
self.assert_(poller2.reachable == False)
# and others satellites too
self.assert_(reactionner1.alive == True)
self.assert_(reactionner1.attempt == 0)
self.assert_(reactionner1.reachable == True)
# still alive, just unreach
self.assert_(reactionner2.alive == True)
self.assert_(reactionner2.attempt == 2)
self.assert_(reactionner2.reachable == False)
# and others satellites too
self.assert_(broker1.alive == True)
self.assert_(broker1.attempt == 0)
self.assert_(broker1.reachable == True)
# still alive, just unreach
self.assert_(broker2.alive == True)
self.assert_(broker2.attempt == 2)
self.assert_(broker2.reachable == False)
time.sleep(60)
### Now we get BAD, We go DEAD for N2!
self.dispatcher.check_alive()
# Check good values
self.assert_(scheduler1.alive == True)
self.assert_(scheduler1.attempt == 0)
self.assert_(scheduler1.reachable == True)
# still alive, just unreach
self.assert_(scheduler2.alive == False)
self.assert_(scheduler2.attempt == 3)
self.assert_(scheduler2.reachable == False)
# and others satellites too
self.assert_(poller1.alive == True)
self.assert_(poller1.attempt == 0)
self.assert_(poller1.reachable == True)
# still alive, just unreach
self.assert_(poller2.alive == False)
self.assert_(poller2.attempt == 3)
self.assert_(poller2.reachable == False)
# and others satellites too
self.assert_(reactionner1.alive == True)
self.assert_(reactionner1.attempt == 0)
self.assert_(reactionner1.reachable == True)
# still alive, just unreach
self.assert_(reactionner2.alive == False)
self.assert_(reactionner2.attempt == 3)
self.assert_(reactionner2.reachable == False)
# and others satellites too
self.assert_(broker1.alive == True)
self.assert_(broker1.attempt == 0)
self.assert_(broker1.reachable == True)
# still alive, just unreach
self.assert_(broker2.alive == False)
self.assert_(broker2.attempt == 3)
self.assert_(broker2.reachable == False)
# Now we check how we should dispatch confs
self.dispatcher.check_dispatch()
# the conf should not be in a good shape
self.assert_(self.dispatcher.dispatch_ok == False)
# Now we really dispatch them!
self.dispatcher.dispatch()
self.assert_(self.any_log_match('Dispatch OK of conf in scheduler scheduler-all-1'))
self.assert_(self.any_log_match('Dispatch OK of configuration 0 to reactionner reactionner-all-1'))
self.assert_(self.any_log_match('Dispatch OK of configuration 0 to poller poller-all-1'))
self.assert_(self.any_log_match('Dispatch OK of configuration 0 to broker broker-all-1'))
self.clear_logs()
# And look if we really dispatch conf as we should
for r in self.conf.realms:
for cfg in r.confs.values():
self.assert_(cfg.is_assigned == True)
self.assert_(cfg.assigned_to == scheduler1)
cmd = "[%lu] ADD_SIMPLE_POLLER;All;newpoller;localhost;7771" % int(time.time())
ext_cmd = ExternalCommand(cmd)
self.external_command_dispatcher.resolve_command(ext_cmd)
# Look for the poller now
newpoller = self.conf.pollers.find_by_name('newpoller')
self.assert_(newpoller is not None)
newpoller.__class__ = GoodPoller
### Wht now with our new poller object?
self.dispatcher.check_alive()
# Check good values
self.assert_(newpoller.alive == True)
self.assert_(newpoller.attempt == 0)
self.assert_(newpoller.reachable == True)
# Now we check how we should dispatch confs
self.dispatcher.check_bad_dispatch()
self.dispatcher.dispatch()
if __name__ == '__main__':
unittest.main()
| agpl-3.0 |
thiagopnts/servo | tests/wpt/web-platform-tests/fetch/api/resources/inspect-headers.py | 133 | 1053 | def main(request, response):
headers = []
request_headers = []
if "headers" in request.GET:
checked_headers = request.GET.first("headers").split("|")
for header in checked_headers:
if header in request.headers:
headers.append(("x-request-" + header, request.headers.get(header, "") ))
if "cors" in request.GET:
if "Origin" in request.headers:
headers.append(("Access-Control-Allow-Origin", request.headers.get("Origin", "")))
else:
headers.append(("Access-Control-Allow-Origin", "*"))
headers.append(("Access-Control-Allow-Credentials", "true"))
headers.append(("Access-Control-Allow-Methods", "GET, POST, HEAD"))
exposed_headers = ["x-request-" + header for header in checked_headers]
headers.append(("Access-Control-Expose-Headers", ", ".join(exposed_headers)))
headers.append(("Access-Control-Allow-Headers", ", ".join(request.headers)))
headers.append(("content-type", "text/plain"))
return headers, ""
| mpl-2.0 |
fintech-circle/edx-platform | lms/djangoapps/instructor_task/tests/test_tasks.py | 1 | 23409 | """
Unit tests for LMS instructor-initiated background tasks.
Runs tasks on answers to course problems to validate that code
paths actually work.
"""
from functools import partial
import json
from uuid import uuid4
from celery.states import SUCCESS, FAILURE
import ddt
from django.utils.translation import ugettext_noop
from mock import Mock, MagicMock, patch
from nose.plugins.attrib import attr
from opaque_keys.edx.locations import i4xEncoder
from courseware.models import StudentModule
from courseware.tests.factories import StudentModuleFactory
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from xmodule.modulestore.exceptions import ItemNotFoundError
from lms.djangoapps.instructor_task.exceptions import UpdateProblemModuleStateError
from lms.djangoapps.instructor_task.models import InstructorTask
from lms.djangoapps.instructor_task.tests.test_base import InstructorTaskModuleTestCase
from lms.djangoapps.instructor_task.tests.factories import InstructorTaskFactory
from lms.djangoapps.instructor_task.tasks import (
rescore_problem,
reset_problem_attempts,
delete_problem_state,
generate_certificates,
export_ora2_data,
)
from lms.djangoapps.instructor_task.tasks_helper.misc import upload_ora2_data
PROBLEM_URL_NAME = "test_urlname"
class TestTaskFailure(Exception):
"""
An example exception to indicate failure of a mocked task.
"""
pass
class TestInstructorTasks(InstructorTaskModuleTestCase):
"""
Ensure tasks behave as expected.
"""
def setUp(self):
super(TestInstructorTasks, self).setUp()
self.initialize_course()
self.instructor = self.create_instructor('instructor')
self.location = self.problem_location(PROBLEM_URL_NAME)
def _create_input_entry(self, student_ident=None, use_problem_url=True, course_id=None, only_if_higher=False):
"""Creates a InstructorTask entry for testing."""
task_id = str(uuid4())
task_input = {'only_if_higher': only_if_higher}
if use_problem_url:
task_input['problem_url'] = self.location
if student_ident is not None:
task_input['student'] = student_ident
course_id = course_id or self.course.id
instructor_task = InstructorTaskFactory.create(course_id=course_id,
requester=self.instructor,
task_input=json.dumps(task_input, cls=i4xEncoder),
task_key='dummy value',
task_id=task_id)
return instructor_task
def _get_xmodule_instance_args(self):
"""
Calculate dummy values for parameters needed for instantiating xmodule instances.
"""
return {
'xqueue_callback_url_prefix': 'dummy_value',
'request_info': {
'username': 'dummy_username',
'user_id': 'dummy_id',
},
}
def _run_task_with_mock_celery(self, task_class, entry_id, task_id, expected_failure_message=None):
"""Submit a task and mock how celery provides a current_task."""
self.current_task = Mock()
self.current_task.request = Mock()
self.current_task.request.id = task_id
self.current_task.update_state = Mock()
if expected_failure_message is not None:
self.current_task.update_state.side_effect = TestTaskFailure(expected_failure_message)
task_args = [entry_id, self._get_xmodule_instance_args()]
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task') as mock_get_task:
mock_get_task.return_value = self.current_task
return task_class.apply(task_args, task_id=task_id).get()
def _test_missing_current_task(self, task_class):
"""Check that a task_class fails when celery doesn't provide a current_task."""
task_entry = self._create_input_entry()
with self.assertRaises(ValueError):
task_class(task_entry.id, self._get_xmodule_instance_args())
def _test_undefined_course(self, task_class):
"""Run with celery, but with no course defined."""
task_entry = self._create_input_entry(course_id="bogus/course/id")
with self.assertRaises(ItemNotFoundError):
self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id)
def _test_undefined_problem(self, task_class):
"""Run with celery, but no problem defined."""
task_entry = self._create_input_entry()
with self.assertRaises(ItemNotFoundError):
self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id)
def _test_run_with_task(self, task_class, action_name, expected_num_succeeded,
expected_num_skipped=0, expected_attempted=0, expected_total=0):
"""Run a task and check the number of StudentModules processed."""
task_entry = self._create_input_entry()
status = self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id)
expected_attempted = expected_attempted \
if expected_attempted else expected_num_succeeded + expected_num_skipped
expected_total = expected_total \
if expected_total else expected_num_succeeded + expected_num_skipped
# check return value
self.assertEquals(status.get('attempted'), expected_attempted)
self.assertEquals(status.get('succeeded'), expected_num_succeeded)
self.assertEquals(status.get('skipped'), expected_num_skipped)
self.assertEquals(status.get('total'), expected_total)
self.assertEquals(status.get('action_name'), action_name)
self.assertGreater(status.get('duration_ms'), 0)
# compare with entry in table:
entry = InstructorTask.objects.get(id=task_entry.id)
self.assertEquals(json.loads(entry.task_output), status)
self.assertEquals(entry.task_state, SUCCESS)
def _test_run_with_no_state(self, task_class, action_name):
"""Run with no StudentModules defined for the current problem."""
self.define_option_problem(PROBLEM_URL_NAME)
self._test_run_with_task(task_class, action_name, 0)
def _create_students_with_state(self, num_students, state=None, grade=0, max_grade=1):
"""Create students, a problem, and StudentModule objects for testing"""
self.define_option_problem(PROBLEM_URL_NAME)
students = [
UserFactory.create(username='robot%d' % i, email='robot+test+%d@edx.org' % i)
for i in xrange(num_students)
]
for student in students:
CourseEnrollmentFactory.create(course_id=self.course.id, user=student)
StudentModuleFactory.create(course_id=self.course.id,
module_state_key=self.location,
student=student,
grade=grade,
max_grade=max_grade,
state=state)
return students
def _assert_num_attempts(self, students, num_attempts):
"""Check the number attempts for all students is the same"""
for student in students:
module = StudentModule.objects.get(course_id=self.course.id,
student=student,
module_state_key=self.location)
state = json.loads(module.state)
self.assertEquals(state['attempts'], num_attempts)
def _test_run_with_failure(self, task_class, expected_message):
"""Run a task and trigger an artificial failure with the given message."""
task_entry = self._create_input_entry()
self.define_option_problem(PROBLEM_URL_NAME)
with self.assertRaises(TestTaskFailure):
self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message)
# compare with entry in table:
entry = InstructorTask.objects.get(id=task_entry.id)
self.assertEquals(entry.task_state, FAILURE)
output = json.loads(entry.task_output)
self.assertEquals(output['exception'], 'TestTaskFailure')
self.assertEquals(output['message'], expected_message)
def _test_run_with_long_error_msg(self, task_class):
"""
Run with an error message that is so long it will require
truncation (as well as the jettisoning of the traceback).
"""
task_entry = self._create_input_entry()
self.define_option_problem(PROBLEM_URL_NAME)
expected_message = "x" * 1500
with self.assertRaises(TestTaskFailure):
self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message)
# compare with entry in table:
entry = InstructorTask.objects.get(id=task_entry.id)
self.assertEquals(entry.task_state, FAILURE)
self.assertGreater(1023, len(entry.task_output))
output = json.loads(entry.task_output)
self.assertEquals(output['exception'], 'TestTaskFailure')
self.assertEquals(output['message'], expected_message[:len(output['message']) - 3] + "...")
self.assertNotIn('traceback', output)
def _test_run_with_short_error_msg(self, task_class):
"""
Run with an error message that is short enough to fit
in the output, but long enough that the traceback won't.
Confirm that the traceback is truncated.
"""
task_entry = self._create_input_entry()
self.define_option_problem(PROBLEM_URL_NAME)
expected_message = "x" * 900
with self.assertRaises(TestTaskFailure):
self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message)
# compare with entry in table:
entry = InstructorTask.objects.get(id=task_entry.id)
self.assertEquals(entry.task_state, FAILURE)
self.assertGreater(1023, len(entry.task_output))
output = json.loads(entry.task_output)
self.assertEquals(output['exception'], 'TestTaskFailure')
self.assertEquals(output['message'], expected_message)
self.assertEquals(output['traceback'][-3:], "...")
@attr(shard=3)
@ddt.ddt
class TestRescoreInstructorTask(TestInstructorTasks):
"""Tests problem-rescoring instructor task."""
def assert_task_output(self, output, **expected_output):
"""
Check & compare output of the task
"""
self.assertEqual(output.get('total'), expected_output.get('total'))
self.assertEqual(output.get('attempted'), expected_output.get('attempted'))
self.assertEqual(output.get('succeeded'), expected_output.get('succeeded'))
self.assertEqual(output.get('skipped'), expected_output.get('skipped'))
self.assertEqual(output.get('failed'), expected_output.get('failed'))
self.assertEqual(output.get('action_name'), expected_output.get('action_name'))
self.assertGreater(output.get('duration_ms'), expected_output.get('duration_ms', 0))
def get_task_output(self, task_id):
"""Get and load instructor task output"""
entry = InstructorTask.objects.get(id=task_id)
return json.loads(entry.task_output)
def test_rescore_missing_current_task(self):
self._test_missing_current_task(rescore_problem)
def test_rescore_undefined_course(self):
self._test_undefined_course(rescore_problem)
def test_rescore_undefined_problem(self):
self._test_undefined_problem(rescore_problem)
def test_rescore_with_no_state(self):
self._test_run_with_no_state(rescore_problem, 'rescored')
def test_rescore_with_failure(self):
self._test_run_with_failure(rescore_problem, 'We expected this to fail')
def test_rescore_with_long_error_msg(self):
self._test_run_with_long_error_msg(rescore_problem)
def test_rescore_with_short_error_msg(self):
self._test_run_with_short_error_msg(rescore_problem)
def test_rescoring_unrescorable(self):
input_state = json.dumps({'done': True})
num_students = 1
self._create_students_with_state(num_students, input_state)
task_entry = self._create_input_entry()
mock_instance = MagicMock()
del mock_instance.rescore_problem
del mock_instance.rescore
with patch('lms.djangoapps.instructor_task.tasks_helper.module_state.get_module_for_descriptor_internal') as mock_get_module:
mock_get_module.return_value = mock_instance
with self.assertRaises(UpdateProblemModuleStateError):
self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
# check values stored in table:
entry = InstructorTask.objects.get(id=task_entry.id)
output = json.loads(entry.task_output)
self.assertEquals(output['exception'], "UpdateProblemModuleStateError")
self.assertEquals(output['message'], "Specified problem does not support rescoring.")
self.assertGreater(len(output['traceback']), 0)
def test_rescoring_unaccessable(self):
"""
Tests rescores a problem in a course, for all students fails if user has answered a
problem to which user does not have access to.
"""
input_state = json.dumps({'done': True})
num_students = 1
self._create_students_with_state(num_students, input_state)
task_entry = self._create_input_entry()
with patch('lms.djangoapps.instructor_task.tasks_helper.module_state.get_module_for_descriptor_internal', return_value=None):
self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
self.assert_task_output(
output=self.get_task_output(task_entry.id),
total=num_students,
attempted=num_students,
succeeded=0,
skipped=0,
failed=num_students,
action_name='rescored'
)
def test_rescoring_success(self):
"""
Tests rescores a problem in a course, for all students succeeds.
"""
mock_instance = MagicMock()
getattr(mock_instance, 'rescore').return_value = None
mock_instance.has_submitted_answer.return_value = True
del mock_instance.done # old CAPA code used to use this value so we delete it here to be sure
num_students = 10
self._create_students_with_state(num_students)
task_entry = self._create_input_entry()
with patch(
'lms.djangoapps.instructor_task.tasks_helper.module_state.get_module_for_descriptor_internal'
) as mock_get_module:
mock_get_module.return_value = mock_instance
self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
self.assert_task_output(
output=self.get_task_output(task_entry.id),
total=num_students,
attempted=num_students,
succeeded=num_students,
skipped=0,
failed=0,
action_name='rescored'
)
@attr(shard=3)
class TestResetAttemptsInstructorTask(TestInstructorTasks):
"""Tests instructor task that resets problem attempts."""
def test_reset_missing_current_task(self):
self._test_missing_current_task(reset_problem_attempts)
def test_reset_undefined_course(self):
self._test_undefined_course(reset_problem_attempts)
def test_reset_undefined_problem(self):
self._test_undefined_problem(reset_problem_attempts)
def test_reset_with_no_state(self):
self._test_run_with_no_state(reset_problem_attempts, 'reset')
def test_reset_with_failure(self):
self._test_run_with_failure(reset_problem_attempts, 'We expected this to fail')
def test_reset_with_long_error_msg(self):
self._test_run_with_long_error_msg(reset_problem_attempts)
def test_reset_with_short_error_msg(self):
self._test_run_with_short_error_msg(reset_problem_attempts)
def test_reset_with_some_state(self):
initial_attempts = 3
input_state = json.dumps({'attempts': initial_attempts})
num_students = 10
students = self._create_students_with_state(num_students, input_state)
# check that entries were set correctly
self._assert_num_attempts(students, initial_attempts)
# run the task
self._test_run_with_task(reset_problem_attempts, 'reset', num_students)
# check that entries were reset
self._assert_num_attempts(students, 0)
def test_reset_with_zero_attempts(self):
initial_attempts = 0
input_state = json.dumps({'attempts': initial_attempts})
num_students = 10
students = self._create_students_with_state(num_students, input_state)
# check that entries were set correctly
self._assert_num_attempts(students, initial_attempts)
# run the task
self._test_run_with_task(reset_problem_attempts, 'reset', 0, expected_num_skipped=num_students)
# check that entries were reset
self._assert_num_attempts(students, 0)
def _test_reset_with_student(self, use_email):
"""Run a reset task for one student, with several StudentModules for the problem defined."""
num_students = 10
initial_attempts = 3
input_state = json.dumps({'attempts': initial_attempts})
students = self._create_students_with_state(num_students, input_state)
# check that entries were set correctly
for student in students:
module = StudentModule.objects.get(course_id=self.course.id,
student=student,
module_state_key=self.location)
state = json.loads(module.state)
self.assertEquals(state['attempts'], initial_attempts)
if use_email:
student_ident = students[3].email
else:
student_ident = students[3].username
task_entry = self._create_input_entry(student_ident)
status = self._run_task_with_mock_celery(reset_problem_attempts, task_entry.id, task_entry.task_id)
# check return value
self.assertEquals(status.get('attempted'), 1)
self.assertEquals(status.get('succeeded'), 1)
self.assertEquals(status.get('total'), 1)
self.assertEquals(status.get('action_name'), 'reset')
self.assertGreater(status.get('duration_ms'), 0)
# compare with entry in table:
entry = InstructorTask.objects.get(id=task_entry.id)
self.assertEquals(json.loads(entry.task_output), status)
self.assertEquals(entry.task_state, SUCCESS)
# check that the correct entry was reset
for index, student in enumerate(students):
module = StudentModule.objects.get(course_id=self.course.id,
student=student,
module_state_key=self.location)
state = json.loads(module.state)
if index == 3:
self.assertEquals(state['attempts'], 0)
else:
self.assertEquals(state['attempts'], initial_attempts)
def test_reset_with_student_username(self):
self._test_reset_with_student(False)
def test_reset_with_student_email(self):
self._test_reset_with_student(True)
@attr(shard=3)
class TestDeleteStateInstructorTask(TestInstructorTasks):
"""Tests instructor task that deletes problem state."""
def test_delete_missing_current_task(self):
self._test_missing_current_task(delete_problem_state)
def test_delete_undefined_course(self):
self._test_undefined_course(delete_problem_state)
def test_delete_undefined_problem(self):
self._test_undefined_problem(delete_problem_state)
def test_delete_with_no_state(self):
self._test_run_with_no_state(delete_problem_state, 'deleted')
def test_delete_with_failure(self):
self._test_run_with_failure(delete_problem_state, 'We expected this to fail')
def test_delete_with_long_error_msg(self):
self._test_run_with_long_error_msg(delete_problem_state)
def test_delete_with_short_error_msg(self):
self._test_run_with_short_error_msg(delete_problem_state)
def test_delete_with_some_state(self):
# This will create StudentModule entries -- we don't have to worry about
# the state inside them.
num_students = 10
students = self._create_students_with_state(num_students)
# check that entries were created correctly
for student in students:
StudentModule.objects.get(course_id=self.course.id,
student=student,
module_state_key=self.location)
self._test_run_with_task(delete_problem_state, 'deleted', num_students)
# confirm that no state can be found anymore:
for student in students:
with self.assertRaises(StudentModule.DoesNotExist):
StudentModule.objects.get(course_id=self.course.id,
student=student,
module_state_key=self.location)
class TestCertificateGenerationnstructorTask(TestInstructorTasks):
"""Tests instructor task that generates student certificates."""
def test_generate_certificates_missing_current_task(self):
"""
Test error is raised when certificate generation task run without current task
"""
self._test_missing_current_task(generate_certificates)
def test_generate_certificates_task_run(self):
"""
Test certificate generation task run without any errors
"""
self._test_run_with_task(
generate_certificates,
'certificates generated',
0,
0,
expected_attempted=1,
expected_total=1
)
class TestOra2ResponsesInstructorTask(TestInstructorTasks):
"""Tests instructor task that fetches ora2 response data."""
def test_ora2_missing_current_task(self):
self._test_missing_current_task(export_ora2_data)
def test_ora2_with_failure(self):
self._test_run_with_failure(export_ora2_data, 'We expected this to fail')
def test_ora2_with_long_error_msg(self):
self._test_run_with_long_error_msg(export_ora2_data)
def test_ora2_with_short_error_msg(self):
self._test_run_with_short_error_msg(export_ora2_data)
def test_ora2_runs_task(self):
task_entry = self._create_input_entry()
task_xmodule_args = self._get_xmodule_instance_args()
with patch('lms.djangoapps.instructor_task.tasks.run_main_task') as mock_main_task:
export_ora2_data(task_entry.id, task_xmodule_args)
action_name = ugettext_noop('generated')
task_fn = partial(upload_ora2_data, task_xmodule_args)
mock_main_task.assert_called_once_with_args(task_entry.id, task_fn, action_name)
| agpl-3.0 |
waheedahmed/edx-platform | scripts/runone.py | 182 | 3124 | #!/usr/bin/env python
import argparse
import os
import sys
# I want this:
# ERROR: test_update_and_fetch (edx-platform.cms.djangoapps.contentstore.tests.test_course_settings.CourseDetailsViewTest)
# to become:
# test --settings=cms.envs.test --pythonpath=. -s cms/djangoapps/contentstore/tests/test_course_settings.py:CourseDetailsViewTest.test_update_and_fetch
def find_full_path(path_to_file):
"""Find the full path where we only have a relative path from somewhere in the tree."""
for subdir, dirs, files in os.walk("."):
full = os.path.relpath(os.path.join(subdir, path_to_file))
if os.path.exists(full):
return full
def main(argv):
parser = argparse.ArgumentParser(description="Run just one test")
parser.add_argument('--nocapture', '-s', action='store_true', help="Don't capture stdout (any stdout output will be printed immediately)")
parser.add_argument('--pdb', action='store_true', help="Use pdb for test errors")
parser.add_argument('--pdb-fail', action='store_true', help="Use pdb for test failures")
parser.add_argument('words', metavar="WORDS", nargs='+', help="The description of a test failure, like 'ERROR: test_set_missing_field (courseware.tests.test_model_data.TestStudentModuleStorage)'")
args = parser.parse_args(argv)
words = []
# Collect all the words, ignoring what was quoted together, and get rid of parens.
for argword in args.words:
words.extend(w.strip("()") for w in argword.split())
# If it starts with "ERROR:" or "FAIL:", just ignore that.
if words[0].endswith(':'):
del words[0]
if len(words) == 1:
test_path, test_method = words[0].rsplit('.', 1)
test_path = test_path.split('.')
else:
test_method = words[0]
test_path = words[1].split('.')
if test_path[0] == 'edx-platform':
del test_path[0]
test_class = test_path[-1]
del test_path[-1]
test_py_path = "%s.py" % ("/".join(test_path))
test_py_path = find_full_path(test_py_path)
test_spec = "%s:%s.%s" % (test_py_path, test_class, test_method)
system = None
if test_py_path.startswith('cms'):
system = 'cms'
elif test_py_path.startswith('lms'):
system = 'lms'
if system:
# Run as a django test suite
from django.core import management
os.environ['DJANGO_SETTINGS_MODULE'] = system + '.envs.test'
django_args = ["./manage.py", "test"]
if args.nocapture:
django_args.append("-s")
if args.pdb:
django_args.append("--pdb")
if args.pdb_fail:
django_args.append("--pdb-fail")
django_args.append(test_spec)
print " ".join(django_args)
management.execute_from_command_line(django_args)
else:
# Run as a nose test suite
import nose.core
nose_args = ["nosetests"]
if args.nocapture:
nose_args.append("-s")
nose_args.append(test_spec)
print " ".join(nose_args)
nose.core.main(argv=nose_args)
if __name__ == "__main__":
main(sys.argv[1:])
| agpl-3.0 |
StephenWeber/ansible | lib/ansible/modules/network/junos/junos_facts.py | 24 | 4232 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = """
---
module: junos_facts
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Collect facts from remote device running Junos
description:
- Collects fact information from a remote device running the Junos
operating system. By default, the module will collect basic fact
information from the device to be included with the hostvars.
Additional fact information can be collected based on the
configured set of arguments.
extends_documentation_fragment: junos
options:
config:
description:
- The C(config) argument instructs the fact module to collect
the configuration from the remote device. The configuration
is then included in return facts. By default, the configuration
is returned as text. The C(config_format) can be used to return
different Junos configuration formats.
required: false
default: null
config_format:
description:
- The C(config_format) argument is used to specify the desired
format of the configuration file. Devices support three
configuration file formats. By default, the configuration
from the device is returned as text. The other option xml.
If the xml option is chosen, the configuration file is
returned as both xml and json.
required: false
default: text
choices: ['xml', 'text']
requirements:
- junos-eznc
notes:
- This module requires the netconf system service be enabled on
the remote device being managed
"""
EXAMPLES = """
# the required set of connection arguments have been purposely left off
# the examples for brevity
- name: collect default set of facts
junos_facts:
- name: collect default set of facts and configuration
junos_facts:
config: yes
- name: collect default set of facts and configuration in text format
junos_facts:
config: yes
config_format: text
- name: collect default set of facts and configuration in XML and JSON format
junos_facts:
config: yes
config_format: xml
"""
RETURN = """
ansible_facts:
description: Returns the facts collect from the device
returned: always
type: dict
"""
import ansible.module_utils.junos
from ansible.module_utils.network import NetworkModule
from ansible.module_utils.junos import xml_to_string, xml_to_json
def main():
""" Main entry point for AnsibleModule
"""
spec = dict(
config=dict(type='bool'),
config_format=dict(default='text', choices=['xml', 'text']),
transport=dict(default='netconf', choices=['netconf'])
)
module = NetworkModule(argument_spec=spec,
supports_check_mode=True)
result = dict(changed=False)
facts = module.connection.get_facts()
if '2RE' in facts:
facts['has_2RE'] = facts['2RE']
del facts['2RE']
facts['version_info'] = dict(facts['version_info'])
if module.params['config'] is True:
config_format = module.params['config_format']
resp_config = module.config.get_config(config_format=config_format)
if config_format in ['text']:
facts['config'] = resp_config
elif config_format == "xml":
facts['config'] = xml_to_string(resp_config)
facts['config_json'] = xml_to_json(resp_config)
result['ansible_facts'] = facts
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
t794104/ansible | lib/ansible/modules/storage/netapp/na_ontap_snmp.py | 38 | 4752 | #!/usr/bin/python
"""
create SNMP module to add/delete/modify SNMP user
"""
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com>
description:
- "Create/Delete SNMP community"
extends_documentation_fragment:
- netapp.na_ontap
module: na_ontap_snmp
options:
access_control:
description:
- "Access control for the community. The only supported value is 'ro' (read-only)"
required: true
community_name:
description:
- "The name of the SNMP community to manage."
required: true
state:
choices: ['present', 'absent']
description:
- "Whether the specified SNMP community should exist or not."
default: 'present'
short_description: NetApp ONTAP SNMP community
version_added: "2.6"
'''
EXAMPLES = """
- name: Create SNMP community
na_ontap_snmp:
state: present
community_name: communityName
access_control: 'ro'
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
- name: Delete SNMP community
na_ontap_snmp:
state: absent
community_name: communityName
access_control: 'ro'
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
"""
RETURN = """
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
HAS_NETAPP_LIB = netapp_utils.has_netapp_lib()
class NetAppONTAPSnmp(object):
'''Class with SNMP methods, doesn't support check mode'''
def __init__(self):
self.argument_spec = netapp_utils.na_ontap_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=False, type='str', choices=['present', 'absent'], default='present'),
community_name=dict(required=True, type='str'),
access_control=dict(required=True, type='str'),
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=False
)
parameters = self.module.params
# set up state variables
self.state = parameters['state']
self.community_name = parameters['community_name']
self.access_control = parameters['access_control']
if HAS_NETAPP_LIB is False:
self.module.fail_json(msg="the python NetApp-Lib module is required")
else:
self.server = netapp_utils.setup_na_ontap_zapi(module=self.module)
def invoke_snmp_community(self, zapi):
"""
Invoke zapi - add/delete take the same NaElement structure
@return: SUCCESS / FAILURE with an error_message
"""
snmp_community = netapp_utils.zapi.NaElement.create_node_with_children(
zapi, **{'community': self.community_name,
'access-control': self.access_control})
try:
self.server.invoke_successfully(snmp_community, enable_tunneling=True)
except netapp_utils.zapi.NaApiError: # return False for duplicate entry
return False
return True
def add_snmp_community(self):
"""
Adds a SNMP community
"""
return self.invoke_snmp_community('snmp-community-add')
def delete_snmp_community(self):
"""
Delete a SNMP community
"""
return self.invoke_snmp_community('snmp-community-delete')
def apply(self):
"""
Apply action to SNMP community
This module is not idempotent:
Add doesn't fail the playbook if user is trying
to add an already existing snmp community
"""
changed = False
results = netapp_utils.get_cserver(self.server)
cserver = netapp_utils.setup_na_ontap_zapi(module=self.module, vserver=results)
netapp_utils.ems_log_event("na_ontap_snmp", cserver)
if self.state == 'present': # add
if self.add_snmp_community():
changed = True
elif self.state == 'absent': # delete
if self.delete_snmp_community():
changed = True
self.module.exit_json(changed=changed)
def main():
'''Execute action'''
community_obj = NetAppONTAPSnmp()
community_obj.apply()
if __name__ == '__main__':
main()
| gpl-3.0 |
nugget/home-assistant | homeassistant/components/mqtt/server.py | 7 | 3055 | """
Support for a local MQTT broker.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/mqtt/#use-the-embedded-broker
"""
import asyncio
import logging
import tempfile
import voluptuous as vol
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['hbmqtt==0.9.4']
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['http']
# None allows custom config to be created through generate_config
HBMQTT_CONFIG_SCHEMA = vol.Any(None, vol.Schema({
vol.Optional('auth'): vol.Schema({
vol.Optional('password-file'): cv.isfile,
}, extra=vol.ALLOW_EXTRA),
vol.Optional('listeners'): vol.Schema({
vol.Required('default'): vol.Schema(dict),
str: vol.Schema(dict)
})
}, extra=vol.ALLOW_EXTRA))
@asyncio.coroutine
def async_start(hass, password, server_config):
"""Initialize MQTT Server.
This method is a coroutine.
"""
from hbmqtt.broker import Broker, BrokerException
passwd = tempfile.NamedTemporaryFile()
try:
if server_config is None:
server_config, client_config = generate_config(
hass, passwd, password)
else:
client_config = None
broker = Broker(server_config, hass.loop)
yield from broker.start()
except BrokerException:
_LOGGER.exception("Error initializing MQTT server")
return False, None
finally:
passwd.close()
@asyncio.coroutine
def async_shutdown_mqtt_server(event):
"""Shut down the MQTT server."""
yield from broker.shutdown()
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STOP, async_shutdown_mqtt_server)
return True, client_config
def generate_config(hass, passwd, password):
"""Generate a configuration based on current Home Assistant instance."""
from . import PROTOCOL_311
config = {
'listeners': {
'default': {
'max-connections': 50000,
'bind': '0.0.0.0:1883',
'type': 'tcp',
},
'ws-1': {
'bind': '0.0.0.0:8080',
'type': 'ws',
},
},
'auth': {
'allow-anonymous': password is None
},
'plugins': ['auth_anonymous'],
'topic-check': {
'enabled': True,
'plugins': ['topic_taboo'],
},
}
if password:
username = 'homeassistant'
# Encrypt with what hbmqtt uses to verify
from passlib.apps import custom_app_context
passwd.write(
'homeassistant:{}\n'.format(
custom_app_context.encrypt(password)).encode('utf-8'))
passwd.flush()
config['auth']['password-file'] = passwd.name
config['plugins'].append('auth_file')
else:
username = None
client_config = ('localhost', 1883, username, password, None, PROTOCOL_311)
return config, client_config
| apache-2.0 |
Ballz0fSteel/Umeko | lib/youtube_dl/extractor/leeco.py | 23 | 13111 | # coding: utf-8
from __future__ import unicode_literals
import base64
import datetime
import hashlib
import re
import time
from .common import InfoExtractor
from ..compat import (
compat_ord,
compat_str,
compat_urllib_parse_urlencode,
)
from ..utils import (
determine_ext,
encode_data_uri,
ExtractorError,
int_or_none,
orderedSet,
parse_iso8601,
str_or_none,
url_basename,
urshift,
)
class LeIE(InfoExtractor):
IE_DESC = '乐视网'
_VALID_URL = r'https?://(?:www\.le\.com/ptv/vplay|(?:sports\.le|(?:www\.)?lesports)\.com/(?:match|video))/(?P<id>\d+)\.html'
_GEO_COUNTRIES = ['CN']
_URL_TEMPLATE = 'http://www.le.com/ptv/vplay/%s.html'
_TESTS = [{
'url': 'http://www.le.com/ptv/vplay/22005890.html',
'md5': 'edadcfe5406976f42f9f266057ee5e40',
'info_dict': {
'id': '22005890',
'ext': 'mp4',
'title': '第87届奥斯卡颁奖礼完美落幕 《鸟人》成最大赢家',
'description': 'md5:a9cb175fd753e2962176b7beca21a47c',
},
'params': {
'hls_prefer_native': True,
},
}, {
'url': 'http://www.le.com/ptv/vplay/1415246.html',
'info_dict': {
'id': '1415246',
'ext': 'mp4',
'title': '美人天下01',
'description': 'md5:28942e650e82ed4fcc8e4de919ee854d',
},
'params': {
'hls_prefer_native': True,
},
}, {
'note': 'This video is available only in Mainland China, thus a proxy is needed',
'url': 'http://www.le.com/ptv/vplay/1118082.html',
'md5': '2424c74948a62e5f31988438979c5ad1',
'info_dict': {
'id': '1118082',
'ext': 'mp4',
'title': '与龙共舞 完整版',
'description': 'md5:7506a5eeb1722bb9d4068f85024e3986',
},
'params': {
'hls_prefer_native': True,
},
}, {
'url': 'http://sports.le.com/video/25737697.html',
'only_matching': True,
}, {
'url': 'http://www.lesports.com/match/1023203003.html',
'only_matching': True,
}, {
'url': 'http://sports.le.com/match/1023203003.html',
'only_matching': True,
}]
# ror() and calc_time_key() are reversed from a embedded swf file in LetvPlayer.swf
def ror(self, param1, param2):
_loc3_ = 0
while _loc3_ < param2:
param1 = urshift(param1, 1) + ((param1 & 1) << 31)
_loc3_ += 1
return param1
def calc_time_key(self, param1):
_loc2_ = 185025305
return self.ror(param1, _loc2_ % 17) ^ _loc2_
# see M3U8Encryption class in KLetvPlayer.swf
@staticmethod
def decrypt_m3u8(encrypted_data):
if encrypted_data[:5].decode('utf-8').lower() != 'vc_01':
return encrypted_data
encrypted_data = encrypted_data[5:]
_loc4_ = bytearray(2 * len(encrypted_data))
for idx, val in enumerate(encrypted_data):
b = compat_ord(val)
_loc4_[2 * idx] = b // 16
_loc4_[2 * idx + 1] = b % 16
idx = len(_loc4_) - 11
_loc4_ = _loc4_[idx:] + _loc4_[:idx]
_loc7_ = bytearray(len(encrypted_data))
for i in range(len(encrypted_data)):
_loc7_[i] = _loc4_[2 * i] * 16 + _loc4_[2 * i + 1]
return bytes(_loc7_)
def _check_errors(self, play_json):
# Check for errors
playstatus = play_json['msgs']['playstatus']
if playstatus['status'] == 0:
flag = playstatus['flag']
if flag == 1:
self.raise_geo_restricted()
else:
raise ExtractorError('Generic error. flag = %d' % flag, expected=True)
def _real_extract(self, url):
media_id = self._match_id(url)
page = self._download_webpage(url, media_id)
play_json_flash = self._download_json(
'http://player-pc.le.com/mms/out/video/playJson',
media_id, 'Downloading flash playJson data', query={
'id': media_id,
'platid': 1,
'splatid': 101,
'format': 1,
'source': 1000,
'tkey': self.calc_time_key(int(time.time())),
'domain': 'www.le.com',
'region': 'cn',
},
headers=self.geo_verification_headers())
self._check_errors(play_json_flash)
def get_flash_urls(media_url, format_id):
nodes_data = self._download_json(
media_url, media_id,
'Download JSON metadata for format %s' % format_id,
query={
'm3v': 1,
'format': 1,
'expect': 3,
'tss': 'ios',
})
req = self._request_webpage(
nodes_data['nodelist'][0]['location'], media_id,
note='Downloading m3u8 information for format %s' % format_id)
m3u8_data = self.decrypt_m3u8(req.read())
return {
'hls': encode_data_uri(m3u8_data, 'application/vnd.apple.mpegurl'),
}
extracted_formats = []
formats = []
playurl = play_json_flash['msgs']['playurl']
play_domain = playurl['domain'][0]
for format_id, format_data in playurl.get('dispatch', []).items():
if format_id in extracted_formats:
continue
extracted_formats.append(format_id)
media_url = play_domain + format_data[0]
for protocol, format_url in get_flash_urls(media_url, format_id).items():
f = {
'url': format_url,
'ext': determine_ext(format_data[1]),
'format_id': '%s-%s' % (protocol, format_id),
'protocol': 'm3u8_native' if protocol == 'hls' else 'http',
'quality': int_or_none(format_id),
}
if format_id[-1:] == 'p':
f['height'] = int_or_none(format_id[:-1])
formats.append(f)
self._sort_formats(formats, ('height', 'quality', 'format_id'))
publish_time = parse_iso8601(self._html_search_regex(
r'发布时间 ([^<>]+) ', page, 'publish time', default=None),
delimiter=' ', timezone=datetime.timedelta(hours=8))
description = self._html_search_meta('description', page, fatal=False)
return {
'id': media_id,
'formats': formats,
'title': playurl['title'],
'thumbnail': playurl['pic'],
'description': description,
'timestamp': publish_time,
}
class LePlaylistIE(InfoExtractor):
_VALID_URL = r'https?://[a-z]+\.le\.com/(?!video)[a-z]+/(?P<id>[a-z0-9_]+)'
_TESTS = [{
'url': 'http://www.le.com/tv/46177.html',
'info_dict': {
'id': '46177',
'title': '美人天下',
'description': 'md5:395666ff41b44080396e59570dbac01c'
},
'playlist_count': 35
}, {
'url': 'http://tv.le.com/izt/wuzetian/index.html',
'info_dict': {
'id': 'wuzetian',
'title': '武媚娘传奇',
'description': 'md5:e12499475ab3d50219e5bba00b3cb248'
},
# This playlist contains some extra videos other than the drama itself
'playlist_mincount': 96
}, {
'url': 'http://tv.le.com/pzt/lswjzzjc/index.shtml',
# This series is moved to http://www.le.com/tv/10005297.html
'only_matching': True,
}, {
'url': 'http://www.le.com/comic/92063.html',
'only_matching': True,
}, {
'url': 'http://list.le.com/listn/c1009_sc532002_d2_p1_o1.html',
'only_matching': True,
}]
@classmethod
def suitable(cls, url):
return False if LeIE.suitable(url) else super(LePlaylistIE, cls).suitable(url)
def _real_extract(self, url):
playlist_id = self._match_id(url)
page = self._download_webpage(url, playlist_id)
# Currently old domain names are still used in playlists
media_ids = orderedSet(re.findall(
r'<a[^>]+href="http://www\.letv\.com/ptv/vplay/(\d+)\.html', page))
entries = [self.url_result(LeIE._URL_TEMPLATE % media_id, ie='Le')
for media_id in media_ids]
title = self._html_search_meta('keywords', page,
fatal=False).split(',')[0]
description = self._html_search_meta('description', page, fatal=False)
return self.playlist_result(entries, playlist_id, playlist_title=title,
playlist_description=description)
class LetvCloudIE(InfoExtractor):
# Most of *.letv.com is changed to *.le.com on 2016/01/02
# but yuntv.letv.com is kept, so also keep the extractor name
IE_DESC = '乐视云'
_VALID_URL = r'https?://yuntv\.letv\.com/bcloud.html\?.+'
_TESTS = [{
'url': 'http://yuntv.letv.com/bcloud.html?uu=p7jnfw5hw9&vu=467623dedf',
'md5': '26450599afd64c513bc77030ad15db44',
'info_dict': {
'id': 'p7jnfw5hw9_467623dedf',
'ext': 'mp4',
'title': 'Video p7jnfw5hw9_467623dedf',
},
}, {
'url': 'http://yuntv.letv.com/bcloud.html?uu=p7jnfw5hw9&vu=ec93197892&pu=2c7cd40209&auto_play=1&gpcflag=1&width=640&height=360',
'md5': 'e03d9cc8d9c13191e1caf277e42dbd31',
'info_dict': {
'id': 'p7jnfw5hw9_ec93197892',
'ext': 'mp4',
'title': 'Video p7jnfw5hw9_ec93197892',
},
}, {
'url': 'http://yuntv.letv.com/bcloud.html?uu=p7jnfw5hw9&vu=187060b6fd',
'md5': 'cb988699a776b22d4a41b9d43acfb3ac',
'info_dict': {
'id': 'p7jnfw5hw9_187060b6fd',
'ext': 'mp4',
'title': 'Video p7jnfw5hw9_187060b6fd',
},
}]
@staticmethod
def sign_data(obj):
if obj['cf'] == 'flash':
salt = '2f9d6924b33a165a6d8b5d3d42f4f987'
items = ['cf', 'format', 'ran', 'uu', 'ver', 'vu']
elif obj['cf'] == 'html5':
salt = 'fbeh5player12c43eccf2bec3300344'
items = ['cf', 'ran', 'uu', 'bver', 'vu']
input_data = ''.join([item + obj[item] for item in items]) + salt
obj['sign'] = hashlib.md5(input_data.encode('utf-8')).hexdigest()
def _get_formats(self, cf, uu, vu, media_id):
def get_play_json(cf, timestamp):
data = {
'cf': cf,
'ver': '2.2',
'bver': 'firefox44.0',
'format': 'json',
'uu': uu,
'vu': vu,
'ran': compat_str(timestamp),
}
self.sign_data(data)
return self._download_json(
'http://api.letvcloud.com/gpc.php?' + compat_urllib_parse_urlencode(data),
media_id, 'Downloading playJson data for type %s' % cf)
play_json = get_play_json(cf, time.time())
# The server time may be different from local time
if play_json.get('code') == 10071:
play_json = get_play_json(cf, play_json['timestamp'])
if not play_json.get('data'):
if play_json.get('message'):
raise ExtractorError('Letv cloud said: %s' % play_json['message'], expected=True)
elif play_json.get('code'):
raise ExtractorError('Letv cloud returned error %d' % play_json['code'], expected=True)
else:
raise ExtractorError('Letv cloud returned an unknwon error')
def b64decode(s):
return base64.b64decode(s.encode('utf-8')).decode('utf-8')
formats = []
for media in play_json['data']['video_info']['media'].values():
play_url = media['play_url']
url = b64decode(play_url['main_url'])
decoded_url = b64decode(url_basename(url))
formats.append({
'url': url,
'ext': determine_ext(decoded_url),
'format_id': str_or_none(play_url.get('vtype')),
'format_note': str_or_none(play_url.get('definition')),
'width': int_or_none(play_url.get('vwidth')),
'height': int_or_none(play_url.get('vheight')),
})
return formats
def _real_extract(self, url):
uu_mobj = re.search(r'uu=([\w]+)', url)
vu_mobj = re.search(r'vu=([\w]+)', url)
if not uu_mobj or not vu_mobj:
raise ExtractorError('Invalid URL: %s' % url, expected=True)
uu = uu_mobj.group(1)
vu = vu_mobj.group(1)
media_id = uu + '_' + vu
formats = self._get_formats('flash', uu, vu, media_id) + self._get_formats('html5', uu, vu, media_id)
self._sort_formats(formats)
return {
'id': media_id,
'title': 'Video %s' % media_id,
'formats': formats,
}
| gpl-3.0 |
asajeffrey/servo | tests/wpt/web-platform-tests/tools/lint/rules.py | 3 | 15945 | from __future__ import unicode_literals
import abc
import inspect
import os
import re
import six
MYPY = False
if MYPY:
# MYPY is set to True when run under Mypy.
from typing import Any, List, Match, Optional, Pattern, Text, Tuple, cast
Error = Tuple[Text, Text, Text, Optional[int]]
def collapse(text):
# type: (Text) -> Text
return inspect.cleandoc(str(text)).replace("\n", " ")
class Rule(six.with_metaclass(abc.ABCMeta)):
@abc.abstractproperty
def name(self):
# type: () -> Text
pass
@abc.abstractproperty
def description(self):
# type: () -> Text
pass
to_fix = None # type: Optional[Text]
@classmethod
def error(cls, path, context=(), line_no=None):
# type: (Text, Tuple[Any, ...], Optional[int]) -> Error
if MYPY:
name = cast(Text, cls.name)
description = cast(Text, cls.description)
else:
name = cls.name
description = cls.description
description = description % context
return (name, description, path, line_no)
class MissingLink(Rule):
name = "MISSING-LINK"
description = "Testcase file must have a link to a spec"
to_fix = """
Ensure that there is a `<link rel="help" href="[url]">` for the spec.
`MISSING-LINK` is designed to ensure that the CSS build tool can find
the tests. Note that the CSS build system is primarily used by
[test.csswg.org/](http://test.csswg.org/), which doesn't use
`wptserve`, so `*.any.js` and similar tests won't work there; stick
with the `.html` equivalent.
"""
class PathLength(Rule):
name = "PATH LENGTH"
description = "/%s longer than maximum path length (%d > 150)"
to_fix = "use shorter filename to rename the test file"
class FileType(Rule):
name = "FILE TYPE"
description = "/%s is an unsupported file type (%s)"
class WorkerCollision(Rule):
name = "WORKER COLLISION"
description = collapse("""
path ends with %s which collides with generated tests from %s files
""")
class GitIgnoreFile(Rule):
name = "GITIGNORE"
description = ".gitignore found outside the root"
class MojomJSFile(Rule):
name = "MOJOM-JS"
description = "Don't check *.mojom.js files into WPT"
to_fix = """
Check if the file is already included in mojojs.zip:
https://source.chromium.org/chromium/chromium/src/+/master:chrome/tools/build/linux/FILES.cfg
If yes, use `loadMojoResources` from `resources/test-only-api.js` to load
it; if not, contact ecosystem-infra@chromium.org for adding new files
to mojojs.zip.
"""
class AhemCopy(Rule):
name = "AHEM COPY"
description = "Don't add extra copies of Ahem, use /fonts/Ahem.ttf"
class AhemSystemFont(Rule):
name = "AHEM SYSTEM FONT"
description = "Don't use Ahem as a system font, use /fonts/ahem.css"
# TODO: Add tests for this rule
class IgnoredPath(Rule):
name = "IGNORED PATH"
description = collapse("""
%s matches an ignore filter in .gitignore - please add a .gitignore
exception
""")
class CSSCollidingTestName(Rule):
name = "CSS-COLLIDING-TEST-NAME"
description = "The filename %s in the %s testsuite is shared by: %s"
class CSSCollidingRefName(Rule):
name = "CSS-COLLIDING-REF-NAME"
description = "The filename %s is shared by: %s"
class CSSCollidingSupportName(Rule):
name = "CSS-COLLIDING-SUPPORT-NAME"
description = "The filename %s is shared by: %s"
class SupportWrongDir(Rule):
name = "SUPPORT-WRONG-DIR"
description = "Support file not in support directory"
class ParseFailed(Rule):
name = "PARSE-FAILED"
description = "Unable to parse file"
to_fix = """
examine the file to find the causes of any parse errors, and fix them.
"""
class ContentManual(Rule):
name = "CONTENT-MANUAL"
description = "Manual test whose filename doesn't end in '-manual'"
class ContentVisual(Rule):
name = "CONTENT-VISUAL"
description = "Visual test whose filename doesn't end in '-visual'"
class AbsoluteUrlRef(Rule):
name = "ABSOLUTE-URL-REF"
description = collapse("""
Reference test with a reference file specified via an absolute URL:
'%s'
""")
class SameFileRef(Rule):
name = "SAME-FILE-REF"
description = "Reference test which points at itself as a reference"
class NonexistentRef(Rule):
name = "NON-EXISTENT-REF"
description = collapse("""
Reference test with a non-existent '%s' relationship reference: '%s'
""")
class MultipleTimeout(Rule):
name = "MULTIPLE-TIMEOUT"
description = "More than one meta name='timeout'"
to_fix = """
ensure each test file has only one instance of a `<meta
name="timeout"...>` element
"""
class InvalidTimeout(Rule):
name = "INVALID-TIMEOUT"
description = collapse("""
Test file with `<meta name='timeout'...>` element that has a `content`
attribute whose value is not `long`: %s
""")
to_fix = "replace the value of the `content` attribute with `long`"
class MultipleTestharness(Rule):
name = "MULTIPLE-TESTHARNESS"
description = "More than one `<script src='/resources/testharness.js'>`"
to_fix = """
ensure each test has only one `<script
src='/resources/testharnessreport.js'>` instance
"""
class MissingTestharnessReport(Rule):
name = "MISSING-TESTHARNESSREPORT"
description = "Missing `<script src='/resources/testharnessreport.js'>`"
to_fix = """
ensure each test file contains `<script
src='/resources/testharnessreport.js'>`
"""
class MultipleTestharnessReport(Rule):
name = "MULTIPLE-TESTHARNESSREPORT"
description = "More than one `<script src='/resources/testharnessreport.js'>`"
class VariantMissing(Rule):
name = "VARIANT-MISSING"
description = collapse("""
Test file with a `<meta name='variant'...>` element that's missing a
`content` attribute
""")
to_fix = """
add a `content` attribute with an appropriate value to the `<meta
name='variant'...>` element
"""
class MalformedVariant(Rule):
name = "MALFORMED-VARIANT"
description = collapse("""
%s `<meta name=variant>` 'content' attribute must be the empty string
or start with '?' or '#'
""")
class LateTimeout(Rule):
name = "LATE-TIMEOUT"
description = "`<meta name=timeout>` seen after testharness.js script"
description = collapse("""
Test file with `<meta name='timeout'...>` element after `<script
src='/resources/testharnessreport.js'>` element
""")
to_fix = """
move the `<meta name="timeout"...>` element to precede the `script`
element.
"""
class EarlyTestharnessReport(Rule):
name = "EARLY-TESTHARNESSREPORT"
description = collapse("""
Test file has an instance of
`<script src='/resources/testharnessreport.js'>` prior to
`<script src='/resources/testharness.js'>`
""")
to_fix = "flip the order"
class EarlyTestdriverVendor(Rule):
name = "EARLY-TESTDRIVER-VENDOR"
description = collapse("""
Test file has an instance of
`<script src='/resources/testdriver-vendor.js'>` prior to
`<script src='/resources/testdriver.js'>`
""")
to_fix = "flip the order"
class MultipleTestdriver(Rule):
name = "MULTIPLE-TESTDRIVER"
description = "More than one `<script src='/resources/testdriver.js'>`"
class MissingTestdriverVendor(Rule):
name = "MISSING-TESTDRIVER-VENDOR"
description = "Missing `<script src='/resources/testdriver-vendor.js'>`"
class MultipleTestdriverVendor(Rule):
name = "MULTIPLE-TESTDRIVER-VENDOR"
description = "More than one `<script src='/resources/testdriver-vendor.js'>`"
class TestharnessPath(Rule):
name = "TESTHARNESS-PATH"
description = "testharness.js script seen with incorrect path"
class TestharnessReportPath(Rule):
name = "TESTHARNESSREPORT-PATH"
description = "testharnessreport.js script seen with incorrect path"
class TestdriverPath(Rule):
name = "TESTDRIVER-PATH"
description = "testdriver.js script seen with incorrect path"
class TestdriverVendorPath(Rule):
name = "TESTDRIVER-VENDOR-PATH"
description = "testdriver-vendor.js script seen with incorrect path"
class OpenNoMode(Rule):
name = "OPEN-NO-MODE"
description = "File opened without providing an explicit mode (note: binary files must be read with 'b' in the mode flags)"
class UnknownGlobalMetadata(Rule):
name = "UNKNOWN-GLOBAL-METADATA"
description = "Unexpected value for global metadata"
class BrokenGlobalMetadata(Rule):
name = "BROKEN-GLOBAL-METADATA"
description = "Invalid global metadata: %s"
class UnknownTimeoutMetadata(Rule):
name = "UNKNOWN-TIMEOUT-METADATA"
description = "Unexpected value for timeout metadata"
class UnknownMetadata(Rule):
name = "UNKNOWN-METADATA"
description = "Unexpected kind of metadata"
class StrayMetadata(Rule):
name = "STRAY-METADATA"
description = "Metadata comments should start the file"
class IndentedMetadata(Rule):
name = "INDENTED-METADATA"
description = "Metadata comments should start the line"
class BrokenMetadata(Rule):
name = "BROKEN-METADATA"
description = "Metadata comment is not formatted correctly"
class TestharnessInOtherType(Rule):
name = "TESTHARNESS-IN-OTHER-TYPE"
description = "testharness.js included in a %s test"
class DuplicateBasenamePath(Rule):
name = "DUPLICATE-BASENAME-PATH"
description = collapse("""
File has identical basename path (path excluding extension) as
other file(s) (found extensions: %s)
""")
to_fix = "rename files so they have unique basename paths"
class TentativeDirectoryName(Rule):
name = "TENTATIVE-DIRECTORY-NAME"
description = "Directories for tentative tests must be named exactly 'tentative'"
to_fix = "rename directory to be called 'tentative'"
class Regexp(six.with_metaclass(abc.ABCMeta)):
@abc.abstractproperty
def pattern(self):
# type: () -> bytes
pass
@abc.abstractproperty
def name(self):
# type: () -> Text
pass
@abc.abstractproperty
def description(self):
# type: () -> Text
pass
file_extensions = None # type: Optional[List[Text]]
def __init__(self):
# type: () -> None
self._re = re.compile(self.pattern) # type: Pattern[bytes]
def applies(self, path):
# type: (Text) -> bool
return (self.file_extensions is None or
os.path.splitext(path)[1] in self.file_extensions)
def search(self, line):
# type: (bytes) -> Optional[Match[bytes]]
return self._re.search(line)
class TabsRegexp(Regexp):
pattern = b"^\t"
name = "INDENT TABS"
description = "Test-file line starts with one or more tab characters"
to_fix = "use spaces to replace any tab characters at beginning of lines"
class CRRegexp(Regexp):
pattern = b"\r$"
name = "CR AT EOL"
description = "Test-file line ends with CR (U+000D) character"
to_fix = """
reformat file so each line just has LF (U+000A) line ending (standard,
cross-platform "Unix" line endings instead of, e.g., DOS line endings).
"""
class SetTimeoutRegexp(Regexp):
pattern = br"setTimeout\s*\("
name = "SET TIMEOUT"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "setTimeout used"
to_fix = """
replace all `setTimeout(...)` calls with `step_timeout(...)` calls
"""
class W3CTestOrgRegexp(Regexp):
pattern = br"w3c\-test\.org"
name = "W3C-TEST.ORG"
description = "Test-file line has the string `w3c-test.org`"
to_fix = """
either replace the `w3c-test.org` string with the expression
`{{host}}:{{ports[http][0]}}` or a generic hostname like `example.org`
"""
class WebPlatformTestRegexp(Regexp):
pattern = br"web\-platform\.test"
name = "WEB-PLATFORM.TEST"
description = "Internal web-platform.test domain used"
to_fix = """
use [server-side substitution](https://web-platform-tests.org/writing-tests/server-pipes.html#sub),
along with the [`.sub` filename-flag](https://web-platform-tests.org/writing-tests/file-names.html#test-features),
to replace web-platform.test with `{{domains[]}}`
"""
class Webidl2Regexp(Regexp):
pattern = br"webidl2\.js"
name = "WEBIDL2.JS"
description = "Legacy webidl2.js script used"
class ConsoleRegexp(Regexp):
pattern = br"console\.[a-zA-Z]+\s*\("
name = "CONSOLE"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "Test-file line has a `console.*(...)` call"
to_fix = """
remove the `console.*(...)` call (and in some cases, consider adding an
`assert_*` of some kind in place of it)
"""
class GenerateTestsRegexp(Regexp):
pattern = br"generate_tests\s*\("
name = "GENERATE_TESTS"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "Test file line has a generate_tests call"
to_fix = "remove the call and call `test()` a number of times instead"
class PrintRegexp(Regexp):
pattern = br"print(?:\s|\s*\()"
name = "PRINT STATEMENT"
file_extensions = [".py"]
description = collapse("""
A server-side python support file contains a `print` statement
""")
to_fix = """
remove the `print` statement or replace it with something else that
achieves the intended effect (e.g., a logging call)
"""
class LayoutTestsRegexp(Regexp):
pattern = br"(eventSender|testRunner|internals)\."
name = "LAYOUTTESTS APIS"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "eventSender/testRunner/internals used; these are LayoutTests-specific APIs (WebKit/Blink)"
class MissingDepsRegexp(Regexp):
pattern = br"[^\w]/gen/"
name = "MISSING DEPENDENCY"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "Chromium-specific content referenced"
to_fix = "Reimplement the test to use well-documented testing interfaces"
class SpecialPowersRegexp(Regexp):
pattern = b"SpecialPowers"
name = "SPECIALPOWERS API"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "SpecialPowers used; this is gecko-specific and not supported in wpt"
class TrailingWhitespaceRegexp(Regexp):
name = "TRAILING WHITESPACE"
description = "Whitespace at EOL"
pattern = b"[ \t\f\v]$"
to_fix = """Remove trailing whitespace from all lines in the file."""
class AssertThrowsRegexp(Regexp):
pattern = br"[^.]assert_throws\("
name = "ASSERT_THROWS"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "Test-file line has an `assert_throws(...)` call"
to_fix = """Replace with `assert_throws_dom` or `assert_throws_js` or `assert_throws_exactly`"""
class PromiseRejectsRegexp(Regexp):
pattern = br"promise_rejects\("
name = "PROMISE_REJECTS"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "Test-file line has a `promise_rejects(...)` call"
to_fix = """Replace with promise_rejects_dom or promise_rejects_js or `promise_rejects_exactly`"""
class AssertPreconditionRegexp(Regexp):
pattern = br"[^.]assert_precondition\("
name = "ASSERT-PRECONDITION"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "Test-file line has an `assert_precondition(...)` call"
to_fix = """Replace with `assert_implements` or `assert_implements_optional`"""
| mpl-2.0 |
ProfessionalIT/maxigenios-website | sdk/google_appengine/lib/django-1.3/django/db/backends/mysql/base.py | 54 | 13894 | """
MySQL database backend for Django.
Requires MySQLdb: http://sourceforge.net/projects/mysql-python
"""
import re
import sys
try:
import MySQLdb as Database
except ImportError, e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading MySQLdb module: %s" % e)
# We want version (1, 2, 1, 'final', 2) or later. We can't just use
# lexicographic ordering in this check because then (1, 2, 1, 'gamma')
# inadvertently passes the version test.
version = Database.version_info
if (version < (1,2,1) or (version[:3] == (1, 2, 1) and
(len(version) < 5 or version[3] != 'final' or version[4] < 2))):
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("MySQLdb-1.2.1p2 or newer is required; you have %s" % Database.__version__)
from MySQLdb.converters import conversions
from MySQLdb.constants import FIELD_TYPE, FLAG, CLIENT
from django.db import utils
from django.db.backends import *
from django.db.backends.signals import connection_created
from django.db.backends.mysql.client import DatabaseClient
from django.db.backends.mysql.creation import DatabaseCreation
from django.db.backends.mysql.introspection import DatabaseIntrospection
from django.db.backends.mysql.validation import DatabaseValidation
from django.utils.safestring import SafeString, SafeUnicode
# Raise exceptions for database warnings if DEBUG is on
from django.conf import settings
if settings.DEBUG:
from warnings import filterwarnings
filterwarnings("error", category=Database.Warning)
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
# MySQLdb-1.2.1 returns TIME columns as timedelta -- they are more like
# timedelta in terms of actual behavior as they are signed and include days --
# and Django expects time, so we still need to override that. We also need to
# add special handling for SafeUnicode and SafeString as MySQLdb's type
# checking is too tight to catch those (see Django ticket #6052).
django_conversions = conversions.copy()
django_conversions.update({
FIELD_TYPE.TIME: util.typecast_time,
FIELD_TYPE.DECIMAL: util.typecast_decimal,
FIELD_TYPE.NEWDECIMAL: util.typecast_decimal,
})
# This should match the numerical portion of the version numbers (we can treat
# versions like 5.0.24 and 5.0.24a as the same). Based on the list of version
# at http://dev.mysql.com/doc/refman/4.1/en/news.html and
# http://dev.mysql.com/doc/refman/5.0/en/news.html .
server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})')
# MySQLdb-1.2.1 and newer automatically makes use of SHOW WARNINGS on
# MySQL-4.1 and newer, so the MysqlDebugWrapper is unnecessary. Since the
# point is to raise Warnings as exceptions, this can be done with the Python
# warning module, and this is setup when the connection is created, and the
# standard util.CursorDebugWrapper can be used. Also, using sql_mode
# TRADITIONAL will automatically cause most warnings to be treated as errors.
class CursorWrapper(object):
"""
A thin wrapper around MySQLdb's normal cursor class so that we can catch
particular exception instances and reraise them with the right types.
Implemented as a wrapper, rather than a subclass, so that we aren't stuck
to the particular underlying representation returned by Connection.cursor().
"""
codes_for_integrityerror = (1048,)
def __init__(self, cursor):
self.cursor = cursor
def execute(self, query, args=None):
try:
return self.cursor.execute(query, args)
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
except Database.OperationalError, e:
# Map some error codes to IntegrityError, since they seem to be
# misclassified and Django would prefer the more logical place.
if e[0] in self.codes_for_integrityerror:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
raise
except Database.DatabaseError, e:
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
def executemany(self, query, args):
try:
return self.cursor.executemany(query, args)
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
except Database.OperationalError, e:
# Map some error codes to IntegrityError, since they seem to be
# misclassified and Django would prefer the more logical place.
if e[0] in self.codes_for_integrityerror:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
raise
except Database.DatabaseError, e:
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
def __iter__(self):
return iter(self.cursor)
class DatabaseFeatures(BaseDatabaseFeatures):
empty_fetchmany_value = ()
update_can_self_select = False
allows_group_by_pk = True
related_fields_match_type = True
allow_sliced_subqueries = False
supports_forward_references = False
supports_long_model_names = False
supports_microsecond_precision = False
supports_regex_backreferencing = False
supports_date_lookup_using_string = False
supports_timezones = False
requires_explicit_null_ordering_when_grouping = True
allows_primary_key_0 = False
def _can_introspect_foreign_keys(self):
"Confirm support for introspected foreign keys"
cursor = self.connection.cursor()
cursor.execute('CREATE TABLE INTROSPECT_TEST (X INT)')
# This command is MySQL specific; the second column
# will tell you the default table type of the created
# table. Since all Django's test tables will have the same
# table type, that's enough to evaluate the feature.
cursor.execute('SHOW TABLE STATUS WHERE Name="INTROSPECT_TEST"')
result = cursor.fetchone()
cursor.execute('DROP TABLE INTROSPECT_TEST')
return result[1] != 'MyISAM'
class DatabaseOperations(BaseDatabaseOperations):
compiler_module = "django.db.backends.mysql.compiler"
def date_extract_sql(self, lookup_type, field_name):
# http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
if lookup_type == 'week_day':
# DAYOFWEEK() returns an integer, 1-7, Sunday=1.
# Note: WEEKDAY() returns 0-6, Monday=0.
return "DAYOFWEEK(%s)" % field_name
else:
return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
def date_trunc_sql(self, lookup_type, field_name):
fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape.
format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
try:
i = fields.index(lookup_type) + 1
except ValueError:
sql = field_name
else:
format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
return sql
def date_interval_sql(self, sql, connector, timedelta):
return "(%s %s INTERVAL '%d 0:0:%d:%d' DAY_MICROSECOND)" % (sql, connector,
timedelta.days, timedelta.seconds, timedelta.microseconds)
def drop_foreignkey_sql(self):
return "DROP FOREIGN KEY"
def force_no_ordering(self):
"""
"ORDER BY NULL" prevents MySQL from implicitly ordering by grouped
columns. If no ordering would otherwise be applied, we don't want any
implicit sorting going on.
"""
return ["NULL"]
def fulltext_search_sql(self, field_name):
return 'MATCH (%s) AGAINST (%%s IN BOOLEAN MODE)' % field_name
def no_limit_value(self):
# 2**64 - 1, as recommended by the MySQL documentation
return 18446744073709551615L
def quote_name(self, name):
if name.startswith("`") and name.endswith("`"):
return name # Quoting once is enough.
return "`%s`" % name
def random_function_sql(self):
return 'RAND()'
def sql_flush(self, style, tables, sequences):
# NB: The generated SQL below is specific to MySQL
# 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements
# to clear all tables of all data
if tables:
sql = ['SET FOREIGN_KEY_CHECKS = 0;']
for table in tables:
sql.append('%s %s;' % (style.SQL_KEYWORD('TRUNCATE'), style.SQL_FIELD(self.quote_name(table))))
sql.append('SET FOREIGN_KEY_CHECKS = 1;')
# 'ALTER TABLE table AUTO_INCREMENT = 1;'... style SQL statements
# to reset sequence indices
sql.extend(["%s %s %s %s %s;" % \
(style.SQL_KEYWORD('ALTER'),
style.SQL_KEYWORD('TABLE'),
style.SQL_TABLE(self.quote_name(sequence['table'])),
style.SQL_KEYWORD('AUTO_INCREMENT'),
style.SQL_FIELD('= 1'),
) for sequence in sequences])
return sql
else:
return []
def value_to_db_datetime(self, value):
if value is None:
return None
# MySQL doesn't support tz-aware datetimes
if value.tzinfo is not None:
raise ValueError("MySQL backend does not support timezone-aware datetimes.")
# MySQL doesn't support microseconds
return unicode(value.replace(microsecond=0))
def value_to_db_time(self, value):
if value is None:
return None
# MySQL doesn't support tz-aware datetimes
if value.tzinfo is not None:
raise ValueError("MySQL backend does not support timezone-aware datetimes.")
# MySQL doesn't support microseconds
return unicode(value.replace(microsecond=0))
def year_lookup_bounds(self, value):
# Again, no microseconds
first = '%s-01-01 00:00:00'
second = '%s-12-31 23:59:59.99'
return [first % value, second % value]
def max_name_length(self):
return 64
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'mysql'
operators = {
'exact': '= %s',
'iexact': 'LIKE %s',
'contains': 'LIKE BINARY %s',
'icontains': 'LIKE %s',
'regex': 'REGEXP BINARY %s',
'iregex': 'REGEXP %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE BINARY %s',
'endswith': 'LIKE BINARY %s',
'istartswith': 'LIKE %s',
'iendswith': 'LIKE %s',
}
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.server_version = None
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations()
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = DatabaseValidation(self)
def _valid_connection(self):
if self.connection is not None:
try:
self.connection.ping()
return True
except DatabaseError:
self.connection.close()
self.connection = None
return False
def _cursor(self):
if not self._valid_connection():
kwargs = {
'conv': django_conversions,
'charset': 'utf8',
'use_unicode': True,
}
settings_dict = self.settings_dict
if settings_dict['USER']:
kwargs['user'] = settings_dict['USER']
if settings_dict['NAME']:
kwargs['db'] = settings_dict['NAME']
if settings_dict['PASSWORD']:
kwargs['passwd'] = settings_dict['PASSWORD']
if settings_dict['HOST'].startswith('/'):
kwargs['unix_socket'] = settings_dict['HOST']
elif settings_dict['HOST']:
kwargs['host'] = settings_dict['HOST']
if settings_dict['PORT']:
kwargs['port'] = int(settings_dict['PORT'])
# We need the number of potentially affected rows after an
# "UPDATE", not the number of changed rows.
kwargs['client_flag'] = CLIENT.FOUND_ROWS
kwargs.update(settings_dict['OPTIONS'])
self.connection = Database.connect(**kwargs)
self.connection.encoders[SafeUnicode] = self.connection.encoders[unicode]
self.connection.encoders[SafeString] = self.connection.encoders[str]
connection_created.send(sender=self.__class__, connection=self)
cursor = CursorWrapper(self.connection.cursor())
return cursor
def _rollback(self):
try:
BaseDatabaseWrapper._rollback(self)
except Database.NotSupportedError:
pass
def get_server_version(self):
if not self.server_version:
if not self._valid_connection():
self.cursor()
m = server_version_re.match(self.connection.get_server_info())
if not m:
raise Exception('Unable to determine MySQL version from version string %r' % self.connection.get_server_info())
self.server_version = tuple([int(x) for x in m.groups()])
return self.server_version
| mit |
gunchleoc/django | django/contrib/gis/db/backends/oracle/models.py | 475 | 2275 | """
The GeometryColumns and SpatialRefSys models for the Oracle spatial
backend.
It should be noted that Oracle Spatial does not have database tables
named according to the OGC standard, so the closest analogs are used.
For example, the `USER_SDO_GEOM_METADATA` is used for the GeometryColumns
model and the `SDO_COORD_REF_SYS` is used for the SpatialRefSys model.
"""
from django.contrib.gis.db import models
from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class OracleGeometryColumns(models.Model):
"Maps to the Oracle USER_SDO_GEOM_METADATA table."
table_name = models.CharField(max_length=32)
column_name = models.CharField(max_length=1024)
srid = models.IntegerField(primary_key=True)
# TODO: Add support for `diminfo` column (type MDSYS.SDO_DIM_ARRAY).
class Meta:
app_label = 'gis'
db_table = 'USER_SDO_GEOM_METADATA'
managed = False
@classmethod
def table_name_col(cls):
"""
Returns the name of the metadata column used to store the feature table
name.
"""
return 'table_name'
@classmethod
def geom_col_name(cls):
"""
Returns the name of the metadata column used to store the feature
geometry column.
"""
return 'column_name'
def __str__(self):
return '%s - %s (SRID: %s)' % (self.table_name, self.column_name, self.srid)
class OracleSpatialRefSys(models.Model, SpatialRefSysMixin):
"Maps to the Oracle MDSYS.CS_SRS table."
cs_name = models.CharField(max_length=68)
srid = models.IntegerField(primary_key=True)
auth_srid = models.IntegerField()
auth_name = models.CharField(max_length=256)
wktext = models.CharField(max_length=2046)
# Optional geometry representing the bounds of this coordinate
# system. By default, all are NULL in the table.
cs_bounds = models.PolygonField(null=True)
objects = models.GeoManager()
class Meta:
app_label = 'gis'
db_table = 'CS_SRS'
managed = False
@property
def wkt(self):
return self.wktext
@classmethod
def wkt_col(cls):
return 'wktext'
| bsd-3-clause |
vipul-sharma20/oh-mainline | vendor/packages/twisted/twisted/mail/relay.py | 28 | 3717 | # -*- test-case-name: twisted.mail.test.test_mail -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""Support for relaying mail for twisted.mail"""
from twisted.mail import smtp
from twisted.python import log
from twisted.internet.address import UNIXAddress
import os
try:
import cPickle as pickle
except ImportError:
import pickle
class DomainQueuer:
"""An SMTP domain which add messages to a queue intended for relaying."""
def __init__(self, service, authenticated=False):
self.service = service
self.authed = authenticated
def exists(self, user):
"""Check whether we will relay
Call overridable willRelay method
"""
if self.willRelay(user.dest, user.protocol):
# The most cursor form of verification of the addresses
orig = filter(None, str(user.orig).split('@', 1))
dest = filter(None, str(user.dest).split('@', 1))
if len(orig) == 2 and len(dest) == 2:
return lambda: self.startMessage(user)
raise smtp.SMTPBadRcpt(user)
def willRelay(self, address, protocol):
"""Check whether we agree to relay
The default is to relay for all connections over UNIX
sockets and all connections from localhost.
"""
peer = protocol.transport.getPeer()
return self.authed or isinstance(peer, UNIXAddress) or peer.host == '127.0.0.1'
def startMessage(self, user):
"""Add envelope to queue and returns ISMTPMessage."""
queue = self.service.queue
envelopeFile, smtpMessage = queue.createNewMessage()
try:
log.msg('Queueing mail %r -> %r' % (str(user.orig), str(user.dest)))
pickle.dump([str(user.orig), str(user.dest)], envelopeFile)
finally:
envelopeFile.close()
return smtpMessage
class RelayerMixin:
# XXX - This is -totally- bogus
# It opens about a -hundred- -billion- files
# and -leaves- them open!
def loadMessages(self, messagePaths):
self.messages = []
self.names = []
for message in messagePaths:
fp = open(message+'-H')
try:
messageContents = pickle.load(fp)
finally:
fp.close()
fp = open(message+'-D')
messageContents.append(fp)
self.messages.append(messageContents)
self.names.append(message)
def getMailFrom(self):
if not self.messages:
return None
return self.messages[0][0]
def getMailTo(self):
if not self.messages:
return None
return [self.messages[0][1]]
def getMailData(self):
if not self.messages:
return None
return self.messages[0][2]
def sentMail(self, code, resp, numOk, addresses, log):
"""Since we only use one recipient per envelope, this
will be called with 0 or 1 addresses. We probably want
to do something with the error message if we failed.
"""
if code in smtp.SUCCESS:
# At least one, i.e. all, recipients successfully delivered
os.remove(self.names[0]+'-D')
os.remove(self.names[0]+'-H')
del self.messages[0]
del self.names[0]
class SMTPRelayer(RelayerMixin, smtp.SMTPClient):
def __init__(self, messagePaths, *args, **kw):
smtp.SMTPClient.__init__(self, *args, **kw)
self.loadMessages(messagePaths)
class ESMTPRelayer(RelayerMixin, smtp.ESMTPClient):
def __init__(self, messagePaths, *args, **kw):
smtp.ESMTPClient.__init__(self, *args, **kw)
self.loadMessages(messagePaths)
| agpl-3.0 |
bbiskup/cpp_benchmarks | vendor/google/googletest/googlemock/test/gmock-function-mocker_nc_test.py | 63 | 1346 | """Negative compilation tests for Google Mock macro MOCK_METHOD."""
import os
import sys
IS_LINUX = os.name == "posix" and os.uname()[0] == "Linux"
if not IS_LINUX:
sys.stderr.write(
"WARNING: Negative compilation tests are not supported on this platform")
sys.exit(0)
# Suppresses the 'Import not at the top of the file' lint complaint.
# pylint: disable-msg=C6204
from google3.testing.pybase import fake_target_util
from google3.testing.pybase import googletest
# pylint: enable-msg=C6204
class GMockMethodNCTest(googletest.TestCase):
"""Negative compilation tests for MOCK_METHOD."""
# The class body is intentionally empty. The actual test*() methods
# will be defined at run time by a call to
# DefineNegativeCompilationTests() later.
pass
# Defines a list of test specs, where each element is a tuple
# (test name, list of regexes for matching the compiler errors).
TEST_SPECS = [
("MOCK_METHOD_INVALID_CONST_SPEC",
[r"onst cannot be recognized as a valid specification modifier"]),
]
# Define a test method in GMockNCTest for each element in TEST_SPECS.
fake_target_util.DefineNegativeCompilationTests(
GMockMethodNCTest,
"google3/third_party/googletest/googlemock/test/gmock-function-mocker_nc",
"gmock-function-mocker_nc.o", TEST_SPECS)
if __name__ == "__main__":
googletest.main()
| mit |
kenshay/ImageScripter | ProgramData/Android/ADB/platform-tools/systrace/catapult/telemetry/third_party/web-page-replay/third_party/dns/rdtypes/IN/AAAA.py | 248 | 2187 | # Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.exception
import dns.inet
import dns.rdata
import dns.tokenizer
class AAAA(dns.rdata.Rdata):
"""AAAA record.
@ivar address: an IPv6 address
@type address: string (in the standard IPv6 format)"""
__slots__ = ['address']
def __init__(self, rdclass, rdtype, address):
super(AAAA, self).__init__(rdclass, rdtype)
# check that it's OK
junk = dns.inet.inet_pton(dns.inet.AF_INET6, address)
self.address = address
def to_text(self, origin=None, relativize=True, **kw):
return self.address
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
address = tok.get_identifier()
tok.get_eol()
return cls(rdclass, rdtype, address)
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
file.write(dns.inet.inet_pton(dns.inet.AF_INET6, self.address))
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
address = dns.inet.inet_ntop(dns.inet.AF_INET6,
wire[current : current + rdlen])
return cls(rdclass, rdtype, address)
from_wire = classmethod(from_wire)
def _cmp(self, other):
sa = dns.inet.inet_pton(dns.inet.AF_INET6, self.address)
oa = dns.inet.inet_pton(dns.inet.AF_INET6, other.address)
return cmp(sa, oa)
| gpl-3.0 |
ceholden/landsat_tile | setup.py | 3 | 1060 | import os
from setuptools import find_packages, setup
import sys
PY2 = sys.version_info[0] == 2
# Get version
with open(os.path.join('tilezilla', 'version.py')) as f:
for line in f:
if line.find('__version__') >= 0:
version = line.split("=")[1].strip()
version = version.strip('"').strip("'")
continue
install_requires = [
'click',
'click_plugins',
'numpy',
'GDAL',
'rasterio',
'shapely',
'clover',
'beautifulsoup4',
'lxml',
'pyyaml',
'jsonschema',
'sqlalchemy',
'sqlalchemy-utils',
]
if PY2:
install_requires += ['futures']
entry_points = '''
[console_scripts]
tilez=tilezilla.cli.main:cli
[tilez.commands]
ingest=tilezilla.cli.ingest:ingest
spew=tilezilla.cli.spew:spew
db=tilezilla.cli.db:db
'''
setup(
name='tilezilla',
version=version,
packages=find_packages(),
package_data={'tilezilla': ['data/*']},
include_package_data=True,
install_requires=install_requires,
entry_points=entry_points
)
| bsd-3-clause |
superchilli/webapp | venv/lib/python2.7/site-packages/werkzeug/useragents.py | 257 | 5418 | # -*- coding: utf-8 -*-
"""
werkzeug.useragents
~~~~~~~~~~~~~~~~~~~
This module provides a helper to inspect user agent strings. This module
is far from complete but should work for most of the currently available
browsers.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
class UserAgentParser(object):
"""A simple user agent parser. Used by the `UserAgent`."""
platforms = (
('cros', 'chromeos'),
('iphone|ios', 'iphone'),
('ipad', 'ipad'),
(r'darwin|mac|os\s*x', 'macos'),
('win', 'windows'),
(r'android', 'android'),
(r'x11|lin(\b|ux)?', 'linux'),
('(sun|i86)os', 'solaris'),
(r'nintendo\s+wii', 'wii'),
('irix', 'irix'),
('hp-?ux', 'hpux'),
('aix', 'aix'),
('sco|unix_sv', 'sco'),
('bsd', 'bsd'),
('amiga', 'amiga'),
('blackberry|playbook', 'blackberry'),
('symbian', 'symbian')
)
browsers = (
('googlebot', 'google'),
('msnbot', 'msn'),
('yahoo', 'yahoo'),
('ask jeeves', 'ask'),
(r'aol|america\s+online\s+browser', 'aol'),
('opera', 'opera'),
('chrome', 'chrome'),
('firefox|firebird|phoenix|iceweasel', 'firefox'),
('galeon', 'galeon'),
('safari|version', 'safari'),
('webkit', 'webkit'),
('camino', 'camino'),
('konqueror', 'konqueror'),
('k-meleon', 'kmeleon'),
('netscape', 'netscape'),
(r'msie|microsoft\s+internet\s+explorer|trident/.+? rv:', 'msie'),
('lynx', 'lynx'),
('links', 'links'),
('seamonkey|mozilla', 'seamonkey')
)
_browser_version_re = r'(?:%s)[/\sa-z(]*(\d+[.\da-z]+)?(?i)'
_language_re = re.compile(
r'(?:;\s*|\s+)(\b\w{2}\b(?:-\b\w{2}\b)?)\s*;|'
r'(?:\(|\[|;)\s*(\b\w{2}\b(?:-\b\w{2}\b)?)\s*(?:\]|\)|;)'
)
def __init__(self):
self.platforms = [(b, re.compile(a, re.I)) for a, b in self.platforms]
self.browsers = [(b, re.compile(self._browser_version_re % a))
for a, b in self.browsers]
def __call__(self, user_agent):
for platform, regex in self.platforms:
match = regex.search(user_agent)
if match is not None:
break
else:
platform = None
for browser, regex in self.browsers:
match = regex.search(user_agent)
if match is not None:
version = match.group(1)
break
else:
browser = version = None
match = self._language_re.search(user_agent)
if match is not None:
language = match.group(1) or match.group(2)
else:
language = None
return platform, browser, version, language
class UserAgent(object):
"""Represents a user agent. Pass it a WSGI environment or a user agent
string and you can inspect some of the details from the user agent
string via the attributes. The following attributes exist:
.. attribute:: string
the raw user agent string
.. attribute:: platform
the browser platform. The following platforms are currently
recognized:
- `aix`
- `amiga`
- `android`
- `bsd`
- `chromeos`
- `hpux`
- `iphone`
- `ipad`
- `irix`
- `linux`
- `macos`
- `sco`
- `solaris`
- `wii`
- `windows`
.. attribute:: browser
the name of the browser. The following browsers are currently
recognized:
- `aol` *
- `ask` *
- `camino`
- `chrome`
- `firefox`
- `galeon`
- `google` *
- `kmeleon`
- `konqueror`
- `links`
- `lynx`
- `msie`
- `msn`
- `netscape`
- `opera`
- `safari`
- `seamonkey`
- `webkit`
- `yahoo` *
(Browsers maked with a star (``*``) are crawlers.)
.. attribute:: version
the version of the browser
.. attribute:: language
the language of the browser
"""
_parser = UserAgentParser()
def __init__(self, environ_or_string):
if isinstance(environ_or_string, dict):
environ_or_string = environ_or_string.get('HTTP_USER_AGENT', '')
self.string = environ_or_string
self.platform, self.browser, self.version, self.language = \
self._parser(environ_or_string)
def to_header(self):
return self.string
def __str__(self):
return self.string
def __nonzero__(self):
return bool(self.browser)
__bool__ = __nonzero__
def __repr__(self):
return '<%s %r/%s>' % (
self.__class__.__name__,
self.browser,
self.version
)
# conceptionally this belongs in this module but because we want to lazily
# load the user agent module (which happens in wrappers.py) we have to import
# it afterwards. The class itself has the module set to this module so
# pickle, inspect and similar modules treat the object as if it was really
# implemented here.
from werkzeug.wrappers import UserAgentMixin # noqa
| mit |
ticosax/django | tests/admin_views/tests.py | 8 | 296033 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import os
import re
import unittest
from django.contrib.admin import ModelAdmin
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.models import ADDITION, DELETION, LogEntry
from django.contrib.admin.options import TO_FIELD_VAR
from django.contrib.admin.templatetags.admin_static import static
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.admin.utils import quote
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core import mail
from django.core.checks import Error
from django.core.files import temp as tempfile
from django.core.urlresolvers import NoReverseMatch, resolve, reverse
from django.forms.utils import ErrorList
from django.template.loader import render_to_string
from django.template.response import TemplateResponse
from django.test import (
TestCase, modify_settings, override_settings, skipUnlessDBFeature,
)
from django.test.utils import override_script_prefix, patch_logger
from django.utils import formats, six, translation
from django.utils._os import upath
from django.utils.cache import get_max_age
from django.utils.encoding import force_bytes, force_text, iri_to_uri
from django.utils.html import escape
from django.utils.http import urlencode
from django.utils.six.moves.urllib.parse import parse_qsl, urljoin, urlparse
from . import customadmin
from .admin import CityAdmin, site, site2
from .models import (
Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField,
AdminOrderedModelMethod, Answer, Article, BarAccount, Book, Category,
Chapter, ChapterXtra1, ChapterXtra2, Character, Child, Choice, City,
Collector, Color, Color2, ComplexSortedPerson, CoverLetter, CustomArticle,
CyclicOne, CyclicTwo, DooHickey, Employee, EmptyModel, ExternalSubscriber,
Fabric, FancyDoodad, FieldOverridePost, FilteredManager, FooAccount,
FoodDelivery, FunkyTag, Gallery, Grommet, Inquisition, Language,
MainPrepopulated, ModelWithStringPrimaryKey, OtherStory, Paper, Parent,
ParentWithDependentChildren, Person, Persona, Picture, Pizza, Plot,
PlotDetails, PluggableSearchPerson, Podcast, Post, PrePopulatedPost, Promo,
Question, Recommendation, Recommender, RelatedPrepopulated, Report,
Restaurant, RowLevelChangePermissionModel, SecretHideout, Section,
ShortMessage, Simple, State, Story, Subscriber, SuperSecretHideout,
SuperVillain, Telegram, TitleTranslation, Topping, UnchangeableObject,
UndeletableObject, UnorderedObject, Villain, Vodcast, Whatsit, Widget,
Worker, WorkHour,
)
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls",
USE_I18N=True, USE_L10N=False, LANGUAGE_CODE='en')
class AdminViewBasicTestCase(TestCase):
@classmethod
def setUpTestData(cls):
# password = "secret"
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.color1 = Color.objects.create(value='Red', warm=True)
cls.color2 = Color.objects.create(value='Orange', warm=True)
cls.color3 = Color.objects.create(value='Blue', warm=False)
cls.color4 = Color.objects.create(value='Green', warm=False)
cls.fab1 = Fabric.objects.create(surface='x')
cls.fab2 = Fabric.objects.create(surface='y')
cls.fab3 = Fabric.objects.create(surface='plain')
cls.b1 = Book.objects.create(name='Book 1')
cls.b2 = Book.objects.create(name='Book 2')
cls.pro1 = Promo.objects.create(name='Promo 1', book=cls.b1)
cls.pro1 = Promo.objects.create(name='Promo 2', book=cls.b2)
cls.chap1 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b1)
cls.chap2 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b1)
cls.chap3 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b2)
cls.chap4 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b2)
cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='ChapterXtra1 1')
cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='ChapterXtra1 2')
# Post data for edit inline
cls.inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": cls.a1.pk,
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver æøå skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": cls.s1.pk,
"article_set-1-id": cls.a2.pk,
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": cls.a3.pk,
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
formats.reset_format_cache()
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertLess(response.content.index(force_bytes(text1)), response.content.index(force_bytes(text2)),
failing_msg)
class AdminViewBasicTest(AdminViewBasicTestCase):
def test_trailing_slash_required(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
add_url = reverse('admin:admin_views_article_add')
response = self.client.get(add_url[:-1])
self.assertRedirects(response, add_url, status_code=301)
def test_admin_static_template_tag(self):
"""
Test that admin_static.static is pointing to the collectstatic version
(as django.contrib.collectstatic is in installed apps).
"""
old_url = staticfiles_storage.base_url
staticfiles_storage.base_url = '/test/'
try:
self.assertEqual(static('path'), '/test/path')
finally:
staticfiles_storage.base_url = old_url
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
response = self.client.get(reverse('admin:admin_views_section_add'), {'name': 'My Section'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response")
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET_string_PK(self):
"""
Ensure GET on the change_view works (returns an HTTP 404 error, see
#11191) when passing a string as the PK argument for a model with an
integer PK field.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=('abc',)))
self.assertEqual(response.status_code, 404)
def test_basic_edit_GET_old_url_redirect(self):
"""
The change URL changed in Django 1.9, but the old one still redirects.
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(self.s1.pk,)).replace('change/', '')
)
self.assertRedirects(response, reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
def test_basic_inheritance_GET_string_PK(self):
"""
Ensure GET on the change_view works on inherited models (returns an
HTTP 404 error, see #19951) when passing a string as the PK argument
for a model with an integer PK field.
"""
response = self.client.get(reverse('admin:admin_views_supervillain_change', args=('abc',)))
self.assertEqual(response.status_code, 404)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_section_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_popup_add_POST(self):
"""
Ensure http response from a popup is properly escaped.
"""
post_data = {
'_popup': '1',
'title': 'title with a new\nline',
'content': 'some content',
'date_0': '2010-09-10',
'date_1': '14:55:39',
}
response = self.client.post(reverse('admin:admin_views_article_add'), post_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissAddRelatedObjectPopup')
self.assertContains(response, 'title with a new\\u000Aline')
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as_delete_inline(self):
"""
Should be able to "Save as new" while also deleting an inline.
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-2-DELETE": "1",
"article_set-3-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data)
self.assertEqual(response.status_code, 302)
# started with 3 articles, one was deleted.
self.assertEqual(Section.objects.latest('id').article_set.count(), 2)
def test_change_list_sorting_callable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 2})
self.assertContentBefore(response, 'Oldest content', 'Middle content',
"Results of sorting on callable are out of order.")
self.assertContentBefore(response, 'Middle content', 'Newest content',
"Results of sorting on callable are out of order.")
def test_change_list_sorting_model(self):
"""
Ensure we can sort on a list_display field that is a Model method
(column 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-3'})
self.assertContentBefore(response, 'Newest content', 'Middle content',
"Results of sorting on Model method are out of order.")
self.assertContentBefore(response, 'Middle content', 'Oldest content',
"Results of sorting on Model method are out of order.")
def test_change_list_sorting_model_admin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(column 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '4'})
self.assertContentBefore(response, 'Oldest content', 'Middle content',
"Results of sorting on ModelAdmin method are out of order.")
self.assertContentBefore(response, 'Middle content', 'Newest content',
"Results of sorting on ModelAdmin method are out of order.")
def test_change_list_sorting_model_admin_reverse(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin
method in reverse order (i.e. admin_order_field uses the '-' prefix)
(column 6 is 'model_year_reverse' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '6'})
self.assertContentBefore(response, '2009', '2008',
"Results of sorting on ModelAdmin method are out of order.")
self.assertContentBefore(response, '2008', '2000',
"Results of sorting on ModelAdmin method are out of order.")
# Let's make sure the ordering is right and that we don't get a
# FieldError when we change to descending order
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-6'})
self.assertContentBefore(response, '2000', '2008',
"Results of sorting on ModelAdmin method are out of order.")
self.assertContentBefore(response, '2008', '2009',
"Results of sorting on ModelAdmin method are out of order.")
def test_change_list_sorting_multiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
# Sort by name, gender
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '1.2'})
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '-2.1'})
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def test_change_list_sorting_preserve_queryset_ordering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
response = self.client.get(reverse('admin:admin_views_person_changelist'), {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def test_change_list_sorting_model_meta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso='ur', name='Urdu')
l2 = Language.objects.create(iso='ar', name='Arabic')
link1 = reverse('admin:admin_views_language_change', args=(quote(l1.pk),))
link2 = reverse('admin:admin_views_language_change', args=(quote(l2.pk),))
response = self.client.get(reverse('admin:admin_views_language_changelist'), {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get(reverse('admin:admin_views_language_changelist'), {'o': '-1'})
self.assertContentBefore(response, link1, link2)
def test_change_list_sorting_override_model_admin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_podcast_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
def test_multiple_sort_same_field(self):
# Check that we get the columns we expect if we have two columns
# that correspond to the same ordering field
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(quote(p1.pk),))
link2 = reverse('admin:admin_views_podcast_change', args=(quote(p2.pk),))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse('admin:admin_views_complexsortedperson_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_complexsortedperson_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_complexsortedperson_changelist'), {})
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, 'Name')
self.assertContains(response, 'Colored name')
# Check order
self.assertContentBefore(response, 'Name', 'Colored name')
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def test_sort_indicators_admin_order(self):
"""
Ensures that the admin shows default sort indicators for all
kinds of 'ordering' fields: field names, method on the model
admin and model itself, and other callables. See #17252.
"""
models = [(AdminOrderedField, 'adminorderedfield'),
(AdminOrderedModelMethod, 'adminorderedmodelmethod'),
(AdminOrderedAdminMethod, 'adminorderedadminmethod'),
(AdminOrderedCallable, 'adminorderedcallable')]
for model, url in models:
model.objects.create(stuff='The Last Item', order=3)
model.objects.create(stuff='The First Item', order=1)
model.objects.create(stuff='The Middle Item', order=2)
response = self.client.get(reverse('admin:admin_views_%s_changelist' % url), {})
self.assertEqual(response.status_code, 200)
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(response.context['cl'].get_ordering_field_columns(), {2: 'asc'})
# Check order of records.
self.assertContentBefore(response, 'The First Item', 'The Middle Item')
self.assertContentBefore(response, 'The Middle Item', 'The Last Item')
def test_limited_filter(self):
"""Ensure admin changelist filters do not contain objects excluded via limit_choices_to.
This also tests relation-spanning filters (e.g. 'color__value').
"""
response = self.client.get(reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view")
self.assertNotContains(response, '<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to")
def test_relation_spanning_filters(self):
changelist_url = reverse('admin:admin_views_chapterxtra1_changelist')
response = self.client.get(changelist_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
'chap__id__exact': dict(
values=[c.id for c in Chapter.objects.all()],
test=lambda obj, value: obj.chap.id == value),
'chap__title': dict(
values=[c.title for c in Chapter.objects.all()],
test=lambda obj, value: obj.chap.title == value),
'chap__book__id__exact': dict(
values=[b.id for b in Book.objects.all()],
test=lambda obj, value: obj.chap.book.id == value),
'chap__book__name': dict(
values=[b.name for b in Book.objects.all()],
test=lambda obj, value: obj.chap.book.name == value),
'chap__book__promo__id__exact': dict(
values=[p.id for p in Promo.objects.all()],
test=lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists()),
'chap__book__promo__name': dict(
values=[p.name for p in Promo.objects.all()],
test=lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists()),
}
for filter_path, params in filters.items():
for value in params['values']:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s">' % query_string)
# ensure link works
filtered_response = self.client.get('%s?%s' % (changelist_url, query_string))
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context['cl'].queryset.all():
self.assertTrue(params['test'](obj, value))
def test_incorrect_lookup_parameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
changelist_url = reverse('admin:admin_views_thing_changelist')
response = self.client.get(changelist_url, {'notarealfield': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Spanning relationships through a nonexistent related object (Refs #16716)
response = self.client.get(changelist_url, {'notarealfield__whatever': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
response = self.client.get(changelist_url, {'color__id__exact': 'StringNotInteger!'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Regression test for #18530
response = self.client.get(changelist_url, {'pub_date__gte': 'foo'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
def test_isnull_lookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(title="I Could Go Anywhere", content="Versatile", date=datetime.datetime.now())
changelist_url = reverse('admin:admin_views_article_changelist')
response = self.client.get(changelist_url)
self.assertContains(response, '4 articles')
response = self.client.get(changelist_url, {'section__isnull': 'false'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': '0'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': 'true'})
self.assertContains(response, '1 article')
response = self.client.get(changelist_url, {'section__isnull': '1'})
self.assertContains(response, '1 article')
def test_logout_and_password_change_URLs(self):
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, '<a href="%s">' % reverse('admin:logout'))
self.assertContains(response, '<a href="%s">' % reverse('admin:password_change'))
def test_named_group_field_choices_change_list(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse('admin:admin_views_fabric_change', args=(self.fab1.pk,))
link2 = reverse('admin:admin_views_fabric_change', args=(self.fab2.pk,))
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = "Changelist table isn't showing the right human-readable values set by a model field 'choices' option named group."
self.assertContains(response, '<a href="%s">Horizontal</a>' % link1, msg_prefix=fail_msg, html=True)
self.assertContains(response, '<a href="%s">Vertical</a>' % link2, msg_prefix=fail_msg, html=True)
def test_named_group_field_choices_filter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = "Changelist filter isn't showing options contained inside a model field 'choices' option named group."
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(response,
'<a href="?surface__exact=x">Horizontal</a>', msg_prefix=fail_msg, html=True)
self.assertContains(response,
'<a href="?surface__exact=y">Vertical</a>', msg_prefix=fail_msg, html=True)
def test_change_list_null_boolean_display(self):
Post.objects.create(public=None)
response = self.client.get(reverse('admin:admin_views_post_changelist'))
self.assertContains(response, 'icon-unknown.gif')
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('en-us'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, 'Choisir une heure')
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertContains(response, 'Choisir une heure')
def test_L10N_deactivated(self):
"""
Check if L10N is deactivated, the JavaScript i18n view doesn't
return localized date/time formats. Refs #14824.
"""
with self.settings(LANGUAGE_CODE='ru', USE_L10N=False), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, '%d.%m.%Y %H:%M:%S')
self.assertContains(response, '%Y-%m-%d %H:%M:%S')
def test_disallowed_filtering(self):
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as calls:
response = self.client.get(
"%s?owner__email__startswith=fuzzy" % reverse('admin:admin_views_album_changelist')
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Filters are allowed if explicitly included in list_filter
response = self.client.get("%s?color__value__startswith=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
response = self.client.get("%s?color__value=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to whitelist them in list_filter or date_hierarchy.
response = self.client.get("%s?age__gt=30" % reverse('admin:admin_views_person_changelist'))
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(name='Anonymous', gender=1, age=22, alive=True, code='123')
e2 = Employee.objects.create(name='Visitor', gender=2, age=19, alive=True, code='124')
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get(reverse('admin:admin_views_workhour_changelist'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'employee__person_ptr__exact')
response = self.client.get("%s?employee__person_ptr__exact=%d" % (
reverse('admin:admin_views_workhour_changelist'), e1.pk)
)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get(reverse('admin:admin_views_section_changelist'), {TO_FIELD_VAR: 'missing_field'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Specifying a field that is not referred by any other model registered
# to this admin site should raise an exception.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get(reverse('admin:admin_views_section_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# #23839 - Primary key should always be allowed, even if the referenced model isn't registered.
response = self.client.get(reverse('admin:admin_views_notreferenced_changelist'), {TO_FIELD_VAR: 'id'})
self.assertEqual(response.status_code, 200)
# #23915 - Specifying a field referenced by another model though a m2m should be allowed.
response = self.client.get(reverse('admin:admin_views_recipe_changelist'), {TO_FIELD_VAR: 'rname'})
self.assertEqual(response.status_code, 200)
# #23604, #23915 - Specifying a field referenced through a reverse m2m relationship should be allowed.
response = self.client.get(reverse('admin:admin_views_ingredient_changelist'), {TO_FIELD_VAR: 'iname'})
self.assertEqual(response.status_code, 200)
# #23329 - Specifying a field that is not referred by any other model directly registered
# to this admin site but registered through inheritance should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyparent_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #23431 - Specifying a field that is only referred to by a inline of a registered
# model should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyinline_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# We also want to prevent the add, change, and delete views from
# leaking a disallowed field value.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.post(reverse('admin:admin_views_section_add'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
section = Section.objects.create()
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.post(reverse('admin:admin_views_section_change', args=(section.pk,)), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.post(reverse('admin:admin_views_section_delete', args=(section.pk,)), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey pointing to this model
response = self.client.get("%s?leader__name=Palin&leader__age=27" % reverse('admin:admin_views_inquisition_changelist'))
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get("%s?%s" % (reverse('admin:admin_views_actor_changelist'), IS_POPUP_VAR))
self.assertContains(response, "opener.dismissRelatedLookupPopup(window, '%s')" % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username='super')
user.set_unusable_password()
user.save()
response = self.client.get(reverse('admin:index'))
self.assertNotContains(response, reverse('admin:password_change'),
msg_prefix='The "change password" link should not be displayed if a user does not have a usable password.')
def test_change_view_with_show_delete_extra_context(self):
"""
Ensured that the 'show_delete' context variable in the admin's change
view actually controls the display of the delete button.
Refs #10057.
"""
instance = UndeletableObject.objects.create(name='foo')
response = self.client.get(reverse('admin:admin_views_undeletableobject_change', args=(instance.pk,)))
self.assertNotContains(response, 'deletelink')
def test_allows_attributeerror_to_bubble_up(self):
"""
Ensure that AttributeErrors are allowed to bubble when raised inside
a change list view.
Requires a model to be created so there's something to be displayed
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get(reverse('admin:admin_views_simple_changelist'))
def test_changelist_with_no_change_url(self):
"""
ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url
for change_view is removed from get_urls
Regression test for #20934
"""
UnchangeableObject.objects.create()
response = self.client.get(reverse('admin:admin_views_unchangeableobject_changelist'))
self.assertEqual(response.status_code, 200)
# Check the format of the shown object -- shouldn't contain a change link
self.assertContains(response, '<th class="field-__str__">UnchangeableObject object</th>', html=True)
def test_invalid_appindex_url(self):
"""
#21056 -- URL reversing shouldn't work for nonexistent apps.
"""
good_url = '/test_admin/admin/admin_views/'
confirm_good_url = reverse('admin:app_list',
kwargs={'app_label': 'admin_views'})
self.assertEqual(good_url, confirm_good_url)
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', kwargs={'app_label': 'this_should_fail'})
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', args=('admin_views2',))
def test_resolve_admin_views(self):
index_match = resolve('/test_admin/admin4/')
list_match = resolve('/test_admin/admin4/auth/user/')
self.assertIs(index_match.func.admin_site, customadmin.simple_site)
self.assertIsInstance(list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin)
def test_proxy_model_content_type_is_used_for_log_entries(self):
"""
Log entries for proxy models should have the proxy model's content
type.
Regression test for #21084.
"""
color2_content_type = ContentType.objects.get_for_model(Color2, for_concrete_model=False)
# add
color2_add_url = reverse('admin:admin_views_color2_add')
self.client.post(color2_add_url, {'value': 'orange'})
color2_addition_log = LogEntry.objects.all()[0]
self.assertEqual(color2_content_type, color2_addition_log.content_type)
# change
color_id = color2_addition_log.object_id
color2_change_url = reverse('admin:admin_views_color2_change', args=(color_id,))
self.client.post(color2_change_url, {'value': 'blue'})
color2_change_log = LogEntry.objects.all()[0]
self.assertEqual(color2_content_type, color2_change_log.content_type)
# delete
color2_delete_url = reverse('admin:admin_views_color2_delete', args=(color_id,))
self.client.post(color2_delete_url)
color2_delete_log = LogEntry.objects.all()[0]
self.assertEqual(color2_content_type, color2_delete_log.content_type)
def test_adminsite_display_site_url(self):
"""
#13749 - Admin should display link to front-end site 'View site'
"""
url = reverse('admin:index')
response = self.client.get(url)
self.assertEqual(response.context['site_url'], '/my-site-url/')
self.assertContains(response, '<a href="/my-site-url/">View site</a>')
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# Put this app's and the shared tests templates dirs in DIRS to take precedence
# over the admin's templates dir.
'DIRS': [
os.path.join(os.path.dirname(upath(__file__)), 'templates'),
os.path.join(os.path.dirname(os.path.dirname(upath(__file__))), 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}])
class AdminCustomTemplateTests(AdminViewBasicTestCase):
def test_custom_model_admin_templates(self):
# Test custom change list template with custom extra context
response = self.client.get(reverse('admin:admin_views_customarticle_changelist'))
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, 'custom_admin/change_list.html')
# Test custom add form template
response = self.client.get(reverse('admin:admin_views_customarticle_add'))
self.assertTemplateUsed(response, 'custom_admin/add_form.html')
# Add an article so we can test delete, change, and history views
post = self.client.post(reverse('admin:admin_views_customarticle_add'), {
'content': '<p>great article</p>',
'date_0': '2008-03-18',
'date_1': '10:54:39'
})
self.assertRedirects(post, reverse('admin:admin_views_customarticle_changelist'))
self.assertEqual(CustomArticle.objects.all().count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get(reverse('admin:admin_views_customarticle_change', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/change_form.html')
response = self.client.get(reverse('admin:admin_views_customarticle_delete', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html')
response = self.client.post(reverse('admin:admin_views_customarticle_changelist'), data={
'index': 0,
'action': ['delete_selected'],
'_selected_action': ['1'],
})
self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html')
response = self.client.get(reverse('admin:admin_views_customarticle_history', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/object_history.html')
def test_extended_bodyclass_template_change_form(self):
"""
Ensure that the admin/change_form.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_change_password(self):
"""
Ensure that the auth/user/change_password.html template uses block
super in the bodyclass block.
"""
user = User.objects.get(username='super')
response = self.client.get(reverse('admin:auth_user_password_change', args=(user.id,)))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_index(self):
"""
Ensure that the admin/index.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse('admin:index'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_change_list(self):
"""
Ensure that the admin/change_list.html' template uses block.super
in the bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_login(self):
"""
Ensure that the admin/login.html template uses block.super in the
bodyclass block.
"""
self.client.logout()
response = self.client.get(reverse('admin:login'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_confirmation(self):
"""
Ensure that the admin/delete_confirmation.html template uses
block.super in the bodyclass block.
"""
group = Group.objects.create(name="foogroup")
response = self.client.get(reverse('admin:auth_group_delete', args=(group.id,)))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_selected_confirmation(self):
"""
Ensure that the admin/delete_selected_confirmation.html template uses
block.super in bodyclass block.
"""
group = Group.objects.create(name="foogroup")
post_data = {
'action': 'delete_selected',
'selected_across': '0',
'index': '0',
'_selected_action': group.id
}
response = self.client.post(reverse('admin:auth_group_changelist'), post_data)
self.assertEqual(response.context['site_header'], 'Django administration')
self.assertContains(response, 'bodyclass_consistency_check ')
def test_filter_with_custom_template(self):
"""
Ensure that one can use a custom template to render an admin filter.
Refs #17515.
"""
response = self.client.get(reverse('admin:admin_views_color2_changelist'))
self.assertTemplateUsed(response, 'custom_filter_template.html')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewFormUrlTest(TestCase):
current_app = "admin3"
@classmethod
def setUpTestData(cls):
# password = "secret"
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.login(username='super', password='secret')
def test_change_form_URL_has_correct_value(self):
"""
Tests whether change_view has form_url in response.context
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(self.s1.pk,), current_app=self.current_app)
)
self.assertIn('form_url', response.context, msg='form_url not present in response.context')
self.assertEqual(response.context['form_url'], 'pony')
def test_initial_data_can_be_overridden(self):
"""
Tests that the behavior for setting initial
form data can be overridden in the ModelAdmin class.
Usually, the initial value is set via the GET params.
"""
response = self.client.get(
reverse('admin:admin_views_restaurant_add', current_app=self.current_app),
{'name': 'test_value'}
)
# this would be the usual behaviour
self.assertNotContains(response, 'value="test_value"')
# this is the overridden behaviour
self.assertContains(response, 'value="overridden_value"')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminJavaScriptTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_js_minified_only_if_debug_is_false(self):
"""
Ensure that the minified versions of the JS files are only used when
DEBUG is False.
Refs #17521.
"""
with override_settings(DEBUG=False):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertNotContains(response, 'jquery.js')
self.assertContains(response, 'jquery.min.js')
self.assertNotContains(response, 'prepopulate.js')
self.assertContains(response, 'prepopulate.min.js')
self.assertNotContains(response, 'actions.js')
self.assertContains(response, 'actions.min.js')
self.assertNotContains(response, 'collapse.js')
self.assertContains(response, 'collapse.min.js')
self.assertNotContains(response, 'inlines.js')
self.assertContains(response, 'inlines.min.js')
with override_settings(DEBUG=True):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'jquery.js')
self.assertNotContains(response, 'jquery.min.js')
self.assertContains(response, 'prepopulate.js')
self.assertNotContains(response, 'prepopulate.min.js')
self.assertContains(response, 'actions.js')
self.assertNotContains(response, 'actions.min.js')
self.assertContains(response, 'collapse.js')
self.assertNotContains(response, 'collapse.min.js')
self.assertContains(response, 'inlines.js')
self.assertNotContains(response, 'inlines.min.js')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class SaveAsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
def setUp(self):
self.client.login(username='super', password='secret')
def test_save_as_duplication(self):
"""Ensure save as actually creates a new person"""
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42}
self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), post_data)
self.assertEqual(len(Person.objects.filter(name='John M')), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
def test_save_as_display(self):
"""
Ensure that 'save as' is displayed when activated and after submitting
invalid data aside save_as_new will not show us a form to overwrite the
initial model.
"""
change_url = reverse('admin:admin_views_person_change', args=(self.per1.pk,))
response = self.client.get(change_url)
self.assertTrue(response.context['save_as'])
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 3, 'alive': 'checked'}
response = self.client.post(change_url, post_data)
self.assertEqual(response.context['form_url'], reverse('admin:admin_views_person_add'))
@override_settings(ROOT_URLCONF="admin_views.urls")
class CustomModelAdminTest(AdminViewBasicTestCase):
def test_custom_admin_site_login_form(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post(reverse('admin2:login'), {
REDIRECT_FIELD_NAME: reverse('admin2:index'),
'username': 'customform',
'password': 'secret',
}, follow=True)
self.assertIsInstance(login, TemplateResponse)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'custom form error')
self.assertContains(login, 'path/to/media.css')
def test_custom_admin_site_login_template(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/login.html')
self.assertContains(response, 'Hello from a custom login template')
def test_custom_admin_site_logout_template(self):
response = self.client.get(reverse('admin2:logout'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/logout.html')
self.assertContains(response, 'Hello from a custom logout template')
def test_custom_admin_site_index_view_and_template(self):
try:
response = self.client.get(reverse('admin2:index'))
except TypeError:
self.fail('AdminSite.index_template should accept a list of template paths')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/index.html')
self.assertContains(response, 'Hello from a custom index template *bar*')
def test_custom_admin_site_app_index_view_and_template(self):
response = self.client.get(reverse('admin2:app_list', args=('admin_views',)))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/app_index.html')
self.assertContains(response, 'Hello from a custom app_index template')
def test_custom_admin_site_password_change_template(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'Hello from a custom password change form template')
def test_custom_admin_site_password_change_with_extra_context(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'eggs')
def test_custom_admin_site_password_change_done_template(self):
response = self.client.get(reverse('admin2:password_change_done'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_done.html')
self.assertContains(response, 'Hello from a custom password change done template')
def test_custom_admin_site_view(self):
self.client.login(username='super', password='secret')
response = self.client.get(reverse('admin2:my_view'))
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.login(username='super', password='secret')
su = User.objects.get(username='super')
try:
response = self.client.get(
reverse('admin4:auth_user_password_change', args=(su.pk,))
)
except TypeError:
self.fail('ModelAdmin.change_user_password_template should accept a list of template paths')
self.assertEqual(response.status_code, 200)
def get_perm(Model, perm):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
@classmethod
def setUpTestData(cls):
super(AdminViewPermissionsTest, cls).setUpTestData()
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
# Setup permissions, for our users who can add, change, and delete.
opts = Article._meta
# User who can add Articles
cls.u2.user_permissions.add(get_perm(Article, get_permission_codename('add', opts)))
# User who can change Articles
cls.u3.user_permissions.add(get_perm(Article, get_permission_codename('change', opts)))
cls.u6.user_permissions.add(get_perm(Article, get_permission_codename('change', opts)))
# User who can delete Articles
cls.u4.user_permissions.add(get_perm(Article, get_permission_codename('delete', opts)))
cls.u4.user_permissions.add(get_perm(Section, get_permission_codename('delete', Section._meta)))
# login POST dicts
cls.index_url = reverse('admin:index')
cls.super_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super',
'password': 'secret',
}
cls.super_email_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super@example.com',
'password': 'secret',
}
cls.super_email_bad_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super@example.com',
'password': 'notsecret',
}
cls.adduser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'adduser',
'password': 'secret',
}
cls.changeuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'changeuser',
'password': 'secret',
}
cls.deleteuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'deleteuser',
'password': 'secret',
}
cls.nostaff_login = {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'nostaff',
'password': 'secret',
}
cls.joepublic_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'joepublic',
'password': 'secret',
}
cls.no_username_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'password': 'secret',
}
def test_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the original url.
Unsuccessful attempts will continue to render the login page with
a 200 status code.
"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
# Super User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Test if user enters email address
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post(login_url, self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username='jondoe', password='secret', email='super@example.com')
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# Add User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.adduser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Change User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.changeuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Delete User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.deleteuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Regular User should not be able to login.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.no_username_login)
self.assertEqual(login.status_code, 200)
form = login.context[0].get('form')
self.assertEqual(form.errors['username'][0], 'This field is required.')
def test_login_has_permission(self):
# Regular User should not be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'permission denied')
# User with permissions should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.nostaff_login)
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
# Staff should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'deleteuser',
'password': 'secret',
})
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
def test_login_successfully_redirects_to_original_URL(self):
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
query_string = 'the-answer=42'
redirect_url = '%s?%s' % (self.index_url, query_string)
new_next = {REDIRECT_FIELD_NAME: redirect_url}
post_data = self.super_login.copy()
post_data.pop(REDIRECT_FIELD_NAME)
login = self.client.post(
'%s?%s' % (reverse('admin:login'), urlencode(new_next)),
post_data)
self.assertRedirects(login, redirect_url)
def test_double_login_is_not_allowed(self):
"""Regression test for #19327"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
def test_add_view(self):
"""Test add view restricts access and actually adds items."""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
add_dict = {'title': 'Døm ikke',
'content': '<p>great article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk}
# Change User should not have access to add articles
self.client.get(self.index_url)
self.client.post(login_url, self.changeuser_login)
# make sure the view removes test cookie
self.assertEqual(self.client.session.test_cookie_worked(), False)
response = self.client.get(reverse('admin:admin_views_article_add'))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.get(reverse('admin:logout'))
# Add user may login and POST to add view, then redirect to admin root
self.client.get(self.index_url)
self.client.post(login_url, self.adduser_login)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
change_list_link = '› <a href="%s">Articles</a>' % reverse('admin:admin_views_article_changelist')
self.assertNotContains(addpage, change_list_link,
msg_prefix='User restricted to add permission is given link to change list view in breadcrumbs.')
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 4)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a created object')
self.client.get(reverse('admin:logout'))
# Check that the addition was logged correctly
addition_log = LogEntry.objects.all()[0]
new_article = Article.objects.last()
article_ct = ContentType.objects.get_for_model(Article)
self.assertEqual(addition_log.user_id, self.u2.pk)
self.assertEqual(addition_log.content_type_id, article_ct.pk)
self.assertEqual(addition_log.object_id, str(new_article.pk))
self.assertEqual(addition_log.object_repr, "Døm ikke")
self.assertEqual(addition_log.action_flag, ADDITION)
self.assertEqual(addition_log.change_message, "Added.")
# Super can add too, but is redirected to the change list view
self.client.get(self.index_url)
self.client.post(login_url, self.super_login)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
self.assertContains(addpage, change_list_link,
msg_prefix='Unrestricted user is not given link to change list view in breadcrumbs.')
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, reverse('admin:admin_views_article_changelist'))
self.assertEqual(Article.objects.count(), 5)
self.client.get(reverse('admin:logout'))
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
self.client.login(username='joepublic', password='secret')
# Check and make sure that if user expires, data still persists
self.client.get(self.index_url)
self.client.post(login_url, self.super_login)
# make sure the view removes test cookie
self.assertEqual(self.client.session.test_cookie_worked(), False)
def test_change_view(self):
"""Change view should restrict access and allow users to edit items."""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
change_dict = {'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk}
article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,))
article_changelist_url = reverse('admin:admin_views_article_changelist')
# add user should not be able to view the list of article or change any of them
self.client.get(self.index_url)
self.client.post(login_url, self.adduser_login)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 403)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.get(self.index_url)
self.client.post(login_url, self.changeuser_login)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 200)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 200)
post = self.client.post(article_change_url, change_dict)
self.assertRedirects(post, article_changelist_url)
self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>edited article</p>')
# one error in form should produce singular error message, multiple errors plural
change_dict['title'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(post, 'Please correct the error below.',
msg_prefix='Singular error message not found in response to post with one error')
change_dict['content'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(post, 'Please correct the errors below.',
msg_prefix='Plural error message not found in response to post with multiple errors')
self.client.get(reverse('admin:logout'))
# Test redirection when using row-level change permissions. Refs #11513.
r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
change_url_1 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r1.pk,))
change_url_2 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r2.pk,))
for login_dict in [self.super_login, self.changeuser_login, self.adduser_login, self.deleteuser_login]:
self.client.post(login_url, login_dict)
response = self.client.get(change_url_1)
self.assertEqual(response.status_code, 403)
response = self.client.post(change_url_1, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 403)
response = self.client.get(change_url_2)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_2, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertRedirects(response, self.index_url)
self.client.get(reverse('admin:logout'))
for login_dict in [self.joepublic_login, self.no_username_login]:
self.client.post(login_url, login_dict)
response = self.client.get(change_url_1, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_1, {'name': 'changed'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.get(change_url_2, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_2, {'name': 'changed again'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_delete_view(self):
"""Delete view should restrict access and actually delete items."""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_article_delete', args=(self.a1.pk,))
# add user should not be able to delete articles
self.client.login(**self.adduser_login)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# Delete user can delete
self.client.login(**self.deleteuser_login)
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 3</li>")
# test response contains link to related Article
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 1</li>")
self.assertEqual(response.status_code, 200)
post = self.client.post(delete_url, delete_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a deleted object')
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, str(self.a1.pk))
def test_history_view(self):
"""History view should restrict access."""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
# add user should not be able to view the list of article or change any of them
self.client.get(self.index_url)
self.client.post(login_url, self.adduser_login)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 403)
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.get(self.index_url)
self.client.post(login_url, self.changeuser_login)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
rl1 = RowLevelChangePermissionModel.objects.create(name="odd id")
rl2 = RowLevelChangePermissionModel.objects.create(name="even id")
for login_dict in [self.super_login, self.changeuser_login, self.adduser_login, self.deleteuser_login]:
self.client.post(login_url, login_dict)
response = self.client.get(reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,)))
self.assertEqual(response.status_code, 403)
response = self.client.get(reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,)))
self.assertEqual(response.status_code, 200)
self.client.get(reverse('admin:logout'))
for login_dict in [self.joepublic_login, self.no_username_login]:
self.client.post(login_url, login_dict)
response = self.client.get(
reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,)), follow=True
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.get(
reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,)), follow=True
)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_history_view_bad_url(self):
self.client.post(reverse('admin:login'), self.changeuser_login)
response = self.client.get(reverse('admin:admin_views_article_history', args=('foo',)))
self.assertEqual(response.status_code, 404)
def test_conditionally_show_add_section_link(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
self.client.login(**self.adduser_login)
# The user can't add sections yet, so they shouldn't see the "add section" link.
url = reverse('admin:admin_views_article_add')
add_link_text = 'add_id_section'
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the user to add sections too. Now they can see the "add section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('add', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def test_conditionally_show_change_section_link(self):
"""
The foreign key widget should only show the "change related" button if
the user has permission to change that related item.
"""
def get_change_related(response):
return response.context['adminform'].form.fields['section'].widget.can_change_related
self.client.login(**self.adduser_login)
# The user can't change sections yet, so they shouldn't see the "change section" link.
url = reverse('admin:admin_views_article_add')
change_link_text = 'change_id_section'
response = self.client.get(url)
self.assertFalse(get_change_related(response))
self.assertNotContains(response, change_link_text)
# Allow the user to change sections too. Now they can see the "change section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('change', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_change_related(response))
self.assertContains(response, change_link_text)
def test_conditionally_show_delete_section_link(self):
"""
The foreign key widget should only show the "delete related" button if
the user has permission to delete that related item.
"""
def get_delete_related(response):
return response.context['adminform'].form.fields['sub_section'].widget.can_delete_related
self.client.login(**self.adduser_login)
# The user can't delete sections yet, so they shouldn't see the "delete section" link.
url = reverse('admin:admin_views_article_add')
delete_link_text = 'delete_id_sub_section'
response = self.client.get(url)
self.assertFalse(get_delete_related(response))
self.assertNotContains(response, delete_link_text)
# Allow the user to delete sections too. Now they can see the "delete section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('delete', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_delete_related(response))
self.assertContains(response, delete_link_text)
def test_disabled_permissions_when_logged_in(self):
self.client.login(username='super', password='secret')
superuser = User.objects.get(username='super')
superuser.is_active = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_disabled_staff_permissions_when_logged_in(self):
self.client.login(username='super', password='secret')
superuser = User.objects.get(username='super')
superuser.is_staff = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_app_index_fail_early(self):
"""
If a user has no module perms, avoid iterating over all the modeladmins
in the registry.
"""
opts = Article._meta
change_user = User.objects.get(username='changeuser')
permission = get_perm(Article, get_permission_codename('change', opts))
self.client.login(**self.changeuser_login)
# the user has no module permissions, because this module doesn't exist
change_user.user_permissions.remove(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 403)
# the user now has module permissions
change_user.user_permissions.add(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 200)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey)
obj = ModelWithStringPrimaryKey.objects.create(string_pk='foo')
shortcut_url = reverse('admin:view_on_site', args=(model_ctype.pk, obj.pk))
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
# Logged in? Redirect.
self.client.login(username='super', password='secret')
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
# Domain may depend on contrib.sites tests also run
six.assertRegex(self, response.url, 'http://(testserver|example.com)/dummy/foo/')
def test_has_module_permission(self):
"""
Ensure that has_module_permission() returns True for all users who
have any permission for that module (add, change, or delete), so that
the module is displayed on the admin index page.
"""
self.client.login(**self.super_login)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.login(**self.adduser_login)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.login(**self.changeuser_login)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.login(**self.deleteuser_login)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
def test_overriding_has_module_permission(self):
"""
Ensure that overriding has_module_permission() has the desired effect.
In this case, it always returns False, so the module should not be
displayed on the admin index page for any users.
"""
index_url = reverse('admin7:index')
self.client.login(**self.super_login)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.logout()
self.client.login(**self.adduser_login)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.logout()
self.client.login(**self.changeuser_login)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.logout()
self.client.login(**self.deleteuser_login)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
@classmethod
def setUpTestData(cls):
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
opts = Report._meta
# User who can change Reports
cls.u3.user_permissions.add(get_perm(Report, get_permission_codename('change', opts)))
# login POST dict
cls.changeuser_login = {
REDIRECT_FIELD_NAME: reverse('admin:index'),
'username': 'changeuser',
'password': 'secret',
}
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.get(reverse('admin:index'))
r = self.client.post(reverse('admin:login'), self.changeuser_login)
r = self.client.get(reverse('admin:index'))
# we shouldn't get a 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.get(reverse('admin:logout'))
@skipUnlessDBFeature('can_defer_constraint_checks')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewDeletedObjectsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.v1 = Villain.objects.create(name='Adam')
cls.v2 = Villain.objects.create(name='Sue')
cls.sv1 = SuperVillain.objects.create(name='Bob')
cls.pl1 = Plot.objects.create(name='World Domination', team_leader=cls.v1, contact=cls.v2)
cls.pl2 = Plot.objects.create(name='World Peace', team_leader=cls.v2, contact=cls.v2)
cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1)
cls.pd1 = PlotDetails.objects.create(details='almost finished', plot=cls.pl1)
cls.sh1 = SecretHideout.objects.create(location='underground bunker', villain=cls.v1)
cls.sh2 = SecretHideout.objects.create(location='floating castle', villain=cls.sv1)
cls.ssh1 = SuperSecretHideout.objects.create(location='super floating castle!', supervillain=cls.sv1)
cls.cy1 = CyclicOne.objects.create(name='I am recursive', two_id=1)
cls.cy2 = CyclicTwo.objects.create(name='I am recursive too', one_id=1)
def setUp(self):
self.client.login(username='super', password='secret')
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(force_bytes(
r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*<li>Plot details: <a href="%s">almost finished</a>' % (
reverse('admin:admin_views_plot_change', args=(self.pl1.pk,)),
reverse('admin:admin_views_plotdetails_change', args=(self.pd1.pk,)))
))
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
six.assertRegex(self, response.content, pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % (
reverse('admin:admin_views_cyclicone_change', args=(self.cy1.pk,)),
)
two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % (
reverse('admin:admin_views_cyclictwo_change', args=(self.cy2.pk,)),
)
response = self.client.get(reverse('admin:admin_views_cyclicone_delete', args=(self.cy1.pk,)))
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username='deleteuser')
delete_user.user_permissions.add(get_perm(Plot,
get_permission_codename('delete', Plot._meta)))
self.assertTrue(self.client.login(username='deleteuser',
password='secret'))
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(self.pl1.pk,)))
self.assertContains(response, "your account doesn't have permission to delete the following types of objects")
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get(reverse('admin:admin_views_question_delete', args=(q.pk,)))
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,))
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,))
)
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse(
'admin:admin_views_plot_change', args=(self.pl1.pk,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertContains(response, should_contain)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,)))
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse(
'admin:admin_views_plot_change', args=(self.pl2.pk,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,)))
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
'<li>Villain: <a href="%s">Bob</a>' % reverse('admin:admin_views_villain_change', args=(self.sv1.pk,)),
'<li>Super villain: <a href="%s">Bob</a>' % reverse('admin:admin_views_supervillain_change', args=(self.sv1.pk,)),
'<li>Secret hideout: floating castle',
'<li>Super secret hideout: super floating castle!',
]
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.sv1.pk,)))
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get(reverse('admin:admin_views_supervillain_delete', args=(self.sv1.pk,)))
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = self.pl3
tag = FunkyTag.objects.create(content_object=plot, name='hott')
should_contain = '<li>Funky tag: <a href="%s">hott' % reverse(
'admin:admin_views_funkytag_change', args=(tag.id,))
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(plot.pk,)))
self.assertContains(response, should_contain)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class TestGenericRelations(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.v1 = Villain.objects.create(name='Adam')
cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1)
def setUp(self):
self.client.login(username='super', password='secret')
def test_generic_content_object_in_list_display(self):
FunkyTag.objects.create(content_object=self.pl3, name='hott')
response = self.client.get(reverse('admin:admin_views_funkytag_changelist'))
self.assertContains(response, "%s</td>" % self.pl3)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewStringPrimaryKeyTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.pk = """abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 -_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk)
content_type_pk = ContentType.objects.get_for_model(ModelWithStringPrimaryKey).pk
LogEntry.objects.log_action(100, content_type_pk, cls.pk, cls.pk, 2, change_message='Changed something')
def setUp(self):
self.client.login(username='super', password='secret')
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_history', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertContains(response, 'Changed something')
self.assertEqual(response.status_code, 200)
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_change', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertEqual(response.status_code, 200)
def test_changelist_to_changeform_link(self):
"Link to the changeform of the object in changelist should use reverse() and be quoted -- #18072"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_changelist'))
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(quote(self.pk)))
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', pk_final_url)
should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"The link from the recent actions list referring to the changeform of the object should be quoted"
response = self.client.get(reverse('admin:index'))
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_without_content_type(self):
"If a LogEntry is missing content_type it will not display it in span tag under the hyperlink."
response = self.client.get(reverse('admin:index'))
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
should_contain = "Model with string primary key" # capitalized in Recent Actions
self.assertContains(response, should_contain)
logentry = LogEntry.objects.get(content_type__model__iexact='modelwithstringprimarykey')
# http://code.djangoproject.com/ticket/10275
# if the log entry doesn't have a content type it should still be
# possible to view the Recent Actions part
logentry.content_type = None
logentry.save()
counted_presence_before = response.content.count(force_bytes(should_contain))
response = self.client.get(reverse('admin:index'))
counted_presence_after = response.content.count(force_bytes(should_contain))
self.assertEqual(counted_presence_before - 1,
counted_presence_after)
def test_logentry_get_admin_url(self):
"LogEntry.get_admin_url returns a URL to edit the entry's object or None for non-existent (possibly deleted) models"
log_entry_model = "modelwithstringprimarykey" # capitalized in Recent Actions
logentry = LogEntry.objects.get(content_type__model__iexact=log_entry_model)
desired_admin_url = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
self.assertEqual(logentry.get_admin_url(), desired_admin_url)
self.assertIn(iri_to_uri(quote(self.pk)), logentry.get_admin_url())
logentry.content_type.model = "non-existent"
self.assertEqual(logentry.get_admin_url(), None)
def test_logentry_get_edited_object(self):
"LogEntry.get_edited_object returns the edited object of a given LogEntry object"
logentry = LogEntry.objects.get(content_type__model__iexact="modelwithstringprimarykey")
edited_obj = logentry.get_edited_object()
self.assertEqual(logentry.object_id, str(edited_obj.pk))
def test_deleteconfirmation_link(self):
"The link from the delete confirmation page referring back to the changeform of the object should be quoted"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_delete', args=(quote(self.pk),)))
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', escape(iri_to_uri(quote(self.pk))))
should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add or is `add` should be visible"
add_model = ModelWithStringPrimaryKey.objects.create(pk="i have something to add")
add_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add")
add_url = reverse('admin:admin_views_modelwithstringprimarykey_add')
change_url = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model2.pk),))
self.assertNotEqual(add_url, change_url)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(delete_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(history_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk='abc_123')
model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(model.pk),))
)
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse('admin:%s_modelwithstringprimarykey_change' %
ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse('admin:%s_modelwithstringprimarykey_history' %
ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),))
self.assertContains(response, '<a href="%s" class="historylink"' % expected_link)
def test_redirect_on_add_view_continue_button(self):
"""As soon as an object is added using "Save and continue editing"
button, the user should be redirected to the object's change_view.
In case primary key is a string containing some special characters
like slash or underscore, these characters must be escaped (see #22266)
"""
response = self.client.post(
reverse('admin:admin_views_modelwithstringprimarykey_add'),
{
'string_pk': '123/history',
"_continue": "1", # Save and continue editing
}
)
self.assertEqual(response.status_code, 302) # temporary redirect
self.assertIn('/123_2Fhistory/', response['location']) # PK is quoted
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class SecureViewTests(TestCase):
"""
Test behavior of a view protected by the staff_member_required decorator.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def test_secure_view_shows_login_if_not_logged_in(self):
"""
Ensure that we see the admin login form.
"""
secure_url = reverse('secure_view')
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), secure_url))
response = self.client.get(secure_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url)
def test_staff_member_required_decorator_works_with_argument(self):
"""
Ensure that staff_member_required decorator works with an argument
(redirect_field_name).
"""
secure_url = '/test_admin/admin/secure-view2/'
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?myfield=%s' % (reverse('admin:login'), secure_url))
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewUnicodeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.b1 = Book.objects.create(name='Lærdommer')
cls.p1 = Promo.objects.create(name='<Promo for Lærdommer>', book=cls.b1)
cls.chap1 = Chapter.objects.create(
title='Norske bostaver æøå skaper problemer', content='<p>Svært frustrerende med UnicodeDecodeErro</p>',
book=cls.b1
)
cls.chap2 = Chapter.objects.create(
title='Kjærlighet', content='<p>La kjærligheten til de lidende seire.</p>', book=cls.b1)
cls.chap3 = Chapter.objects.create(title='Kjærlighet', content='<p>Noe innhold</p>', book=cls.b1)
cls.chap4 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='<Xtra(1) Norske bostaver æøå skaper problemer>')
cls.chap5 = ChapterXtra1.objects.create(chap=cls.chap2, xtra='<Xtra(1) Kjærlighet>')
cls.chap6 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='<Xtra(1) Kjærlighet>')
cls.chap7 = ChapterXtra2.objects.create(chap=cls.chap1, xtra='<Xtra(2) Norske bostaver æøå skaper problemer>')
cls.chap8 = ChapterXtra2.objects.create(chap=cls.chap2, xtra='<Xtra(2) Kjærlighet>')
cls.chap9 = ChapterXtra2.objects.create(chap=cls.chap3, xtra='<Xtra(2) Kjærlighet>')
def setUp(self):
self.client.login(username='super', password='secret')
def test_unicode_edit(self):
"""
A test to ensure that POST on edit_view handles non-ASCII characters.
"""
post_data = {
"name": "Test lærdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": self.chap1.pk,
"chapter_set-0-title": "Norske bostaver æøå skaper problemer",
"chapter_set-0-content": "<p>Svært frustrerende med UnicodeDecodeError</p>",
"chapter_set-1-id": self.chap2.id,
"chapter_set-1-title": "Kjærlighet.",
"chapter_set-1-content": "<p>La kjærligheten til de lidende seire.</p>",
"chapter_set-2-id": self.chap3.id,
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post(reverse('admin:admin_views_book_change', args=(self.b1.pk,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_unicode_delete(self):
"""
Ensure that the delete_view handles non-ASCII characters
"""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_book_delete', args=(self.b1.pk,))
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(delete_url, delete_dict)
self.assertRedirects(response, reverse('admin:admin_views_book_changelist'))
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewListEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
def setUp(self):
self.client.login(username='super', password='secret')
def test_inheritance(self):
Podcast.objects.create(name="This Week in Django",
release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get(reverse('admin:admin_views_vodcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso='en', name='English', english_name='English')
response = self.client.get(reverse('admin:admin_views_language_changelist'))
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get(reverse('admin:admin_views_person_changelist'))
# 2 inputs per object(the field and the hidden id field) = 6
# 4 management hidden fields = 4
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 1
# field to track 'select all' across paginated views = 1
# 6 + 4 + 4 + 1 + 2 + 1 + 1 = 19 inputs
self.assertContains(response, "<input", count=19)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'),
data, follow=True)
self.assertEqual(len(response.context['messages']), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per1.pk,
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": "%s" % self.per3.pk,
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?gender__exact=1', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per1.pk,
"form-0-gender": "1",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?q=john', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
''' Ensure that non field errors are displayed for each of the
forms in the changelist's formset. Refs #13126.
'''
fd1 = FoodDelivery.objects.create(reference='123', driver='bill', restaurant='thai')
fd2 = FoodDelivery.objects.create(reference='456', driver='bill', restaurant='india')
fd3 = FoodDelivery.objects.create(reference='789', driver='bill', restaurant='pizza')
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
1,
html=True
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
2,
html=True
)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per2.pk,
"form-0-alive": "1",
"form-0-gender": "2",
# Ensure that the form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per2.pk,
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
non_form_errors = response.context['cl'].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(str(non_form_errors), str(ErrorList(["Grace is not a Zombie"])))
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# Ensure that the form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_category_changelist'), data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# Check that the order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Ensure that pagination works for list_editable items.
Refs #16819.
"""
UnorderedObject.objects.create(id=1, name='Unordered object #1')
UnorderedObject.objects.create(id=2, name='Unordered object #2')
UnorderedObject.objects.create(id=3, name='Unordered object #3')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist'))
self.assertContains(response, 'Unordered object #3')
self.assertContains(response, 'Unordered object #2')
self.assertNotContains(response, 'Unordered object #1')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist') + '?p=1')
self.assertNotContains(response, 'Unordered object #3')
self.assertNotContains(response, 'Unordered object #2')
self.assertContains(response, 'Unordered object #1')
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ['3'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
"_selected_action": ['1'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get(reverse('admin:admin_views_person_changelist'))
self.assertNotEqual(response.context['cl'].list_editable, ())
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?%s' % IS_POPUP_VAR)
self.assertEqual(response.context['cl'].list_editable, ())
def test_pk_hidden_fields(self):
""" Ensure that hidden pk fields aren't displayed in the table body and
that their corresponding human-readable value is displayed instead.
Note that the hidden pk fields are in fact be displayed but
separately (not in the table), and only once.
Refs #12475.
"""
story1 = Story.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...')
story2 = Story.objects.create(title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...')
response = self.client.get(reverse('admin:admin_views_story_changelist'))
self.assertContains(response, 'id="id_form-0-id"', 1) # Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(response, '<div class="hiddenfields">\n<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /><input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>' % (story2.id, story1.id), html=True)
self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1)
self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
""" Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...')
story2 = OtherStory.objects.create(title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...')
link1 = reverse('admin:admin_views_otherstory_change', args=(story1.pk,))
link2 = reverse('admin:admin_views_otherstory_change', args=(story2.pk,))
response = self.client.get(reverse('admin:admin_views_otherstory_changelist'))
self.assertContains(response, 'id="id_form-0-id"', 1) # Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(response, '<div class="hiddenfields">\n<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /><input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>' % (story2.id, story1.id), html=True)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id), 1)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id), 1)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminSearchTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
cls.t1 = Recommender.objects.create()
cls.t2 = Recommendation.objects.create(recommender=cls.t1)
cls.t3 = Recommender.objects.create()
cls.t4 = Recommendation.objects.create(recommender=cls.t3)
cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text='Bar')
cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text='Foo')
cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text='Few')
cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text='Bas')
def setUp(self):
self.client.login(username='super', password='secret')
def test_search_on_sibling_models(self):
"Check that a search that mentions sibling models"
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""
Ensure that the to_field GET parameter is preserved when a search
is performed. Refs #10918.
"""
response = self.client.get(reverse('admin:auth_user_changelist') + '?q=joe&%s=id' % TO_FIELD_VAR)
self.assertContains(response, "\n1 user\n")
self.assertContains(response, '<input type="hidden" name="%s" value="id"/>' % TO_FIELD_VAR, html=True)
def test_exact_matches(self):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=ba')
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=uido')
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
PluggableSearchPerson.objects.create(name="Bob", age=10)
PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=Bob')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=20')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
# + 1 for total count
with self.assertNumQueries(5):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
self.assertContains(response,
"""<span class="small quiet">1 result (<a href="?">3 total</a>)</span>""",
html=True)
def test_no_total_count(self):
"""
#8408 -- "Show all" should be displayed instead of the total count if
ModelAdmin.show_full_result_count is False.
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
with self.assertNumQueries(4):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
self.assertContains(response,
"""<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""",
html=True)
self.assertTrue(response.context['cl'].show_admin_actions)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminInheritedInlinesTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_inline(self):
"Ensure that inline models which inherit from a common parent are correctly handled by admin."
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get(reverse('admin:admin_views_persona_add'))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post(reverse('admin:admin_views_persona_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get(reverse('admin:admin_views_persona_change', args=(persona_id,)))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post(reverse('admin:admin_views_persona_change', args=(persona_id,)), post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminActionsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = ExternalSubscriber.objects.create(name='John Doe', email='john@example.org')
cls.s2 = Subscriber.objects.create(name='Max Mustermann', email='max@example.org')
def setUp(self):
self.client.login(username='super', password='secret')
def test_model_admin_custom_action(self):
"Tests a custom action defined in a ModelAdmin method"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'mail_admin',
'index': 0,
}
self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a ModelAdmin action')
def test_model_admin_default_delete_action(self):
"Tests the default delete action defined as a ModelAdmin method"
action_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': 'delete_selected',
'index': 0,
}
delete_confirmation_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': 'delete_selected',
'post': 'yes',
}
confirmation = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertIsInstance(confirmation, TemplateResponse)
self.assertContains(confirmation, "Are you sure you want to delete the selected subscribers?")
self.assertContains(confirmation, "<h2>Summary</h2>")
self.assertContains(confirmation, "<li>Subscribers: 3</li>")
self.assertContains(confirmation, "<li>External subscribers: 1</li>")
self.assertContains(confirmation, ACTION_CHECKBOX_NAME, count=2)
self.client.post(reverse('admin:admin_views_subscriber_changelist'), delete_confirmation_data)
self.assertEqual(Subscriber.objects.count(), 0)
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_non_localized_pk(self):
"""If USE_THOUSAND_SEPARATOR is set, make sure that the ids for
the objects selected for deletion are rendered without separators.
Refs #14895.
"""
subscriber = Subscriber.objects.get(id=1)
subscriber.id = 9999
subscriber.save()
action_data = {
ACTION_CHECKBOX_NAME: [9999, 2],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertTemplateUsed(response, 'admin/delete_selected_confirmation.html')
self.assertContains(response, 'value="9999"') # Instead of 9,999
self.assertContains(response, 'value="2"')
def test_model_admin_default_delete_action_protected(self):
"""
Tests the default delete action defined as a ModelAdmin method in the
case where some related objects are protected from deletion.
"""
q1 = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q1, answer="Because.")
a2 = Answer.objects.create(question=q1, answer="Yes.")
q2 = Question.objects.create(question="Wherefore?")
action_data = {
ACTION_CHECKBOX_NAME: [q1.pk, q2.pk],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_question_changelist'), action_data)
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,)),
html=True
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,)),
html=True
)
def test_model_admin_default_delete_action_no_change_url(self):
"""
Default delete action shouldn't break if a user's ModelAdmin removes the url for change_view.
Regression test for #20640
"""
obj = UnchangeableObject.objects.create()
action_data = {
ACTION_CHECKBOX_NAME: obj.pk,
"action": "delete_selected",
"index": "0",
}
response = self.client.post(reverse('admin:admin_views_unchangeableobject_changelist'), action_data)
# No 500 caused by NoReverseMatch
self.assertEqual(response.status_code, 200)
# The page shouldn't display a link to the nonexistent change page
self.assertContains(response, "<li>Unchangeable object: UnchangeableObject object</li>", 1, html=True)
def test_custom_function_mail_action(self):
"Tests a custom action defined in a function"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'external_mail',
'index': 0,
}
self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action')
def test_custom_function_action_with_redirect(self):
"Tests a custom action defined in a function"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'redirect_to',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(response.status_code, 302)
def test_default_redirect(self):
"""
Test that actions which don't return an HttpResponse are redirected to
the same page, retaining the querystring (which may contain changelist
information).
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'external_mail',
'index': 0,
}
url = reverse('admin:admin_views_externalsubscriber_changelist') + '?o=1'
response = self.client.post(url, action_data)
self.assertRedirects(response, url)
def test_custom_function_action_streaming_response(self):
"""Tests a custom action that returns a StreamingHttpResponse."""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'download',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
content = b''.join(response.streaming_content)
self.assertEqual(content, b'This is the content of the file')
self.assertEqual(response.status_code, 200)
def test_custom_function_action_no_perm_response(self):
"""Tests a custom action that returns an HttpResponse with 403 code."""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'no_perm',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(response.status_code, 403)
self.assertEqual(response.content, b'No permission to perform this action')
def test_actions_ordering(self):
"""
Ensure that actions are ordered as expected.
Refs #15964.
"""
response = self.client.get(reverse('admin:admin_views_externalsubscriber_changelist'))
self.assertContains(response, '''<label>Action: <select name="action">
<option value="" selected="selected">---------</option>
<option value="delete_selected">Delete selected external
subscribers</option>
<option value="redirect_to">Redirect to (Awesome action)</option>
<option value="external_mail">External mail (Another awesome
action)</option>
<option value="download">Download subscription</option>
<option value="no_perm">No permission to run</option>
</select>''', html=True)
def test_model_without_action(self):
"Tests a ModelAdmin without any action"
response = self.client.get(reverse('admin:admin_views_oldsubscriber_changelist'))
self.assertEqual(response.context["action_form"], None)
self.assertNotContains(response, '<input type="checkbox" class="action-select"',
msg_prefix="Found an unexpected action toggle checkboxbox in response")
self.assertNotContains(response, '<input type="checkbox" class="action-select"')
def test_model_without_action_still_has_jquery(self):
"Tests that a ModelAdmin without any actions still gets jQuery included in page"
response = self.client.get(reverse('admin:admin_views_oldsubscriber_changelist'))
self.assertEqual(response.context["action_form"], None)
self.assertContains(response, 'jquery.min.js',
msg_prefix="jQuery missing from admin pages for model with no admin actions")
def test_action_column_class(self):
"Tests that the checkbox column class is present in the response"
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertNotEqual(response.context["action_form"], None)
self.assertContains(response, 'action-checkbox-column')
def test_multiple_actions_form(self):
"""
Test that actions come from the form whose submit button was pressed (#10618).
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
# Two different actions selected on the two forms...
'action': ['external_mail', 'delete_selected'],
# ...but we clicked "go" on the top form.
'index': 0
}
self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
# Send mail, don't delete.
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action')
def test_user_message_on_none_selected(self):
"""
User should see a warning when 'Go' is pressed and no items are selected.
"""
action_data = {
ACTION_CHECKBOX_NAME: [],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
msg = """Items must be selected in order to perform actions on them. No items have been changed."""
self.assertContains(response, msg)
self.assertEqual(Subscriber.objects.count(), 2)
def test_user_message_on_no_action(self):
"""
User should see a warning when 'Go' is pressed and no action is selected.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': '',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
msg = """No action selected."""
self.assertContains(response, msg)
self.assertEqual(Subscriber.objects.count(), 2)
def test_selection_counter(self):
"""
Check if the selection counter is there.
"""
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertContains(response, '0 of 2 selected')
def test_popup_actions(self):
""" Actions should not be shown in popups. """
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertNotEqual(response.context["action_form"], None)
response = self.client.get(
reverse('admin:admin_views_subscriber_changelist') + '?%s' % IS_POPUP_VAR)
self.assertEqual(response.context["action_form"], None)
def test_popup_template_response(self):
"""
Success on popups shall be rendered from template in order to allow
easy customization.
"""
response = self.client.post(
reverse('admin:admin_views_actor_add') + '?%s=1' % IS_POPUP_VAR,
{'name': 'Troy McClure', 'age': '55', IS_POPUP_VAR: '1'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name, 'admin/popup_response.html')
def test_popup_template_escaping(self):
context = {
'new_value': 'new_value\\',
'obj': 'obj\\',
'value': 'value\\',
}
output = render_to_string('admin/popup_response.html', context)
self.assertIn(
'opener.dismissAddRelatedObjectPopup(window, "value\\u005C", "obj\\u005C");', output
)
context['action'] = 'change'
output = render_to_string('admin/popup_response.html', context)
self.assertIn(
'opener.dismissChangeRelatedObjectPopup(window, '
'"value\\u005C", "obj\\u005C", "new_value\\u005C");', output
)
context['action'] = 'delete'
output = render_to_string('admin/popup_response.html', context)
self.assertIn(
'opener.dismissDeleteRelatedObjectPopup(window, "value\\u005C");', output
)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class TestCustomChangeList(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post(reverse('admin:admin_views_gadget_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
# Ensure that data is still not visible on the page
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'First Gadget')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class TestInlineNotEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def test_GET_parent_add(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get(reverse('admin:admin_views_parent_add'))
self.assertEqual(response.status_code, 200)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminCustomQuerysetTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
self.pks = [EmptyModel.objects.create().id for i in range(3)]
self.super_login = {
REDIRECT_FIELD_NAME: reverse('admin:index'),
'username': 'super',
'password': 'secret',
}
def test_changelist_view(self):
response = self.client.get(reverse('admin:admin_views_emptymodel_changelist'))
for i in self.pks:
if i > 1:
self.assertContains(response, 'Primary key = %s' % i)
else:
self.assertNotContains(response, 'Primary key = %s' % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name='person1', gender=1)
Person.objects.create(name='person2', gender=2)
changelist_url = reverse('admin:admin_views_person_changelist')
# 4 queries are expected: 1 for the session, 1 for the user,
# 1 for the count and 1 for the objects on the page
with self.assertNumQueries(4):
resp = self.client.get(changelist_url)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
# here one more count(*) query will run, because filters were applied
with self.assertNumQueries(5):
extra = {'q': 'not_in_name'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 0 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 0 selected')
with self.assertNumQueries(5):
extra = {'q': 'person'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'gender__exact': '1'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 1 selected')
self.assertEqual(resp.context['selection_note_all'], '1 selected')
def test_change_view(self):
for i in self.pks:
response = self.client.get(reverse('admin:admin_views_emptymodel_change', args=(i,)))
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertEqual(response.status_code, 404)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_coverletter_add'),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The cover letter "Candidate, Best" was added successfully.</li>',
html=True
)
# model has no __unicode__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_shortmessage_add'),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The short message "ShortMessage object" was added successfully.</li>',
html=True
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_add'),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The telegram "Urgent telegram" was added successfully.</li>',
html=True
)
# model has no __unicode__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_add'),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The paper "Paper object" was added successfully.</li>',
html=True
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_coverletter_change', args=(cl.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_coverletter_change', args=(cl.pk,)),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __unicode__()
self.assertContains(
response,
'<li class="success">The cover letter "John Doe II" was changed successfully.</li>',
html=True
)
# model has no __unicode__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by six.text_type()
self.assertContains(
response,
'<li class="success">The short message "ShortMessage_Deferred_timestamp object" was changed successfully.</li>',
html=True
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
t = Telegram.objects.create(title="Frist Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_telegram_change', args=(t.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_change', args=(t.pk,)),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __unicode__()
self.assertContains(
response,
'<li class="success">The telegram "Telegram without typo" was changed successfully.</li>',
html=True
)
# model has no __unicode__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_paper_change', args=(p.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_change', args=(p.pk,)),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by six.text_type()
self.assertContains(
response,
'<li class="success">The paper "Paper_Deferred_author object" was changed successfully.</li>',
html=True
)
def test_history_view_custom_qs(self):
"""
Ensure that custom querysets are considered for the admin history view.
Refs #21013.
"""
self.client.post(reverse('admin:login'), self.super_login)
FilteredManager.objects.create(pk=1)
FilteredManager.objects.create(pk=2)
response = self.client.get(reverse('admin:admin_views_filteredmanager_changelist'))
self.assertContains(response, "PK=1")
self.assertContains(response, "PK=2")
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(1,))).status_code, 200
)
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(2,))).status_code, 200
)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminInlineFileUploadTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
# Set up test Picture and Gallery.
# These must be set up here instead of in fixtures in order to allow Picture
# to use a NamedTemporaryFile.
file1 = tempfile.NamedTemporaryFile(suffix=".file1")
file1.write(b'a' * (2 ** 21))
filename = file1.name
file1.close()
self.gallery = Gallery(name="Test Gallery")
self.gallery.save()
self.picture = Picture(name="Test Picture", image=filename, gallery=self.gallery)
self.picture.save()
def test_inline_file_upload_edit_validation_error_post(self):
"""
Test that inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": six.text_type(self.picture.id),
"pictures-0-gallery": six.text_type(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post(
reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)), post_data
)
self.assertContains(response, b"Currently")
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminInlineTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
self.collector = Collector(pk=1, name='John Fowles')
self.collector.save()
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data['widget_set-0-name'] = "Widget 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# Now resave that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_explicit_autofield_inline(self):
"A model with an explicit autofield primary key can be saved as inlines. Regression for #8093"
# First add a new inline
self.post_data['grommet_set-0-name'] = "Grommet 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""Check that an inline with an editable ordering fields is
updated correctly. Regression for #10922"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update({
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
})
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# Check that the order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class NeverCacheTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
def setUp(self):
self.client.login(username='super', password='secret')
def test_admin_index(self):
"Check the never-cache status of the main index"
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_app_index(self):
"Check the never-cache status of an application index"
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(get_max_age(response), 0)
def test_model_index(self):
"Check the never-cache status of a model index"
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
self.assertEqual(get_max_age(response), 0)
def test_model_add(self):
"Check the never-cache status of a model add page"
response = self.client.get(reverse('admin:admin_views_fabric_add'))
self.assertEqual(get_max_age(response), 0)
def test_model_view(self):
"Check the never-cache status of a model edit page"
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_model_history(self):
"Check the never-cache status of a model history page"
response = self.client.get(reverse('admin:admin_views_section_history', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_model_delete(self):
"Check the never-cache status of a model delete page"
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_login(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_logout(self):
"Check the never-cache status of logout view"
response = self.client.get(reverse('admin:logout'))
self.assertEqual(get_max_age(response), 0)
def test_password_change(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get(reverse('admin:password_change'))
self.assertEqual(get_max_age(response), None)
def test_password_change_done(self):
"Check the never-cache status of the password change done view"
response = self.client.get(reverse('admin:password_change_done'))
self.assertEqual(get_max_age(response), None)
def test_JS_i18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get(reverse('admin:jsi18n'))
self.assertEqual(get_max_age(response), None)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class PrePopulatedTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.login(username='super', password='secret')
def test_prepopulated_on(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "id: '#id_slug',")
self.assertContains(response, "field['dependency_ids'].push('#id_title');")
self.assertContains(response, "id: '#id_prepopulatedsubpost_set-0-subslug',")
def test_prepopulated_off(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_change', args=(self.p1.pk,)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "A Long Title")
self.assertNotContains(response, "id: '#id_slug'")
self.assertNotContains(response, "field['dependency_ids'].push('#id_title');")
self.assertNotContains(response, "id: '#id_prepopulatedsubpost_set-0-subslug',")
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get(reverse('admin:admin_views_prepopulatedpostlargeslug_add'))
self.assertContains(response, "maxLength: 1000") # instead of 1,000
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class SeleniumAdminViewsFirefoxTests(AdminSeleniumWebDriverTestCase):
available_apps = ['admin_views'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
self.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
self.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def test_prepopulated_fields(self):
"""
Ensure that the JavaScript-automated prepopulated fields work with the
main form and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_views_mainprepopulated_add')))
# Main form ----------------------------------------------------------
self.selenium.find_element_by_css_selector('#id_pubdate').send_keys('2012-02-18')
self.get_select_option('#id_status', 'option two').click()
self.selenium.find_element_by_css_selector('#id_name').send_keys(' this is the mAin nÀMë and it\'s awεšome')
slug1 = self.selenium.find_element_by_css_selector('#id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-and-its-awesome-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-and-its-awesome')
# Stacked inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-pubdate').send_keys('2011-12-17')
self.get_select_option('#id_relatedprepopulated_set-0-status', 'option one').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-name').send_keys(' here is a sŤāÇkeð inline ! ')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'here-stacked-inline-2011-12-17')
self.assertEqual(slug2, 'option-one-here-stacked-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[0].click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-pubdate').send_keys('1999-01-25')
self.get_select_option('#id_relatedprepopulated_set-1-status', 'option two').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-name').send_keys(' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog text... ')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-slug2').get_attribute('value')
self.assertEqual(slug1, 'now-you-have-another-stacked-inline-very-loooooooo') # 50 characters maximum for slug1 field
self.assertEqual(slug2, 'option-two-now-you-have-another-stacked-inline-very-looooooo') # 60 characters maximum for slug2 field
# Tabular inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-pubdate').send_keys('1234-12-07')
self.get_select_option('#id_relatedprepopulated_set-2-0-status', 'option two').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-name').send_keys('And now, with a tÃbűlaŘ inline !!!')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'and-now-tabular-inline-1234-12-07')
self.assertEqual(slug2, 'option-two-and-now-tabular-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[1].click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-pubdate').send_keys('1981-08-22')
self.get_select_option('#id_relatedprepopulated_set-2-1-status', 'option one').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-name').send_keys('a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-slug2').get_attribute('value')
self.assertEqual(slug1, 'tabular-inline-ignored-characters-1981-08-22')
self.assertEqual(slug2, 'option-one-tabular-inline-ignored-characters')
# Save and check that everything is properly stored in the database
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.assertEqual(MainPrepopulated.objects.all().count(), 1)
MainPrepopulated.objects.get(
name=' this is the mAin nÀMë and it\'s awεšome',
pubdate='2012-02-18',
status='option two',
slug1='main-name-and-its-awesome-2012-02-18',
slug2='option-two-main-name-and-its-awesome',
)
self.assertEqual(RelatedPrepopulated.objects.all().count(), 4)
RelatedPrepopulated.objects.get(
name=' here is a sŤāÇkeð inline ! ',
pubdate='2011-12-17',
status='option one',
slug1='here-stacked-inline-2011-12-17',
slug2='option-one-here-stacked-inline',
)
RelatedPrepopulated.objects.get(
name=' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooo', # 75 characters in name field
pubdate='1999-01-25',
status='option two',
slug1='now-you-have-another-stacked-inline-very-loooooooo',
slug2='option-two-now-you-have-another-stacked-inline-very-looooooo',
)
RelatedPrepopulated.objects.get(
name='And now, with a tÃbűlaŘ inline !!!',
pubdate='1234-12-07',
status='option two',
slug1='and-now-tabular-inline-1234-12-07',
slug2='option-two-and-now-tabular-inline',
)
RelatedPrepopulated.objects.get(
name='a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate='1981-08-22',
status='option one',
slug1='tabular-inline-ignored-characters-1981-08-22',
slug2='option-one-tabular-inline-ignored-characters',
)
def test_populate_existing_object(self):
"""
Ensure that the prepopulation works for existing objects too, as long
as the original field is empty.
Refs #19082.
"""
# Slugs are empty to start with.
item = MainPrepopulated.objects.create(
name=' this is the mAin nÀMë',
pubdate='2012-02-18',
status='option two',
slug1='',
slug2='',
)
self.admin_login(username='super',
password='secret',
login_url=reverse('admin:index'))
object_url = '%s%s' % (
self.live_server_url,
reverse('admin:admin_views_mainprepopulated_change', args=(item.id,)))
self.selenium.get(object_url)
self.selenium.find_element_by_css_selector('#id_name').send_keys(' the best')
# The slugs got prepopulated since they were originally empty
slug1 = self.selenium.find_element_by_css_selector('#id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
# Save the object
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.selenium.get(object_url)
self.selenium.find_element_by_css_selector('#id_name').send_keys(' hello')
# The slugs got prepopulated didn't change since they were originally not empty
slug1 = self.selenium.find_element_by_css_selector('#id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
def test_collapsible_fieldset(self):
"""
Test that the 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_views_article_add')))
self.assertFalse(self.selenium.find_element_by_id('id_title').is_displayed())
self.selenium.find_elements_by_link_text('Show')[0].click()
self.assertTrue(self.selenium.find_element_by_id('id_title').is_displayed())
self.assertEqual(
self.selenium.find_element_by_id('fieldsetcollapser0').text,
"Hide"
)
def test_first_field_focus(self):
"""JavaScript-assisted auto-focus on first usable form field."""
# First form field has a single widget
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_views_picture_add')))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_name')
)
# First form field has a MultiWidget
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_views_reservation_add')))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_start_date_0')
)
def test_cancel_delete_confirmation(self):
"Cancelling the deletion of an object takes the user back one page."
pizza = Pizza.objects.create(name="Double Cheese")
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = '%s%s' % (self.live_server_url, url)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
self.selenium.find_element_by_class_name('cancel-link').click()
self.wait_page_loaded()
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
def test_cancel_delete_related_confirmation(self):
"""
Cancelling the deletion of an object with relations takes the user back
one page.
"""
pizza = Pizza.objects.create(name="Double Cheese")
topping1 = Topping.objects.create(name="Cheddar")
topping2 = Topping.objects.create(name="Mozzarella")
pizza.toppings.add(topping1, topping2)
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = '%s%s' % (self.live_server_url, url)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
self.selenium.find_element_by_class_name('cancel-link').click()
self.wait_page_loaded()
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
self.assertEqual(Topping.objects.count(), 2)
class SeleniumAdminViewsChromeTests(SeleniumAdminViewsFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class SeleniumAdminViewsIETests(SeleniumAdminViewsFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class ReadonlyTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_readonly_get(self):
response = self.client.get(reverse('admin:admin_views_post_add'))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 5 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
self.assertContains(response, "<input", count=15)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response,
"<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo")
# Checks that multiline text in a readonly field gets <br /> tags
self.assertContains(response, "Multiline<br />test<br />string")
self.assertContains(response, "<p>Multiline<br />html<br />content</p>", html=True)
self.assertContains(response, "InlineMultiline<br />test<br />string")
self.assertContains(response,
formats.localize(datetime.date.today() - datetime.timedelta(days=7)))
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<p class="help">', 3)
self.assertContains(response, '<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>', html=True)
self.assertContains(response, '<p class="help">Some help text for the content (with unicode ŠĐĆŽćžšđ)</p>', html=True)
self.assertContains(response, '<p class="help">Some help text for the date (with unicode ŠĐĆŽćžšđ)</p>', html=True)
p = Post.objects.create(title="I worked on readonly_fields", content="Its good stuff")
response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,)))
self.assertContains(response, "%d amount of cool" % p.pk)
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by('-id')[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get(reverse('admin:admin_views_pizza_add'))
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get(reverse('admin2:auth_user_password_change', args=(su.pk,)))
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get(reverse('admin:admin_views_choice_change', args=(choice.pk,)))
self.assertContains(response, '<p>No opinion</p>', html=True)
self.assertNotContains(response, '<p>(None)</p>')
def test_readonly_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name='Salami')
pizza = Pizza.objects.create(name='Americano')
pizza.toppings.add(topping)
response = self.client.get(reverse('admin:admin_views_topping_add'))
self.assertEqual(response.status_code, 200)
def test_readonly_field_overrides(self):
"""
Regression test for #22087 - ModelForm Meta overrides are ignored by
AdminReadonlyField
"""
p = FieldOverridePost.objects.create(title="Test Post", content="Test Content")
response = self.client.get(reverse('admin:admin_views_fieldoverridepost_change', args=(p.pk,)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<p class="help">Overridden help text for the date</p>')
self.assertContains(response, '<label for="id_public">Overridden public label:</label>', html=True)
self.assertNotContains(response, "Some help text for the date (with unicode ŠĐĆŽćžšđ)")
def test_correct_autoescaping(self):
"""
Make sure that non-field readonly elements are properly autoescaped (#24461)
"""
section = Section.objects.create(name='<a>evil</a>')
response = self.client.get(reverse('admin:admin_views_section_change', args=(section.pk,)))
self.assertNotContains(response, "<a>evil</a>", status_code=200)
self.assertContains(response, "<a>evil</a>", status_code=200)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class LimitChoicesToInAdminTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_limit_choices_to_as_callable(self):
"""Test for ticket 2445 changes to admin."""
threepwood = Character.objects.create(
username='threepwood',
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
marley = Character.objects.create(
username='marley',
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
response = self.client.get(reverse('admin:admin_views_stumpjoke_add'))
# The allowed option should appear twice; the limited option should not appear.
self.assertContains(response, threepwood.username, count=2)
self.assertNotContains(response, marley.username)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class RawIdFieldsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True,
leader=actor,
country="England")
Inquisition.objects.create(expected=False,
leader=actor,
country="Spain")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
def setUp(self):
self.client.login(username='super', password='secret')
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
})
new_user = User.objects.get(username='newuser')
self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,)))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_continue': '1',
})
new_user = User.objects.get(username='newuser')
self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,)))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_password_mismatch(self):
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'mismatch',
})
self.assertEqual(response.status_code, 200)
adminform = response.context['adminform']
self.assertNotIn('password', adminform.form.errors)
self.assertEqual(adminform.form.errors['password2'],
["The two password fields didn't match."])
def test_user_fk_add_popup(self):
"""User addition through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, reverse('admin:auth_user_add'))
self.assertContains(response, 'class="related-widget-wrapper-link add-related" id="add_id_owner"')
response = self.client.get(reverse('admin:auth_user_add') + '?_popup=1')
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_popup': '1',
'_save': '1',
}
response = self.client.post(reverse('admin:auth_user_add') + '?_popup=1', data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissAddRelatedObjectPopup')
def test_user_fk_change_popup(self):
"""User change through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, reverse('admin:auth_user_change', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_change', args=(user.pk,)) + '?_popup=1'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
'_popup': '1',
'_save': '1',
}
response = self.client.post(url, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissChangeRelatedObjectPopup')
def test_user_fk_delete_popup(self):
"""User deletion through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, reverse('admin:auth_user_delete', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_delete', args=(user.pk,)) + '?_popup=1'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
'post': 'yes',
'_popup': '1',
}
response = self.client.post(url, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissDeleteRelatedObjectPopup')
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_addanother': '1',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_user_add'))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
with self.assertNumQueries(10):
response = self.client.get(reverse('admin:auth_user_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get(reverse('admin3:auth_user_password_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['form_url'], 'pony')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post(reverse('admin:auth_group_add'), {
'name': 'newgroup',
})
Group.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_group_changelist'))
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
# Ensure no queries are skipped due to cached content type for Group.
ContentType.objects.clear_cache()
with self.assertNumQueries(8):
response = self.client.get(reverse('admin:auth_group_change', args=(g.pk,)))
self.assertEqual(response.status_code, 200)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class CSSTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.login(username='super', password='secret')
def test_field_prefix_css_classes(self):
"""
Ensure that fields have a CSS class name with a 'field-' prefix.
Refs #16371.
"""
response = self.client.get(reverse('admin:admin_views_post_add'))
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
Ensure that CSS class names are used for each app and model on the
admin index pages.
Refs #17050.
"""
# General index page
response = self.client.get(reverse('admin:index'))
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
def test_app_model_in_form_body_class(self):
"""
Ensure app and model tag are correctly read by change_form template
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_app_model_in_list_body_class(self):
"""
Ensure app and model tag are correctly read by change_list template
"""
response = self.client.get(reverse('admin:admin_views_section_changelist'))
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_app_model_in_delete_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by delete_confirmation
template
"""
response = self.client.get(
reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_app_model_in_app_index_body_class(self):
"""
Ensure app and model tag are correctly read by app_index template
"""
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<body class=" dashboard app-admin_views')
def test_app_model_in_delete_selected_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by
delete_selected_confirmation template
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_section_changelist'),
action_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_changelist_field_classes(self):
"""
Cells of the change list table should contain the field name in their class attribute
Refs #11195.
"""
Podcast.objects.create(name="Django Dose",
release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertContains(
response, '<th class="field-name">')
self.assertContains(
response, '<td class="field-release_date nowrap">')
self.assertContains(
response, '<td class="action-checkbox">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
@modify_settings(INSTALLED_APPS={'append': ['django.contrib.admindocs', 'django.contrib.flatpages']})
class AdminDocsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_tags(self):
response = self.client.get(reverse('django-admindocs-tags'))
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-autoescape">autoescape</a></li>', html=True)
# An app tag exists in both the index and detail
self.assertContains(response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True)
self.assertContains(response, '<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>', html=True)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True)
self.assertContains(response, '<li><a href="#admin_list-admin_actions">admin_actions</a></li>', html=True)
def test_filters(self):
response = self.client.get(reverse('django-admindocs-filters'))
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-add">add</a></li>', html=True)
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls",
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}],
USE_I18N=False,
)
class ValidXHTMLTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_lang_name_present(self):
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls",
USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
class DateHierarchyTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
formats.reset_format_cache()
def assert_non_localized_year(self, response, year):
"""Ensure that the year is not localized with
USE_THOUSAND_SEPARATOR. Refs #15234.
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % (date.year,))
def assert_contains_month_link(self, response, date):
self.assertContains(
response, '?release_date__month=%d&release_date__year=%d"' % (
date.month, date.year))
def assert_contains_day_link(self, response, date):
self.assertContains(
response, '?release_date__day=%d&'
'release_date__month=%d&release_date__year=%d"' % (
date.day, date.month, date.year))
def test_empty(self):
"""
Ensure that no date hierarchy links display with empty changelist.
"""
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
self.assertNotContains(response, 'release_date__year=')
self.assertNotContains(response, 'release_date__month=')
self.assertNotContains(response, 'release_date__day=')
def test_single(self):
"""
Ensure that single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
Ensure that day-level links appear for changelist within single month.
"""
DATES = (datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
Ensure that month-level links appear for changelist within single year.
"""
DATES = (datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, 'release_date__day=')
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
Ensure that year-level links appear for year-spanning changelist.
"""
DATES = (datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
# no day/month-level links
self.assertNotContains(response, 'release_date__day=')
self.assertNotContains(response, 'release_date__month=')
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = '%s?release_date__year=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = '%s?release_date__year=%d&release_date__month=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year, date.month)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminCustomSaveRelatedTests(TestCase):
"""
Ensure that one can easily customize the way related objects are saved.
Refs #16115.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
'child_set-TOTAL_FORMS': '3',
'child_set-INITIAL_FORMS': '0',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-1-name': 'Catherine',
}
self.client.post(reverse('admin:admin_views_parent_add'), post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name='Josh Stone')
paul = Child.objects.create(parent=parent, name='Paul')
catherine = Child.objects.create(parent=parent, name='Catherine')
post = {
'child_set-TOTAL_FORMS': '5',
'child_set-INITIAL_FORMS': '2',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-0-id': paul.id,
'child_set-1-name': 'Catherine',
'child_set-1-id': catherine.id,
}
self.client.post(reverse('admin:admin_views_parent_change', args=(parent.id,)), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name='Josh Rock')
Child.objects.create(parent=parent, name='Paul')
Child.objects.create(parent=parent, name='Catherine')
post = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': parent.id,
'form-0-name': 'Josh Stone',
'_save': 'Save'
}
self.client.post(reverse('admin:admin_views_parent_changelist'), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewLogoutTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'registration/logged_out.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:logout'))
# we are now logged out
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 302) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.get(reverse('admin:logout'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:login'))
self.assertContains(response, '<input type="hidden" name="next" value="%s" />' % reverse('admin:index'))
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminUserMessageTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_%s' % level,
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="%s">Test %s</li>' % (level, level),
html=True)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message('debug')
def test_message_info(self):
self.send_message('info')
def test_message_success(self):
self.send_message('success')
def test_message_warning(self):
self.send_message('warning')
def test_message_error(self):
self.send_message('error')
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_extra_tags',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="extra_tag info">Test tags</li>',
html=True)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminKeepChangeListFiltersTests(TestCase):
admin_site = site
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def assertURLEqual(self, url1, url2):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if '_changelist_filters' in parsed_qs:
changelist_filters = parsed_qs['_changelist_filters']
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs['_changelist_filters'] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
change_user_url = reverse('admin:auth_user_change', args=(self.u5.pk,))
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url),
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url)
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url),
'http://testserver{}?_changelist_filters=is_staff__exact%3D1%26is_superuser__exact%3D1'.format(change_user_url)
)
# Ignore scheme and host.
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url),
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url)
)
# Ignore ordering of querystring.
self.assertURLEqual(
'{}?is_staff__exact=0&is_superuser__exact=0'.format(reverse('admin:auth_user_changelist')),
'{}?is_superuser__exact=0&is_staff__exact=0'.format(reverse('admin:auth_user_changelist'))
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url),
'{}?_changelist_filters=is_superuser__exact%3D0%26is_staff__exact%3D0'.format(change_user_url)
)
def get_changelist_filters(self):
return {
'is_superuser__exact': 0,
'is_staff__exact': 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode({
'_changelist_filters': self.get_changelist_filters_querystring()
})
def get_sample_user_id(self):
return self.u5.pk
def get_changelist_url(self):
return '%s?%s' % (
reverse('admin:auth_user_changelist',
current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self):
return '%s?%s' % (
reverse('admin:auth_user_add',
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_change_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_change', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_history', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_delete', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">{}</a>'.format(self.u5.username),
force_text(response.content)
)
self.assertURLEqual(detail_link.group(1), self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>',
force_text(response.content)
)
self.assertURLEqual(history_link.group(1), self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>',
force_text(response.content)
)
self.assertURLEqual(delete_link.group(1), self.get_delete_url())
# Test redirect on "Save".
post_data = {
'username': 'joepublic',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
}
post_data['_save'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['_continue'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url()
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['_addanother'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
post_data = {
'username': 'dummy',
'password1': 'test',
'password2': 'test',
}
# Test redirect on "Save".
post_data['_save'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.get(username='dummy').pk)
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['username'] = 'dummy2'
post_data['_continue'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.get(username='dummy2').pk)
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['username'] = 'dummy3'
post_data['_addanother'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {'post': 'yes'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
def test_url_prefix(self):
context = {
'preserved_filters': self.get_preserved_filters_querystring(),
'opts': User._meta,
}
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
with override_script_prefix('/prefix/'):
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class TestLabelVisibility(TestCase):
""" #11277 -Labels of hidden fields in admin were not hidden. """
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_all_fields_visible(self):
response = self.client.get(reverse('admin:admin_views_emptymodelvisible_add'))
self.assert_fieldline_visible(response)
self.assert_field_visible(response, 'first')
self.assert_field_visible(response, 'second')
def test_all_fields_hidden(self):
response = self.client.get(reverse('admin:admin_views_emptymodelhidden_add'))
self.assert_fieldline_hidden(response)
self.assert_field_hidden(response, 'first')
self.assert_field_hidden(response, 'second')
def test_mixin(self):
response = self.client.get(reverse('admin:admin_views_emptymodelmixin_add'))
self.assert_fieldline_visible(response)
self.assert_field_hidden(response, 'first')
self.assert_field_visible(response, 'second')
def assert_field_visible(self, response, field_name):
self.assertContains(response, '<div class="field-box field-%s">' % field_name)
def assert_field_hidden(self, response, field_name):
self.assertContains(response, '<div class="field-box field-%s hidden">' % field_name)
def assert_fieldline_visible(self, response):
self.assertContains(response, '<div class="form-row field-first field-second">')
def assert_fieldline_hidden(self, response):
self.assertContains(response, '<div class="form-row hidden')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewOnSiteTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = State.objects.create(name='New York')
cls.s2 = State.objects.create(name='Illinois')
cls.s3 = State.objects.create(name='California')
cls.c1 = City.objects.create(state=cls.s1, name='New York')
cls.c2 = City.objects.create(state=cls.s2, name='Chicago')
cls.c3 = City.objects.create(state=cls.s3, name='San Francisco')
cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza')
cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard')
cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner')
cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels')
cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away')
cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant')
cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi')
cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi')
cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe')
def setUp(self):
self.client.login(username='super', password='secret')
def test_add_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {"family_name": "Test1",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": "",
"dependentchild_set-0-family_name": "Test2"}
response = self.client.post(reverse('admin:admin_views_parentwithdependentchildren_add'),
post_data)
# just verifying the parent form failed validation, as expected --
# this isn't the regression test
self.assertIn('some_required_info', response.context['adminform'].form.errors)
# actual regression test
for error_set in response.context['inline_admin_formset'].formset.errors:
self.assertEqual(['Children must share a family name with their parents in this contrived test case'],
error_set.get('__all__'))
def test_change_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
pwdc = ParentWithDependentChildren.objects.create(some_required_info=6,
family_name="Test1")
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {"family_name": "Test2",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": str(pwdc.id),
"dependentchild_set-0-family_name": "Test1"}
response = self.client.post(
reverse('admin:admin_views_parentwithdependentchildren_change', args=(pwdc.id,)), post_data
)
# just verifying the parent form failed validation, as expected --
# this isn't the regression test
self.assertIn('some_required_info', response.context['adminform'].form.errors)
# actual regression test
for error_set in response.context['inline_admin_formset'].formset.errors:
self.assertEqual(['Children must share a family name with their parents in this contrived test case'],
error_set.get('__all__'))
def test_check(self):
"Ensure that the view_on_site value is either a boolean or a callable"
try:
CityAdmin.view_on_site = True
self.assertEqual(CityAdmin.check(City), [])
CityAdmin.view_on_site = False
self.assertEqual(CityAdmin.check(City), [])
CityAdmin.view_on_site = lambda obj: obj.get_absolute_url()
self.assertEqual(CityAdmin.check(City), [])
CityAdmin.view_on_site = []
self.assertEqual(CityAdmin.check(City), [
Error(
"The value of 'view_on_site' must be a callable or a boolean value.",
hint=None,
obj=CityAdmin,
id='admin.E025',
),
])
finally:
# Restore the original values for the benefit of other tests.
CityAdmin.view_on_site = True
def test_false(self):
"Ensure that the 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1)))
def test_true(self):
"Ensure that the default behavior is followed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk)))
def test_callable(self):
"Ensure that the right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_worker_change', args=(self.w1.pk,)))
self.assertContains(response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name))
def test_missing_get_absolute_url(self):
"Ensure None is returned if model doesn't have get_absolute_url"
model_admin = ModelAdmin(Worker, None)
self.assertIsNone(model_admin.get_view_on_site_url(Worker()))
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class InlineAdminViewOnSiteTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = State.objects.create(name='New York')
cls.s2 = State.objects.create(name='Illinois')
cls.s3 = State.objects.create(name='California')
cls.c1 = City.objects.create(state=cls.s1, name='New York')
cls.c2 = City.objects.create(state=cls.s2, name='Chicago')
cls.c3 = City.objects.create(state=cls.s3, name='San Francisco')
cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza')
cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard')
cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner')
cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels')
cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away')
cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant')
cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi')
cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi')
cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe')
def setUp(self):
self.client.login(username='super', password='secret')
def test_false(self):
"Ensure that the 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_state_change', args=(self.s1.pk,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk)))
def test_true(self):
"Ensure that the 'View on site' button is displayed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.r1.pk)))
def test_callable(self):
"Ensure that the right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,)))
self.assertContains(response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name))
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestEtagWithAdminView(TestCase):
# See https://code.djangoproject.com/ticket/16003
def test_admin(self):
with self.settings(USE_ETAGS=False):
response = self.client.get(reverse('admin:index'))
self.assertEqual(response.status_code, 302)
self.assertFalse(response.has_header('ETag'))
with self.settings(USE_ETAGS=True):
response = self.client.get(reverse('admin:index'))
self.assertEqual(response.status_code, 302)
self.assertTrue(response.has_header('ETag'))
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls",
)
class GetFormsetsWithInlinesArgumentTest(TestCase):
"""
#23934 - When adding a new model instance in the admin, the 'obj' argument
of get_formsets_with_inlines() should be None. When changing, it should be
equal to the existing model instance.
The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception
if obj is not None during add_view or obj is None during change_view.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_explicitly_provided_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
def test_implicitly_generated_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
| bsd-3-clause |
Java1Guy/ansible-modules-extras | monitoring/zabbix_hostmacro.py | 91 | 7996 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013-2014, Epic Games, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: zabbix_hostmacro
short_description: Zabbix host macro creates/updates/deletes
description:
- manages Zabbix host macros, it can create, update or delete them.
version_added: "2.0"
author:
- "(@cave)"
- Dean Hailin Song
requirements:
- "python >= 2.6"
- zabbix-api
options:
server_url:
description:
- Url of Zabbix server, with protocol (http or https).
required: true
aliases: [ "url" ]
login_user:
description:
- Zabbix user name.
required: true
login_password:
description:
- Zabbix user password.
required: true
host_name:
description:
- Name of the host.
required: true
macro_name:
description:
- Name of the host macro.
required: true
macro_value:
description:
- Value of the host macro.
required: true
state:
description:
- State of the macro.
- On C(present), it will create if macro does not exist or update the macro if the associated data is different.
- On C(absent) will remove a macro if it exists.
required: false
choices: ['present', 'absent']
default: "present"
timeout:
description:
- The timeout of API request (seconds).
default: 10
'''
EXAMPLES = '''
- name: Create a new host macro or update an existing macro's value
local_action:
module: zabbix_hostmacro
server_url: http://monitor.example.com
login_user: username
login_password: password
host_name: ExampleHost
macro_name:Example macro
macro_value:Example value
state: present
'''
import logging
import copy
try:
from zabbix_api import ZabbixAPI, ZabbixAPISubClass
HAS_ZABBIX_API = True
except ImportError:
HAS_ZABBIX_API = False
# Extend the ZabbixAPI
# Since the zabbix-api python module too old (version 1.0, no higher version so far).
class ZabbixAPIExtends(ZabbixAPI):
def __init__(self, server, timeout, **kwargs):
ZabbixAPI.__init__(self, server, timeout=timeout)
class HostMacro(object):
def __init__(self, module, zbx):
self._module = module
self._zapi = zbx
# exist host
def is_host_exist(self, host_name):
result = self._zapi.host.exists({'host': host_name})
return result
# get host id by host name
def get_host_id(self, host_name):
try:
host_list = self._zapi.host.get({'output': 'extend', 'filter': {'host': host_name}})
if len(host_list) < 1:
self._module.fail_json(msg="Host not found: %s" % host_name)
else:
host_id = host_list[0]['hostid']
return host_id
except Exception, e:
self._module.fail_json(msg="Failed to get the host %s id: %s." % (host_name, e))
# get host macro
def get_host_macro(self, macro_name, host_id):
try:
host_macro_list = self._zapi.usermacro.get(
{"output": "extend", "selectSteps": "extend", 'hostids': [host_id], 'filter': {'macro': '{$' + macro_name + '}'}})
if len(host_macro_list) > 0:
return host_macro_list[0]
return None
except Exception, e:
self._module.fail_json(msg="Failed to get host macro %s: %s" % (macro_name, e))
# create host macro
def create_host_macro(self, macro_name, macro_value, host_id):
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.usermacro.create({'hostid': host_id, 'macro': '{$' + macro_name + '}', 'value': macro_value})
self._module.exit_json(changed=True, result="Successfully added host macro %s " % macro_name)
except Exception, e:
self._module.fail_json(msg="Failed to create host macro %s: %s" % (macro_name, e))
# update host macro
def update_host_macro(self, host_macro_obj, macro_name, macro_value):
host_macro_id = host_macro_obj['hostmacroid']
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.usermacro.update({'hostmacroid': host_macro_id, 'value': macro_value})
self._module.exit_json(changed=True, result="Successfully updated host macro %s " % macro_name)
except Exception, e:
self._module.fail_json(msg="Failed to updated host macro %s: %s" % (macro_name, e))
# delete host macro
def delete_host_macro(self, host_macro_obj, macro_name):
host_macro_id = host_macro_obj['hostmacroid']
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.usermacro.delete([host_macro_id])
self._module.exit_json(changed=True, result="Successfully deleted host macro %s " % macro_name)
except Exception, e:
self._module.fail_json(msg="Failed to delete host macro %s: %s" % (macro_name, e))
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(required=True, aliases=['url']),
login_user=dict(required=True),
login_password=dict(required=True, no_log=True),
host_name=dict(required=True),
macro_name=dict(required=True),
macro_value=dict(required=True),
state=dict(default="present", choices=['present', 'absent']),
timeout=dict(type='int', default=10)
),
supports_check_mode=True
)
if not HAS_ZABBIX_API:
module.fail_json(msg="Missing requried zabbix-api module (check docs or install with: pip install zabbix-api)")
server_url = module.params['server_url']
login_user = module.params['login_user']
login_password = module.params['login_password']
host_name = module.params['host_name']
macro_name = (module.params['macro_name']).upper()
macro_value = module.params['macro_value']
state = module.params['state']
timeout = module.params['timeout']
zbx = None
# login to zabbix
try:
zbx = ZabbixAPIExtends(server_url, timeout=timeout)
zbx.login(login_user, login_password)
except Exception, e:
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
host_macro_class_obj = HostMacro(module, zbx)
changed = False
if host_name:
host_id = host_macro_class_obj.get_host_id(host_name)
host_macro_obj = host_macro_class_obj.get_host_macro(macro_name, host_id)
if state == 'absent':
if not host_macro_obj:
module.exit_json(changed=False, msg="Host Macro %s does not exist" % macro_name)
else:
# delete a macro
host_macro_class_obj.delete_host_macro(host_macro_obj, macro_name)
else:
if not host_macro_obj:
# create host macro
host_macro_class_obj.create_host_macro(macro_name, macro_value, host_id)
else:
# update host macro
host_macro_class_obj.update_host_macro(host_macro_obj, macro_name, macro_value)
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
vdeluca/tfi | manage.py | 44 | 1062 | #!/usr/bin/env python
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import os, sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "geonode.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| gpl-3.0 |
18padx08/PPTex | PPTexEnv_x86_64/lib/python2.7/site-packages/sympy/core/rules.py | 29 | 1552 | """
Replacement rules.
"""
from __future__ import print_function, division
class Transform(object):
"""
Immutable mapping that can be used as a generic transformation rule.
Parameters
----------
transform : callable
Computes the value corresponding to any key.
filter : callable, optional
If supplied, specifies which objects are in the mapping.
Examples
--------
>>> from sympy.core.rules import Transform
>>> from sympy.abc import x
This Transform will return, as a value, one more than the key:
>>> add1 = Transform(lambda x: x + 1)
>>> add1[1]
2
>>> add1[x]
x + 1
By default, all values are considered to be in the dictionary. If a filter
is supplied, only the objects for which it returns True are considered as
being in the dictionary:
>>> add1_odd = Transform(lambda x: x + 1, lambda x: x%2 == 1)
>>> 2 in add1_odd
False
>>> add1_odd.get(2, 0)
0
>>> 3 in add1_odd
True
>>> add1_odd[3]
4
>>> add1_odd.get(3, 0)
4
"""
def __init__(self, transform, filter=lambda x: True):
self._transform = transform
self._filter = filter
def __contains__(self, item):
return self._filter(item)
def __getitem__(self, key):
if self._filter(key):
return self._transform(key)
else:
raise KeyError(key)
def get(self, item, default=None):
if item in self:
return self[item]
else:
return default
| mit |
arenadata/ambari | contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/webhcat_service_check.py | 10 | 4456 | #!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import urllib2
from resource_management import *
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
from ambari_commons import OSConst
import time
@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
def webhcat_service_check():
Logger.info("Webhcat smoke test - service status")
import params
# AMBARI-11633 [WinTP2] Webhcat service check fails
# Hive doesn't pass the environment variables correctly to child processes, which fails the smoke test.
# Reducing the amount of URLs checked to the minimum required.
#smoke_cmd = os.path.join(params.stack_root,"Run-SmokeTests.cmd")
#service = "WEBHCAT"
#Execute(format("cmd /C {smoke_cmd} {service}"), user=params.hcat_user, logoutput=True)
url_tests = [
"status",
#These are the failing ones:
#"ddl/database?user.name=hadoop",
#"ddl/database/default/table?user.name=hadoop"
]
import socket
url_host = socket.getfqdn()
url_port = params.config["configurations"]["webhcat-site"]["templeton.port"]
for url_test in url_tests:
url_request = "http://{0}:{1}/templeton/v1/{2}".format(url_host, url_port, url_test)
url_response = None
try:
# execute the query for the JSON that includes WebHCat status
url_response = urllib2.urlopen(url_request, timeout=30)
status = url_response.getcode()
response = url_response.read()
if status != 200:
Logger.warning("Webhcat service check status: {0}".format(status))
Logger.info("Webhcat service check response: {0}".format(response))
except urllib2.HTTPError as he:
raise Fail("Webhcat check {0} failed: {1}".format(url_request, he.msg))
finally:
if url_response is not None:
try:
url_response.close()
except:
pass
@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def webhcat_service_check():
import params
File(format("{tmp_dir}/templetonSmoke.sh"),
content= StaticFile('templetonSmoke.sh'),
mode=0755
)
if params.security_enabled:
smokeuser_keytab=params.smoke_user_keytab
smoke_user_principal=params.smokeuser_principal
else:
smokeuser_keytab= "no_keytab"
smoke_user_principal="no_principal"
unique_name = format("{smokeuser}.{timestamp}", timestamp = time.time())
templeton_test_script = format("idtest.{unique_name}.pig")
templeton_test_input = format("/tmp/idtest.{unique_name}.in")
templeton_test_output = format("/tmp/idtest.{unique_name}.out")
File(format("{tmp_dir}/{templeton_test_script}"),
content = Template("templeton_smoke.pig.j2", templeton_test_input=templeton_test_input, templeton_test_output=templeton_test_output),
owner=params.hdfs_user
)
params.HdfsResource(format("/tmp/{templeton_test_script}"),
action = "create_on_execute",
type = "file",
source = format("{tmp_dir}/{templeton_test_script}"),
owner = params.smokeuser
)
params.HdfsResource(templeton_test_input,
action = "create_on_execute",
type = "file",
source = "/etc/passwd",
owner = params.smokeuser
)
params.HdfsResource(None, action = "execute")
cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {templeton_port} {templeton_test_script} {smokeuser_keytab}"
" {security_param} {kinit_path_local} {smoke_user_principal}"
" {tmp_dir}")
Execute(cmd,
tries=3,
try_sleep=5,
path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
logoutput=True)
| apache-2.0 |
tmuelle2/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/system/outputtee_unittest.py | 124 | 1987 | # Copyright (C) 2012 Zan Dobersek <zandobersek@gmail.com>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import StringIO
import unittest2 as unittest
from webkitpy.common.system.outputtee import Tee, OutputTee
class SimpleTeeTest(unittest.TestCase):
def test_simple_tee(self):
file1, file2 = StringIO.StringIO(), StringIO.StringIO()
tee = Tee(file1, file2)
tee.write("foo bar\n")
tee.write("baz\n")
self.assertEqual(file1.getvalue(), "foo bar\nbaz\n")
self.assertEqual(file2.getvalue(), file1.getvalue())
| bsd-3-clause |
brucetsao/python-devicecloud | devicecloud/test/unit/test_ws.py | 3 | 1071 | import unittest
from devicecloud import DeviceCloudException
from mock import MagicMock, patch
from devicecloud.ws import WebServiceStub
from devicecloud import DeviceCloudConnection
class MockConnection(MagicMock):
def get(self, *args, **kwargs):
return (args, kwargs)
def post(self, *args, **kwargs):
return (args, kwargs)
class LegacyAPIMemberInternalsTests(unittest.TestCase):
def setUp(self):
self.conn = MockConnection()
self.stub = WebServiceStub(self.conn, '/ws')
def test_path_building(self):
test = self.stub.a.b.c
self.assertEqual(test._path, "/ws/a/b/c")
def test_method_access(self):
res = self.stub.a.b.c.get()
self.assertEqual(res[0], ("/ws/a/b/c", ))
self.assertDictEqual(res[1], {})
def test_method_access_args_kwargs(self):
res = self.stub.a.test.path.post("foo", bar="baz")
self.assertEqual(res[0], ("/ws/a/test/path", "foo"))
self.assertDictEqual(res[1], {"bar": "baz"})
if __name__ == '__main__':
unittest.main()
| mpl-2.0 |
FrankBian/kuma | vendor/lib/python/south/introspection_plugins/django_audit_log.py | 154 | 1436 | """
South introspection rules for django-audit-log
"""
from django.contrib.auth.models import User
from django.conf import settings
from south.modelsinspector import add_introspection_rules
if "audit_log" in settings.INSTALLED_APPS:
try:
# Try and import the field so we can see if audit_log is available
from audit_log.models import fields
# Make sure the `to` and `null` parameters will be ignored
rules = [(
(fields.LastUserField,),
[],
{
'to': ['rel.to', {'default': User}],
'null': ['null', {'default': True}],
},
)]
# Add the rules for the `LastUserField`
add_introspection_rules(
rules,
['^audit_log\.models\.fields\.LastUserField'],
)
except ImportError:
pass
| mpl-2.0 |
neilpelow/wmap-django | venv/lib/python3.5/site-packages/django/contrib/gis/forms/widgets.py | 42 | 3529 | from __future__ import unicode_literals
import logging
from django.conf import settings
from django.contrib.gis import gdal
from django.contrib.gis.geos import GEOSException, GEOSGeometry
from django.forms.widgets import Widget
from django.template import loader
from django.utils import six, translation
logger = logging.getLogger('django.contrib.gis')
class BaseGeometryWidget(Widget):
"""
The base class for rich geometry widgets.
Renders a map using the WKT of the geometry.
"""
geom_type = 'GEOMETRY'
map_srid = 4326
map_width = 600
map_height = 400
display_raw = False
supports_3d = False
template_name = '' # set on subclasses
def __init__(self, attrs=None):
self.attrs = {}
for key in ('geom_type', 'map_srid', 'map_width', 'map_height', 'display_raw'):
self.attrs[key] = getattr(self, key)
if attrs:
self.attrs.update(attrs)
def serialize(self, value):
return value.wkt if value else ''
def deserialize(self, value):
try:
return GEOSGeometry(value, self.map_srid)
except (GEOSException, ValueError) as err:
logger.error("Error creating geometry from value '%s' (%s)", value, err)
return None
def render(self, name, value, attrs=None):
# If a string reaches here (via a validation error on another
# field) then just reconstruct the Geometry.
if isinstance(value, six.string_types):
value = self.deserialize(value)
if value:
# Check that srid of value and map match
if value.srid != self.map_srid:
try:
ogr = value.ogr
ogr.transform(self.map_srid)
value = ogr
except gdal.GDALException as err:
logger.error(
"Error transforming geometry from srid '%s' to srid '%s' (%s)",
value.srid, self.map_srid, err
)
context = self.build_attrs(
attrs,
name=name,
module='geodjango_%s' % name.replace('-', '_'), # JS-safe
serialized=self.serialize(value),
geom_type=gdal.OGRGeomType(self.attrs['geom_type']),
STATIC_URL=settings.STATIC_URL,
LANGUAGE_BIDI=translation.get_language_bidi(),
)
return loader.render_to_string(self.template_name, context)
class OpenLayersWidget(BaseGeometryWidget):
template_name = 'gis/openlayers.html'
class Media:
js = (
'http://openlayers.org/api/2.13.1/OpenLayers.js',
'gis/js/OLMapWidget.js',
)
class OSMWidget(BaseGeometryWidget):
"""
An OpenLayers/OpenStreetMap-based widget.
"""
template_name = 'gis/openlayers-osm.html'
default_lon = 5
default_lat = 47
class Media:
js = (
'http://openlayers.org/api/2.13.1/OpenLayers.js',
'gis/js/OLMapWidget.js',
)
def __init__(self, attrs=None):
super(OSMWidget, self).__init__()
for key in ('default_lon', 'default_lat'):
self.attrs[key] = getattr(self, key)
if attrs:
self.attrs.update(attrs)
@property
def map_srid(self):
# Use the official spherical mercator projection SRID when GDAL is
# available; otherwise, fallback to 900913.
if gdal.HAS_GDAL:
return 3857
else:
return 900913
| gpl-3.0 |
GuillaumeDD/AdventOfCode2016 | day08.py | 1 | 5304 | from __future__ import print_function # print utilities without systematic '\n' at EOL
import io
import re
# --- Day 8: Two-Factor Authentication ---
#
# You come across a door implementing what you can only assume is an
# implementation of two-factor authentication after a long game of
# requirements telephone.
#
# To get past the door, you first swipe a keycard (no problem; there was
# one on a nearby desk). Then, it displays a code on a little screen,
# and you type that code on a keypad. Then, presumably, the door
# unlocks.
#
# Unfortunately, the screen has been smashed. After a few minutes,
# you've taken everything apart and figured out how it works. Now you
# just have to work out what the screen would have displayed.
#
# The magnetic strip on the card you swiped encodes a series of
# instructions for the screen; these instructions are your puzzle
# input. The screen is 50 pixels wide and 6 pixels tall, all of which
# start off, and is capable of three somewhat peculiar operations:
#
# rect AxB turns on all of the pixels in a rectangle at the top-left of the screen which is A wide and B tall.
#
# rotate row y=A by B shifts all of the pixels in row A (0 is the top
# row) right by B pixels. Pixels that would fall off the right end
# appear at the left end of the row.
#
# rotate column x=A by B shifts all of the pixels in column A (0 is
# the left column) down by B pixels. Pixels that would fall off the
# bottom appear at the top of the column.
#
# For example, here is a simple sequence on a smaller screen:
#
# rect 3x2 creates a small rectangle in the top-left corner:
#
# ###....
# ###....
# .......
#
# rotate column x=1 by 1 rotates the second column down by one pixel:
#
# #.#....
# ###....
# .#.....
#
# rotate row y=0 by 4 rotates the top row right by four pixels:
#
# ....#.#
# ###....
# .#.....
#
# rotate column x=1 by 1 again rotates the second column down by one
# pixel, causing the bottom pixel to wrap back to the top:
#
# .#..#.#
# #.#....
# .#.....
#
# As you can see, this display technology is extremely powerful, and
# will soon dominate the tiny-code-displaying-screen market. That's what
# the advertisement on the back of the display tries to convince you,
# anyway.
#
# There seems to be an intermediate check of the voltage used by the
# display: after you swipe your card, if the screen did work, how many
# pixels should be lit?
#
# --- Part Two ---
#
# You notice that the screen is only capable of displaying capital
# letters; in the font it uses, each letter is 5 pixels wide and 6 tall.
#
# After you swipe your card, what code is the screen trying to display?
pattern_rect = re.compile('rect ([0-9]+)x([0-9]+)')
pattern_rotate_row = re.compile('rotate row y=([0-9]+) by ([0-9]+)')
pattern_rotate_column = re.compile('rotate column x=([0-9]+) by ([0-9]+)')
# Light statuses
ON = '#'
OFF = '.'
# beware of this initialisation!
# -> every cell should be a different string
SCREEN_WIDTH = 50
SCREEN_HEIGHT = 6
SCREEN = [[OFF for _ in range(SCREEN_WIDTH)] for _ in range(SCREEN_HEIGHT)]
def print_screen():
for line in SCREEN:
for col in line:
print(col, end='')
print()
def switch(light_status):
if light_status == ON:
return OFF
else:
return ON
def nb_ON():
"""
Computes the number of 'ON' lights in SCREEN
"""
count = 0
for i in range(SCREEN_HEIGHT):
for j in range(SCREEN_WIDTH):
if SCREEN[i][j] == ON:
count += 1
return count
def apply_command(command_line):
"""
Apply a given command line on SCREEN
"""
global SCREEN
rect = pattern_rect.match(command_line)
if rect is not None:
# RECT command
width = int(rect.group(1))
height = int(rect.group(2))
for i in range(height):
for j in range(width):
SCREEN[i][j] = switch(SCREEN[i][j])
else:
# ROTATE ROW command
rotate_row = pattern_rotate_row.match(command_line)
if rotate_row is not None:
y = int(rotate_row.group(1))
by = int(rotate_row.group(2))
new_line = [OFF for _ in range(SCREEN_WIDTH)]
for j in range(SCREEN_WIDTH):
next_j = (j+by) % SCREEN_WIDTH
new_line[next_j] = SCREEN[y][j]
for j,light in enumerate(new_line):
SCREEN[y][j] = light
else:
# ROTATE COLUMN command
rotate_column = pattern_rotate_column.match(command_line)
if rotate_column is not None:
x = int(rotate_column.group(1))
by = int(rotate_column.group(2))
new_column = [OFF for _ in range(SCREEN_HEIGHT)]
for i in range(SCREEN_HEIGHT):
next_i = (i+by) % SCREEN_HEIGHT
new_column[next_i] = SCREEN[i][x]
for i,light in enumerate(new_column):
SCREEN[i][x] = light
else:
print('Unable to match command')
with io.open('inputs/day08.txt', 'r') as f:
for line in f:
command = line.strip()
apply_command(command)
print_screen()
print('Number of pixels lit: {}'.format(nb_ON()))
| gpl-3.0 |
areitz/pants | src/python/pants/fs/archive.py | 10 | 5224 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from abc import abstractmethod
from collections import OrderedDict
from zipfile import ZIP_DEFLATED
from pants.util.contextutil import open_tar, open_zip
from pants.util.dirutil import safe_walk
from pants.util.meta import AbstractClass
from pants.util.strutil import ensure_text
"""Support for wholesale archive creation and extraction in a uniform API across archive types."""
class Archiver(AbstractClass):
@classmethod
def extract(cls, path, outdir):
"""Extracts an archive's contents to the specified outdir."""
raise NotImplementedError()
@abstractmethod
def create(self, basedir, outdir, name, prefix=None):
"""Creates an archive of all files found under basedir to a file at outdir of the given name.
If prefix is specified, it should be prepended to all archive paths.
"""
class TarArchiver(Archiver):
"""An archiver that stores files in a tar file with optional compression."""
@classmethod
def extract(cls, path, outdir):
with open_tar(path, errorlevel=1) as tar:
tar.extractall(outdir)
def __init__(self, mode, extension):
Archiver.__init__(self)
self.mode = mode
self.extension = extension
def create(self, basedir, outdir, name, prefix=None):
basedir = ensure_text(basedir)
tarpath = os.path.join(outdir, '{}.{}'.format(ensure_text(name), self.extension))
with open_tar(tarpath, self.mode, dereference=True, errorlevel=1) as tar:
tar.add(basedir, arcname=prefix or '.')
return tarpath
class ZipArchiver(Archiver):
"""An archiver that stores files in a zip file with optional compression."""
@classmethod
def extract(cls, path, outdir, filter_func=None):
"""Extract from a zip file, with an optional filter
:param string path: path to the zipfile to extract from
:param string outdir: directory to extract files into
:param function filter_func: optional filter with the filename as the parameter. Returns True if
the file should be extracted.
"""
with open_zip(path) as archive_file:
for name in archive_file.namelist():
# While we're at it, we also perform this safety test.
if name.startswith(b'/') or name.startswith(b'..'):
raise ValueError('Zip file contains unsafe path: {}'.format(name))
# Ignore directories. extract() will create parent dirs as needed.
# OS X's python 2.6.1 has a bug in zipfile that makes it unzip directories as regular files.
# This method should work on for python 2.6-3.x.
# TODO(Eric Ayers) Pants no longer builds with python 2.6. Can this be removed?
if not name.endswith(b'/'):
if (not filter_func or filter_func(name)):
archive_file.extract(name, outdir)
def __init__(self, compression):
Archiver.__init__(self)
self.compression = compression
def create(self, basedir, outdir, name, prefix=None):
zippath = os.path.join(outdir, '{}.zip'.format(name))
with open_zip(zippath, 'w', compression=ZIP_DEFLATED) as zip:
for root, _, files in safe_walk(basedir):
root = ensure_text(root)
for file in files:
file = ensure_text(file)
full_path = os.path.join(root, file)
relpath = os.path.relpath(full_path, basedir)
if prefix:
relpath = os.path.join(ensure_text(prefix), relpath)
zip.write(full_path, relpath)
return zippath
TAR = TarArchiver('w:', 'tar')
TGZ = TarArchiver('w:gz', 'tar.gz')
TBZ2 = TarArchiver('w:bz2', 'tar.bz2')
ZIP = ZipArchiver(ZIP_DEFLATED)
_ARCHIVER_BY_TYPE = OrderedDict(tar=TAR, tgz=TGZ, tbz2=TBZ2, zip=ZIP)
TYPE_NAMES = frozenset(_ARCHIVER_BY_TYPE.keys())
def archiver(typename):
"""Returns Archivers in common configurations.
The typename must correspond to one of the following:
'tar' Returns a tar archiver that applies no compression and emits .tar files.
'tgz' Returns a tar archiver that applies gzip compression and emits .tar.gz files.
'tbz2' Returns a tar archiver that applies bzip2 compression and emits .tar.bz2 files.
'zip' Returns a zip archiver that applies standard compression and emits .zip files.
"""
archiver = _ARCHIVER_BY_TYPE.get(typename)
if not archiver:
raise ValueError('No archiver registered for {!r}'.format(typename))
return archiver
def archiver_for_path(path_name):
"""Returns an Archiver for the given path name.
:param string path_name: The path name of the archive - need not exist.
:raises: :class:`ValueError` If the path name does not uniquely identify a supported archive type.
"""
if path_name.endswith('.tar.gz'):
return TGZ
elif path_name.endswith('.tar.bz2'):
return TBZ2
else:
_, ext = os.path.splitext(path_name)
if ext:
ext = ext[1:] # Trim leading '.'.
if not ext:
raise ValueError('Could not determine archive type of path {}'.format(path_name))
return archiver(ext)
| apache-2.0 |
nispc/ckanext-data_recommendation | ckanext/data_recommendation/plugin.py | 1 | 2619 | import ckan.plugins as plugins
import ckan.plugins.toolkit as toolkit
from ckan.plugins.toolkit import asbool
import jieba
import jieba.analyse
from ckan.plugins.toolkit import request, c
import pylons.config as config
import opencc
class Data_RecommendationPlugin(plugins.SingletonPlugin):
plugins.implements(plugins.IConfigurer)
plugins.implements(plugins.ITemplateHelpers)
plugins.implements(plugins.IRoutes, inherit=True)
# IConfigurer
def update_config(self, config_):
toolkit.add_template_directory(config_, 'templates')
toolkit.add_public_directory(config_, 'public')
toolkit.add_resource('fanstatic', 'data_recommendation')
@classmethod
def related_pkgs(cls):
# Parameter
extractNum = int(config.get('ckan.data_recommended.extract_num', '5'))
byTag = asbool(config.get('ckan.data_recommended.by_tag', 'true'))
byTitle = asbool(config.get('ckan.data_recommended.by_title', 'true'))
# fetch pkg info
pkg_name = request.environ['PATH_INFO'].split('/')[-1]
pkg_title = toolkit.get_action('package_show')({}, {'id':pkg_name})['title']
pkg_title_s = opencc.convert(pkg_title, config='zhtw2zhcn_s.ini')
pkg_tags = [pkg_tag['name'] for pkg_tag in toolkit.get_action('package_show')({}, {'id':pkg_name})['tags']]
# related_tag_titles
related_tag_titles = set()
if byTag:
related_tag_titles.update(set(pkg_tags))
if byTitle:
tmp = jieba.analyse.extract_tags(pkg_title_s, topK=extractNum)
related_tag_titles.update(
set(
(opencc.convert(_, config='zhs2zhtw_vp.ini') for _ in tmp)
)
)
related_pkgs = {}
related_pkgs['results'] = dict()
for related_tag_title in related_tag_titles:
tmp = toolkit.get_action('package_search')({}, {'q': '"'+related_tag_title+'"', 'rows': 20})
related_pkg_results = tmp['results']
related_pkgs['results'][related_tag_title] = dict()
related_pkgs['results'][related_tag_title]['rows'] = tmp['count']
# filte the same title
related_pkg_results = [_ for _ in related_pkg_results if _['title'] != pkg_title]
related_pkgs['results'][related_tag_title]['result'] = related_pkg_results
# related_pkgs['results'][related_tag_title] = sorted(related_pkgs['results'][related_tag_title], key=lambda t: len(t))
return related_pkgs
def get_helpers(self):
return {'related_pkgs': self.related_pkgs} | agpl-3.0 |
HyperloopTeam/FullOpenMDAO | lib/python2.7/site-packages/matplotlib/figure.py | 10 | 58719 | """
The figure module provides the top-level
:class:`~matplotlib.artist.Artist`, the :class:`Figure`, which
contains all the plot elements. The following classes are defined
:class:`SubplotParams`
control the default spacing of the subplots
:class:`Figure`
top level container for all plot elements
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import warnings
from operator import itemgetter
import numpy as np
from matplotlib import rcParams
from matplotlib import docstring
from matplotlib import __version__ as _mpl_version
import matplotlib.artist as martist
from matplotlib.artist import Artist, allow_rasterization
import matplotlib.cbook as cbook
from matplotlib.cbook import Stack, iterable
from matplotlib import _image
from matplotlib.image import FigureImage
import matplotlib.colorbar as cbar
from matplotlib.axes import Axes, SubplotBase, subplot_class_factory
from matplotlib.blocking_input import BlockingMouseInput, BlockingKeyMouseInput
from matplotlib.legend import Legend
from matplotlib.patches import Rectangle
from matplotlib.projections import (get_projection_names,
process_projection_requirements)
from matplotlib.text import Text, _process_text_args
from matplotlib.transforms import (Affine2D, Bbox, BboxTransformTo,
TransformedBbox)
from matplotlib.backend_bases import NonGuiException
docstring.interpd.update(projection_names=get_projection_names())
class AxesStack(Stack):
"""
Specialization of the Stack to handle all tracking of Axes in a Figure.
This stack stores ``key, (ind, axes)`` pairs, where:
* **key** should be a hash of the args and kwargs
used in generating the Axes.
* **ind** is a serial number for tracking the order
in which axes were added.
The AxesStack is a callable, where ``ax_stack()`` returns
the current axes. Alternatively the :meth:`current_key_axes` will
return the current key and associated axes.
"""
def __init__(self):
Stack.__init__(self)
self._ind = 0
def as_list(self):
"""
Return a list of the Axes instances that have been added to the figure
"""
ia_list = [a for k, a in self._elements]
ia_list.sort()
return [a for i, a in ia_list]
def get(self, key):
"""
Return the Axes instance that was added with *key*.
If it is not present, return None.
"""
item = dict(self._elements).get(key)
if item is None:
return None
return item[1]
def _entry_from_axes(self, e):
ind, k = dict([(a, (ind, k)) for (k, (ind, a)) in self._elements])[e]
return (k, (ind, e))
def remove(self, a):
"""Remove the axes from the stack."""
Stack.remove(self, self._entry_from_axes(a))
def bubble(self, a):
"""
Move the given axes, which must already exist in the
stack, to the top.
"""
return Stack.bubble(self, self._entry_from_axes(a))
def add(self, key, a):
"""
Add Axes *a*, with key *key*, to the stack, and return the stack.
If *a* is already on the stack, don't add it again, but
return *None*.
"""
# All the error checking may be unnecessary; but this method
# is called so seldom that the overhead is negligible.
if not isinstance(a, Axes):
raise ValueError("second argument, %s, is not an Axes" % a)
try:
hash(key)
except TypeError:
raise ValueError("first argument, %s, is not a valid key" % key)
a_existing = self.get(key)
if a_existing is not None:
Stack.remove(self, (key, a_existing))
warnings.warn(
"key %s already existed; Axes is being replaced" % key)
# I don't think the above should ever happen.
if a in self:
return None
self._ind += 1
return Stack.push(self, (key, (self._ind, a)))
def current_key_axes(self):
"""
Return a tuple of ``(key, axes)`` for the active axes.
If no axes exists on the stack, then returns ``(None, None)``.
"""
if not len(self._elements):
return self._default, self._default
else:
key, (index, axes) = self._elements[self._pos]
return key, axes
def __call__(self):
return self.current_key_axes()[1]
def __contains__(self, a):
return a in self.as_list()
class SubplotParams:
"""
A class to hold the parameters for a subplot
"""
def __init__(self, left=None, bottom=None, right=None, top=None,
wspace=None, hspace=None):
"""
All dimensions are fraction of the figure width or height.
All values default to their rc params
The following attributes are available
*left* : 0.125
The left side of the subplots of the figure
*right* : 0.9
The right side of the subplots of the figure
*bottom* : 0.1
The bottom of the subplots of the figure
*top* : 0.9
The top of the subplots of the figure
*wspace* : 0.2
The amount of width reserved for blank space between subplots
*hspace* : 0.2
The amount of height reserved for white space between subplots
"""
self.validate = True
self.update(left, bottom, right, top, wspace, hspace)
def update(self, left=None, bottom=None, right=None, top=None,
wspace=None, hspace=None):
"""
Update the current values. If any kwarg is None, default to
the current value, if set, otherwise to rc
"""
thisleft = getattr(self, 'left', None)
thisright = getattr(self, 'right', None)
thistop = getattr(self, 'top', None)
thisbottom = getattr(self, 'bottom', None)
thiswspace = getattr(self, 'wspace', None)
thishspace = getattr(self, 'hspace', None)
self._update_this('left', left)
self._update_this('right', right)
self._update_this('bottom', bottom)
self._update_this('top', top)
self._update_this('wspace', wspace)
self._update_this('hspace', hspace)
def reset():
self.left = thisleft
self.right = thisright
self.top = thistop
self.bottom = thisbottom
self.wspace = thiswspace
self.hspace = thishspace
if self.validate:
if self.left >= self.right:
reset()
raise ValueError('left cannot be >= right')
if self.bottom >= self.top:
reset()
raise ValueError('bottom cannot be >= top')
def _update_this(self, s, val):
if val is None:
val = getattr(self, s, None)
if val is None:
key = 'figure.subplot.' + s
val = rcParams[key]
setattr(self, s, val)
class Figure(Artist):
"""
The Figure instance supports callbacks through a *callbacks*
attribute which is a :class:`matplotlib.cbook.CallbackRegistry`
instance. The events you can connect to are 'dpi_changed', and
the callback will be called with ``func(fig)`` where fig is the
:class:`Figure` instance.
*patch*
The figure patch is drawn by a
:class:`matplotlib.patches.Rectangle` instance
*suppressComposite*
For multiple figure images, the figure will make composite
images depending on the renderer option_image_nocomposite
function. If suppressComposite is True|False, this will
override the renderer.
"""
def __str__(self):
return "Figure(%gx%g)" % tuple(self.bbox.size)
def __init__(self,
figsize=None, # defaults to rc figure.figsize
dpi=None, # defaults to rc figure.dpi
facecolor=None, # defaults to rc figure.facecolor
edgecolor=None, # defaults to rc figure.edgecolor
linewidth=0.0, # the default linewidth of the frame
frameon=None, # whether or not to draw the figure frame
subplotpars=None, # default to rc
tight_layout=None, # default to rc figure.autolayout
):
"""
*figsize*
w,h tuple in inches
*dpi*
Dots per inch
*facecolor*
The figure patch facecolor; defaults to rc ``figure.facecolor``
*edgecolor*
The figure patch edge color; defaults to rc ``figure.edgecolor``
*linewidth*
The figure patch edge linewidth; the default linewidth of the frame
*frameon*
If *False*, suppress drawing the figure frame
*subplotpars*
A :class:`SubplotParams` instance, defaults to rc
*tight_layout*
If *False* use *subplotpars*; if *True* adjust subplot
parameters using :meth:`tight_layout` with default padding.
When providing a dict containing the keys `pad`, `w_pad`, `h_pad`
and `rect`, the default :meth:`tight_layout` paddings will be
overridden.
Defaults to rc ``figure.autolayout``.
"""
Artist.__init__(self)
self.callbacks = cbook.CallbackRegistry()
if figsize is None:
figsize = rcParams['figure.figsize']
if dpi is None:
dpi = rcParams['figure.dpi']
if facecolor is None:
facecolor = rcParams['figure.facecolor']
if edgecolor is None:
edgecolor = rcParams['figure.edgecolor']
if frameon is None:
frameon = rcParams['figure.frameon']
self.dpi_scale_trans = Affine2D()
self.dpi = dpi
self.bbox_inches = Bbox.from_bounds(0, 0, *figsize)
self.bbox = TransformedBbox(self.bbox_inches, self.dpi_scale_trans)
self.frameon = frameon
self.transFigure = BboxTransformTo(self.bbox)
# the figurePatch name is deprecated
self.patch = self.figurePatch = Rectangle(
xy=(0, 0), width=1, height=1,
facecolor=facecolor, edgecolor=edgecolor,
linewidth=linewidth)
self._set_artist_props(self.patch)
self.patch.set_aa(False)
self._hold = rcParams['axes.hold']
self.canvas = None
self._suptitle = None
if subplotpars is None:
subplotpars = SubplotParams()
self.subplotpars = subplotpars
self.set_tight_layout(tight_layout)
self._axstack = AxesStack() # track all figure axes and current axes
self.clf()
self._cachedRenderer = None
# TODO: I'd like to dynamically add the _repr_html_ method
# to the figure in the right context, but then IPython doesn't
# use it, for some reason.
def _repr_html_(self):
# We can't use "isinstance" here, because then we'd end up importing
# webagg unconditiionally.
if (self.canvas is not None and
'WebAgg' in self.canvas.__class__.__name__):
from matplotlib.backends import backend_webagg
return backend_webagg.ipython_inline_display(self)
def show(self, warn=True):
"""
If using a GUI backend with pyplot, display the figure window.
If the figure was not created using
:func:`~matplotlib.pyplot.figure`, it will lack a
:class:`~matplotlib.backend_bases.FigureManagerBase`, and
will raise an AttributeError.
For non-GUI backends, this does nothing, in which case
a warning will be issued if *warn* is True (default).
"""
try:
manager = getattr(self.canvas, 'manager')
except AttributeError as err:
raise AttributeError("%s\n"
"Figure.show works only "
"for figures managed by pyplot, normally "
"created by pyplot.figure()." % err)
if manager is not None:
try:
manager.show()
return
except NonGuiException:
pass
if warn:
import warnings
warnings.warn(
"matplotlib is currently using a non-GUI backend, "
"so cannot show the figure")
def _get_axes(self):
return self._axstack.as_list()
axes = property(fget=_get_axes, doc="Read-only: list of axes in Figure")
def _get_dpi(self):
return self._dpi
def _set_dpi(self, dpi):
self._dpi = dpi
self.dpi_scale_trans.clear().scale(dpi, dpi)
self.callbacks.process('dpi_changed', self)
dpi = property(_get_dpi, _set_dpi)
def get_tight_layout(self):
"""
Return the Boolean flag, True to use :meth`tight_layout` when drawing.
"""
return self._tight
def set_tight_layout(self, tight):
"""
Set whether :meth:`tight_layout` is used upon drawing.
If None, the rcParams['figure.autolayout'] value will be set.
When providing a dict containing the keys `pad`, `w_pad`, `h_pad`
and `rect`, the default :meth:`tight_layout` paddings will be
overridden.
ACCEPTS: [True | False | dict | None ]
"""
if tight is None:
tight = rcParams['figure.autolayout']
self._tight = bool(tight)
self._tight_parameters = tight if isinstance(tight, dict) else {}
def autofmt_xdate(self, bottom=0.2, rotation=30, ha='right'):
"""
Date ticklabels often overlap, so it is useful to rotate them
and right align them. Also, a common use case is a number of
subplots with shared xaxes where the x-axis is date data. The
ticklabels are often long, and it helps to rotate them on the
bottom subplot and turn them off on other subplots, as well as
turn off xlabels.
*bottom*
The bottom of the subplots for :meth:`subplots_adjust`
*rotation*
The rotation of the xtick labels
*ha*
The horizontal alignment of the xticklabels
"""
allsubplots = np.alltrue([hasattr(ax, 'is_last_row') for ax
in self.axes])
if len(self.axes) == 1:
for label in self.axes[0].get_xticklabels():
label.set_ha(ha)
label.set_rotation(rotation)
else:
if allsubplots:
for ax in self.get_axes():
if ax.is_last_row():
for label in ax.get_xticklabels():
label.set_ha(ha)
label.set_rotation(rotation)
else:
for label in ax.get_xticklabels():
label.set_visible(False)
ax.set_xlabel('')
if allsubplots:
self.subplots_adjust(bottom=bottom)
def get_children(self):
'get a list of artists contained in the figure'
children = [self.patch]
children.extend(self.artists)
children.extend(self.axes)
children.extend(self.lines)
children.extend(self.patches)
children.extend(self.texts)
children.extend(self.images)
children.extend(self.legends)
return children
def contains(self, mouseevent):
"""
Test whether the mouse event occurred on the figure.
Returns True,{}
"""
if six.callable(self._contains):
return self._contains(self, mouseevent)
# inside = mouseevent.x >= 0 and mouseevent.y >= 0
inside = self.bbox.contains(mouseevent.x, mouseevent.y)
return inside, {}
def get_window_extent(self, *args, **kwargs):
'get the figure bounding box in display space; kwargs are void'
return self.bbox
def suptitle(self, t, **kwargs):
"""
Add a centered title to the figure.
kwargs are :class:`matplotlib.text.Text` properties. Using figure
coordinates, the defaults are:
*x* : 0.5
The x location of the text in figure coords
*y* : 0.98
The y location of the text in figure coords
*horizontalalignment* : 'center'
The horizontal alignment of the text
*verticalalignment* : 'top'
The vertical alignment of the text
A :class:`matplotlib.text.Text` instance is returned.
Example::
fig.suptitle('this is the figure title', fontsize=12)
"""
x = kwargs.pop('x', 0.5)
y = kwargs.pop('y', 0.98)
if ('horizontalalignment' not in kwargs) and ('ha' not in kwargs):
kwargs['horizontalalignment'] = 'center'
if ('verticalalignment' not in kwargs) and ('va' not in kwargs):
kwargs['verticalalignment'] = 'top'
sup = self.text(x, y, t, **kwargs)
if self._suptitle is not None:
self._suptitle.set_text(t)
self._suptitle.set_position((x, y))
self._suptitle.update_from(sup)
sup.remove()
else:
self._suptitle = sup
return self._suptitle
def set_canvas(self, canvas):
"""
Set the canvas the contains the figure
ACCEPTS: a FigureCanvas instance
"""
self.canvas = canvas
def hold(self, b=None):
"""
Set the hold state. If hold is None (default), toggle the
hold state. Else set the hold state to boolean value b.
e.g.::
hold() # toggle hold
hold(True) # hold is on
hold(False) # hold is off
"""
if b is None:
self._hold = not self._hold
else:
self._hold = b
def figimage(self, X,
xo=0,
yo=0,
alpha=None,
norm=None,
cmap=None,
vmin=None,
vmax=None,
origin=None,
**kwargs):
"""
Adds a non-resampled image to the figure.
call signatures::
figimage(X, **kwargs)
adds a non-resampled array *X* to the figure.
::
figimage(X, xo, yo)
with pixel offsets *xo*, *yo*,
*X* must be a float array:
* If *X* is MxN, assume luminance (grayscale)
* If *X* is MxNx3, assume RGB
* If *X* is MxNx4, assume RGBA
Optional keyword arguments:
========= =========================================================
Keyword Description
========= =========================================================
xo or yo An integer, the *x* and *y* image offset in pixels
cmap a :class:`matplotlib.colors.Colormap` instance, e.g.,
cm.jet. If *None*, default to the rc ``image.cmap``
value
norm a :class:`matplotlib.colors.Normalize` instance. The
default is normalization(). This scales luminance -> 0-1
vmin|vmax are used to scale a luminance image to 0-1. If either
is *None*, the min and max of the luminance values will
be used. Note if you pass a norm instance, the settings
for *vmin* and *vmax* will be ignored.
alpha the alpha blending value, default is *None*
origin [ 'upper' | 'lower' ] Indicates where the [0,0] index of
the array is in the upper left or lower left corner of
the axes. Defaults to the rc image.origin value
========= =========================================================
figimage complements the axes image
(:meth:`~matplotlib.axes.Axes.imshow`) which will be resampled
to fit the current axes. If you want a resampled image to
fill the entire figure, you can define an
:class:`~matplotlib.axes.Axes` with size [0,1,0,1].
An :class:`matplotlib.image.FigureImage` instance is returned.
.. plot:: mpl_examples/pylab_examples/figimage_demo.py
Additional kwargs are Artist kwargs passed on to
:class:`~matplotlib.image.FigureImage`
"""
if not self._hold:
self.clf()
im = FigureImage(self, cmap, norm, xo, yo, origin, **kwargs)
im.set_array(X)
im.set_alpha(alpha)
if norm is None:
im.set_clim(vmin, vmax)
self.images.append(im)
im._remove_method = lambda h: self.images.remove(h)
return im
def set_size_inches(self, *args, **kwargs):
"""
set_size_inches(w,h, forward=False)
Set the figure size in inches (1in == 2.54cm)
Usage::
fig.set_size_inches(w,h) # OR
fig.set_size_inches((w,h) )
optional kwarg *forward=True* will cause the canvas size to be
automatically updated; e.g., you can resize the figure window
from the shell
ACCEPTS: a w,h tuple with w,h in inches
See Also
--------
matplotlib.Figure.get_size_inches
"""
forward = kwargs.get('forward', False)
if len(args) == 1:
w, h = args[0]
else:
w, h = args
dpival = self.dpi
self.bbox_inches.p1 = w, h
if forward:
dpival = self.dpi
canvasw = w * dpival
canvash = h * dpival
manager = getattr(self.canvas, 'manager', None)
if manager is not None:
manager.resize(int(canvasw), int(canvash))
def get_size_inches(self):
"""
Returns the current size of the figure in inches (1in == 2.54cm)
as an numpy array.
Returns
-------
size : ndarray
The size of the figure in inches
See Also
--------
matplotlib.Figure.set_size_inches
"""
return np.array(self.bbox_inches.p1)
def get_edgecolor(self):
'Get the edge color of the Figure rectangle'
return self.patch.get_edgecolor()
def get_facecolor(self):
'Get the face color of the Figure rectangle'
return self.patch.get_facecolor()
def get_figwidth(self):
'Return the figwidth as a float'
return self.bbox_inches.width
def get_figheight(self):
'Return the figheight as a float'
return self.bbox_inches.height
def get_dpi(self):
'Return the dpi as a float'
return self.dpi
def get_frameon(self):
'get the boolean indicating frameon'
return self.frameon
def set_edgecolor(self, color):
"""
Set the edge color of the Figure rectangle
ACCEPTS: any matplotlib color - see help(colors)
"""
self.patch.set_edgecolor(color)
def set_facecolor(self, color):
"""
Set the face color of the Figure rectangle
ACCEPTS: any matplotlib color - see help(colors)
"""
self.patch.set_facecolor(color)
def set_dpi(self, val):
"""
Set the dots-per-inch of the figure
ACCEPTS: float
"""
self.dpi = val
def set_figwidth(self, val):
"""
Set the width of the figure in inches
ACCEPTS: float
"""
self.bbox_inches.x1 = val
def set_figheight(self, val):
"""
Set the height of the figure in inches
ACCEPTS: float
"""
self.bbox_inches.y1 = val
def set_frameon(self, b):
"""
Set whether the figure frame (background) is displayed or invisible
ACCEPTS: boolean
"""
self.frameon = b
def delaxes(self, a):
'remove a from the figure and update the current axes'
self._axstack.remove(a)
for func in self._axobservers:
func(self)
def _make_key(self, *args, **kwargs):
'make a hashable key out of args and kwargs'
def fixitems(items):
#items may have arrays and lists in them, so convert them
# to tuples for the key
ret = []
for k, v in items:
# some objects can define __getitem__ without being
# iterable and in those cases the conversion to tuples
# will fail. So instead of using the iterable(v) function
# we simply try and convert to a tuple, and proceed if not.
try:
v = tuple(v)
except Exception:
pass
ret.append((k, v))
return tuple(ret)
def fixlist(args):
ret = []
for a in args:
if iterable(a):
a = tuple(a)
ret.append(a)
return tuple(ret)
key = fixlist(args), fixitems(six.iteritems(kwargs))
return key
@docstring.dedent_interpd
def add_axes(self, *args, **kwargs):
"""
Add an axes at position *rect* [*left*, *bottom*, *width*,
*height*] where all quantities are in fractions of figure
width and height. kwargs are legal
:class:`~matplotlib.axes.Axes` kwargs plus *projection* which
sets the projection type of the axes. (For backward
compatibility, ``polar=True`` may also be provided, which is
equivalent to ``projection='polar'``). Valid values for
*projection* are: %(projection_names)s. Some of these
projections support additional kwargs, which may be provided
to :meth:`add_axes`. Typical usage::
rect = l,b,w,h
fig.add_axes(rect)
fig.add_axes(rect, frameon=False, axisbg='g')
fig.add_axes(rect, polar=True)
fig.add_axes(rect, projection='polar')
fig.add_axes(ax)
If the figure already has an axes with the same parameters,
then it will simply make that axes current and return it. If
you do not want this behavior, e.g., you want to force the
creation of a new Axes, you must use a unique set of args and
kwargs. The axes :attr:`~matplotlib.axes.Axes.label`
attribute has been exposed for this purpose. e.g., if you want
two axes that are otherwise identical to be added to the
figure, make sure you give them unique labels::
fig.add_axes(rect, label='axes1')
fig.add_axes(rect, label='axes2')
In rare circumstances, add_axes may be called with a single
argument, an Axes instance already created in the present
figure but not in the figure's list of axes. For example,
if an axes has been removed with :meth:`delaxes`, it can
be restored with::
fig.add_axes(ax)
In all cases, the :class:`~matplotlib.axes.Axes` instance
will be returned.
In addition to *projection*, the following kwargs are supported:
%(Axes)s
"""
if not len(args):
return
# shortcut the projection "key" modifications later on, if an axes
# with the exact args/kwargs exists, return it immediately.
key = self._make_key(*args, **kwargs)
ax = self._axstack.get(key)
if ax is not None:
self.sca(ax)
return ax
if isinstance(args[0], Axes):
a = args[0]
assert(a.get_figure() is self)
else:
rect = args[0]
projection_class, kwargs, key = process_projection_requirements(
self, *args, **kwargs)
# check that an axes of this type doesn't already exist, if it
# does, set it as active and return it
ax = self._axstack.get(key)
if ax is not None and isinstance(ax, projection_class):
self.sca(ax)
return ax
# create the new axes using the axes class given
a = projection_class(self, rect, **kwargs)
self._axstack.add(key, a)
self.sca(a)
return a
@docstring.dedent_interpd
def add_subplot(self, *args, **kwargs):
"""
Add a subplot. Examples::
fig.add_subplot(111)
# equivalent but more general
fig.add_subplot(1,1,1)
# add subplot with red background
fig.add_subplot(212, axisbg='r')
# add a polar subplot
fig.add_subplot(111, projection='polar')
# add Subplot instance sub
fig.add_subplot(sub)
*kwargs* are legal :class:`~matplotlib.axes.Axes` kwargs plus
*projection*, which chooses a projection type for the axes.
(For backward compatibility, *polar=True* may also be
provided, which is equivalent to *projection='polar'*). Valid
values for *projection* are: %(projection_names)s. Some of
these projections
support additional *kwargs*, which may be provided to
:meth:`add_axes`.
The :class:`~matplotlib.axes.Axes` instance will be returned.
If the figure already has a subplot with key (*args*,
*kwargs*) then it will simply make that subplot current and
return it.
.. seealso:: :meth:`~matplotlib.pyplot.subplot` for an
explanation of the args.
The following kwargs are supported:
%(Axes)s
"""
if not len(args):
return
if len(args) == 1 and isinstance(args[0], int):
args = tuple([int(c) for c in str(args[0])])
if len(args) != 3:
raise ValueError("Integer subplot specification must " +
"be a three digit number. " +
"Not {n:d}".format(n=len(args)))
if isinstance(args[0], SubplotBase):
a = args[0]
assert(a.get_figure() is self)
# make a key for the subplot (which includes the axes object id
# in the hash)
key = self._make_key(*args, **kwargs)
else:
projection_class, kwargs, key = process_projection_requirements(
self, *args, **kwargs)
# try to find the axes with this key in the stack
ax = self._axstack.get(key)
if ax is not None:
if isinstance(ax, projection_class):
# the axes already existed, so set it as active & return
self.sca(ax)
return ax
else:
# Undocumented convenience behavior:
# subplot(111); subplot(111, projection='polar')
# will replace the first with the second.
# Without this, add_subplot would be simpler and
# more similar to add_axes.
self._axstack.remove(ax)
a = subplot_class_factory(projection_class)(self, *args, **kwargs)
self._axstack.add(key, a)
self.sca(a)
return a
def clf(self, keep_observers=False):
"""
Clear the figure.
Set *keep_observers* to True if, for example,
a gui widget is tracking the axes in the figure.
"""
self.suppressComposite = None
self.callbacks = cbook.CallbackRegistry()
for ax in tuple(self.axes): # Iterate over the copy.
ax.cla()
self.delaxes(ax) # removes ax from self._axstack
toolbar = getattr(self.canvas, 'toolbar', None)
if toolbar is not None:
toolbar.update()
self._axstack.clear()
self.artists = []
self.lines = []
self.patches = []
self.texts = []
self.images = []
self.legends = []
if not keep_observers:
self._axobservers = []
self._suptitle = None
def clear(self):
"""
Clear the figure -- synonym for :meth:`clf`.
"""
self.clf()
@allow_rasterization
def draw(self, renderer):
"""
Render the figure using :class:`matplotlib.backend_bases.RendererBase`
instance *renderer*.
"""
# draw the figure bounding box, perhaps none for white figure
if not self.get_visible():
return
renderer.open_group('figure')
if self.get_tight_layout() and self.axes:
try:
self.tight_layout(renderer, **self._tight_parameters)
except ValueError:
pass
# ValueError can occur when resizing a window.
if self.frameon:
self.patch.draw(renderer)
# a list of (zorder, func_to_call, list_of_args)
dsu = []
for a in self.patches:
dsu.append((a.get_zorder(), a, a.draw, [renderer]))
for a in self.lines:
dsu.append((a.get_zorder(), a, a.draw, [renderer]))
for a in self.artists:
dsu.append((a.get_zorder(), a, a.draw, [renderer]))
# override the renderer default if self.suppressComposite
# is not None
not_composite = renderer.option_image_nocomposite()
if self.suppressComposite is not None:
not_composite = self.suppressComposite
if (len(self.images) <= 1 or not_composite or
not cbook.allequal([im.origin for im in self.images])):
for a in self.images:
dsu.append((a.get_zorder(), a, a.draw, [renderer]))
else:
# make a composite image blending alpha
# list of (_image.Image, ox, oy)
mag = renderer.get_image_magnification()
ims = [(im.make_image(mag), im.ox, im.oy, im.get_alpha())
for im in self.images]
im = _image.from_images(self.bbox.height * mag,
self.bbox.width * mag,
ims)
im.is_grayscale = False
l, b, w, h = self.bbox.bounds
def draw_composite():
gc = renderer.new_gc()
gc.set_clip_rectangle(self.bbox)
gc.set_clip_path(self.get_clip_path())
renderer.draw_image(gc, l, b, im)
gc.restore()
dsu.append((self.images[0].get_zorder(), self.images[0],
draw_composite, []))
# render the axes
for a in self.axes:
dsu.append((a.get_zorder(), a, a.draw, [renderer]))
# render the figure text
for a in self.texts:
dsu.append((a.get_zorder(), a, a.draw, [renderer]))
for a in self.legends:
dsu.append((a.get_zorder(), a, a.draw, [renderer]))
dsu = [row for row in dsu if not row[1].get_animated()]
dsu.sort(key=itemgetter(0))
for zorder, a, func, args in dsu:
func(*args)
renderer.close_group('figure')
self._cachedRenderer = renderer
self.canvas.draw_event(renderer)
def draw_artist(self, a):
"""
draw :class:`matplotlib.artist.Artist` instance *a* only --
this is available only after the figure is drawn
"""
assert self._cachedRenderer is not None
a.draw(self._cachedRenderer)
def get_axes(self):
return self.axes
def legend(self, handles, labels, *args, **kwargs):
"""
Place a legend in the figure. Labels are a sequence of
strings, handles is a sequence of
:class:`~matplotlib.lines.Line2D` or
:class:`~matplotlib.patches.Patch` instances, and loc can be a
string or an integer specifying the legend location
USAGE::
legend( (line1, line2, line3),
('label1', 'label2', 'label3'),
'upper right')
The *loc* location codes are::
'best' : 0, (currently not supported for figure legends)
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4,
'right' : 5,
'center left' : 6,
'center right' : 7,
'lower center' : 8,
'upper center' : 9,
'center' : 10,
*loc* can also be an (x,y) tuple in figure coords, which
specifies the lower left of the legend box. figure coords are
(0,0) is the left, bottom of the figure and 1,1 is the right,
top.
Keyword arguments:
*prop*: [ *None* | FontProperties | dict ]
A :class:`matplotlib.font_manager.FontProperties`
instance. If *prop* is a dictionary, a new instance will be
created with *prop*. If *None*, use rc settings.
*numpoints*: integer
The number of points in the legend line, default is 4
*scatterpoints*: integer
The number of points in the legend line, default is 4
*scatteryoffsets*: list of floats
a list of yoffsets for scatter symbols in legend
*markerscale*: [ *None* | scalar ]
The relative size of legend markers vs. original. If *None*, use rc
settings.
*fancybox*: [ *None* | *False* | *True* ]
if *True*, draw a frame with a round fancybox. If *None*, use rc
*shadow*: [ *None* | *False* | *True* ]
If *True*, draw a shadow behind legend. If *None*, use rc settings.
*ncol* : integer
number of columns. default is 1
*mode* : [ "expand" | *None* ]
if mode is "expand", the legend will be horizontally expanded
to fill the axes area (or *bbox_to_anchor*)
*title* : string
the legend title
Padding and spacing between various elements use following keywords
parameters. The dimensions of these values are given as a fraction
of the fontsize. Values from rcParams will be used if None.
================ ====================================================
Keyword Description
================ ====================================================
borderpad the fractional whitespace inside the legend border
labelspacing the vertical space between the legend entries
handlelength the length of the legend handles
handletextpad the pad between the legend handle and text
borderaxespad the pad between the axes and legend border
columnspacing the spacing between columns
================ ====================================================
.. Note:: Not all kinds of artist are supported by the legend.
See LINK (FIXME) for details.
**Example:**
.. plot:: mpl_examples/pylab_examples/figlegend_demo.py
"""
l = Legend(self, handles, labels, *args, **kwargs)
self.legends.append(l)
l._remove_method = lambda h: self.legends.remove(h)
return l
@docstring.dedent_interpd
def text(self, x, y, s, *args, **kwargs):
"""
Add text to figure.
Call signature::
text(x, y, s, fontdict=None, **kwargs)
Add text to figure at location *x*, *y* (relative 0-1
coords). See :func:`~matplotlib.pyplot.text` for the meaning
of the other arguments.
kwargs control the :class:`~matplotlib.text.Text` properties:
%(Text)s
"""
override = _process_text_args({}, *args, **kwargs)
t = Text(x=x, y=y, text=s)
t.update(override)
self._set_artist_props(t)
self.texts.append(t)
t._remove_method = lambda h: self.texts.remove(h)
return t
def _set_artist_props(self, a):
if a != self:
a.set_figure(self)
a.set_transform(self.transFigure)
@docstring.dedent_interpd
def gca(self, **kwargs):
"""
Get the current axes, creating one if necessary
The following kwargs are supported for ensuring the returned axes
adheres to the given projection etc., and for axes creation if
the active axes does not exist:
%(Axes)s
"""
ckey, cax = self._axstack.current_key_axes()
# if there exists an axes on the stack see if it maches
# the desired axes configuration
if cax is not None:
# if no kwargs are given just return the current axes
# this is a convenience for gca() on axes such as polar etc.
if not kwargs:
return cax
# if the user has specified particular projection detail
# then build up a key which can represent this
else:
# we don't want to modify the original kwargs
# so take a copy so that we can do what we like to it
kwargs_copy = kwargs.copy()
projection_class, _, key = process_projection_requirements(
self, **kwargs_copy)
# let the returned axes have any gridspec by removing it from
# the key
ckey = ckey[1:]
key = key[1:]
# if the cax matches this key then return the axes, otherwise
# continue and a new axes will be created
if key == ckey and isinstance(cax, projection_class):
return cax
# no axes found, so create one which spans the figure
return self.add_subplot(1, 1, 1, **kwargs)
def sca(self, a):
'Set the current axes to be a and return a'
self._axstack.bubble(a)
for func in self._axobservers:
func(self)
return a
def _gci(self):
"""
helper for :func:`~matplotlib.pyplot.gci`;
do not use elsewhere.
"""
# Look first for an image in the current Axes:
cax = self._axstack.current_key_axes()[1]
if cax is None:
return None
im = cax._gci()
if im is not None:
return im
# If there is no image in the current Axes, search for
# one in a previously created Axes. Whether this makes
# sense is debatable, but it is the documented behavior.
for ax in reversed(self.axes):
im = ax._gci()
if im is not None:
return im
return None
def __getstate__(self):
state = self.__dict__.copy()
# the axobservers cannot currently be pickled.
# Additionally, the canvas cannot currently be pickled, but this has
# the benefit of meaning that a figure can be detached from one canvas,
# and re-attached to another.
for attr_to_pop in ('_axobservers', 'show',
'canvas', '_cachedRenderer'):
state.pop(attr_to_pop, None)
# add version information to the state
state['__mpl_version__'] = _mpl_version
# check to see if the figure has a manager and whether it is registered
# with pyplot
if getattr(self.canvas, 'manager', None) is not None:
manager = self.canvas.manager
import matplotlib._pylab_helpers
if manager in list(six.itervalues(
matplotlib._pylab_helpers.Gcf.figs)):
state['_restore_to_pylab'] = True
return state
def __setstate__(self, state):
version = state.pop('__mpl_version__')
restore_to_pylab = state.pop('_restore_to_pylab', False)
if version != _mpl_version:
import warnings
warnings.warn("This figure was saved with matplotlib version %s "
"and is unlikely to function correctly." %
(version, ))
self.__dict__ = state
# re-initialise some of the unstored state information
self._axobservers = []
self.canvas = None
if restore_to_pylab:
# lazy import to avoid circularity
import matplotlib.pyplot as plt
import matplotlib._pylab_helpers as pylab_helpers
allnums = plt.get_fignums()
num = max(allnums) + 1 if allnums else 1
mgr = plt._backend_mod.new_figure_manager_given_figure(num, self)
# XXX The following is a copy and paste from pyplot. Consider
# factoring to pylab_helpers
if self.get_label():
mgr.set_window_title(self.get_label())
# make this figure current on button press event
def make_active(event):
pylab_helpers.Gcf.set_active(mgr)
mgr._cidgcf = mgr.canvas.mpl_connect('button_press_event',
make_active)
pylab_helpers.Gcf.set_active(mgr)
self.number = num
plt.draw_if_interactive()
def add_axobserver(self, func):
'whenever the axes state change, ``func(self)`` will be called'
self._axobservers.append(func)
def savefig(self, *args, **kwargs):
"""
Save the current figure.
Call signature::
savefig(fname, dpi=None, facecolor='w', edgecolor='w',
orientation='portrait', papertype=None, format=None,
transparent=False, bbox_inches=None, pad_inches=0.1,
frameon=None)
The output formats available depend on the backend being used.
Arguments:
*fname*:
A string containing a path to a filename, or a Python
file-like object, or possibly some backend-dependent object
such as :class:`~matplotlib.backends.backend_pdf.PdfPages`.
If *format* is *None* and *fname* is a string, the output
format is deduced from the extension of the filename. If
the filename has no extension, the value of the rc parameter
``savefig.format`` is used.
If *fname* is not a string, remember to specify *format* to
ensure that the correct backend is used.
Keyword arguments:
*dpi*: [ *None* | ``scalar > 0`` ]
The resolution in dots per inch. If *None* it will default to
the value ``savefig.dpi`` in the matplotlibrc file.
*facecolor*, *edgecolor*:
the colors of the figure rectangle
*orientation*: [ 'landscape' | 'portrait' ]
not supported on all backends; currently only on postscript output
*papertype*:
One of 'letter', 'legal', 'executive', 'ledger', 'a0' through
'a10', 'b0' through 'b10'. Only supported for postscript
output.
*format*:
One of the file extensions supported by the active
backend. Most backends support png, pdf, ps, eps and svg.
*transparent*:
If *True*, the axes patches will all be transparent; the
figure patch will also be transparent unless facecolor
and/or edgecolor are specified via kwargs.
This is useful, for example, for displaying
a plot on top of a colored background on a web page. The
transparency of these patches will be restored to their
original values upon exit of this function.
*frameon*:
If *True*, the figure patch will be colored, if *False*, the
figure background will be transparent. If not provided, the
rcParam 'savefig.frameon' will be used.
*bbox_inches*:
Bbox in inches. Only the given portion of the figure is
saved. If 'tight', try to figure out the tight bbox of
the figure.
*pad_inches*:
Amount of padding around the figure when bbox_inches is
'tight'.
*bbox_extra_artists*:
A list of extra artists that will be considered when the
tight bbox is calculated.
"""
kwargs.setdefault('dpi', rcParams['savefig.dpi'])
frameon = kwargs.pop('frameon', rcParams['savefig.frameon'])
transparent = kwargs.pop('transparent',
rcParams['savefig.transparent'])
if transparent:
kwargs.setdefault('facecolor', 'none')
kwargs.setdefault('edgecolor', 'none')
original_axes_colors = []
for ax in self.axes:
patch = ax.patch
original_axes_colors.append((patch.get_facecolor(),
patch.get_edgecolor()))
patch.set_facecolor('none')
patch.set_edgecolor('none')
else:
kwargs.setdefault('facecolor', rcParams['savefig.facecolor'])
kwargs.setdefault('edgecolor', rcParams['savefig.edgecolor'])
if frameon:
original_frameon = self.get_frameon()
self.set_frameon(frameon)
self.canvas.print_figure(*args, **kwargs)
if frameon:
self.set_frameon(original_frameon)
if transparent:
for ax, cc in zip(self.axes, original_axes_colors):
ax.patch.set_facecolor(cc[0])
ax.patch.set_edgecolor(cc[1])
@docstring.dedent_interpd
def colorbar(self, mappable, cax=None, ax=None, use_gridspec=True, **kw):
"""
Create a colorbar for a ScalarMappable instance, *mappable*.
Documentation for the pylab thin wrapper:
%(colorbar_doc)s
"""
if ax is None:
ax = self.gca()
# Store the value of gca so that we can set it back later on.
current_ax = self.gca()
if cax is None:
if use_gridspec and isinstance(ax, SubplotBase):
cax, kw = cbar.make_axes_gridspec(ax, **kw)
else:
cax, kw = cbar.make_axes(ax, **kw)
cax.hold(True)
cb = cbar.colorbar_factory(cax, mappable, **kw)
self.sca(current_ax)
return cb
def subplots_adjust(self, *args, **kwargs):
"""
Call signature::
subplots_adjust(left=None, bottom=None, right=None, top=None,
wspace=None, hspace=None)
Update the :class:`SubplotParams` with *kwargs* (defaulting to rc when
*None*) and update the subplot locations
"""
self.subplotpars.update(*args, **kwargs)
for ax in self.axes:
if not isinstance(ax, SubplotBase):
# Check if sharing a subplots axis
if (ax._sharex is not None and
isinstance(ax._sharex, SubplotBase)):
ax._sharex.update_params()
ax.set_position(ax._sharex.figbox)
elif (ax._sharey is not None and
isinstance(ax._sharey, SubplotBase)):
ax._sharey.update_params()
ax.set_position(ax._sharey.figbox)
else:
ax.update_params()
ax.set_position(ax.figbox)
def ginput(self, n=1, timeout=30, show_clicks=True, mouse_add=1,
mouse_pop=3, mouse_stop=2):
"""
Call signature::
ginput(self, n=1, timeout=30, show_clicks=True,
mouse_add=1, mouse_pop=3, mouse_stop=2)
Blocking call to interact with the figure.
This will wait for *n* clicks from the user and return a list of the
coordinates of each click.
If *timeout* is zero or negative, does not timeout.
If *n* is zero or negative, accumulate clicks until a middle click
(or potentially both mouse buttons at once) terminates the input.
Right clicking cancels last input.
The buttons used for the various actions (adding points, removing
points, terminating the inputs) can be overriden via the
arguments *mouse_add*, *mouse_pop* and *mouse_stop*, that give
the associated mouse button: 1 for left, 2 for middle, 3 for
right.
The keyboard can also be used to select points in case your mouse
does not have one or more of the buttons. The delete and backspace
keys act like right clicking (i.e., remove last point), the enter key
terminates input and any other key (not already used by the window
manager) selects a point.
"""
blocking_mouse_input = BlockingMouseInput(self,
mouse_add=mouse_add,
mouse_pop=mouse_pop,
mouse_stop=mouse_stop)
return blocking_mouse_input(n=n, timeout=timeout,
show_clicks=show_clicks)
def waitforbuttonpress(self, timeout=-1):
"""
Call signature::
waitforbuttonpress(self, timeout=-1)
Blocking call to interact with the figure.
This will return True is a key was pressed, False if a mouse
button was pressed and None if *timeout* was reached without
either being pressed.
If *timeout* is negative, does not timeout.
"""
blocking_input = BlockingKeyMouseInput(self)
return blocking_input(timeout=timeout)
def get_default_bbox_extra_artists(self):
bbox_artists = [artist for artist in self.get_children()
if artist.get_visible()]
for ax in self.axes:
if ax.get_visible():
bbox_artists.extend(ax.get_default_bbox_extra_artists())
# we don't want the figure's patch to influence the bbox calculation
bbox_artists.remove(self.patch)
return bbox_artists
def get_tightbbox(self, renderer):
"""
Return a (tight) bounding box of the figure in inches.
It only accounts axes title, axis labels, and axis
ticklabels. Needs improvement.
"""
bb = []
for ax in self.axes:
if ax.get_visible():
bb.append(ax.get_tightbbox(renderer))
if len(bb) == 0:
return self.bbox_inches
_bbox = Bbox.union([b for b in bb if b.width != 0 or b.height != 0])
bbox_inches = TransformedBbox(_bbox,
Affine2D().scale(1. / self.dpi))
return bbox_inches
def tight_layout(self, renderer=None, pad=1.08, h_pad=None,
w_pad=None, rect=None):
"""
Adjust subplot parameters to give specified padding.
Parameters:
*pad* : float
padding between the figure edge and the edges of subplots,
as a fraction of the font-size.
*h_pad*, *w_pad* : float
padding (height/width) between edges of adjacent subplots.
Defaults to `pad_inches`.
*rect* : if rect is given, it is interpreted as a rectangle
(left, bottom, right, top) in the normalized figure
coordinate that the whole subplots area (including
labels) will fit into. Default is (0, 0, 1, 1).
"""
from .tight_layout import (get_renderer, get_tight_layout_figure,
get_subplotspec_list)
subplotspec_list = get_subplotspec_list(self.axes)
if None in subplotspec_list:
warnings.warn("This figure includes Axes that are not "
"compatible with tight_layout, so its "
"results might be incorrect.")
if renderer is None:
renderer = get_renderer(self)
kwargs = get_tight_layout_figure(self, self.axes, subplotspec_list,
renderer,
pad=pad, h_pad=h_pad, w_pad=w_pad,
rect=rect)
self.subplots_adjust(**kwargs)
def figaspect(arg):
"""
Create a figure with specified aspect ratio. If *arg* is a number,
use that aspect ratio. If *arg* is an array, figaspect will
determine the width and height for a figure that would fit array
preserving aspect ratio. The figure width, height in inches are
returned. Be sure to create an axes with equal with and height,
e.g.,
Example usage::
# make a figure twice as tall as it is wide
w, h = figaspect(2.)
fig = Figure(figsize=(w,h))
ax = fig.add_axes([0.1, 0.1, 0.8, 0.8])
ax.imshow(A, **kwargs)
# make a figure with the proper aspect for an array
A = rand(5,3)
w, h = figaspect(A)
fig = Figure(figsize=(w,h))
ax = fig.add_axes([0.1, 0.1, 0.8, 0.8])
ax.imshow(A, **kwargs)
Thanks to Fernando Perez for this function
"""
isarray = hasattr(arg, 'shape')
# min/max sizes to respect when autoscaling. If John likes the idea, they
# could become rc parameters, for now they're hardwired.
figsize_min = np.array((4.0, 2.0)) # min length for width/height
figsize_max = np.array((16.0, 16.0)) # max length for width/height
#figsize_min = rcParams['figure.figsize_min']
#figsize_max = rcParams['figure.figsize_max']
# Extract the aspect ratio of the array
if isarray:
nr, nc = arg.shape[:2]
arr_ratio = float(nr) / nc
else:
arr_ratio = float(arg)
# Height of user figure defaults
fig_height = rcParams['figure.figsize'][1]
# New size for the figure, keeping the aspect ratio of the caller
newsize = np.array((fig_height / arr_ratio, fig_height))
# Sanity checks, don't drop either dimension below figsize_min
newsize /= min(1.0, *(newsize / figsize_min))
# Avoid humongous windows as well
newsize /= max(1.0, *(newsize / figsize_max))
# Finally, if we have a really funky aspect ratio, break it but respect
# the min/max dimensions (we don't want figures 10 feet tall!)
newsize = np.clip(newsize, figsize_min, figsize_max)
return newsize
docstring.interpd.update(Figure=martist.kwdoc(Figure))
| gpl-2.0 |
CiscoSystems/neutron | neutron/tests/unit/test_api_api_common.py | 68 | 2904 | # Copyright (c) 2013 Intel Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from testtools import matchers
from webob import exc
from neutron.api import api_common as common
from neutron.tests import base
class FakeController(common.NeutronController):
_resource_name = 'fake'
class APICommonTestCase(base.BaseTestCase):
def setUp(self):
super(APICommonTestCase, self).setUp()
self.controller = FakeController(None)
def test_prepare_request_body(self):
body = {
'fake': {
'name': 'terminator',
'model': 'T-800',
}
}
params = [
{'param-name': 'name',
'required': True},
{'param-name': 'model',
'required': True},
{'param-name': 'quote',
'required': False,
'default-value': "i'll be back"},
]
expect = {
'fake': {
'name': 'terminator',
'model': 'T-800',
'quote': "i'll be back",
}
}
actual = self.controller._prepare_request_body(body, params)
self.assertThat(expect, matchers.Equals(actual))
def test_prepare_request_body_none(self):
body = None
params = [
{'param-name': 'quote',
'required': False,
'default-value': "I'll be back"},
]
expect = {
'fake': {
'quote': "I'll be back",
}
}
actual = self.controller._prepare_request_body(body, params)
self.assertThat(expect, matchers.Equals(actual))
def test_prepare_request_body_keyerror(self):
body = {'t2': {}}
params = []
self.assertRaises(exc.HTTPBadRequest,
self.controller._prepare_request_body,
body,
params)
def test_prepare_request_param_value_none(self):
body = {
'fake': {
'name': None,
}
}
params = [
{'param-name': 'name',
'required': True},
]
self.assertRaises(exc.HTTPBadRequest,
self.controller._prepare_request_body,
body,
params)
| apache-2.0 |
gcd0318/django | tests/sitemaps_tests/urls/http.py | 311 | 4069 | from datetime import date, datetime
from django.conf.urls import url
from django.conf.urls.i18n import i18n_patterns
from django.contrib.sitemaps import GenericSitemap, Sitemap, views
from django.http import HttpResponse
from django.utils import timezone
from django.views.decorators.cache import cache_page
from ..models import I18nTestModel, TestModel
class SimpleSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = '/location/'
lastmod = datetime.now()
def items(self):
return [object()]
class SimpleI18nSitemap(Sitemap):
changefreq = "never"
priority = 0.5
i18n = True
def items(self):
return I18nTestModel.objects.all()
class EmptySitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = '/location/'
def items(self):
return []
class FixedLastmodSitemap(SimpleSitemap):
lastmod = datetime(2013, 3, 13, 10, 0, 0)
class FixedLastmodMixedSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = '/location/'
loop = 0
def items(self):
o1 = TestModel()
o1.lastmod = datetime(2013, 3, 13, 10, 0, 0)
o2 = TestModel()
return [o1, o2]
class DateSiteMap(SimpleSitemap):
lastmod = date(2013, 3, 13)
class TimezoneSiteMap(SimpleSitemap):
lastmod = datetime(2013, 3, 13, 10, 0, 0, tzinfo=timezone.get_fixed_timezone(-300))
def testmodelview(request, id):
return HttpResponse()
simple_sitemaps = {
'simple': SimpleSitemap,
}
simple_i18nsitemaps = {
'simple': SimpleI18nSitemap,
}
empty_sitemaps = {
'empty': EmptySitemap,
}
fixed_lastmod_sitemaps = {
'fixed-lastmod': FixedLastmodSitemap,
}
fixed_lastmod__mixed_sitemaps = {
'fixed-lastmod-mixed': FixedLastmodMixedSitemap,
}
generic_sitemaps = {
'generic': GenericSitemap({'queryset': TestModel.objects.all()}),
}
urlpatterns = [
url(r'^simple/index\.xml$', views.index, {'sitemaps': simple_sitemaps}),
url(r'^simple/custom-index\.xml$', views.index,
{'sitemaps': simple_sitemaps, 'template_name': 'custom_sitemap_index.xml'}),
url(r'^simple/sitemap-(?P<section>.+)\.xml$', views.sitemap,
{'sitemaps': simple_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^simple/sitemap\.xml$', views.sitemap,
{'sitemaps': simple_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^simple/i18n\.xml$', views.sitemap,
{'sitemaps': simple_i18nsitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^simple/custom-sitemap\.xml$', views.sitemap,
{'sitemaps': simple_sitemaps, 'template_name': 'custom_sitemap.xml'},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^empty/sitemap\.xml$', views.sitemap,
{'sitemaps': empty_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^lastmod/sitemap\.xml$', views.sitemap,
{'sitemaps': fixed_lastmod_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^lastmod-mixed/sitemap\.xml$', views.sitemap,
{'sitemaps': fixed_lastmod__mixed_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^lastmod/date-sitemap.xml$', views.sitemap,
{'sitemaps': {'date-sitemap': DateSiteMap}},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^lastmod/tz-sitemap.xml$', views.sitemap,
{'sitemaps': {'tz-sitemap': TimezoneSiteMap}},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^generic/sitemap\.xml$', views.sitemap,
{'sitemaps': generic_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^cached/index\.xml$', cache_page(1)(views.index),
{'sitemaps': simple_sitemaps, 'sitemap_url_name': 'cached_sitemap'}),
url(r'^cached/sitemap-(?P<section>.+)\.xml', cache_page(1)(views.sitemap),
{'sitemaps': simple_sitemaps}, name='cached_sitemap')
]
urlpatterns += i18n_patterns(
url(r'^i18n/testmodel/(?P<id>\d+)/$', testmodelview, name='i18n_testmodel'),
)
| bsd-3-clause |
dylanGeng/BuildingMachineLearningSystemsWithPython | ch09/fft.py | 24 | 3673 | # This code is supporting material for the book
# Building Machine Learning Systems with Python
# by Willi Richert and Luis Pedro Coelho
# published by PACKT Publishing
#
# It is made available under the MIT License
import sys
import os
import glob
import numpy as np
import scipy
import scipy.io.wavfile
from utils import GENRE_DIR, CHART_DIR
import matplotlib.pyplot as plt
from matplotlib.ticker import EngFormatter
def write_fft(fft_features, fn):
"""
Write the FFT features to separate files to speed up processing.
"""
base_fn, ext = os.path.splitext(fn)
data_fn = base_fn + ".fft"
np.save(data_fn, fft_features)
print("Written "%data_fn)
def create_fft(fn):
sample_rate, X = scipy.io.wavfile.read(fn)
fft_features = abs(scipy.fft(X)[:1000])
write_fft(fft_features, fn)
def read_fft(genre_list, base_dir=GENRE_DIR):
X = []
y = []
for label, genre in enumerate(genre_list):
genre_dir = os.path.join(base_dir, genre, "*.fft.npy")
file_list = glob.glob(genre_dir)
assert(file_list), genre_dir
for fn in file_list:
fft_features = np.load(fn)
X.append(fft_features[:2000])
y.append(label)
return np.array(X), np.array(y)
def plot_wav_fft(wav_filename, desc=None):
plt.clf()
plt.figure(num=None, figsize=(6, 4))
sample_rate, X = scipy.io.wavfile.read(wav_filename)
spectrum = np.fft.fft(X)
freq = np.fft.fftfreq(len(X), 1.0 / sample_rate)
plt.subplot(211)
num_samples = 200.0
plt.xlim(0, num_samples / sample_rate)
plt.xlabel("time [s]")
plt.title(desc or wav_filename)
plt.plot(np.arange(num_samples) / sample_rate, X[:num_samples])
plt.grid(True)
plt.subplot(212)
plt.xlim(0, 5000)
plt.xlabel("frequency [Hz]")
plt.xticks(np.arange(5) * 1000)
if desc:
desc = desc.strip()
fft_desc = desc[0].lower() + desc[1:]
else:
fft_desc = wav_filename
plt.title("FFT of %s" % fft_desc)
plt.plot(freq, abs(spectrum), linewidth=5)
plt.grid(True)
plt.tight_layout()
rel_filename = os.path.split(wav_filename)[1]
plt.savefig("%s_wav_fft.png" % os.path.splitext(rel_filename)[0],
bbox_inches='tight')
plt.show()
def plot_wav_fft_demo():
plot_wav_fft("sine_a.wav", "400Hz sine wave")
plot_wav_fft("sine_b.wav", "3,000Hz sine wave")
plot_wav_fft("sine_mix.wav", "Mixed sine wave")
def plot_specgram(ax, fn):
sample_rate, X = scipy.io.wavfile.read(fn)
ax.specgram(X, Fs=sample_rate, xextent=(0, 30))
def plot_specgrams(base_dir=CHART_DIR):
"""
Plot a bunch of spectrograms of wav files in different genres
"""
plt.clf()
genres = ["classical", "jazz", "country", "pop", "rock", "metal"]
num_files = 3
f, axes = plt.subplots(len(genres), num_files)
for genre_idx, genre in enumerate(genres):
for idx, fn in enumerate(glob.glob(os.path.join(GENRE_DIR, genre, "*.wav"))):
if idx == num_files:
break
axis = axes[genre_idx, idx]
axis.yaxis.set_major_formatter(EngFormatter())
axis.set_title("%s song %i" % (genre, idx + 1))
plot_specgram(axis, fn)
specgram_file = os.path.join(base_dir, "Spectrogram_Genres.png")
plt.savefig(specgram_file, bbox_inches="tight")
plt.show()
if __name__ == "__main__":
# for fn in glob.glob(os.path.join(sys.argv[1], "*.wav")):
# create_fft(fn)
# plot_decomp()
if len(sys.argv) > 1:
plot_wav_fft(sys.argv[1], desc="some sample song")
else:
plot_wav_fft_demo()
plot_specgrams()
| mit |
fengbaicanhe/intellij-community | python/lib/Lib/site-packages/django/db/backends/creation.py | 71 | 21002 | import sys
import time
from django.conf import settings
# The prefix to put on the default database name when creating
# the test database.
TEST_DATABASE_PREFIX = 'test_'
class BaseDatabaseCreation(object):
"""
This class encapsulates all backend-specific differences that pertain to
database *creation*, such as the column types to use for particular Django
Fields, the SQL used to create and destroy tables, and the creation and
destruction of test databases.
"""
data_types = {}
def __init__(self, connection):
self.connection = connection
def _digest(self, *args):
"""
Generates a 32-bit digest of a set of arguments that can be used to
shorten identifying names.
"""
return '%x' % (abs(hash(args)) % 4294967296L) # 2**32
def sql_create_model(self, model, style, known_models=set()):
"""
Returns the SQL required to create a single model, as a tuple of:
(list_of_sql, pending_references_dict)
"""
opts = model._meta
if not opts.managed or opts.proxy:
return [], {}
final_output = []
table_output = []
pending_references = {}
qn = self.connection.ops.quote_name
for f in opts.local_fields:
col_type = f.db_type(connection=self.connection)
tablespace = f.db_tablespace or opts.db_tablespace
if col_type is None:
# Skip ManyToManyFields, because they're not represented as
# database columns in this table.
continue
# Make the definition (e.g. 'foo VARCHAR(30)') for this field.
field_output = [style.SQL_FIELD(qn(f.column)),
style.SQL_COLTYPE(col_type)]
if not f.null:
field_output.append(style.SQL_KEYWORD('NOT NULL'))
if f.primary_key:
field_output.append(style.SQL_KEYWORD('PRIMARY KEY'))
elif f.unique:
field_output.append(style.SQL_KEYWORD('UNIQUE'))
if tablespace and f.unique:
# We must specify the index tablespace inline, because we
# won't be generating a CREATE INDEX statement for this field.
field_output.append(self.connection.ops.tablespace_sql(tablespace, inline=True))
if f.rel:
ref_output, pending = self.sql_for_inline_foreign_key_references(f, known_models, style)
if pending:
pr = pending_references.setdefault(f.rel.to, []).append((model, f))
else:
field_output.extend(ref_output)
table_output.append(' '.join(field_output))
for field_constraints in opts.unique_together:
table_output.append(style.SQL_KEYWORD('UNIQUE') + ' (%s)' % \
", ".join([style.SQL_FIELD(qn(opts.get_field(f).column)) for f in field_constraints]))
full_statement = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + style.SQL_TABLE(qn(opts.db_table)) + ' (']
for i, line in enumerate(table_output): # Combine and add commas.
full_statement.append(' %s%s' % (line, i < len(table_output)-1 and ',' or ''))
full_statement.append(')')
if opts.db_tablespace:
full_statement.append(self.connection.ops.tablespace_sql(opts.db_tablespace))
full_statement.append(';')
final_output.append('\n'.join(full_statement))
if opts.has_auto_field:
# Add any extra SQL needed to support auto-incrementing primary keys.
auto_column = opts.auto_field.db_column or opts.auto_field.name
autoinc_sql = self.connection.ops.autoinc_sql(opts.db_table, auto_column)
if autoinc_sql:
for stmt in autoinc_sql:
final_output.append(stmt)
return final_output, pending_references
def sql_for_inline_foreign_key_references(self, field, known_models, style):
"Return the SQL snippet defining the foreign key reference for a field"
qn = self.connection.ops.quote_name
if field.rel.to in known_models:
output = [style.SQL_KEYWORD('REFERENCES') + ' ' + \
style.SQL_TABLE(qn(field.rel.to._meta.db_table)) + ' (' + \
style.SQL_FIELD(qn(field.rel.to._meta.get_field(field.rel.field_name).column)) + ')' +
self.connection.ops.deferrable_sql()
]
pending = False
else:
# We haven't yet created the table to which this field
# is related, so save it for later.
output = []
pending = True
return output, pending
def sql_for_pending_references(self, model, style, pending_references):
"Returns any ALTER TABLE statements to add constraints after the fact."
from django.db.backends.util import truncate_name
if not model._meta.managed or model._meta.proxy:
return []
qn = self.connection.ops.quote_name
final_output = []
opts = model._meta
if model in pending_references:
for rel_class, f in pending_references[model]:
rel_opts = rel_class._meta
r_table = rel_opts.db_table
r_col = f.column
table = opts.db_table
col = opts.get_field(f.rel.field_name).column
# For MySQL, r_name must be unique in the first 64 characters.
# So we are careful with character usage here.
r_name = '%s_refs_%s_%s' % (r_col, col, self._digest(r_table, table))
final_output.append(style.SQL_KEYWORD('ALTER TABLE') + ' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % \
(qn(r_table), qn(truncate_name(r_name, self.connection.ops.max_name_length())),
qn(r_col), qn(table), qn(col),
self.connection.ops.deferrable_sql()))
del pending_references[model]
return final_output
def sql_for_many_to_many(self, model, style):
"Return the CREATE TABLE statments for all the many-to-many tables defined on a model"
import warnings
warnings.warn(
'Database creation API for m2m tables has been deprecated. M2M models are now automatically generated',
DeprecationWarning
)
output = []
for f in model._meta.local_many_to_many:
if model._meta.managed or f.rel.to._meta.managed:
output.extend(self.sql_for_many_to_many_field(model, f, style))
return output
def sql_for_many_to_many_field(self, model, f, style):
"Return the CREATE TABLE statements for a single m2m field"
import warnings
warnings.warn(
'Database creation API for m2m tables has been deprecated. M2M models are now automatically generated',
DeprecationWarning
)
from django.db import models
from django.db.backends.util import truncate_name
output = []
if f.auto_created:
opts = model._meta
qn = self.connection.ops.quote_name
tablespace = f.db_tablespace or opts.db_tablespace
if tablespace:
sql = self.connection.ops.tablespace_sql(tablespace, inline=True)
if sql:
tablespace_sql = ' ' + sql
else:
tablespace_sql = ''
else:
tablespace_sql = ''
table_output = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + \
style.SQL_TABLE(qn(f.m2m_db_table())) + ' (']
table_output.append(' %s %s %s%s,' %
(style.SQL_FIELD(qn('id')),
style.SQL_COLTYPE(models.AutoField(primary_key=True).db_type(connection=self.connection)),
style.SQL_KEYWORD('NOT NULL PRIMARY KEY'),
tablespace_sql))
deferred = []
inline_output, deferred = self.sql_for_inline_many_to_many_references(model, f, style)
table_output.extend(inline_output)
table_output.append(' %s (%s, %s)%s' %
(style.SQL_KEYWORD('UNIQUE'),
style.SQL_FIELD(qn(f.m2m_column_name())),
style.SQL_FIELD(qn(f.m2m_reverse_name())),
tablespace_sql))
table_output.append(')')
if opts.db_tablespace:
# f.db_tablespace is only for indices, so ignore its value here.
table_output.append(self.connection.ops.tablespace_sql(opts.db_tablespace))
table_output.append(';')
output.append('\n'.join(table_output))
for r_table, r_col, table, col in deferred:
r_name = '%s_refs_%s_%s' % (r_col, col, self._digest(r_table, table))
output.append(style.SQL_KEYWORD('ALTER TABLE') + ' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' %
(qn(r_table),
qn(truncate_name(r_name, self.connection.ops.max_name_length())),
qn(r_col), qn(table), qn(col),
self.connection.ops.deferrable_sql()))
# Add any extra SQL needed to support auto-incrementing PKs
autoinc_sql = self.connection.ops.autoinc_sql(f.m2m_db_table(), 'id')
if autoinc_sql:
for stmt in autoinc_sql:
output.append(stmt)
return output
def sql_for_inline_many_to_many_references(self, model, field, style):
"Create the references to other tables required by a many-to-many table"
import warnings
warnings.warn(
'Database creation API for m2m tables has been deprecated. M2M models are now automatically generated',
DeprecationWarning
)
from django.db import models
opts = model._meta
qn = self.connection.ops.quote_name
table_output = [
' %s %s %s %s (%s)%s,' %
(style.SQL_FIELD(qn(field.m2m_column_name())),
style.SQL_COLTYPE(models.ForeignKey(model).db_type(connection=self.connection)),
style.SQL_KEYWORD('NOT NULL REFERENCES'),
style.SQL_TABLE(qn(opts.db_table)),
style.SQL_FIELD(qn(opts.pk.column)),
self.connection.ops.deferrable_sql()),
' %s %s %s %s (%s)%s,' %
(style.SQL_FIELD(qn(field.m2m_reverse_name())),
style.SQL_COLTYPE(models.ForeignKey(field.rel.to).db_type(connection=self.connection)),
style.SQL_KEYWORD('NOT NULL REFERENCES'),
style.SQL_TABLE(qn(field.rel.to._meta.db_table)),
style.SQL_FIELD(qn(field.rel.to._meta.pk.column)),
self.connection.ops.deferrable_sql())
]
deferred = []
return table_output, deferred
def sql_indexes_for_model(self, model, style):
"Returns the CREATE INDEX SQL statements for a single model"
if not model._meta.managed or model._meta.proxy:
return []
output = []
for f in model._meta.local_fields:
output.extend(self.sql_indexes_for_field(model, f, style))
return output
def sql_indexes_for_field(self, model, f, style):
"Return the CREATE INDEX SQL statements for a single model field"
from django.db.backends.util import truncate_name
if f.db_index and not f.unique:
qn = self.connection.ops.quote_name
tablespace = f.db_tablespace or model._meta.db_tablespace
if tablespace:
sql = self.connection.ops.tablespace_sql(tablespace)
if sql:
tablespace_sql = ' ' + sql
else:
tablespace_sql = ''
else:
tablespace_sql = ''
i_name = '%s_%s' % (model._meta.db_table, self._digest(f.column))
output = [style.SQL_KEYWORD('CREATE INDEX') + ' ' +
style.SQL_TABLE(qn(truncate_name(i_name, self.connection.ops.max_name_length()))) + ' ' +
style.SQL_KEYWORD('ON') + ' ' +
style.SQL_TABLE(qn(model._meta.db_table)) + ' ' +
"(%s)" % style.SQL_FIELD(qn(f.column)) +
"%s;" % tablespace_sql]
else:
output = []
return output
def sql_destroy_model(self, model, references_to_delete, style):
"Return the DROP TABLE and restraint dropping statements for a single model"
if not model._meta.managed or model._meta.proxy:
return []
# Drop the table now
qn = self.connection.ops.quote_name
output = ['%s %s;' % (style.SQL_KEYWORD('DROP TABLE'),
style.SQL_TABLE(qn(model._meta.db_table)))]
if model in references_to_delete:
output.extend(self.sql_remove_table_constraints(model, references_to_delete, style))
if model._meta.has_auto_field:
ds = self.connection.ops.drop_sequence_sql(model._meta.db_table)
if ds:
output.append(ds)
return output
def sql_remove_table_constraints(self, model, references_to_delete, style):
from django.db.backends.util import truncate_name
if not model._meta.managed or model._meta.proxy:
return []
output = []
qn = self.connection.ops.quote_name
for rel_class, f in references_to_delete[model]:
table = rel_class._meta.db_table
col = f.column
r_table = model._meta.db_table
r_col = model._meta.get_field(f.rel.field_name).column
r_name = '%s_refs_%s_%s' % (col, r_col, self._digest(table, r_table))
output.append('%s %s %s %s;' % \
(style.SQL_KEYWORD('ALTER TABLE'),
style.SQL_TABLE(qn(table)),
style.SQL_KEYWORD(self.connection.ops.drop_foreignkey_sql()),
style.SQL_FIELD(qn(truncate_name(r_name, self.connection.ops.max_name_length())))))
del references_to_delete[model]
return output
def sql_destroy_many_to_many(self, model, f, style):
"Returns the DROP TABLE statements for a single m2m field"
import warnings
warnings.warn(
'Database creation API for m2m tables has been deprecated. M2M models are now automatically generated',
DeprecationWarning
)
qn = self.connection.ops.quote_name
output = []
if f.auto_created:
output.append("%s %s;" % (style.SQL_KEYWORD('DROP TABLE'),
style.SQL_TABLE(qn(f.m2m_db_table()))))
ds = self.connection.ops.drop_sequence_sql("%s_%s" % (model._meta.db_table, f.column))
if ds:
output.append(ds)
return output
def create_test_db(self, verbosity=1, autoclobber=False):
"""
Creates a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
# Don't import django.core.management if it isn't needed.
from django.core.management import call_command
test_database_name = self._get_test_db_name()
if verbosity >= 1:
test_db_repr = ''
if verbosity >= 2:
test_db_repr = " ('%s')" % test_database_name
print "Creating test database for alias '%s'%s..." % (self.connection.alias, test_db_repr)
self._create_test_db(verbosity, autoclobber)
self.connection.close()
self.connection.settings_dict["NAME"] = test_database_name
# Confirm the feature set of the test database
self.connection.features.confirm()
# Report syncdb messages at one level lower than that requested.
# This ensures we don't get flooded with messages during testing
# (unless you really ask to be flooded)
call_command('syncdb', verbosity=max(verbosity - 1, 0), interactive=False, database=self.connection.alias)
from django.core.cache import get_cache
from django.core.cache.backends.db import BaseDatabaseCache
for cache_alias in settings.CACHES:
cache = get_cache(cache_alias)
if isinstance(cache, BaseDatabaseCache):
from django.db import router
if router.allow_syncdb(self.connection.alias, cache.cache_model_class):
call_command('createcachetable', cache._table, database=self.connection.alias)
# Get a cursor (even though we don't need one yet). This has
# the side effect of initializing the test database.
cursor = self.connection.cursor()
return test_database_name
def _get_test_db_name(self):
"""
Internal implementation - returns the name of the test DB that will be
created. Only useful when called from create_test_db() and
_create_test_db() and when no external munging is done with the 'NAME'
or 'TEST_NAME' settings.
"""
if self.connection.settings_dict['TEST_NAME']:
return self.connection.settings_dict['TEST_NAME']
return TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
def _create_test_db(self, verbosity, autoclobber):
"Internal implementation - creates the test db tables."
suffix = self.sql_table_creation_suffix()
test_database_name = self._get_test_db_name()
qn = self.connection.ops.quote_name
# Create the test database and connect to it. We need to autocommit
# if the database supports it because PostgreSQL doesn't allow
# CREATE/DROP DATABASE statements within transactions.
cursor = self.connection.cursor()
self.set_autocommit()
try:
cursor.execute("CREATE DATABASE %s %s" % (qn(test_database_name), suffix))
except Exception, e:
sys.stderr.write("Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = raw_input("Type 'yes' if you would like to try deleting the test database '%s', or 'no' to cancel: " % test_database_name)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print "Destroying old test database '%s'..." % self.connection.alias
cursor.execute("DROP DATABASE %s" % qn(test_database_name))
cursor.execute("CREATE DATABASE %s %s" % (qn(test_database_name), suffix))
except Exception, e:
sys.stderr.write("Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print "Tests cancelled."
sys.exit(1)
return test_database_name
def destroy_test_db(self, old_database_name, verbosity=1):
"""
Destroy a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
self.connection.close()
test_database_name = self.connection.settings_dict['NAME']
if verbosity >= 1:
test_db_repr = ''
if verbosity >= 2:
test_db_repr = " ('%s')" % test_database_name
print "Destroying test database for alias '%s'%s..." % (self.connection.alias, test_db_repr)
self.connection.settings_dict['NAME'] = old_database_name
self._destroy_test_db(test_database_name, verbosity)
def _destroy_test_db(self, test_database_name, verbosity):
"Internal implementation - remove the test db tables."
# Remove the test database to clean up after
# ourselves. Connect to the previous database (not the test database)
# to do so, because it's not allowed to delete a database while being
# connected to it.
cursor = self.connection.cursor()
self.set_autocommit()
time.sleep(1) # To avoid "database is being accessed by other users" errors.
cursor.execute("DROP DATABASE %s" % self.connection.ops.quote_name(test_database_name))
self.connection.close()
def set_autocommit(self):
"Make sure a connection is in autocommit mode."
if hasattr(self.connection.connection, "autocommit"):
if callable(self.connection.connection.autocommit):
self.connection.connection.autocommit(True)
else:
self.connection.connection.autocommit = True
elif hasattr(self.connection.connection, "set_isolation_level"):
self.connection.connection.set_isolation_level(0)
def sql_table_creation_suffix(self):
"SQL to append to the end of the test table creation statements"
return ''
| apache-2.0 |
kivio/PerfKitBenchmarker | tools/side-by-side/side_by_side.py | 5 | 14125 | #!/usr/bin/env python
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
"""Runs a side-by-side comparison of two PerfKitBenchmarker revisions.
Given a pair of revisions (e.g., 'dev', 'master') and command-line arguments,
this tool runs 'pkb.py' with for each and creates a report showing the
differences in the results between the two runs.
"""
import argparse
import collections
import contextlib
import difflib
import itertools
import json
import logging
import os
import pprint
import shlex
import shutil
import subprocess
import tempfile
import jinja2
DEFAULT_FLAGS = ('--cloud=GCP', '--machine_type=n1-standard-4',
'--benchmarks=netperf')
# Keys in the sample JSON we expect to vary between runs.
# These will be removed prior to diffing samples.
VARYING_KEYS = 'run_uri', 'sample_uri', 'timestamp', 'value'
# Template name, in same directory as this file.
TEMPLATE = 'side_by_side.html.j2'
# Thresholds for highlighting results
SMALL_CHANGE_THRESHOLD = 5
MEDIUM_CHANGE_THRESHOLD = 10
LARGE_CHANGE_THRESHOLD = 25
PerfKitBenchmarkerResult = collections.namedtuple(
'PerfKitBenchmarkerResult',
['name', 'description', 'sha1', 'samples', 'flags'])
@contextlib.contextmanager
def TempDir(delete=True, **kwargs):
"""Directory equivalent of tempfile.NamedTemporaryFile.
When used as a context manager, yields a temporary directory which by default
is removed when the context manager goes our of scope.
Example usage:
>>> with TempDir(prefix='perfkit') as td:
... shutil.copy('test.txt', td)
Args:
delete: Delete the directory on exit?
**kwargs: Passed to tempfile.mkdtemp.
Yields:
String. Path to the temporary directory.
"""
td = tempfile.mkdtemp(**kwargs)
logging.info('Created %s', td)
try:
yield td
finally:
if delete:
logging.info('Removing %s', td)
shutil.rmtree(td)
def _GitCommandPrefix():
"""Prefix for all git commands.
Returns:
list of strings; 'git' with an appropriate '--git-dir' flag.
"""
git_dir = os.path.join(os.path.dirname(__file__), '..', '..', '.git')
return ['git', '--git-dir', git_dir]
def _GitRevParse(revision):
"""Returns the output of 'git rev-parse' for 'revision'."""
output = subprocess.check_output(_GitCommandPrefix() +
['rev-parse', revision])
return output.rstrip()
def _GitDescribe(revision):
"""Returns the output of 'git describe' for 'revision'."""
output = subprocess.check_output(_GitCommandPrefix() +
['describe', '--always', revision])
return output.rstrip()
@contextlib.contextmanager
def PerfKitBenchmarkerCheckout(revision):
"""Yields a directory with PerfKitBenchmarker checked out to 'revision'."""
archive_cmd = _GitCommandPrefix() + ['archive', revision]
logging.info('Running: %s', archive_cmd)
p_archive = subprocess.Popen(archive_cmd, stdout=subprocess.PIPE)
with TempDir(prefix='pkb-test-') as td:
tar_cmd = ['tar', 'xf', '-']
logging.info('Running %s in %s', tar_cmd, td)
p_tar = subprocess.Popen(tar_cmd, stdin=p_archive.stdout, cwd=td)
archive_status = p_archive.wait()
tar_status = p_tar.wait()
if archive_status:
raise subprocess.CalledProcessError(archive_cmd, archive_status)
if tar_status:
raise subprocess.CalledProcessError(tar_status, tar_cmd)
yield td
def RunPerfKitBenchmarker(revision, flags):
"""Runs perfkitbenchmarker, returning the results as parsed JSON.
Args:
revision: string. git commit identifier. Version of PerfKitBenchmarker to
run.
flags: list of strings. Default arguments to pass to `pkb.py.`
Returns:
List of dicts. Deserialized JSON output of running PerfKitBenchmarker with
`--json_path`.
"""
sha1 = _GitRevParse(revision)
description = _GitDescribe(revision)
with PerfKitBenchmarkerCheckout(revision) as td:
with tempfile.NamedTemporaryFile(suffix='.json') as tf:
flags = flags + ['--json_path=' + tf.name]
cmd = ['./pkb.py'] + flags
logging.info('Running %s in %s', cmd, td)
subprocess.check_call(cmd, cwd=td)
samples = [json.loads(line) for line in tf]
return PerfKitBenchmarkerResult(name=revision, sha1=sha1, flags=flags,
samples=samples, description=description)
def _SplitLabels(labels):
"""Parse the 'labels' key from a PerfKitBenchmarker record.
Labels are recorded in '|key:value|,|key:value|' form.
This function transforms them to a dict.
Args:
labels: string. labels to parse.
Returns:
dict. Parsed 'labels'.
"""
result = {}
for item in labels.strip('|').split('|,|'):
k, v = item.split(':', 1)
result[k] = v
return result
def _CompareSamples(a, b, context=True, numlines=1):
"""Generate an HTML table showing differences between 'a' and 'b'.
Args:
a: dict, as output by PerfKitBenchmarker.
b: dict, as output by PerfKitBenchmarker.
context: boolean. Show context in diff? If False, all lines are output, even
those which are equal.
numlines: int. Passed to difflib.Htmldiff.make_table.
Returns:
string or None. An HTML table, or None if there are no differences.
"""
a = a.copy()
b = b.copy()
a['metadata'] = _SplitLabels(a.pop('labels', ''))
b['metadata'] = _SplitLabels(b.pop('labels', ''))
# Prune the keys in VARYING_KEYS prior to comparison to make the diff more
# informative.
for d in (a, b):
for key in VARYING_KEYS:
d.pop(key, None)
astr = pprint.pformat(a).splitlines()
bstr = pprint.pformat(b).splitlines()
if astr == bstr and context:
return None
differ = difflib.HtmlDiff()
return differ.make_table(astr, bstr, context=context, numlines=numlines)
def _MatchSamples(base_samples, head_samples):
"""Match items from base_samples with items from head_samples.
Rows are matched using 'test', 'metric', and 'unit' fields.
Args:
base_samples: List of dicts.
head_samples: List of dicts.
Returns:
List of pairs, each item of the pair containing either a dict or None.
"""
def ExtractKeys(samples):
return [(i['test'], i['metric'], i['unit']) for i in samples]
base_keys = ExtractKeys(base_samples)
head_keys = ExtractKeys(head_samples)
sm = difflib.SequenceMatcher('', base_keys, head_keys)
result = []
for opcode, base_begin, base_end, head_begin, head_end in sm.get_opcodes():
if opcode == 'equal':
result.extend(zip(base_samples[base_begin:base_end],
head_samples[head_begin:head_end]))
elif opcode == 'replace':
result.extend(itertools.izip_longest(base_samples[base_begin:base_end],
head_samples[head_begin:head_end]))
elif opcode == 'delete':
result.extend(zip(base_samples[base_begin:base_end],
[None] * (base_end - base_begin)))
elif opcode == 'insert':
result.extend(zip([None] * (head_end - head_begin),
head_samples[head_begin:head_end]))
else:
raise AssertionError('Unknown op: ' + opcode)
return result
def RenderResults(base_result, head_result, template_name=TEMPLATE,
**kwargs):
"""Render the results of a comparison as an HTML page.
Args:
base_result: PerfKitBenchmarkerResult. Result of running against base
revision.
head_result: PerfKitBenchmarkerResult. Result of running against head
revision.
template_name: string. The filename of the template.
kwargs: Additional arguments to Template.render.
Returns:
String. The HTML template.
"""
def _ClassForPercentDifference(percent_diff):
"""Crude highlighting of differences between runs.
Samples varying by >25% are colored red.
Samples varying by 5-25% are colored orange.
Other samples are colored green.
Args:
percent_diff: float. percent difference between values.
"""
if percent_diff < 0:
direction = 'decrease'
else:
direction = 'increase'
percent_diff = abs(percent_diff)
if percent_diff > LARGE_CHANGE_THRESHOLD:
size = 'large'
elif percent_diff > MEDIUM_CHANGE_THRESHOLD:
size = 'medium'
elif percent_diff > SMALL_CHANGE_THRESHOLD:
size = 'small'
else:
return ''
return 'value-{0}-{1}'.format(direction, size)
env = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
undefined=jinja2.StrictUndefined)
env.globals['class_for_percent_diff'] = _ClassForPercentDifference
env.globals['izip_longest'] = itertools.izip_longest
template = env.get_template('side_by_side.html.j2')
matched = _MatchSamples(base_result.samples,
head_result.samples)
# Generate sample diffs
sample_context_diffs = []
sample_diffs = []
for base_sample, head_sample in matched:
if not base_sample or not head_sample:
# Sample inserted or deleted.
continue
sample_context_diffs.append(
_CompareSamples(base_sample, head_sample))
sample_diffs.append(
_CompareSamples(base_sample, head_sample, context=False))
# Generate flag diffs
flag_diffs = difflib.HtmlDiff().make_table(
base_result.flags, head_result.flags, context=False)
# Used for generating a chart with differences.
matched_json = json.dumps(matched)\
.replace(u'<', u'\\u003c') \
.replace(u'>', u'\\u003e') \
.replace(u'&', u'\\u0026') \
.replace(u"'", u'\\u0027')
return template.render(base=base_result,
head=head_result,
matched_samples=matched,
matched_samples_json=matched_json,
sample_diffs=sample_diffs,
sample_context_diffs=sample_context_diffs,
flag_diffs=flag_diffs,
infinity=float('inf'),
**kwargs)
def main():
p = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description=__doc__)
p.add_argument('-t', '--title', default='PerfKitBenchmarker Comparison',
help="""HTML report title""")
p.add_argument('--base', default='master', help="""Base revision.""")
p.add_argument('--head', default='dev', help="""Head revision.""")
p.add_argument('--base-flags', default=None, help="""Flags for run against
'--base' revision. Will be combined with --flags.""",
type=shlex.split)
p.add_argument('--head-flags', default=None, help="""Flags for run against
'--head' revision. Will be combined with --flags.""",
type=shlex.split)
p.add_argument('-f', '--flags', type=shlex.split,
help="""Command line flags (Default: {0})""".format(
' '.join(DEFAULT_FLAGS)))
p.add_argument('-p', '--parallel', default=False, action='store_true',
help="""Run concurrently""")
p.add_argument('--rerender', help="""Re-render the HTML report from a JSON
file [for developers].""", action='store_true')
p.add_argument('json_output', help="""JSON output path.""")
p.add_argument('html_output', help="""HTML output path.""")
a = p.parse_args()
if (a.base_flags or a.head_flags):
if not (a.base_flags and a.head_flags):
p.error('--base-flags and --head-flags must be specified together.\n'
'\tbase flags={0}\n\thead flags={1}'.format(
a.base_flags, a.head_flags))
a.base_flags = a.base_flags + (a.flags or [])
a.head_flags = a.head_flags + (a.flags or [])
else:
# Just --flags
assert not a.base_flags, a.base_flags
assert not a.head_flags, a.head_flags
a.base_flags = a.flags or list(DEFAULT_FLAGS)
a.head_flags = a.flags or list(DEFAULT_FLAGS)
if not a.rerender:
if a.parallel:
from concurrent import futures
with futures.ThreadPoolExecutor(max_workers=2) as executor:
base_res_fut = executor.submit(RunPerfKitBenchmarker, a.base,
a.base_flags)
head_res_fut = executor.submit(RunPerfKitBenchmarker, a.head,
a.head_flags)
base_res = base_res_fut.result()
head_res = head_res_fut.result()
else:
base_res = RunPerfKitBenchmarker(a.base, a.base_flags)
head_res = RunPerfKitBenchmarker(a.head, a.head_flags)
logging.info('Base result: %s', base_res)
logging.info('Head result: %s', head_res)
with argparse.FileType('w')(a.json_output) as json_fp:
logging.info('Writing JSON to %s', a.json_output)
json.dump({'head': head_res._asdict(),
'base': base_res._asdict()},
json_fp,
indent=2)
json_fp.write('\n')
else:
logging.info('Loading results from %s', a.json_output)
with argparse.FileType('r')(a.json_output) as json_fp:
d = json.load(json_fp)
base_res = PerfKitBenchmarkerResult(**d['base'])
head_res = PerfKitBenchmarkerResult(**d['head'])
with argparse.FileType('w')(a.html_output) as html_fp:
logging.info('Writing HTML to %s', a.html_output)
html_fp.write(RenderResults(base_result=base_res,
head_result=head_res,
varying_keys=VARYING_KEYS,
title=a.title))
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
main()
| apache-2.0 |
tomlepaine/bottle-skeleton | app.py | 1 | 1076 |
import argparse
from bottle import get, run, response, static_file, redirect
from jinja2 import Environment, PackageLoader
import config
parser = argparse.ArgumentParser(prog=config.name,
description=config.description)
parser.add_argument('--port',
type=int,
default=8080,
help='Port where gui is running.')
args = parser.parse_args()
# Setup globals
PORT = args.port
ENV = Environment(loader=PackageLoader(config.package_name,
config.template_dir))
@get('/')
def index():
redirect('/hello-world')
@get('/hello-world')
def hello_world():
template = ENV.get_template('hello-world.html')
page = template.render()
return page
@get('/page')
def page():
template = ENV.get_template('not-implemented.html')
page = template.render()
return page
@get('/frame/<index:int>.jpeg')
def frame(index):
response.content_type = "image/jpeg"
return VIDEO.get_frame(index)
run(host='localhost', port=PORT)
| bsd-2-clause |
ZhangXinNan/tensorflow | tensorflow/contrib/kinesis/python/kernel_tests/kinesis_test.py | 9 | 5546 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# ==============================================================================
"""Tests for KinesisDataset.
NOTE: boto3 is needed and the test has to be invoked manually:
```
$ bazel test -s --verbose_failures --config=opt \
--action_env=AWS_ACCESS_KEY_ID=XXXXXX \
--action_env=AWS_SECRET_ACCESS_KEY=XXXXXX \
//tensorflow/contrib/kinesis:kinesis_test
```
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import boto3
from tensorflow.contrib.kinesis.python.ops import kinesis_dataset_ops
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
class KinesisDatasetTest(test.TestCase):
def testKinesisDatasetOneShard(self):
client = boto3.client('kinesis', region_name='us-east-1')
# Setup the Kinesis with 1 shard.
stream_name = "tf_kinesis_test_1"
client.create_stream(StreamName=stream_name, ShardCount=1)
# Wait until stream exists, default is 10 * 18 seconds.
client.get_waiter('stream_exists').wait(StreamName=stream_name)
for i in range(10):
data = "D" + str(i)
client.put_record(
StreamName=stream_name, Data=data, PartitionKey="TensorFlow" + str(i))
stream = array_ops.placeholder(dtypes.string, shape=[])
num_epochs = array_ops.placeholder(dtypes.int64, shape=[])
batch_size = array_ops.placeholder(dtypes.int64, shape=[])
repeat_dataset = kinesis_dataset_ops.KinesisDataset(
stream, read_indefinitely=False).repeat(num_epochs)
batch_dataset = repeat_dataset.batch(batch_size)
iterator = iterator_ops.Iterator.from_structure(batch_dataset.output_types)
init_op = iterator.make_initializer(repeat_dataset)
init_batch_op = iterator.make_initializer(batch_dataset)
get_next = iterator.get_next()
with self.test_session() as sess:
# Basic test: read from shard 0 of stream 1.
sess.run(init_op, feed_dict={stream: stream_name, num_epochs: 1})
for i in range(10):
self.assertEqual("D" + str(i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
client.delete_stream(StreamName=stream_name)
# Wait until stream deleted, default is 10 * 18 seconds.
client.get_waiter('stream_not_exists').wait(StreamName=stream_name)
def testKinesisDatasetTwoShards(self):
client = boto3.client('kinesis', region_name='us-east-1')
# Setup the Kinesis with 2 shards.
stream_name = "tf_kinesis_test_2"
client.create_stream(StreamName=stream_name, ShardCount=2)
# Wait until stream exists, default is 10 * 18 seconds.
client.get_waiter('stream_exists').wait(StreamName=stream_name)
for i in range(10):
data = "D" + str(i)
client.put_record(
StreamName=stream_name, Data=data, PartitionKey="TensorFlow" + str(i))
response = client.describe_stream(StreamName=stream_name)
shard_id_0 = response["StreamDescription"]["Shards"][0]["ShardId"]
shard_id_1 = response["StreamDescription"]["Shards"][1]["ShardId"]
stream = array_ops.placeholder(dtypes.string, shape=[])
shard = array_ops.placeholder(dtypes.string, shape=[])
num_epochs = array_ops.placeholder(dtypes.int64, shape=[])
batch_size = array_ops.placeholder(dtypes.int64, shape=[])
repeat_dataset = kinesis_dataset_ops.KinesisDataset(
stream, shard, read_indefinitely=False).repeat(num_epochs)
batch_dataset = repeat_dataset.batch(batch_size)
iterator = iterator_ops.Iterator.from_structure(batch_dataset.output_types)
init_op = iterator.make_initializer(repeat_dataset)
init_batch_op = iterator.make_initializer(batch_dataset)
get_next = iterator.get_next()
data = list()
with self.test_session() as sess:
# Basic test: read from shard 0 of stream 2.
sess.run(
init_op, feed_dict={
stream: stream_name, shard: shard_id_0, num_epochs: 1})
with self.assertRaises(errors.OutOfRangeError):
# Use range(11) to guarantee the OutOfRangeError.
for i in range(11):
data.append(sess.run(get_next))
# Basic test: read from shard 1 of stream 2.
sess.run(
init_op, feed_dict={
stream: stream_name, shard: shard_id_1, num_epochs: 1})
with self.assertRaises(errors.OutOfRangeError):
# Use range(11) to guarantee the OutOfRangeError.
for i in range(11):
data.append(sess.run(get_next))
data.sort()
self.assertEqual(data, ["D" + str(i) for i in range(10)])
client.delete_stream(StreamName=stream_name)
# Wait until stream deleted, default is 10 * 18 seconds.
client.get_waiter('stream_not_exists').wait(StreamName=stream_name)
if __name__ == "__main__":
test.main()
| apache-2.0 |
eeshangarg/zulip | zerver/migrations/0297_draft.py | 6 | 1348 | # Generated by Django 2.2.14 on 2020-07-23 17:07
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("zerver", "0296_remove_userprofile_short_name"),
]
operations = [
migrations.CreateModel(
name="Draft",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("topic", models.CharField(db_index=True, max_length=60)),
("content", models.TextField()),
("last_edit_time", models.DateTimeField(db_index=True)),
(
"recipient",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="zerver.Recipient",
),
),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
]
| apache-2.0 |
engineer0x47/SCONS | build/lib/SCons/Tool/aixcc.py | 2 | 2267 | """SCons.Tool.aixcc
Tool-specific initialization for IBM xlc / Visual Age C compiler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/aixcc.py 2014/08/24 12:12:31 garyo"
import os.path
import SCons.Platform.aix
import cc
packages = ['vac.C', 'ibmcxx.cmp']
def get_xlc(env):
xlc = env.get('CC', 'xlc')
return SCons.Platform.aix.get_xlc(env, xlc, packages)
def generate(env):
"""Add Builders and construction variables for xlc / Visual Age
suite to an Environment."""
path, _cc, version = get_xlc(env)
if path and _cc:
_cc = os.path.join(path, _cc)
if 'CC' not in env:
env['CC'] = _cc
cc.generate(env)
if version:
env['CCVERSION'] = version
def exists(env):
path, _cc, version = get_xlc(env)
if path and _cc:
xlc = os.path.join(path, _cc)
if os.path.exists(xlc):
return xlc
return None
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit |
wtolson/circle-craters | test/test_qgis_environment.py | 116 | 1885 | # coding=utf-8
"""Tests for QGIS functionality.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'tim@linfiniti.com'
__date__ = '20/01/2011'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
import os
import unittest
from qgis.core import (
QgsProviderRegistry,
QgsCoordinateReferenceSystem,
QgsRasterLayer)
from utilities import get_qgis_app
QGIS_APP = get_qgis_app()
class QGISTest(unittest.TestCase):
"""Test the QGIS Environment"""
def test_qgis_environment(self):
"""QGIS environment has the expected providers"""
r = QgsProviderRegistry.instance()
self.assertIn('gdal', r.providerList())
self.assertIn('ogr', r.providerList())
self.assertIn('postgres', r.providerList())
def test_projection(self):
"""Test that QGIS properly parses a wkt string.
"""
crs = QgsCoordinateReferenceSystem()
wkt = (
'GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",'
'SPHEROID["WGS_1984",6378137.0,298.257223563]],'
'PRIMEM["Greenwich",0.0],UNIT["Degree",'
'0.0174532925199433]]')
crs.createFromWkt(wkt)
auth_id = crs.authid()
expected_auth_id = 'EPSG:4326'
self.assertEqual(auth_id, expected_auth_id)
# now test for a loaded layer
path = os.path.join(os.path.dirname(__file__), 'tenbytenraster.asc')
title = 'TestRaster'
layer = QgsRasterLayer(path, title)
auth_id = layer.crs().authid()
self.assertEqual(auth_id, expected_auth_id)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
whiteclover/Breeze | breeze/chatcenter/room.py | 1 | 1148 | import time
import logging
LOGGER = logging.getLogger(__name__)
class Room(object):
def __init__(self, name):
self.name = name
self.peers = {}
def broadcast(self, msg):
if msg:
for peer in self.peers.values():
if peer != msg.user:
LOGGER.info('peer: %s', peer)
peer.send(msg)
def add_peer(self, peer):
if peer.uid in self.peers:
raise Exception('in')
peer.add_room(self)
self.peers[peer.uid] = peer
def remove_peer(self, peer):
peer.remove_room(self)
del self.peers[peer.uid]
class RoomManager(object):
def __init__(self):
self.rooms = {}
def add_room(self, room):
self.rooms[room.name] = room
def remove_room(self, room):
if room.name in self.rooms:
del self.rooms[room.name]
def add_peer_to_room(self, room_name, peer):
room = self.rooms.get(room_name)
if not room:
room = Room(room_name)
self.rooms[room_name] = room
room.add_peer(peer)
def remove_peer_from_room(self, room_name, peer):
room = self.rooms.get(room_name)
if room:
room.remove_peer(peer)
def broadcast(self, room_name, msg):
room = self.rooms.get(room_name)
if room:
room.broadcast(msg)
| gpl-2.0 |
mhumeSF/ansible-container | setup.py | 1 | 3215 | import os
import sys
import shlex
import shutil
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
from setuptools.command.sdist import sdist as SDistCommand
from pip.req import parse_requirements
import container
class PlaybookAsTests(TestCommand):
user_options = [('ansible-args=', None, "Extra ansible arguments")]
def initialize_options(self):
self.ansible_args = u''
TestCommand.initialize_options(self)
def run(self):
if sys.platform == 'darwin':
# Docker for Mac exports certain paths into the virtual machine
# actually running Docker. The default tempdir isn't one of them,
# but /tmp is.
os.environ['TMPDIR'] = '/tmp'
return TestCommand.run(self)
def run_tests(self):
import subprocess
p = subprocess.Popen(
['ansible-playbook'] +
shlex.split(self.ansible_args) +
['run_tests.yml'],
cwd=os.path.join(os.getcwd(), 'test'),
)
rc = p.wait()
sys.exit(rc)
class BundleConductorFiles(SDistCommand):
def run(self):
shutil.copyfile('./setup.py', 'container/docker/files/setup.py')
shutil.copyfile('./conductor-requirements.txt',
'container/docker/files/conductor-requirements.txt')
shutil.copyfile('./conductor-requirements.yml',
'container/docker/files/conductor-requirements.yml')
return SDistCommand.run(self)
if container.ENV == 'host':
install_reqs = parse_requirements('requirements.txt', session=False)
setup_kwargs = dict(
install_requires=[str(ir.req) for ir in install_reqs if ir.match_markers()],
tests_require=[
'ansible>=2.3.0',
'pytest>=3',
'docker>=2.1',
'jmespath>=0.9'
],
extras_require={
'docker': ['docker>=2.1'],
'docbuild': ['Sphinx>=1.5.0'],
'openshift': ['openshift==0.0.1'],
'k8s': ['openshift==0.0.1']
},
#dependency_links=[
# 'https://github.com/ansible/ansible/archive/devel.tar.gz#egg=ansible-2.4.0',
#],
cmdclass={'test': PlaybookAsTests,
'sdist': BundleConductorFiles},
entry_points={
'console_scripts': [
'ansible-container = container.cli:host_commandline']
}
)
else:
setup_kwargs = dict(
entry_points={
'console_scripts': ['conductor = container.cli:conductor_commandline']
},
)
setup(
name='ansible-container',
version=container.__version__,
packages=find_packages(include='container.*'),
include_package_data=True,
zip_safe=False,
url='https://github.com/ansible/ansible-container',
license='LGPLv3 (See LICENSE file for terms)',
author='Joshua "jag" Ginsberg, Chris Houseknecht, and others (See AUTHORS file for contributors)',
author_email='jag@ansible.com',
description=('Ansible Container empowers you to orchestrate, build, run, and ship '
'Docker images built from Ansible playbooks.'),
**setup_kwargs
)
| lgpl-3.0 |
plumgrid/plumgrid-nova | nova/tests/test_safeutils.py | 13 | 4052 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Justin Santa Barbara
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import safe_utils
from nova import test
class GetCallArgsTestCase(test.TestCase):
def _test_func(self, instance, red=None, blue=None):
pass
def test_all_kwargs(self):
args = ()
kwargs = {'instance': {'uuid': 1}, 'red': 3, 'blue': 4}
callargs = safe_utils.getcallargs(self._test_func, *args, **kwargs)
#implicit self counts as an arg
self.assertEqual(4, len(callargs))
self.assertTrue('instance' in callargs)
self.assertEqual({'uuid': 1}, callargs['instance'])
self.assertTrue('red' in callargs)
self.assertEqual(3, callargs['red'])
self.assertTrue('blue' in callargs)
self.assertEqual(4, callargs['blue'])
def test_all_args(self):
args = ({'uuid': 1}, 3, 4)
kwargs = {}
callargs = safe_utils.getcallargs(self._test_func, *args, **kwargs)
#implicit self counts as an arg
self.assertEqual(4, len(callargs))
self.assertTrue('instance' in callargs)
self.assertEqual({'uuid': 1}, callargs['instance'])
self.assertTrue('red' in callargs)
self.assertEqual(3, callargs['red'])
self.assertTrue('blue' in callargs)
self.assertEqual(4, callargs['blue'])
def test_mixed_args(self):
args = ({'uuid': 1}, 3)
kwargs = {'blue': 4}
callargs = safe_utils.getcallargs(self._test_func, *args, **kwargs)
#implicit self counts as an arg
self.assertEqual(4, len(callargs))
self.assertTrue('instance' in callargs)
self.assertEqual({'uuid': 1}, callargs['instance'])
self.assertTrue('red' in callargs)
self.assertEqual(3, callargs['red'])
self.assertTrue('blue' in callargs)
self.assertEqual(4, callargs['blue'])
def test_partial_kwargs(self):
args = ()
kwargs = {'instance': {'uuid': 1}, 'red': 3}
callargs = safe_utils.getcallargs(self._test_func, *args, **kwargs)
#implicit self counts as an arg
self.assertEqual(4, len(callargs))
self.assertTrue('instance' in callargs)
self.assertEqual({'uuid': 1}, callargs['instance'])
self.assertTrue('red' in callargs)
self.assertEqual(3, callargs['red'])
self.assertTrue('blue' in callargs)
self.assertEqual(None, callargs['blue'])
def test_partial_args(self):
args = ({'uuid': 1}, 3)
kwargs = {}
callargs = safe_utils.getcallargs(self._test_func, *args, **kwargs)
#implicit self counts as an arg
self.assertEqual(4, len(callargs))
self.assertTrue('instance' in callargs)
self.assertEqual({'uuid': 1}, callargs['instance'])
self.assertTrue('red' in callargs)
self.assertEqual(3, callargs['red'])
self.assertTrue('blue' in callargs)
self.assertEqual(None, callargs['blue'])
def test_partial_mixed_args(self):
args = (3,)
kwargs = {'instance': {'uuid': 1}}
callargs = safe_utils.getcallargs(self._test_func, *args, **kwargs)
self.assertEqual(4, len(callargs))
self.assertTrue('instance' in callargs)
self.assertEqual({'uuid': 1}, callargs['instance'])
self.assertTrue('red' in callargs)
self.assertEqual(3, callargs['red'])
self.assertTrue('blue' in callargs)
self.assertEqual(None, callargs['blue'])
| apache-2.0 |
dxwu/BinderFilter | resources/android-toolchain-16/lib/python2.7/idlelib/SearchDialog.py | 81 | 2040 | from Tkinter import *
from idlelib import SearchEngine
from idlelib.SearchDialogBase import SearchDialogBase
def _setup(text):
root = text._root()
engine = SearchEngine.get(root)
if not hasattr(engine, "_searchdialog"):
engine._searchdialog = SearchDialog(root, engine)
return engine._searchdialog
def find(text):
pat = text.get("sel.first", "sel.last")
return _setup(text).open(text,pat)
def find_again(text):
return _setup(text).find_again(text)
def find_selection(text):
return _setup(text).find_selection(text)
class SearchDialog(SearchDialogBase):
def create_widgets(self):
f = SearchDialogBase.create_widgets(self)
self.make_button("Find", self.default_command, 1)
def default_command(self, event=None):
if not self.engine.getprog():
return
if self.find_again(self.text):
self.close()
def find_again(self, text):
if not self.engine.getpat():
self.open(text)
return False
if not self.engine.getprog():
return False
res = self.engine.search_text(text)
if res:
line, m = res
i, j = m.span()
first = "%d.%d" % (line, i)
last = "%d.%d" % (line, j)
try:
selfirst = text.index("sel.first")
sellast = text.index("sel.last")
if selfirst == first and sellast == last:
text.bell()
return False
except TclError:
pass
text.tag_remove("sel", "1.0", "end")
text.tag_add("sel", first, last)
text.mark_set("insert", self.engine.isback() and first or last)
text.see("insert")
return True
else:
text.bell()
return False
def find_selection(self, text):
pat = text.get("sel.first", "sel.last")
if pat:
self.engine.setcookedpat(pat)
return self.find_again(text)
| mit |
kangfend/django | tests/schema/models.py | 9 | 3782 | from django.apps.registry import Apps
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# Because we want to test creation and deletion of these as separate things,
# these models are all inserted into a separate Apps so the main test
# runner doesn't migrate them.
new_apps = Apps()
class Author(models.Model):
name = models.CharField(max_length=255)
height = models.PositiveIntegerField(null=True, blank=True)
class Meta:
apps = new_apps
class AuthorWithDefaultHeight(models.Model):
name = models.CharField(max_length=255)
height = models.PositiveIntegerField(null=True, blank=True, default=42)
class Meta:
apps = new_apps
class AuthorWithEvenLongerName(models.Model):
name = models.CharField(max_length=255)
height = models.PositiveIntegerField(null=True, blank=True)
class Meta:
apps = new_apps
class Book(models.Model):
author = models.ForeignKey(Author)
title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField()
# tags = models.ManyToManyField("Tag", related_name="books")
class Meta:
apps = new_apps
class BookWeak(models.Model):
author = models.ForeignKey(Author, db_constraint=False)
title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField()
class Meta:
apps = new_apps
class BookWithLongName(models.Model):
author_foreign_key_with_really_long_field_name = models.ForeignKey(AuthorWithEvenLongerName)
class Meta:
apps = new_apps
class BookWithO2O(models.Model):
author = models.OneToOneField(Author)
title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField()
class Meta:
apps = new_apps
db_table = "schema_book"
class BookWithSlug(models.Model):
author = models.ForeignKey(Author)
title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField()
slug = models.CharField(max_length=20, unique=True)
class Meta:
apps = new_apps
db_table = "schema_book"
class IntegerPK(models.Model):
i = models.IntegerField(primary_key=True)
j = models.IntegerField(unique=True)
class Meta:
apps = new_apps
db_table = "INTEGERPK" # uppercase to ensure proper quoting
class Note(models.Model):
info = models.TextField()
class Meta:
apps = new_apps
class NoteRename(models.Model):
detail_info = models.TextField()
class Meta:
apps = new_apps
db_table = "schema_note"
class Tag(models.Model):
title = models.CharField(max_length=255)
slug = models.SlugField(unique=True)
class Meta:
apps = new_apps
class TagIndexed(models.Model):
title = models.CharField(max_length=255)
slug = models.SlugField(unique=True)
class Meta:
apps = new_apps
index_together = [["slug", "title"]]
class TagM2MTest(models.Model):
title = models.CharField(max_length=255)
slug = models.SlugField(unique=True)
class Meta:
apps = new_apps
class TagUniqueRename(models.Model):
title = models.CharField(max_length=255)
slug2 = models.SlugField(unique=True)
class Meta:
apps = new_apps
db_table = "schema_tag"
# Based on tests/reserved_names/models.py
@python_2_unicode_compatible
class Thing(models.Model):
when = models.CharField(max_length=1, primary_key=True)
class Meta:
db_table = 'drop'
def __str__(self):
return self.when
class UniqueTest(models.Model):
year = models.IntegerField()
slug = models.SlugField(unique=False)
class Meta:
apps = new_apps
unique_together = ["year", "slug"]
| bsd-3-clause |
marcore/edx-platform | lms/celery.py | 25 | 1079 | """
Import celery, load its settings from the django settings
and auto discover tasks in all installed django apps.
Taken from: http://celery.readthedocs.org/en/latest/django/first-steps-with-django.html
"""
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
from openedx.core.lib.celery.routers import AlternateEnvironmentRouter
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj.settings')
APP = Celery('proj')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
APP.config_from_object('django.conf:settings')
APP.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
class Router(AlternateEnvironmentRouter):
"""
An implementation of AlternateEnvironmentRouter, for routing tasks to non-cms queues.
"""
@property
def alternate_env_tasks(self):
"""
Defines alternate environment tasks, as a dict of form { task_name: alternate_queue }
"""
return {}
| agpl-3.0 |
googleapis/python-dialogflow | tests/unit/gapic/dialogflow_v2beta1/test_entity_types.py | 1 | 136892 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import packaging.version
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import future
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.dialogflow_v2beta1.services.entity_types import EntityTypesAsyncClient
from google.cloud.dialogflow_v2beta1.services.entity_types import EntityTypesClient
from google.cloud.dialogflow_v2beta1.services.entity_types import pagers
from google.cloud.dialogflow_v2beta1.services.entity_types import transports
from google.cloud.dialogflow_v2beta1.services.entity_types.transports.base import (
_API_CORE_VERSION,
)
from google.cloud.dialogflow_v2beta1.services.entity_types.transports.base import (
_GOOGLE_AUTH_VERSION,
)
from google.cloud.dialogflow_v2beta1.types import entity_type
from google.cloud.dialogflow_v2beta1.types import entity_type as gcd_entity_type
from google.longrunning import operations_pb2
from google.oauth2 import service_account
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import struct_pb2 # type: ignore
import google.auth
# TODO(busunkim): Once google-api-core >= 1.26.0 is required:
# - Delete all the api-core and auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
reason="This test requires google-auth < 1.25.0",
)
requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
reason="This test requires google-auth >= 1.25.0",
)
requires_api_core_lt_1_26_0 = pytest.mark.skipif(
packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"),
reason="This test requires google-api-core < 1.26.0",
)
requires_api_core_gte_1_26_0 = pytest.mark.skipif(
packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"),
reason="This test requires google-api-core >= 1.26.0",
)
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert EntityTypesClient._get_default_mtls_endpoint(None) is None
assert (
EntityTypesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
)
assert (
EntityTypesClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
EntityTypesClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
EntityTypesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert EntityTypesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [EntityTypesClient, EntityTypesAsyncClient,])
def test_entity_types_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "dialogflow.googleapis.com:443"
@pytest.mark.parametrize("client_class", [EntityTypesClient, EntityTypesAsyncClient,])
def test_entity_types_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "dialogflow.googleapis.com:443"
def test_entity_types_client_get_transport_class():
transport = EntityTypesClient.get_transport_class()
available_transports = [
transports.EntityTypesGrpcTransport,
]
assert transport in available_transports
transport = EntityTypesClient.get_transport_class("grpc")
assert transport == transports.EntityTypesGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(EntityTypesClient, transports.EntityTypesGrpcTransport, "grpc"),
(
EntityTypesAsyncClient,
transports.EntityTypesGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
EntityTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EntityTypesClient)
)
@mock.patch.object(
EntityTypesAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(EntityTypesAsyncClient),
)
def test_entity_types_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(EntityTypesClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(EntityTypesClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(EntityTypesClient, transports.EntityTypesGrpcTransport, "grpc", "true"),
(
EntityTypesAsyncClient,
transports.EntityTypesGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(EntityTypesClient, transports.EntityTypesGrpcTransport, "grpc", "false"),
(
EntityTypesAsyncClient,
transports.EntityTypesGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
EntityTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EntityTypesClient)
)
@mock.patch.object(
EntityTypesAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(EntityTypesAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_entity_types_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(EntityTypesClient, transports.EntityTypesGrpcTransport, "grpc"),
(
EntityTypesAsyncClient,
transports.EntityTypesGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_entity_types_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(EntityTypesClient, transports.EntityTypesGrpcTransport, "grpc"),
(
EntityTypesAsyncClient,
transports.EntityTypesGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_entity_types_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_entity_types_client_client_options_from_dict():
with mock.patch(
"google.cloud.dialogflow_v2beta1.services.entity_types.transports.EntityTypesGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = EntityTypesClient(client_options={"api_endpoint": "squid.clam.whelk"})
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_list_entity_types(
transport: str = "grpc", request_type=entity_type.ListEntityTypesRequest
):
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_entity_types), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = entity_type.ListEntityTypesResponse(
next_page_token="next_page_token_value",
)
response = client.list_entity_types(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.ListEntityTypesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListEntityTypesPager)
assert response.next_page_token == "next_page_token_value"
def test_list_entity_types_from_dict():
test_list_entity_types(request_type=dict)
def test_list_entity_types_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_entity_types), "__call__"
) as call:
client.list_entity_types()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.ListEntityTypesRequest()
@pytest.mark.asyncio
async def test_list_entity_types_async(
transport: str = "grpc_asyncio", request_type=entity_type.ListEntityTypesRequest
):
client = EntityTypesAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_entity_types), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
entity_type.ListEntityTypesResponse(
next_page_token="next_page_token_value",
)
)
response = await client.list_entity_types(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.ListEntityTypesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListEntityTypesAsyncPager)
assert response.next_page_token == "next_page_token_value"
@pytest.mark.asyncio
async def test_list_entity_types_async_from_dict():
await test_list_entity_types_async(request_type=dict)
def test_list_entity_types_field_headers():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.ListEntityTypesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_entity_types), "__call__"
) as call:
call.return_value = entity_type.ListEntityTypesResponse()
client.list_entity_types(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_entity_types_field_headers_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.ListEntityTypesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_entity_types), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
entity_type.ListEntityTypesResponse()
)
await client.list_entity_types(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_entity_types_flattened():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_entity_types), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = entity_type.ListEntityTypesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_entity_types(
parent="parent_value", language_code="language_code_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].language_code == "language_code_value"
def test_list_entity_types_flattened_error():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_entity_types(
entity_type.ListEntityTypesRequest(),
parent="parent_value",
language_code="language_code_value",
)
@pytest.mark.asyncio
async def test_list_entity_types_flattened_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_entity_types), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = entity_type.ListEntityTypesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
entity_type.ListEntityTypesResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_entity_types(
parent="parent_value", language_code="language_code_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].language_code == "language_code_value"
@pytest.mark.asyncio
async def test_list_entity_types_flattened_error_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_entity_types(
entity_type.ListEntityTypesRequest(),
parent="parent_value",
language_code="language_code_value",
)
def test_list_entity_types_pager():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_entity_types), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
entity_type.ListEntityTypesResponse(
entity_types=[
entity_type.EntityType(),
entity_type.EntityType(),
entity_type.EntityType(),
],
next_page_token="abc",
),
entity_type.ListEntityTypesResponse(
entity_types=[], next_page_token="def",
),
entity_type.ListEntityTypesResponse(
entity_types=[entity_type.EntityType(),], next_page_token="ghi",
),
entity_type.ListEntityTypesResponse(
entity_types=[entity_type.EntityType(), entity_type.EntityType(),],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_entity_types(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, entity_type.EntityType) for i in results)
def test_list_entity_types_pages():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_entity_types), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
entity_type.ListEntityTypesResponse(
entity_types=[
entity_type.EntityType(),
entity_type.EntityType(),
entity_type.EntityType(),
],
next_page_token="abc",
),
entity_type.ListEntityTypesResponse(
entity_types=[], next_page_token="def",
),
entity_type.ListEntityTypesResponse(
entity_types=[entity_type.EntityType(),], next_page_token="ghi",
),
entity_type.ListEntityTypesResponse(
entity_types=[entity_type.EntityType(), entity_type.EntityType(),],
),
RuntimeError,
)
pages = list(client.list_entity_types(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_entity_types_async_pager():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_entity_types),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
entity_type.ListEntityTypesResponse(
entity_types=[
entity_type.EntityType(),
entity_type.EntityType(),
entity_type.EntityType(),
],
next_page_token="abc",
),
entity_type.ListEntityTypesResponse(
entity_types=[], next_page_token="def",
),
entity_type.ListEntityTypesResponse(
entity_types=[entity_type.EntityType(),], next_page_token="ghi",
),
entity_type.ListEntityTypesResponse(
entity_types=[entity_type.EntityType(), entity_type.EntityType(),],
),
RuntimeError,
)
async_pager = await client.list_entity_types(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, entity_type.EntityType) for i in responses)
@pytest.mark.asyncio
async def test_list_entity_types_async_pages():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_entity_types),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
entity_type.ListEntityTypesResponse(
entity_types=[
entity_type.EntityType(),
entity_type.EntityType(),
entity_type.EntityType(),
],
next_page_token="abc",
),
entity_type.ListEntityTypesResponse(
entity_types=[], next_page_token="def",
),
entity_type.ListEntityTypesResponse(
entity_types=[entity_type.EntityType(),], next_page_token="ghi",
),
entity_type.ListEntityTypesResponse(
entity_types=[entity_type.EntityType(), entity_type.EntityType(),],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_entity_types(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_get_entity_type(
transport: str = "grpc", request_type=entity_type.GetEntityTypeRequest
):
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_entity_type), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = entity_type.EntityType(
name="name_value",
display_name="display_name_value",
kind=entity_type.EntityType.Kind.KIND_MAP,
auto_expansion_mode=entity_type.EntityType.AutoExpansionMode.AUTO_EXPANSION_MODE_DEFAULT,
enable_fuzzy_extraction=True,
)
response = client.get_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.GetEntityTypeRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, entity_type.EntityType)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.kind == entity_type.EntityType.Kind.KIND_MAP
assert (
response.auto_expansion_mode
== entity_type.EntityType.AutoExpansionMode.AUTO_EXPANSION_MODE_DEFAULT
)
assert response.enable_fuzzy_extraction is True
def test_get_entity_type_from_dict():
test_get_entity_type(request_type=dict)
def test_get_entity_type_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_entity_type), "__call__") as call:
client.get_entity_type()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.GetEntityTypeRequest()
@pytest.mark.asyncio
async def test_get_entity_type_async(
transport: str = "grpc_asyncio", request_type=entity_type.GetEntityTypeRequest
):
client = EntityTypesAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_entity_type), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
entity_type.EntityType(
name="name_value",
display_name="display_name_value",
kind=entity_type.EntityType.Kind.KIND_MAP,
auto_expansion_mode=entity_type.EntityType.AutoExpansionMode.AUTO_EXPANSION_MODE_DEFAULT,
enable_fuzzy_extraction=True,
)
)
response = await client.get_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.GetEntityTypeRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, entity_type.EntityType)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.kind == entity_type.EntityType.Kind.KIND_MAP
assert (
response.auto_expansion_mode
== entity_type.EntityType.AutoExpansionMode.AUTO_EXPANSION_MODE_DEFAULT
)
assert response.enable_fuzzy_extraction is True
@pytest.mark.asyncio
async def test_get_entity_type_async_from_dict():
await test_get_entity_type_async(request_type=dict)
def test_get_entity_type_field_headers():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.GetEntityTypeRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_entity_type), "__call__") as call:
call.return_value = entity_type.EntityType()
client.get_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_entity_type_field_headers_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.GetEntityTypeRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_entity_type), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
entity_type.EntityType()
)
await client.get_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_entity_type_flattened():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_entity_type), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = entity_type.EntityType()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_entity_type(
name="name_value", language_code="language_code_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].language_code == "language_code_value"
def test_get_entity_type_flattened_error():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_entity_type(
entity_type.GetEntityTypeRequest(),
name="name_value",
language_code="language_code_value",
)
@pytest.mark.asyncio
async def test_get_entity_type_flattened_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_entity_type), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = entity_type.EntityType()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
entity_type.EntityType()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_entity_type(
name="name_value", language_code="language_code_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].language_code == "language_code_value"
@pytest.mark.asyncio
async def test_get_entity_type_flattened_error_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_entity_type(
entity_type.GetEntityTypeRequest(),
name="name_value",
language_code="language_code_value",
)
def test_create_entity_type(
transport: str = "grpc", request_type=gcd_entity_type.CreateEntityTypeRequest
):
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_entity_type), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcd_entity_type.EntityType(
name="name_value",
display_name="display_name_value",
kind=gcd_entity_type.EntityType.Kind.KIND_MAP,
auto_expansion_mode=gcd_entity_type.EntityType.AutoExpansionMode.AUTO_EXPANSION_MODE_DEFAULT,
enable_fuzzy_extraction=True,
)
response = client.create_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == gcd_entity_type.CreateEntityTypeRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcd_entity_type.EntityType)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.kind == gcd_entity_type.EntityType.Kind.KIND_MAP
assert (
response.auto_expansion_mode
== gcd_entity_type.EntityType.AutoExpansionMode.AUTO_EXPANSION_MODE_DEFAULT
)
assert response.enable_fuzzy_extraction is True
def test_create_entity_type_from_dict():
test_create_entity_type(request_type=dict)
def test_create_entity_type_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_entity_type), "__call__"
) as call:
client.create_entity_type()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == gcd_entity_type.CreateEntityTypeRequest()
@pytest.mark.asyncio
async def test_create_entity_type_async(
transport: str = "grpc_asyncio",
request_type=gcd_entity_type.CreateEntityTypeRequest,
):
client = EntityTypesAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_entity_type), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcd_entity_type.EntityType(
name="name_value",
display_name="display_name_value",
kind=gcd_entity_type.EntityType.Kind.KIND_MAP,
auto_expansion_mode=gcd_entity_type.EntityType.AutoExpansionMode.AUTO_EXPANSION_MODE_DEFAULT,
enable_fuzzy_extraction=True,
)
)
response = await client.create_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == gcd_entity_type.CreateEntityTypeRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcd_entity_type.EntityType)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.kind == gcd_entity_type.EntityType.Kind.KIND_MAP
assert (
response.auto_expansion_mode
== gcd_entity_type.EntityType.AutoExpansionMode.AUTO_EXPANSION_MODE_DEFAULT
)
assert response.enable_fuzzy_extraction is True
@pytest.mark.asyncio
async def test_create_entity_type_async_from_dict():
await test_create_entity_type_async(request_type=dict)
def test_create_entity_type_field_headers():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcd_entity_type.CreateEntityTypeRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_entity_type), "__call__"
) as call:
call.return_value = gcd_entity_type.EntityType()
client.create_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_entity_type_field_headers_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcd_entity_type.CreateEntityTypeRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_entity_type), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcd_entity_type.EntityType()
)
await client.create_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_entity_type_flattened():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_entity_type), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcd_entity_type.EntityType()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_entity_type(
parent="parent_value",
entity_type=gcd_entity_type.EntityType(name="name_value"),
language_code="language_code_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].entity_type == gcd_entity_type.EntityType(name="name_value")
assert args[0].language_code == "language_code_value"
def test_create_entity_type_flattened_error():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_entity_type(
gcd_entity_type.CreateEntityTypeRequest(),
parent="parent_value",
entity_type=gcd_entity_type.EntityType(name="name_value"),
language_code="language_code_value",
)
@pytest.mark.asyncio
async def test_create_entity_type_flattened_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_entity_type), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcd_entity_type.EntityType()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcd_entity_type.EntityType()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_entity_type(
parent="parent_value",
entity_type=gcd_entity_type.EntityType(name="name_value"),
language_code="language_code_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].entity_type == gcd_entity_type.EntityType(name="name_value")
assert args[0].language_code == "language_code_value"
@pytest.mark.asyncio
async def test_create_entity_type_flattened_error_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_entity_type(
gcd_entity_type.CreateEntityTypeRequest(),
parent="parent_value",
entity_type=gcd_entity_type.EntityType(name="name_value"),
language_code="language_code_value",
)
def test_update_entity_type(
transport: str = "grpc", request_type=gcd_entity_type.UpdateEntityTypeRequest
):
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_entity_type), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcd_entity_type.EntityType(
name="name_value",
display_name="display_name_value",
kind=gcd_entity_type.EntityType.Kind.KIND_MAP,
auto_expansion_mode=gcd_entity_type.EntityType.AutoExpansionMode.AUTO_EXPANSION_MODE_DEFAULT,
enable_fuzzy_extraction=True,
)
response = client.update_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == gcd_entity_type.UpdateEntityTypeRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcd_entity_type.EntityType)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.kind == gcd_entity_type.EntityType.Kind.KIND_MAP
assert (
response.auto_expansion_mode
== gcd_entity_type.EntityType.AutoExpansionMode.AUTO_EXPANSION_MODE_DEFAULT
)
assert response.enable_fuzzy_extraction is True
def test_update_entity_type_from_dict():
test_update_entity_type(request_type=dict)
def test_update_entity_type_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_entity_type), "__call__"
) as call:
client.update_entity_type()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == gcd_entity_type.UpdateEntityTypeRequest()
@pytest.mark.asyncio
async def test_update_entity_type_async(
transport: str = "grpc_asyncio",
request_type=gcd_entity_type.UpdateEntityTypeRequest,
):
client = EntityTypesAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_entity_type), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcd_entity_type.EntityType(
name="name_value",
display_name="display_name_value",
kind=gcd_entity_type.EntityType.Kind.KIND_MAP,
auto_expansion_mode=gcd_entity_type.EntityType.AutoExpansionMode.AUTO_EXPANSION_MODE_DEFAULT,
enable_fuzzy_extraction=True,
)
)
response = await client.update_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == gcd_entity_type.UpdateEntityTypeRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcd_entity_type.EntityType)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.kind == gcd_entity_type.EntityType.Kind.KIND_MAP
assert (
response.auto_expansion_mode
== gcd_entity_type.EntityType.AutoExpansionMode.AUTO_EXPANSION_MODE_DEFAULT
)
assert response.enable_fuzzy_extraction is True
@pytest.mark.asyncio
async def test_update_entity_type_async_from_dict():
await test_update_entity_type_async(request_type=dict)
def test_update_entity_type_field_headers():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcd_entity_type.UpdateEntityTypeRequest()
request.entity_type.name = "entity_type.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_entity_type), "__call__"
) as call:
call.return_value = gcd_entity_type.EntityType()
client.update_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "entity_type.name=entity_type.name/value",) in kw[
"metadata"
]
@pytest.mark.asyncio
async def test_update_entity_type_field_headers_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcd_entity_type.UpdateEntityTypeRequest()
request.entity_type.name = "entity_type.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_entity_type), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcd_entity_type.EntityType()
)
await client.update_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "entity_type.name=entity_type.name/value",) in kw[
"metadata"
]
def test_update_entity_type_flattened():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_entity_type), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcd_entity_type.EntityType()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_entity_type(
entity_type=gcd_entity_type.EntityType(name="name_value"),
language_code="language_code_value",
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].entity_type == gcd_entity_type.EntityType(name="name_value")
assert args[0].language_code == "language_code_value"
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
def test_update_entity_type_flattened_error():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_entity_type(
gcd_entity_type.UpdateEntityTypeRequest(),
entity_type=gcd_entity_type.EntityType(name="name_value"),
language_code="language_code_value",
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_entity_type_flattened_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_entity_type), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcd_entity_type.EntityType()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcd_entity_type.EntityType()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_entity_type(
entity_type=gcd_entity_type.EntityType(name="name_value"),
language_code="language_code_value",
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].entity_type == gcd_entity_type.EntityType(name="name_value")
assert args[0].language_code == "language_code_value"
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
@pytest.mark.asyncio
async def test_update_entity_type_flattened_error_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_entity_type(
gcd_entity_type.UpdateEntityTypeRequest(),
entity_type=gcd_entity_type.EntityType(name="name_value"),
language_code="language_code_value",
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
def test_delete_entity_type(
transport: str = "grpc", request_type=entity_type.DeleteEntityTypeRequest
):
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_entity_type), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.DeleteEntityTypeRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_entity_type_from_dict():
test_delete_entity_type(request_type=dict)
def test_delete_entity_type_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_entity_type), "__call__"
) as call:
client.delete_entity_type()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.DeleteEntityTypeRequest()
@pytest.mark.asyncio
async def test_delete_entity_type_async(
transport: str = "grpc_asyncio", request_type=entity_type.DeleteEntityTypeRequest
):
client = EntityTypesAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_entity_type), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.DeleteEntityTypeRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_entity_type_async_from_dict():
await test_delete_entity_type_async(request_type=dict)
def test_delete_entity_type_field_headers():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.DeleteEntityTypeRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_entity_type), "__call__"
) as call:
call.return_value = None
client.delete_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_entity_type_field_headers_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.DeleteEntityTypeRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_entity_type), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_entity_type(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_entity_type_flattened():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_entity_type), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_entity_type(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_delete_entity_type_flattened_error():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_entity_type(
entity_type.DeleteEntityTypeRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_entity_type_flattened_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_entity_type), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_entity_type(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_delete_entity_type_flattened_error_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_entity_type(
entity_type.DeleteEntityTypeRequest(), name="name_value",
)
def test_batch_update_entity_types(
transport: str = "grpc", request_type=entity_type.BatchUpdateEntityTypesRequest
):
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_update_entity_types), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.batch_update_entity_types(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchUpdateEntityTypesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_batch_update_entity_types_from_dict():
test_batch_update_entity_types(request_type=dict)
def test_batch_update_entity_types_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_update_entity_types), "__call__"
) as call:
client.batch_update_entity_types()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchUpdateEntityTypesRequest()
@pytest.mark.asyncio
async def test_batch_update_entity_types_async(
transport: str = "grpc_asyncio",
request_type=entity_type.BatchUpdateEntityTypesRequest,
):
client = EntityTypesAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_update_entity_types), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.batch_update_entity_types(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchUpdateEntityTypesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_batch_update_entity_types_async_from_dict():
await test_batch_update_entity_types_async(request_type=dict)
def test_batch_update_entity_types_field_headers():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.BatchUpdateEntityTypesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_update_entity_types), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.batch_update_entity_types(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_batch_update_entity_types_field_headers_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.BatchUpdateEntityTypesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_update_entity_types), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.batch_update_entity_types(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_batch_delete_entity_types(
transport: str = "grpc", request_type=entity_type.BatchDeleteEntityTypesRequest
):
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_delete_entity_types), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.batch_delete_entity_types(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchDeleteEntityTypesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_batch_delete_entity_types_from_dict():
test_batch_delete_entity_types(request_type=dict)
def test_batch_delete_entity_types_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_delete_entity_types), "__call__"
) as call:
client.batch_delete_entity_types()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchDeleteEntityTypesRequest()
@pytest.mark.asyncio
async def test_batch_delete_entity_types_async(
transport: str = "grpc_asyncio",
request_type=entity_type.BatchDeleteEntityTypesRequest,
):
client = EntityTypesAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_delete_entity_types), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.batch_delete_entity_types(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchDeleteEntityTypesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_batch_delete_entity_types_async_from_dict():
await test_batch_delete_entity_types_async(request_type=dict)
def test_batch_delete_entity_types_field_headers():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.BatchDeleteEntityTypesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_delete_entity_types), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.batch_delete_entity_types(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_batch_delete_entity_types_field_headers_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.BatchDeleteEntityTypesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_delete_entity_types), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.batch_delete_entity_types(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_batch_delete_entity_types_flattened():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_delete_entity_types), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.batch_delete_entity_types(
parent="parent_value", entity_type_names=["entity_type_names_value"],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].entity_type_names == ["entity_type_names_value"]
def test_batch_delete_entity_types_flattened_error():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.batch_delete_entity_types(
entity_type.BatchDeleteEntityTypesRequest(),
parent="parent_value",
entity_type_names=["entity_type_names_value"],
)
@pytest.mark.asyncio
async def test_batch_delete_entity_types_flattened_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_delete_entity_types), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.batch_delete_entity_types(
parent="parent_value", entity_type_names=["entity_type_names_value"],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].entity_type_names == ["entity_type_names_value"]
@pytest.mark.asyncio
async def test_batch_delete_entity_types_flattened_error_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.batch_delete_entity_types(
entity_type.BatchDeleteEntityTypesRequest(),
parent="parent_value",
entity_type_names=["entity_type_names_value"],
)
def test_batch_create_entities(
transport: str = "grpc", request_type=entity_type.BatchCreateEntitiesRequest
):
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_entities), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.batch_create_entities(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchCreateEntitiesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_batch_create_entities_from_dict():
test_batch_create_entities(request_type=dict)
def test_batch_create_entities_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_entities), "__call__"
) as call:
client.batch_create_entities()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchCreateEntitiesRequest()
@pytest.mark.asyncio
async def test_batch_create_entities_async(
transport: str = "grpc_asyncio", request_type=entity_type.BatchCreateEntitiesRequest
):
client = EntityTypesAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_entities), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.batch_create_entities(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchCreateEntitiesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_batch_create_entities_async_from_dict():
await test_batch_create_entities_async(request_type=dict)
def test_batch_create_entities_field_headers():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.BatchCreateEntitiesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_entities), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.batch_create_entities(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_batch_create_entities_field_headers_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.BatchCreateEntitiesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_entities), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.batch_create_entities(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_batch_create_entities_flattened():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_entities), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.batch_create_entities(
parent="parent_value",
entities=[entity_type.EntityType.Entity(value="value_value")],
language_code="language_code_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].entities == [entity_type.EntityType.Entity(value="value_value")]
assert args[0].language_code == "language_code_value"
def test_batch_create_entities_flattened_error():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.batch_create_entities(
entity_type.BatchCreateEntitiesRequest(),
parent="parent_value",
entities=[entity_type.EntityType.Entity(value="value_value")],
language_code="language_code_value",
)
@pytest.mark.asyncio
async def test_batch_create_entities_flattened_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_create_entities), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.batch_create_entities(
parent="parent_value",
entities=[entity_type.EntityType.Entity(value="value_value")],
language_code="language_code_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].entities == [entity_type.EntityType.Entity(value="value_value")]
assert args[0].language_code == "language_code_value"
@pytest.mark.asyncio
async def test_batch_create_entities_flattened_error_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.batch_create_entities(
entity_type.BatchCreateEntitiesRequest(),
parent="parent_value",
entities=[entity_type.EntityType.Entity(value="value_value")],
language_code="language_code_value",
)
def test_batch_update_entities(
transport: str = "grpc", request_type=entity_type.BatchUpdateEntitiesRequest
):
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_update_entities), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.batch_update_entities(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchUpdateEntitiesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_batch_update_entities_from_dict():
test_batch_update_entities(request_type=dict)
def test_batch_update_entities_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_update_entities), "__call__"
) as call:
client.batch_update_entities()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchUpdateEntitiesRequest()
@pytest.mark.asyncio
async def test_batch_update_entities_async(
transport: str = "grpc_asyncio", request_type=entity_type.BatchUpdateEntitiesRequest
):
client = EntityTypesAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_update_entities), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.batch_update_entities(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchUpdateEntitiesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_batch_update_entities_async_from_dict():
await test_batch_update_entities_async(request_type=dict)
def test_batch_update_entities_field_headers():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.BatchUpdateEntitiesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_update_entities), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.batch_update_entities(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_batch_update_entities_field_headers_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.BatchUpdateEntitiesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_update_entities), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.batch_update_entities(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_batch_update_entities_flattened():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_update_entities), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.batch_update_entities(
parent="parent_value",
entities=[entity_type.EntityType.Entity(value="value_value")],
language_code="language_code_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].entities == [entity_type.EntityType.Entity(value="value_value")]
assert args[0].language_code == "language_code_value"
def test_batch_update_entities_flattened_error():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.batch_update_entities(
entity_type.BatchUpdateEntitiesRequest(),
parent="parent_value",
entities=[entity_type.EntityType.Entity(value="value_value")],
language_code="language_code_value",
)
@pytest.mark.asyncio
async def test_batch_update_entities_flattened_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_update_entities), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.batch_update_entities(
parent="parent_value",
entities=[entity_type.EntityType.Entity(value="value_value")],
language_code="language_code_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].entities == [entity_type.EntityType.Entity(value="value_value")]
assert args[0].language_code == "language_code_value"
@pytest.mark.asyncio
async def test_batch_update_entities_flattened_error_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.batch_update_entities(
entity_type.BatchUpdateEntitiesRequest(),
parent="parent_value",
entities=[entity_type.EntityType.Entity(value="value_value")],
language_code="language_code_value",
)
def test_batch_delete_entities(
transport: str = "grpc", request_type=entity_type.BatchDeleteEntitiesRequest
):
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_delete_entities), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.batch_delete_entities(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchDeleteEntitiesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_batch_delete_entities_from_dict():
test_batch_delete_entities(request_type=dict)
def test_batch_delete_entities_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_delete_entities), "__call__"
) as call:
client.batch_delete_entities()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchDeleteEntitiesRequest()
@pytest.mark.asyncio
async def test_batch_delete_entities_async(
transport: str = "grpc_asyncio", request_type=entity_type.BatchDeleteEntitiesRequest
):
client = EntityTypesAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_delete_entities), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.batch_delete_entities(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == entity_type.BatchDeleteEntitiesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_batch_delete_entities_async_from_dict():
await test_batch_delete_entities_async(request_type=dict)
def test_batch_delete_entities_field_headers():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.BatchDeleteEntitiesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_delete_entities), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.batch_delete_entities(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_batch_delete_entities_field_headers_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = entity_type.BatchDeleteEntitiesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_delete_entities), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.batch_delete_entities(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_batch_delete_entities_flattened():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_delete_entities), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.batch_delete_entities(
parent="parent_value",
entity_values=["entity_values_value"],
language_code="language_code_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].entity_values == ["entity_values_value"]
assert args[0].language_code == "language_code_value"
def test_batch_delete_entities_flattened_error():
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.batch_delete_entities(
entity_type.BatchDeleteEntitiesRequest(),
parent="parent_value",
entity_values=["entity_values_value"],
language_code="language_code_value",
)
@pytest.mark.asyncio
async def test_batch_delete_entities_flattened_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_delete_entities), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.batch_delete_entities(
parent="parent_value",
entity_values=["entity_values_value"],
language_code="language_code_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].entity_values == ["entity_values_value"]
assert args[0].language_code == "language_code_value"
@pytest.mark.asyncio
async def test_batch_delete_entities_flattened_error_async():
client = EntityTypesAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.batch_delete_entities(
entity_type.BatchDeleteEntitiesRequest(),
parent="parent_value",
entity_values=["entity_values_value"],
language_code="language_code_value",
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.EntityTypesGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.EntityTypesGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = EntityTypesClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.EntityTypesGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = EntityTypesClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.EntityTypesGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = EntityTypesClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.EntityTypesGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.EntityTypesGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[transports.EntityTypesGrpcTransport, transports.EntityTypesGrpcAsyncIOTransport,],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = EntityTypesClient(credentials=ga_credentials.AnonymousCredentials(),)
assert isinstance(client.transport, transports.EntityTypesGrpcTransport,)
def test_entity_types_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.EntityTypesTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_entity_types_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.dialogflow_v2beta1.services.entity_types.transports.EntityTypesTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.EntityTypesTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"list_entity_types",
"get_entity_type",
"create_entity_type",
"update_entity_type",
"delete_entity_type",
"batch_update_entity_types",
"batch_delete_entity_types",
"batch_create_entities",
"batch_update_entities",
"batch_delete_entities",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
# Additionally, the LRO client (a property) should
# also raise NotImplementedError
with pytest.raises(NotImplementedError):
transport.operations_client
@requires_google_auth_gte_1_25_0
def test_entity_types_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.dialogflow_v2beta1.services.entity_types.transports.EntityTypesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.EntityTypesTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
quota_project_id="octopus",
)
@requires_google_auth_lt_1_25_0
def test_entity_types_base_transport_with_credentials_file_old_google_auth():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.dialogflow_v2beta1.services.entity_types.transports.EntityTypesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.EntityTypesTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
quota_project_id="octopus",
)
def test_entity_types_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.dialogflow_v2beta1.services.entity_types.transports.EntityTypesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.EntityTypesTransport()
adc.assert_called_once()
@requires_google_auth_gte_1_25_0
def test_entity_types_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
EntityTypesClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
quota_project_id=None,
)
@requires_google_auth_lt_1_25_0
def test_entity_types_auth_adc_old_google_auth():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
EntityTypesClient()
adc.assert_called_once_with(
scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[transports.EntityTypesGrpcTransport, transports.EntityTypesGrpcAsyncIOTransport,],
)
@requires_google_auth_gte_1_25_0
def test_entity_types_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class",
[transports.EntityTypesGrpcTransport, transports.EntityTypesGrpcAsyncIOTransport,],
)
@requires_google_auth_lt_1_25_0
def test_entity_types_transport_auth_adc_old_google_auth(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus")
adc.assert_called_once_with(
scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.EntityTypesGrpcTransport, grpc_helpers),
(transports.EntityTypesGrpcAsyncIOTransport, grpc_helpers_async),
],
)
@requires_api_core_gte_1_26_0
def test_entity_types_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"dialogflow.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
scopes=["1", "2"],
default_host="dialogflow.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.EntityTypesGrpcTransport, grpc_helpers),
(transports.EntityTypesGrpcAsyncIOTransport, grpc_helpers_async),
],
)
@requires_api_core_lt_1_26_0
def test_entity_types_transport_create_channel_old_api_core(
transport_class, grpc_helpers
):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus")
create_channel.assert_called_with(
"dialogflow.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.EntityTypesGrpcTransport, grpc_helpers),
(transports.EntityTypesGrpcAsyncIOTransport, grpc_helpers_async),
],
)
@requires_api_core_lt_1_26_0
def test_entity_types_transport_create_channel_user_scopes(
transport_class, grpc_helpers
):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"dialogflow.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
scopes=["1", "2"],
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[transports.EntityTypesGrpcTransport, transports.EntityTypesGrpcAsyncIOTransport],
)
def test_entity_types_grpc_transport_client_cert_source_for_mtls(transport_class):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
def test_entity_types_host_no_port():
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="dialogflow.googleapis.com"
),
)
assert client.transport._host == "dialogflow.googleapis.com:443"
def test_entity_types_host_with_port():
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="dialogflow.googleapis.com:8000"
),
)
assert client.transport._host == "dialogflow.googleapis.com:8000"
def test_entity_types_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.EntityTypesGrpcTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_entity_types_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.EntityTypesGrpcAsyncIOTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[transports.EntityTypesGrpcTransport, transports.EntityTypesGrpcAsyncIOTransport],
)
def test_entity_types_transport_channel_mtls_with_client_cert_source(transport_class):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[transports.EntityTypesGrpcTransport, transports.EntityTypesGrpcAsyncIOTransport],
)
def test_entity_types_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_entity_types_grpc_lro_client():
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_entity_types_grpc_lro_async_client():
client = EntityTypesAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_entity_type_path():
project = "squid"
entity_type = "clam"
expected = "projects/{project}/agent/entityTypes/{entity_type}".format(
project=project, entity_type=entity_type,
)
actual = EntityTypesClient.entity_type_path(project, entity_type)
assert expected == actual
def test_parse_entity_type_path():
expected = {
"project": "whelk",
"entity_type": "octopus",
}
path = EntityTypesClient.entity_type_path(**expected)
# Check that the path construction is reversible.
actual = EntityTypesClient.parse_entity_type_path(path)
assert expected == actual
def test_common_billing_account_path():
billing_account = "oyster"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = EntityTypesClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "nudibranch",
}
path = EntityTypesClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = EntityTypesClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "cuttlefish"
expected = "folders/{folder}".format(folder=folder,)
actual = EntityTypesClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "mussel",
}
path = EntityTypesClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = EntityTypesClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "winkle"
expected = "organizations/{organization}".format(organization=organization,)
actual = EntityTypesClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nautilus",
}
path = EntityTypesClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = EntityTypesClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "scallop"
expected = "projects/{project}".format(project=project,)
actual = EntityTypesClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "abalone",
}
path = EntityTypesClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = EntityTypesClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "squid"
location = "clam"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = EntityTypesClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "whelk",
"location": "octopus",
}
path = EntityTypesClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = EntityTypesClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.EntityTypesTransport, "_prep_wrapped_messages"
) as prep:
client = EntityTypesClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.EntityTypesTransport, "_prep_wrapped_messages"
) as prep:
transport_class = EntityTypesClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
| apache-2.0 |
hellhovnd/django | django/contrib/auth/tests/test_auth_backends.py | 3 | 15696 | from __future__ import unicode_literals
from datetime import date
from django.conf import settings
from django.contrib.auth.models import User, Group, Permission, AnonymousUser
from django.contrib.auth.tests.utils import skipIfCustomUser
from django.contrib.auth.tests.test_custom_user import ExtensionUser, CustomPermissionsUser, CustomUser
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.contrib.auth import authenticate
from django.test import TestCase
from django.test.utils import override_settings
class BaseModelBackendTest(object):
"""
A base class for tests that need to validate the ModelBackend
with different User models. Subclasses should define a class
level UserModel attribute, and a create_users() method to
construct two users for test purposes.
"""
backend = 'django.contrib.auth.backends.ModelBackend'
def setUp(self):
self.curr_auth = settings.AUTHENTICATION_BACKENDS
settings.AUTHENTICATION_BACKENDS = (self.backend,)
self.create_users()
def tearDown(self):
settings.AUTHENTICATION_BACKENDS = self.curr_auth
# The custom_perms test messes with ContentTypes, which will
# be cached; flush the cache to ensure there are no side effects
# Refs #14975, #14925
ContentType.objects.clear_cache()
def test_has_perm(self):
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(user.has_perm('auth.test'), False)
user.is_staff = True
user.save()
self.assertEqual(user.has_perm('auth.test'), False)
user.is_superuser = True
user.save()
self.assertEqual(user.has_perm('auth.test'), True)
user.is_staff = False
user.is_superuser = False
user.save()
self.assertEqual(user.has_perm('auth.test'), False)
user.is_staff = True
user.is_superuser = True
user.is_active = False
user.save()
self.assertEqual(user.has_perm('auth.test'), False)
def test_custom_perms(self):
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
user.user_permissions.add(perm)
user.save()
# reloading user to purge the _perm_cache
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(user.get_all_permissions() == set(['auth.test']), True)
self.assertEqual(user.get_group_permissions(), set([]))
self.assertEqual(user.has_module_perms('Group'), False)
self.assertEqual(user.has_module_perms('auth'), True)
perm = Permission.objects.create(name='test2', content_type=content_type, codename='test2')
user.user_permissions.add(perm)
user.save()
perm = Permission.objects.create(name='test3', content_type=content_type, codename='test3')
user.user_permissions.add(perm)
user.save()
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(user.get_all_permissions(), set(['auth.test2', 'auth.test', 'auth.test3']))
self.assertEqual(user.has_perm('test'), False)
self.assertEqual(user.has_perm('auth.test'), True)
self.assertEqual(user.has_perms(['auth.test2', 'auth.test3']), True)
perm = Permission.objects.create(name='test_group', content_type=content_type, codename='test_group')
group = Group.objects.create(name='test_group')
group.permissions.add(perm)
group.save()
user.groups.add(group)
user = self.UserModel._default_manager.get(pk=self.user.pk)
exp = set(['auth.test2', 'auth.test', 'auth.test3', 'auth.test_group'])
self.assertEqual(user.get_all_permissions(), exp)
self.assertEqual(user.get_group_permissions(), set(['auth.test_group']))
self.assertEqual(user.has_perms(['auth.test3', 'auth.test_group']), True)
user = AnonymousUser()
self.assertEqual(user.has_perm('test'), False)
self.assertEqual(user.has_perms(['auth.test2', 'auth.test3']), False)
def test_has_no_object_perm(self):
"""Regressiontest for #12462"""
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
user.user_permissions.add(perm)
user.save()
self.assertEqual(user.has_perm('auth.test', 'object'), False)
self.assertEqual(user.get_all_permissions('object'), set([]))
self.assertEqual(user.has_perm('auth.test'), True)
self.assertEqual(user.get_all_permissions(), set(['auth.test']))
def test_get_all_superuser_permissions(self):
"A superuser has all permissions. Refs #14795"
user = self.UserModel._default_manager.get(pk=self.superuser.pk)
self.assertEqual(len(user.get_all_permissions()), len(Permission.objects.all()))
@skipIfCustomUser
class ModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the default User model.
"""
UserModel = User
def create_users(self):
self.user = User.objects.create_user(
username='test',
email='test@example.com',
password='test',
)
self.superuser = User.objects.create_superuser(
username='test2',
email='test2@example.com',
password='test',
)
@override_settings(AUTH_USER_MODEL='auth.ExtensionUser')
class ExtensionUserModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the custom ExtensionUser model.
This isn't a perfect test, because both the User and ExtensionUser are
synchronized to the database, which wouldn't ordinary happen in
production. As a result, it doesn't catch errors caused by the non-
existence of the User table.
The specific problem is queries on .filter(groups__user) et al, which
makes an implicit assumption that the user model is called 'User'. In
production, the auth.User table won't exist, so the requested join
won't exist either; in testing, the auth.User *does* exist, and
so does the join. However, the join table won't contain any useful
data; for testing, we check that the data we expect actually does exist.
"""
UserModel = ExtensionUser
def create_users(self):
self.user = ExtensionUser._default_manager.create_user(
username='test',
email='test@example.com',
password='test',
date_of_birth=date(2006, 4, 25)
)
self.superuser = ExtensionUser._default_manager.create_superuser(
username='test2',
email='test2@example.com',
password='test',
date_of_birth=date(1976, 11, 8)
)
@override_settings(AUTH_USER_MODEL='auth.CustomPermissionsUser')
class CustomPermissionsUserModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the CustomPermissionsUser model.
As with the ExtensionUser test, this isn't a perfect test, because both
the User and CustomPermissionsUser are synchronized to the database,
which wouldn't ordinary happen in production.
"""
UserModel = CustomPermissionsUser
def create_users(self):
self.user = CustomPermissionsUser._default_manager.create_user(
email='test@example.com',
password='test',
date_of_birth=date(2006, 4, 25)
)
self.superuser = CustomPermissionsUser._default_manager.create_superuser(
email='test2@example.com',
password='test',
date_of_birth=date(1976, 11, 8)
)
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
class CustomUserModelBackendAuthenticateTest(TestCase):
"""
Tests that the model backend can accept a credentials kwarg labeled with
custom user model's USERNAME_FIELD.
"""
def test_authenticate(self):
test_user = CustomUser._default_manager.create_user(
email='test@example.com',
password='test',
date_of_birth=date(2006, 4, 25)
)
authenticated_user = authenticate(email='test@example.com', password='test')
self.assertEqual(test_user, authenticated_user)
class TestObj(object):
pass
class SimpleRowlevelBackend(object):
def has_perm(self, user, perm, obj=None):
if not obj:
return # We only support row level perms
if isinstance(obj, TestObj):
if user.username == 'test2':
return True
elif user.is_anonymous() and perm == 'anon':
return True
elif not user.is_active and perm == 'inactive':
return True
return False
def has_module_perms(self, user, app_label):
if not user.is_anonymous() and not user.is_active:
return False
return app_label == "app1"
def get_all_permissions(self, user, obj=None):
if not obj:
return [] # We only support row level perms
if not isinstance(obj, TestObj):
return ['none']
if user.is_anonymous():
return ['anon']
if user.username == 'test2':
return ['simple', 'advanced']
else:
return ['simple']
def get_group_permissions(self, user, obj=None):
if not obj:
return # We only support row level perms
if not isinstance(obj, TestObj):
return ['none']
if 'test_group' in [group.name for group in user.groups.all()]:
return ['group_perm']
else:
return ['none']
@skipIfCustomUser
class RowlevelBackendTest(TestCase):
"""
Tests for auth backend that supports object level permissions
"""
backend = 'django.contrib.auth.tests.test_auth_backends.SimpleRowlevelBackend'
def setUp(self):
self.curr_auth = settings.AUTHENTICATION_BACKENDS
settings.AUTHENTICATION_BACKENDS = tuple(self.curr_auth) + (self.backend,)
self.user1 = User.objects.create_user('test', 'test@example.com', 'test')
self.user2 = User.objects.create_user('test2', 'test2@example.com', 'test')
self.user3 = User.objects.create_user('test3', 'test3@example.com', 'test')
def tearDown(self):
settings.AUTHENTICATION_BACKENDS = self.curr_auth
# The get_group_permissions test messes with ContentTypes, which will
# be cached; flush the cache to ensure there are no side effects
# Refs #14975, #14925
ContentType.objects.clear_cache()
def test_has_perm(self):
self.assertEqual(self.user1.has_perm('perm', TestObj()), False)
self.assertEqual(self.user2.has_perm('perm', TestObj()), True)
self.assertEqual(self.user2.has_perm('perm'), False)
self.assertEqual(self.user2.has_perms(['simple', 'advanced'], TestObj()), True)
self.assertEqual(self.user3.has_perm('perm', TestObj()), False)
self.assertEqual(self.user3.has_perm('anon', TestObj()), False)
self.assertEqual(self.user3.has_perms(['simple', 'advanced'], TestObj()), False)
def test_get_all_permissions(self):
self.assertEqual(self.user1.get_all_permissions(TestObj()), set(['simple']))
self.assertEqual(self.user2.get_all_permissions(TestObj()), set(['simple', 'advanced']))
self.assertEqual(self.user2.get_all_permissions(), set([]))
def test_get_group_permissions(self):
group = Group.objects.create(name='test_group')
self.user3.groups.add(group)
self.assertEqual(self.user3.get_group_permissions(TestObj()), set(['group_perm']))
class AnonymousUserBackendTest(TestCase):
"""
Tests for AnonymousUser delegating to backend.
"""
backend = 'django.contrib.auth.tests.test_auth_backends.SimpleRowlevelBackend'
def setUp(self):
self.curr_auth = settings.AUTHENTICATION_BACKENDS
settings.AUTHENTICATION_BACKENDS = (self.backend,)
self.user1 = AnonymousUser()
def tearDown(self):
settings.AUTHENTICATION_BACKENDS = self.curr_auth
def test_has_perm(self):
self.assertEqual(self.user1.has_perm('perm', TestObj()), False)
self.assertEqual(self.user1.has_perm('anon', TestObj()), True)
def test_has_perms(self):
self.assertEqual(self.user1.has_perms(['anon'], TestObj()), True)
self.assertEqual(self.user1.has_perms(['anon', 'perm'], TestObj()), False)
def test_has_module_perms(self):
self.assertEqual(self.user1.has_module_perms("app1"), True)
self.assertEqual(self.user1.has_module_perms("app2"), False)
def test_get_all_permissions(self):
self.assertEqual(self.user1.get_all_permissions(TestObj()), set(['anon']))
@skipIfCustomUser
@override_settings(AUTHENTICATION_BACKENDS=[])
class NoBackendsTest(TestCase):
"""
Tests that an appropriate error is raised if no auth backends are provided.
"""
def setUp(self):
self.user = User.objects.create_user('test', 'test@example.com', 'test')
def test_raises_exception(self):
self.assertRaises(ImproperlyConfigured, self.user.has_perm, ('perm', TestObj(),))
@skipIfCustomUser
class InActiveUserBackendTest(TestCase):
"""
Tests for a inactive user
"""
backend = 'django.contrib.auth.tests.test_auth_backends.SimpleRowlevelBackend'
def setUp(self):
self.curr_auth = settings.AUTHENTICATION_BACKENDS
settings.AUTHENTICATION_BACKENDS = (self.backend,)
self.user1 = User.objects.create_user('test', 'test@example.com', 'test')
self.user1.is_active = False
self.user1.save()
def tearDown(self):
settings.AUTHENTICATION_BACKENDS = self.curr_auth
def test_has_perm(self):
self.assertEqual(self.user1.has_perm('perm', TestObj()), False)
self.assertEqual(self.user1.has_perm('inactive', TestObj()), True)
def test_has_module_perms(self):
self.assertEqual(self.user1.has_module_perms("app1"), False)
self.assertEqual(self.user1.has_module_perms("app2"), False)
class PermissionDeniedBackend(object):
"""
Always raises PermissionDenied.
"""
supports_object_permissions = True
supports_anonymous_user = True
supports_inactive_user = True
def authenticate(self, username=None, password=None):
raise PermissionDenied
@skipIfCustomUser
class PermissionDeniedBackendTest(TestCase):
"""
Tests that other backends are not checked once a backend raises PermissionDenied
"""
backend = 'django.contrib.auth.tests.test_auth_backends.PermissionDeniedBackend'
def setUp(self):
self.user1 = User.objects.create_user('test', 'test@example.com', 'test')
self.user1.save()
@override_settings(AUTHENTICATION_BACKENDS=(backend, ) +
tuple(settings.AUTHENTICATION_BACKENDS))
def test_permission_denied(self):
"user is not authenticated after a backend raises permission denied #2550"
self.assertEqual(authenticate(username='test', password='test'), None)
@override_settings(AUTHENTICATION_BACKENDS=tuple(
settings.AUTHENTICATION_BACKENDS) + (backend, ))
def test_authenticates(self):
self.assertEqual(authenticate(username='test', password='test'), self.user1)
| bsd-3-clause |
adobe-research/spark-cluster-deployment | initial-deployment-puppet/modules/spark/files/spark/python/pyspark/storagelevel.py | 2 | 2459 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__all__ = ["StorageLevel"]
class StorageLevel:
"""
Flags for controlling the storage of an RDD. Each StorageLevel records whether to use memory,
whether to drop the RDD to disk if it falls out of memory, whether to keep the data in memory
in a serialized format, and whether to replicate the RDD partitions on multiple nodes.
Also contains static constants for some commonly used storage levels, such as MEMORY_ONLY.
"""
def __init__(self, useDisk, useMemory, useOffHeap, deserialized, replication = 1):
self.useDisk = useDisk
self.useMemory = useMemory
self.useOffHeap = useOffHeap
self.deserialized = deserialized
self.replication = replication
def __repr__(self):
return "StorageLevel(%s, %s, %s, %s, %s)" % (
self.useDisk, self.useMemory, self.useOffHeap, self.deserialized, self.replication)
StorageLevel.DISK_ONLY = StorageLevel(True, False, False, False)
StorageLevel.DISK_ONLY_2 = StorageLevel(True, False, False, False, 2)
StorageLevel.MEMORY_ONLY = StorageLevel(False, True, False, True)
StorageLevel.MEMORY_ONLY_2 = StorageLevel(False, True, False, True, 2)
StorageLevel.MEMORY_ONLY_SER = StorageLevel(False, True, False, False)
StorageLevel.MEMORY_ONLY_SER_2 = StorageLevel(False, True, False, False, 2)
StorageLevel.MEMORY_AND_DISK = StorageLevel(True, True, False, True)
StorageLevel.MEMORY_AND_DISK_2 = StorageLevel(True, True, False, True, 2)
StorageLevel.MEMORY_AND_DISK_SER = StorageLevel(True, True, False, False)
StorageLevel.MEMORY_AND_DISK_SER_2 = StorageLevel(True, True, False, False, 2)
StorageLevel.OFF_HEAP = StorageLevel(False, False, True, False, 1) | apache-2.0 |
sameerparekh/pants | contrib/cpp/src/python/pants/contrib/cpp/tasks/cpp_task.py | 9 | 1810 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import subprocess
from pants.backend.core.tasks.task import Task
from pants.base.exceptions import TaskError
from pants.contrib.cpp.targets.cpp_binary import CppBinary
from pants.contrib.cpp.targets.cpp_library import CppLibrary
from pants.contrib.cpp.targets.cpp_target import CppTarget
from pants.contrib.cpp.toolchain.cpp_toolchain import CppToolchain
class CppTask(Task):
@staticmethod
def is_cpp(target):
return isinstance(target, CppTarget)
@staticmethod
def is_library(target):
return isinstance(target, CppLibrary)
@staticmethod
def is_binary(target):
return isinstance(target, CppBinary)
@classmethod
def register_options(cls, register):
super(CppTask, cls).register_options(register)
register('--compiler', advanced=True, fingerprint=True,
help='Set a specific compiler to use (eg, g++-4.8, clang++)')
def execute(self):
raise NotImplementedError('execute must be implemented by subclasses of CppTask')
def run_command(self, cmd, workunit):
try:
self.context.log.debug('Executing: {0}'.format(cmd))
# TODO: capture stdout/stderr and redirect to log
subprocess.check_call(cmd, stdout=workunit.output('stdout'), stderr=workunit.output('stderr'))
except subprocess.CalledProcessError as e:
raise TaskError('Execution failed: {0}'.format(e))
except:
raise TaskError('Failed to execute {0}'.format(cmd))
@property
def cpp_toolchain(self):
return CppToolchain(self.get_options().compiler)
| apache-2.0 |
arante/pyloc | microblog/flask/lib/python3.5/site-packages/sqlparse/engine/statement_splitter.py | 20 | 3648 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2016 Andi Albrecht, albrecht.andi@gmail.com
#
# This module is part of python-sqlparse and is released under
# the BSD License: https://opensource.org/licenses/BSD-3-Clause
from sqlparse import sql, tokens as T
class StatementSplitter(object):
"""Filter that split stream at individual statements"""
def __init__(self):
self._reset()
def _reset(self):
"""Set the filter attributes to its default values"""
self._in_declare = False
self._is_create = False
self._begin_depth = 0
self.consume_ws = False
self.tokens = []
self.level = 0
def _change_splitlevel(self, ttype, value):
"""Get the new split level (increase, decrease or remain equal)"""
# ANSI
# if normal token return
# wouldn't parenthesis increase/decrease a level?
# no, inside a paranthesis can't start new statement
if ttype not in T.Keyword:
return 0
# Everything after here is ttype = T.Keyword
# Also to note, once entered an If statement you are done and basically
# returning
unified = value.upper()
# three keywords begin with CREATE, but only one of them is DDL
# DDL Create though can contain more words such as "or replace"
if ttype is T.Keyword.DDL and unified.startswith('CREATE'):
self._is_create = True
return 0
# can have nested declare inside of being...
if unified == 'DECLARE' and self._is_create and self._begin_depth == 0:
self._in_declare = True
return 1
if unified == 'BEGIN':
self._begin_depth += 1
if self._is_create:
# FIXME(andi): This makes no sense.
return 1
return 0
# Should this respect a preceeding BEGIN?
# In CASE ... WHEN ... END this results in a split level -1.
# Would having multiple CASE WHEN END and a Assigment Operator
# cause the statement to cut off prematurely?
if unified == 'END':
self._begin_depth = max(0, self._begin_depth - 1)
return -1
if (unified in ('IF', 'FOR', 'WHILE') and
self._is_create and self._begin_depth > 0):
return 1
if unified in ('END IF', 'END FOR', 'END WHILE'):
return -1
# Default
return 0
def process(self, stream):
"""Process the stream"""
EOS_TTYPE = T.Whitespace, T.Comment.Single
# Run over all stream tokens
for ttype, value in stream:
# Yield token if we finished a statement and there's no whitespaces
# It will count newline token as a non whitespace. In this context
# whitespace ignores newlines.
# why don't multi line comments also count?
if self.consume_ws and ttype not in EOS_TTYPE:
yield sql.Statement(self.tokens)
# Reset filter and prepare to process next statement
self._reset()
# Change current split level (increase, decrease or remain equal)
self.level += self._change_splitlevel(ttype, value)
# Append the token to the current statement
self.tokens.append(sql.Token(ttype, value))
# Check if we get the end of a statement
if self.level <= 0 and ttype is T.Punctuation and value == ';':
self.consume_ws = True
# Yield pending statement (if any)
if self.tokens:
yield sql.Statement(self.tokens)
| gpl-3.0 |
chiefspace/udemy-rest-api | udemy_rest_api_section5/code/env/lib/python3.4/site-packages/flask/ctx.py | 170 | 14739 | # -*- coding: utf-8 -*-
"""
flask.ctx
~~~~~~~~~
Implements the objects required to keep the context.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import sys
from functools import update_wrapper
from werkzeug.exceptions import HTTPException
from .globals import _request_ctx_stack, _app_ctx_stack
from .signals import appcontext_pushed, appcontext_popped
from ._compat import BROKEN_PYPY_CTXMGR_EXIT, reraise
# a singleton sentinel value for parameter defaults
_sentinel = object()
class _AppCtxGlobals(object):
"""A plain object."""
def get(self, name, default=None):
return self.__dict__.get(name, default)
def pop(self, name, default=_sentinel):
if default is _sentinel:
return self.__dict__.pop(name)
else:
return self.__dict__.pop(name, default)
def setdefault(self, name, default=None):
return self.__dict__.setdefault(name, default)
def __contains__(self, item):
return item in self.__dict__
def __iter__(self):
return iter(self.__dict__)
def __repr__(self):
top = _app_ctx_stack.top
if top is not None:
return '<flask.g of %r>' % top.app.name
return object.__repr__(self)
def after_this_request(f):
"""Executes a function after this request. This is useful to modify
response objects. The function is passed the response object and has
to return the same or a new one.
Example::
@app.route('/')
def index():
@after_this_request
def add_header(response):
response.headers['X-Foo'] = 'Parachute'
return response
return 'Hello World!'
This is more useful if a function other than the view function wants to
modify a response. For instance think of a decorator that wants to add
some headers without converting the return value into a response object.
.. versionadded:: 0.9
"""
_request_ctx_stack.top._after_request_functions.append(f)
return f
def copy_current_request_context(f):
"""A helper function that decorates a function to retain the current
request context. This is useful when working with greenlets. The moment
the function is decorated a copy of the request context is created and
then pushed when the function is called.
Example::
import gevent
from flask import copy_current_request_context
@app.route('/')
def index():
@copy_current_request_context
def do_some_work():
# do some work here, it can access flask.request like you
# would otherwise in the view function.
...
gevent.spawn(do_some_work)
return 'Regular response'
.. versionadded:: 0.10
"""
top = _request_ctx_stack.top
if top is None:
raise RuntimeError('This decorator can only be used at local scopes '
'when a request context is on the stack. For instance within '
'view functions.')
reqctx = top.copy()
def wrapper(*args, **kwargs):
with reqctx:
return f(*args, **kwargs)
return update_wrapper(wrapper, f)
def has_request_context():
"""If you have code that wants to test if a request context is there or
not this function can be used. For instance, you may want to take advantage
of request information if the request object is available, but fail
silently if it is unavailable.
::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and has_request_context():
remote_addr = request.remote_addr
self.remote_addr = remote_addr
Alternatively you can also just test any of the context bound objects
(such as :class:`request` or :class:`g` for truthness)::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and request:
remote_addr = request.remote_addr
self.remote_addr = remote_addr
.. versionadded:: 0.7
"""
return _request_ctx_stack.top is not None
def has_app_context():
"""Works like :func:`has_request_context` but for the application
context. You can also just do a boolean check on the
:data:`current_app` object instead.
.. versionadded:: 0.9
"""
return _app_ctx_stack.top is not None
class AppContext(object):
"""The application context binds an application object implicitly
to the current thread or greenlet, similar to how the
:class:`RequestContext` binds request information. The application
context is also implicitly created if a request context is created
but the application is not on top of the individual application
context.
"""
def __init__(self, app):
self.app = app
self.url_adapter = app.create_url_adapter(None)
self.g = app.app_ctx_globals_class()
# Like request context, app contexts can be pushed multiple times
# but there a basic "refcount" is enough to track them.
self._refcnt = 0
def push(self):
"""Binds the app context to the current context."""
self._refcnt += 1
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
_app_ctx_stack.push(self)
appcontext_pushed.send(self.app)
def pop(self, exc=_sentinel):
"""Pops the app context."""
try:
self._refcnt -= 1
if self._refcnt <= 0:
if exc is _sentinel:
exc = sys.exc_info()[1]
self.app.do_teardown_appcontext(exc)
finally:
rv = _app_ctx_stack.pop()
assert rv is self, 'Popped wrong app context. (%r instead of %r)' \
% (rv, self)
appcontext_popped.send(self.app)
def __enter__(self):
self.push()
return self
def __exit__(self, exc_type, exc_value, tb):
self.pop(exc_value)
if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None:
reraise(exc_type, exc_value, tb)
class RequestContext(object):
"""The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
Do not attempt to use this class directly, instead use
:meth:`~flask.Flask.test_request_context` and
:meth:`~flask.Flask.request_context` to create this object.
When the request context is popped, it will evaluate all the
functions registered on the application for teardown execution
(:meth:`~flask.Flask.teardown_request`).
The request context is automatically popped at the end of the request
for you. In debug mode the request context is kept around if
exceptions happen so that interactive debuggers have a chance to
introspect the data. With 0.4 this can also be forced for requests
that did not fail and outside of ``DEBUG`` mode. By setting
``'flask._preserve_context'`` to ``True`` on the WSGI environment the
context will not pop itself at the end of the request. This is used by
the :meth:`~flask.Flask.test_client` for example to implement the
deferred cleanup functionality.
You might find this helpful for unittests where you need the
information from the context local around for a little longer. Make
sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in
that situation, otherwise your unittests will leak memory.
"""
def __init__(self, app, environ, request=None):
self.app = app
if request is None:
request = app.request_class(environ)
self.request = request
self.url_adapter = app.create_url_adapter(self.request)
self.flashes = None
self.session = None
# Request contexts can be pushed multiple times and interleaved with
# other request contexts. Now only if the last level is popped we
# get rid of them. Additionally if an application context is missing
# one is created implicitly so for each level we add this information
self._implicit_app_ctx_stack = []
# indicator if the context was preserved. Next time another context
# is pushed the preserved context is popped.
self.preserved = False
# remembers the exception for pop if there is one in case the context
# preservation kicks in.
self._preserved_exc = None
# Functions that should be executed after the request on the response
# object. These will be called before the regular "after_request"
# functions.
self._after_request_functions = []
self.match_request()
def _get_g(self):
return _app_ctx_stack.top.g
def _set_g(self, value):
_app_ctx_stack.top.g = value
g = property(_get_g, _set_g)
del _get_g, _set_g
def copy(self):
"""Creates a copy of this request context with the same request object.
This can be used to move a request context to a different greenlet.
Because the actual request object is the same this cannot be used to
move a request context to a different thread unless access to the
request object is locked.
.. versionadded:: 0.10
"""
return self.__class__(self.app,
environ=self.request.environ,
request=self.request
)
def match_request(self):
"""Can be overridden by a subclass to hook into the matching
of the request.
"""
try:
url_rule, self.request.view_args = \
self.url_adapter.match(return_rule=True)
self.request.url_rule = url_rule
except HTTPException as e:
self.request.routing_exception = e
def push(self):
"""Binds the request context to the current context."""
# If an exception occurs in debug mode or if context preservation is
# activated under exception situations exactly one context stays
# on the stack. The rationale is that you want to access that
# information under debug situations. However if someone forgets to
# pop that context again we want to make sure that on the next push
# it's invalidated, otherwise we run at risk that something leaks
# memory. This is usually only a problem in test suite since this
# functionality is not active in production environments.
top = _request_ctx_stack.top
if top is not None and top.preserved:
top.pop(top._preserved_exc)
# Before we push the request context we have to ensure that there
# is an application context.
app_ctx = _app_ctx_stack.top
if app_ctx is None or app_ctx.app != self.app:
app_ctx = self.app.app_context()
app_ctx.push()
self._implicit_app_ctx_stack.append(app_ctx)
else:
self._implicit_app_ctx_stack.append(None)
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
_request_ctx_stack.push(self)
# Open the session at the moment that the request context is
# available. This allows a custom open_session method to use the
# request context (e.g. code that access database information
# stored on `g` instead of the appcontext).
self.session = self.app.open_session(self.request)
if self.session is None:
self.session = self.app.make_null_session()
def pop(self, exc=_sentinel):
"""Pops the request context and unbinds it by doing that. This will
also trigger the execution of functions registered by the
:meth:`~flask.Flask.teardown_request` decorator.
.. versionchanged:: 0.9
Added the `exc` argument.
"""
app_ctx = self._implicit_app_ctx_stack.pop()
try:
clear_request = False
if not self._implicit_app_ctx_stack:
self.preserved = False
self._preserved_exc = None
if exc is _sentinel:
exc = sys.exc_info()[1]
self.app.do_teardown_request(exc)
# If this interpreter supports clearing the exception information
# we do that now. This will only go into effect on Python 2.x,
# on 3.x it disappears automatically at the end of the exception
# stack.
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
request_close = getattr(self.request, 'close', None)
if request_close is not None:
request_close()
clear_request = True
finally:
rv = _request_ctx_stack.pop()
# get rid of circular dependencies at the end of the request
# so that we don't require the GC to be active.
if clear_request:
rv.request.environ['werkzeug.request'] = None
# Get rid of the app as well if necessary.
if app_ctx is not None:
app_ctx.pop(exc)
assert rv is self, 'Popped wrong request context. ' \
'(%r instead of %r)' % (rv, self)
def auto_pop(self, exc):
if self.request.environ.get('flask._preserve_context') or \
(exc is not None and self.app.preserve_context_on_exception):
self.preserved = True
self._preserved_exc = exc
else:
self.pop(exc)
def __enter__(self):
self.push()
return self
def __exit__(self, exc_type, exc_value, tb):
# do not pop the request stack if we are in debug mode and an
# exception happened. This will allow the debugger to still
# access the request object in the interactive shell. Furthermore
# the context can be force kept alive for the test client.
# See flask.testing for how this works.
self.auto_pop(exc_value)
if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None:
reraise(exc_type, exc_value, tb)
def __repr__(self):
return '<%s \'%s\' [%s] of %s>' % (
self.__class__.__name__,
self.request.url,
self.request.method,
self.app.name,
)
| gpl-2.0 |
nmercier/linux-cross-gcc | linux/lib/python2.7/dist-packages/samba/subunit/__init__.py | 32 | 2672 | # Subunit handling
# Copyright (C) Jelmer Vernooij <jelmer@samba.org> 2014
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Subunit test protocol."""
import samba
samba.ensure_third_party_module("iso8601", "pyiso8601")
import iso8601
import unittest
PROGRESS_SET = 0
PROGRESS_CUR = 1
PROGRESS_PUSH = 2
PROGRESS_POP = 3
def RemoteError(description=""):
return (Exception, Exception(description), None)
class RemotedTestCase(unittest.TestCase):
"""A class to represent test cases run in child processes.
Instances of this class are used to provide the Python test API a TestCase
that can be printed to the screen, introspected for metadata and so on.
However, as they are a simply a memoisation of a test that was actually
run in the past by a separate process, they cannot perform any interactive
actions.
"""
def __eq__ (self, other):
try:
return self.__description == other.__description
except AttributeError:
return False
def __init__(self, description):
"""Create a psuedo test case with description description."""
self.__description = description
def error(self, label):
raise NotImplementedError("%s on RemotedTestCases is not permitted." %
label)
def setUp(self):
self.error("setUp")
def tearDown(self):
self.error("tearDown")
def shortDescription(self):
return self.__description
def id(self):
return "%s" % (self.__description,)
def __str__(self):
return "%s (%s)" % (self.__description, self._strclass())
def __repr__(self):
return "<%s description='%s'>" % \
(self._strclass(), self.__description)
def run(self, result=None):
if result is None: result = self.defaultTestResult()
result.startTest(self)
result.addError(self, RemoteError("Cannot run RemotedTestCases.\n"))
result.stopTest(self)
def _strclass(self):
cls = self.__class__
return "%s.%s" % (cls.__module__, cls.__name__)
| bsd-3-clause |
Ninjakow/TrueSkill | lib/click/types.py | 196 | 18864 | import os
import stat
from ._compat import open_stream, text_type, filename_to_ui, \
get_filesystem_encoding, get_streerror, _get_argv_encoding, PY2
from .exceptions import BadParameter
from .utils import safecall, LazyFile
class ParamType(object):
"""Helper for converting values through types. The following is
necessary for a valid type:
* it needs a name
* it needs to pass through None unchanged
* it needs to convert from a string
* it needs to convert its result type through unchanged
(eg: needs to be idempotent)
* it needs to be able to deal with param and context being `None`.
This can be the case when the object is used with prompt
inputs.
"""
is_composite = False
#: the descriptive name of this type
name = None
#: if a list of this type is expected and the value is pulled from a
#: string environment variable, this is what splits it up. `None`
#: means any whitespace. For all parameters the general rule is that
#: whitespace splits them up. The exception are paths and files which
#: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on
#: Windows).
envvar_list_splitter = None
def __call__(self, value, param=None, ctx=None):
if value is not None:
return self.convert(value, param, ctx)
def get_metavar(self, param):
"""Returns the metavar default for this param if it provides one."""
def get_missing_message(self, param):
"""Optionally might return extra information about a missing
parameter.
.. versionadded:: 2.0
"""
def convert(self, value, param, ctx):
"""Converts the value. This is not invoked for values that are
`None` (the missing value).
"""
return value
def split_envvar_value(self, rv):
"""Given a value from an environment variable this splits it up
into small chunks depending on the defined envvar list splitter.
If the splitter is set to `None`, which means that whitespace splits,
then leading and trailing whitespace is ignored. Otherwise, leading
and trailing splitters usually lead to empty items being included.
"""
return (rv or '').split(self.envvar_list_splitter)
def fail(self, message, param=None, ctx=None):
"""Helper method to fail with an invalid value message."""
raise BadParameter(message, ctx=ctx, param=param)
class CompositeParamType(ParamType):
is_composite = True
@property
def arity(self):
raise NotImplementedError()
class FuncParamType(ParamType):
def __init__(self, func):
self.name = func.__name__
self.func = func
def convert(self, value, param, ctx):
try:
return self.func(value)
except ValueError:
try:
value = text_type(value)
except UnicodeError:
value = str(value).decode('utf-8', 'replace')
self.fail(value, param, ctx)
class UnprocessedParamType(ParamType):
name = 'text'
def convert(self, value, param, ctx):
return value
def __repr__(self):
return 'UNPROCESSED'
class StringParamType(ParamType):
name = 'text'
def convert(self, value, param, ctx):
if isinstance(value, bytes):
enc = _get_argv_encoding()
try:
value = value.decode(enc)
except UnicodeError:
fs_enc = get_filesystem_encoding()
if fs_enc != enc:
try:
value = value.decode(fs_enc)
except UnicodeError:
value = value.decode('utf-8', 'replace')
return value
return value
def __repr__(self):
return 'STRING'
class Choice(ParamType):
"""The choice type allows a value to be checked against a fixed set of
supported values. All of these values have to be strings.
See :ref:`choice-opts` for an example.
"""
name = 'choice'
def __init__(self, choices):
self.choices = choices
def get_metavar(self, param):
return '[%s]' % '|'.join(self.choices)
def get_missing_message(self, param):
return 'Choose from %s.' % ', '.join(self.choices)
def convert(self, value, param, ctx):
# Exact match
if value in self.choices:
return value
# Match through normalization
if ctx is not None and \
ctx.token_normalize_func is not None:
value = ctx.token_normalize_func(value)
for choice in self.choices:
if ctx.token_normalize_func(choice) == value:
return choice
self.fail('invalid choice: %s. (choose from %s)' %
(value, ', '.join(self.choices)), param, ctx)
def __repr__(self):
return 'Choice(%r)' % list(self.choices)
class IntParamType(ParamType):
name = 'integer'
def convert(self, value, param, ctx):
try:
return int(value)
except (ValueError, UnicodeError):
self.fail('%s is not a valid integer' % value, param, ctx)
def __repr__(self):
return 'INT'
class IntRange(IntParamType):
"""A parameter that works similar to :data:`click.INT` but restricts
the value to fit into a range. The default behavior is to fail if the
value falls outside the range, but it can also be silently clamped
between the two edges.
See :ref:`ranges` for an example.
"""
name = 'integer range'
def __init__(self, min=None, max=None, clamp=False):
self.min = min
self.max = max
self.clamp = clamp
def convert(self, value, param, ctx):
rv = IntParamType.convert(self, value, param, ctx)
if self.clamp:
if self.min is not None and rv < self.min:
return self.min
if self.max is not None and rv > self.max:
return self.max
if self.min is not None and rv < self.min or \
self.max is not None and rv > self.max:
if self.min is None:
self.fail('%s is bigger than the maximum valid value '
'%s.' % (rv, self.max), param, ctx)
elif self.max is None:
self.fail('%s is smaller than the minimum valid value '
'%s.' % (rv, self.min), param, ctx)
else:
self.fail('%s is not in the valid range of %s to %s.'
% (rv, self.min, self.max), param, ctx)
return rv
def __repr__(self):
return 'IntRange(%r, %r)' % (self.min, self.max)
class BoolParamType(ParamType):
name = 'boolean'
def convert(self, value, param, ctx):
if isinstance(value, bool):
return bool(value)
value = value.lower()
if value in ('true', '1', 'yes', 'y'):
return True
elif value in ('false', '0', 'no', 'n'):
return False
self.fail('%s is not a valid boolean' % value, param, ctx)
def __repr__(self):
return 'BOOL'
class FloatParamType(ParamType):
name = 'float'
def convert(self, value, param, ctx):
try:
return float(value)
except (UnicodeError, ValueError):
self.fail('%s is not a valid floating point value' %
value, param, ctx)
def __repr__(self):
return 'FLOAT'
class UUIDParameterType(ParamType):
name = 'uuid'
def convert(self, value, param, ctx):
import uuid
try:
if PY2 and isinstance(value, text_type):
value = value.encode('ascii')
return uuid.UUID(value)
except (UnicodeError, ValueError):
self.fail('%s is not a valid UUID value' % value, param, ctx)
def __repr__(self):
return 'UUID'
class File(ParamType):
"""Declares a parameter to be a file for reading or writing. The file
is automatically closed once the context tears down (after the command
finished working).
Files can be opened for reading or writing. The special value ``-``
indicates stdin or stdout depending on the mode.
By default, the file is opened for reading text data, but it can also be
opened in binary mode or for writing. The encoding parameter can be used
to force a specific encoding.
The `lazy` flag controls if the file should be opened immediately or
upon first IO. The default is to be non lazy for standard input and
output streams as well as files opened for reading, lazy otherwise.
Starting with Click 2.0, files can also be opened atomically in which
case all writes go into a separate file in the same folder and upon
completion the file will be moved over to the original location. This
is useful if a file regularly read by other users is modified.
See :ref:`file-args` for more information.
"""
name = 'filename'
envvar_list_splitter = os.path.pathsep
def __init__(self, mode='r', encoding=None, errors='strict', lazy=None,
atomic=False):
self.mode = mode
self.encoding = encoding
self.errors = errors
self.lazy = lazy
self.atomic = atomic
def resolve_lazy_flag(self, value):
if self.lazy is not None:
return self.lazy
if value == '-':
return False
elif 'w' in self.mode:
return True
return False
def convert(self, value, param, ctx):
try:
if hasattr(value, 'read') or hasattr(value, 'write'):
return value
lazy = self.resolve_lazy_flag(value)
if lazy:
f = LazyFile(value, self.mode, self.encoding, self.errors,
atomic=self.atomic)
if ctx is not None:
ctx.call_on_close(f.close_intelligently)
return f
f, should_close = open_stream(value, self.mode,
self.encoding, self.errors,
atomic=self.atomic)
# If a context is provided, we automatically close the file
# at the end of the context execution (or flush out). If a
# context does not exist, it's the caller's responsibility to
# properly close the file. This for instance happens when the
# type is used with prompts.
if ctx is not None:
if should_close:
ctx.call_on_close(safecall(f.close))
else:
ctx.call_on_close(safecall(f.flush))
return f
except (IOError, OSError) as e:
self.fail('Could not open file: %s: %s' % (
filename_to_ui(value),
get_streerror(e),
), param, ctx)
class Path(ParamType):
"""The path type is similar to the :class:`File` type but it performs
different checks. First of all, instead of returning an open file
handle it returns just the filename. Secondly, it can perform various
basic checks about what the file or directory should be.
.. versionchanged:: 6.0
`allow_dash` was added.
:param exists: if set to true, the file or directory needs to exist for
this value to be valid. If this is not required and a
file does indeed not exist, then all further checks are
silently skipped.
:param file_okay: controls if a file is a possible value.
:param dir_okay: controls if a directory is a possible value.
:param writable: if true, a writable check is performed.
:param readable: if true, a readable check is performed.
:param resolve_path: if this is true, then the path is fully resolved
before the value is passed onwards. This means
that it's absolute and symlinks are resolved.
:param allow_dash: If this is set to `True`, a single dash to indicate
standard streams is permitted.
:param type: optionally a string type that should be used to
represent the path. The default is `None` which
means the return value will be either bytes or
unicode depending on what makes most sense given the
input data Click deals with.
"""
envvar_list_splitter = os.path.pathsep
def __init__(self, exists=False, file_okay=True, dir_okay=True,
writable=False, readable=True, resolve_path=False,
allow_dash=False, path_type=None):
self.exists = exists
self.file_okay = file_okay
self.dir_okay = dir_okay
self.writable = writable
self.readable = readable
self.resolve_path = resolve_path
self.allow_dash = allow_dash
self.type = path_type
if self.file_okay and not self.dir_okay:
self.name = 'file'
self.path_type = 'File'
if self.dir_okay and not self.file_okay:
self.name = 'directory'
self.path_type = 'Directory'
else:
self.name = 'path'
self.path_type = 'Path'
def coerce_path_result(self, rv):
if self.type is not None and not isinstance(rv, self.type):
if self.type is text_type:
rv = rv.decode(get_filesystem_encoding())
else:
rv = rv.encode(get_filesystem_encoding())
return rv
def convert(self, value, param, ctx):
rv = value
is_dash = self.file_okay and self.allow_dash and rv in (b'-', '-')
if not is_dash:
if self.resolve_path:
rv = os.path.realpath(rv)
try:
st = os.stat(rv)
except OSError:
if not self.exists:
return self.coerce_path_result(rv)
self.fail('%s "%s" does not exist.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
if not self.file_okay and stat.S_ISREG(st.st_mode):
self.fail('%s "%s" is a file.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
if not self.dir_okay and stat.S_ISDIR(st.st_mode):
self.fail('%s "%s" is a directory.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
if self.writable and not os.access(value, os.W_OK):
self.fail('%s "%s" is not writable.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
if self.readable and not os.access(value, os.R_OK):
self.fail('%s "%s" is not readable.' % (
self.path_type,
filename_to_ui(value)
), param, ctx)
return self.coerce_path_result(rv)
class Tuple(CompositeParamType):
"""The default behavior of Click is to apply a type on a value directly.
This works well in most cases, except for when `nargs` is set to a fixed
count and different types should be used for different items. In this
case the :class:`Tuple` type can be used. This type can only be used
if `nargs` is set to a fixed number.
For more information see :ref:`tuple-type`.
This can be selected by using a Python tuple literal as a type.
:param types: a list of types that should be used for the tuple items.
"""
def __init__(self, types):
self.types = [convert_type(ty) for ty in types]
@property
def name(self):
return "<" + " ".join(ty.name for ty in self.types) + ">"
@property
def arity(self):
return len(self.types)
def convert(self, value, param, ctx):
if len(value) != len(self.types):
raise TypeError('It would appear that nargs is set to conflict '
'with the composite type arity.')
return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value))
def convert_type(ty, default=None):
"""Converts a callable or python ty into the most appropriate param
ty.
"""
guessed_type = False
if ty is None and default is not None:
if isinstance(default, tuple):
ty = tuple(map(type, default))
else:
ty = type(default)
guessed_type = True
if isinstance(ty, tuple):
return Tuple(ty)
if isinstance(ty, ParamType):
return ty
if ty is text_type or ty is str or ty is None:
return STRING
if ty is int:
return INT
# Booleans are only okay if not guessed. This is done because for
# flags the default value is actually a bit of a lie in that it
# indicates which of the flags is the one we want. See get_default()
# for more information.
if ty is bool and not guessed_type:
return BOOL
if ty is float:
return FLOAT
if guessed_type:
return STRING
# Catch a common mistake
if __debug__:
try:
if issubclass(ty, ParamType):
raise AssertionError('Attempted to use an uninstantiated '
'parameter type (%s).' % ty)
except TypeError:
pass
return FuncParamType(ty)
#: A dummy parameter type that just does nothing. From a user's
#: perspective this appears to just be the same as `STRING` but internally
#: no string conversion takes place. This is necessary to achieve the
#: same bytes/unicode behavior on Python 2/3 in situations where you want
#: to not convert argument types. This is usually useful when working
#: with file paths as they can appear in bytes and unicode.
#:
#: For path related uses the :class:`Path` type is a better choice but
#: there are situations where an unprocessed type is useful which is why
#: it is is provided.
#:
#: .. versionadded:: 4.0
UNPROCESSED = UnprocessedParamType()
#: A unicode string parameter type which is the implicit default. This
#: can also be selected by using ``str`` as type.
STRING = StringParamType()
#: An integer parameter. This can also be selected by using ``int`` as
#: type.
INT = IntParamType()
#: A floating point value parameter. This can also be selected by using
#: ``float`` as type.
FLOAT = FloatParamType()
#: A boolean parameter. This is the default for boolean flags. This can
#: also be selected by using ``bool`` as a type.
BOOL = BoolParamType()
#: A UUID parameter.
UUID = UUIDParameterType()
| gpl-3.0 |
h2oai/h2o-dev | h2o-py/tests/testdir_munging/pyunit_trim.py | 10 | 1383 | import sys
sys.path.insert(1,"../../")
import h2o
from tests import pyunit_utils
def trim_check():
# Connect to a pre-existing cluster
# testing on a string column
frame = h2o.import_file(path=pyunit_utils.locate("smalldata/junit/cars_trim.csv"), col_types=["string","numeric","numeric","numeric","numeric","numeric","numeric","numeric"])
# single column (frame)
trimmed_frame = frame["name"].trim()
assert trimmed_frame[0,0] == "AMC Ambassador Brougham", "Expected 'AMC Ambassador Brougham', but got {}".format(trimmed_frame[0,0])
assert trimmed_frame[1,0] == "AMC Ambassador DPL", "Expected 'AMC Ambassador DPL', but got {}".format(trimmed_frame[1,0])
assert trimmed_frame[2,0] == "AMC Ambassador SST", "Expected 'AMC Ambassador SST', but got {}".format(trimmed_frame[2,0])
# single column (vec)
vec = frame["name"]
trimmed_vec = vec.trim()
assert trimmed_vec[0,0] == "AMC Ambassador Brougham", "Expected 'AMC Ambassador Brougham', but got {}".format(trimmed_frame[0,0])
assert trimmed_vec[1,0] == "AMC Ambassador DPL", "Expected 'AMC Ambassador DPL', but got {}".format(trimmed_frame[1,0])
assert trimmed_vec[2,0] == "AMC Ambassador SST", "Expected 'AMC Ambassador SST', but got {}".format(trimmed_frame[2,0])
if __name__ == "__main__":
pyunit_utils.standalone_test(trim_check)
else:
trim_check()
| apache-2.0 |
Sorsly/subtle | google-cloud-sdk/lib/third_party/httplib2/iri2uri.py | 706 | 3828 | """
iri2uri
Converts an IRI to a URI.
"""
__author__ = "Joe Gregorio (joe@bitworking.org)"
__copyright__ = "Copyright 2006, Joe Gregorio"
__contributors__ = []
__version__ = "1.0.0"
__license__ = "MIT"
__history__ = """
"""
import urlparse
# Convert an IRI to a URI following the rules in RFC 3987
#
# The characters we need to enocde and escape are defined in the spec:
#
# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
# / %xD0000-DFFFD / %xE1000-EFFFD
escape_range = [
(0xA0, 0xD7FF),
(0xE000, 0xF8FF),
(0xF900, 0xFDCF),
(0xFDF0, 0xFFEF),
(0x10000, 0x1FFFD),
(0x20000, 0x2FFFD),
(0x30000, 0x3FFFD),
(0x40000, 0x4FFFD),
(0x50000, 0x5FFFD),
(0x60000, 0x6FFFD),
(0x70000, 0x7FFFD),
(0x80000, 0x8FFFD),
(0x90000, 0x9FFFD),
(0xA0000, 0xAFFFD),
(0xB0000, 0xBFFFD),
(0xC0000, 0xCFFFD),
(0xD0000, 0xDFFFD),
(0xE1000, 0xEFFFD),
(0xF0000, 0xFFFFD),
(0x100000, 0x10FFFD),
]
def encode(c):
retval = c
i = ord(c)
for low, high in escape_range:
if i < low:
break
if i >= low and i <= high:
retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')])
break
return retval
def iri2uri(uri):
"""Convert an IRI to a URI. Note that IRIs must be
passed in a unicode strings. That is, do not utf-8 encode
the IRI before passing it into the function."""
if isinstance(uri ,unicode):
(scheme, authority, path, query, fragment) = urlparse.urlsplit(uri)
authority = authority.encode('idna')
# For each character in 'ucschar' or 'iprivate'
# 1. encode as utf-8
# 2. then %-encode each octet of that utf-8
uri = urlparse.urlunsplit((scheme, authority, path, query, fragment))
uri = "".join([encode(c) for c in uri])
return uri
if __name__ == "__main__":
import unittest
class Test(unittest.TestCase):
def test_uris(self):
"""Test that URIs are invariant under the transformation."""
invariant = [
u"ftp://ftp.is.co.za/rfc/rfc1808.txt",
u"http://www.ietf.org/rfc/rfc2396.txt",
u"ldap://[2001:db8::7]/c=GB?objectClass?one",
u"mailto:John.Doe@example.com",
u"news:comp.infosystems.www.servers.unix",
u"tel:+1-816-555-1212",
u"telnet://192.0.2.16:80/",
u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ]
for uri in invariant:
self.assertEqual(uri, iri2uri(uri))
def test_iri(self):
""" Test that the right type of escaping is done for each part of the URI."""
self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}"))
self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}"))
self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}"))
self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}"))
self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))
self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")))
self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8')))
unittest.main()
| mit |
blockstack/blockstack-server | integration_tests/blockstack_integration_tests/scenarios/name_pre_reg_stacks_sendtokens_interleaved.py | 1 | 5075 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Blockstack
~~~~~
copyright: (c) 2014-2015 by Halfmoon Labs, Inc.
copyright: (c) 2016 by Blockstack.org
This file is part of Blockstack
Blockstack is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Blockstack is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Blockstack. If not, see <http://www.gnu.org/licenses/>.
"""
import testlib
import virtualchain
import blockstack
import json
STACKS = testlib.TOKEN_TYPE_STACKS
# activate tokens
"""
TEST ENV BLOCKSTACK_EPOCH_1_END_BLOCK 682
TEST ENV BLOCKSTACK_EPOCH_2_END_BLOCK 683
TEST ENV BLOCKSTACK_EPOCH_3_END_BLOCK 684
TEST ENV BLOCKSTACK_EPOCH_2_NAMESPACE_LIFETIME_MULTIPLIER 1
TEST ENV BLOCKSTACK_EPOCH_3_NAMESPACE_LIFETIME_MULTIPLIER 1
"""
wallets = [
testlib.Wallet( "5JesPiN68qt44Hc2nT8qmyZ1JDwHebfoh9KQ52Lazb1m1LaKNj9", 100000000000 ),
testlib.Wallet( "5KHqsiU9qa77frZb6hQy9ocV7Sus9RWJcQGYYBJJBb2Efj1o77e", 100000000000 ),
testlib.Wallet( "5Kg5kJbQHvk1B64rJniEmgbD83FpZpbw2RjdAZEzTefs9ihN3Bz", 0 ), # no tokens yet
testlib.Wallet( "5JuVsoS9NauksSkqEjbUZxWwgGDQbMwPsEfoRBSpLpgDX1RtLX7", 0 ),
testlib.Wallet( "5KEpiSRr1BrT8vRD7LKGCEmudokTh1iMHbiThMQpLdwBwhDJB1T", 0 ),
testlib.Wallet("9864768ccf5137392de5b5d6551a0f9f17279df2f82b4de7b905290f95fde66201", 0),
testlib.Wallet("2e55007fec0f1d9a81700e56aa8ce24d7e6e245ede48c00663f930f99fae133601", 0),
testlib.Wallet("9d6836cdaf81245d646988effe398338073892143b1185f4553e6118f231d1bf01", 0),
testlib.Wallet("f9c9371b7a8cc6b5ef544457cdd565e5791d743f5d60c924265732147429414801", 0),
testlib.Wallet("cd8d6bdf3dfd7b3d498539bb42cf218b77b0fda4f3bc119c7226d803e8425da901", 0),
]
consensus = "17ac43c1d8549c3181b200f1bf97eb7d"
def scenario( wallets, **kw ):
testlib.blockstack_namespace_preorder( "test", wallets[1].addr, wallets[0].privkey )
testlib.next_block( **kw )
testlib.blockstack_namespace_reveal( "test", wallets[1].addr, 52595, 250, 4, [6,5,4,3,2,1,0,0,0,0,0,0,0,0,0,0], 10, 10, wallets[0].privkey, version_bits=blockstack.NAMESPACE_VERSION_PAY_WITH_STACKS )
testlib.next_block( **kw )
testlib.blockstack_namespace_ready( "test", wallets[1].privkey )
testlib.next_block( **kw )
balances = testlib.get_wallet_balances(wallets[2])
assert balances[wallets[2].addr][STACKS] == 0
# should fail--not enough stacks
testlib.blockstack_name_preorder( "foo.test", wallets[2].privkey, wallets[3].addr, safety_checks=False, expect_fail=True )
testlib.next_block( **kw )
name_cost = testlib.blockstack_get_name_token_cost('foo.test')
assert name_cost['units'] == STACKS
assert name_cost['amount'] > 0
# send tokens and preorder multiple times in the block
# should all succeed, BUT: force them to go in order through UTXO chaining
for i in range(0, 5):
name_recipient_privkey = wallets[-(i+1)].privkey
name_recipient_addr = virtualchain.address_reencode(virtualchain.get_privkey_address(name_recipient_privkey))
testlib.blockstack_send_tokens(name_recipient_addr, "STACKS", name_cost['amount'], wallets[0].privkey)
testlib.send_funds(wallets[0].privkey, 1000000, name_recipient_addr)
testlib.blockstack_name_preorder( "foo_{}.test".format(i), name_recipient_privkey, wallets[3].addr, safety_checks=False )
testlib.blockstack_name_register( "foo_{}.test".format(i), name_recipient_privkey, wallets[3].addr, safety_checks=False )
testlib.next_block(**kw)
def check( state_engine ):
# not revealed, but ready
ns = state_engine.get_namespace_reveal( "test" )
if ns is not None:
print "namespace reveal exists"
return False
ns = state_engine.get_namespace( "test" )
if ns is None:
print "no namespace"
return False
if ns['namespace_id'] != 'test':
print "wrong namespace"
return False
# not preordered
for i in range(0, 5):
name = 'foo_{}.test'.format(i)
preorder = state_engine.get_name_preorder( name, virtualchain.make_payment_script(wallets[2].addr), wallets[3].addr )
if preorder is not None:
print "preorder exists"
return False
# registered
name_rec = state_engine.get_name( name )
if name_rec is None:
print "name does not exist"
return False
# owned by
if name_rec['address'] != wallets[3].addr or name_rec['sender'] != virtualchain.make_payment_script(wallets[3].addr):
print "sender is wrong"
return False
return True
| gpl-3.0 |
ovnicraft/openerp-restaurant | mail/tests/test_mail_gateway.py | 40 | 40085 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.addons.mail.tests.common import TestMail
from openerp.tools import mute_logger
import socket
MAIL_TEMPLATE = """Return-Path: <whatever-2a840@postmaster.twitter.com>
To: {to}
Received: by mail1.openerp.com (Postfix, from userid 10002)
id 5DF9ABFB2A; Fri, 10 Aug 2012 16:16:39 +0200 (CEST)
From: {email_from}
Subject: {subject}
MIME-Version: 1.0
Content-Type: multipart/alternative;
boundary="----=_Part_4200734_24778174.1344608186754"
Date: Fri, 10 Aug 2012 14:16:26 +0000
Message-ID: {msg_id}
{extra}
------=_Part_4200734_24778174.1344608186754
Content-Type: text/plain; charset=utf-8
Content-Transfer-Encoding: quoted-printable
Please call me as soon as possible this afternoon!
--
Sylvie
------=_Part_4200734_24778174.1344608186754
Content-Type: text/html; charset=utf-8
Content-Transfer-Encoding: quoted-printable
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>=20
<meta http-equiv=3D"Content-Type" content=3D"text/html; charset=3Dutf-8" />
</head>=20
<body style=3D"margin: 0; padding: 0; background: #ffffff;-webkit-text-size-adjust: 100%;">=20
<p>Please call me as soon as possible this afternoon!</p>
<p>--<br/>
Sylvie
<p>
</body>
</html>
------=_Part_4200734_24778174.1344608186754--
"""
MAIL_TEMPLATE_PLAINTEXT = """Return-Path: <whatever-2a840@postmaster.twitter.com>
To: {to}
Received: by mail1.openerp.com (Postfix, from userid 10002)
id 5DF9ABFB2A; Fri, 10 Aug 2012 16:16:39 +0200 (CEST)
From: Sylvie Lelitre <sylvie.lelitre@agrolait.com>
Subject: {subject}
MIME-Version: 1.0
Content-Type: text/plain
Date: Fri, 10 Aug 2012 14:16:26 +0000
Message-ID: {msg_id}
{extra}
Please call me as soon as possible this afternoon!
--
Sylvie
"""
MAIL_MULTIPART_MIXED = """Return-Path: <ignasse.carambar@gmail.com>
X-Original-To: raoul@grosbedon.fr
Delivered-To: raoul@grosbedon.fr
Received: by mail1.grosbedon.com (Postfix, from userid 10002)
id E8166BFACA; Fri, 23 Aug 2013 13:18:01 +0200 (CEST)
X-Spam-Checker-Version: SpamAssassin 3.3.1 (2010-03-16) on mail1.grosbedon.com
X-Spam-Level:
X-Spam-Status: No, score=-2.6 required=5.0 tests=BAYES_00,FREEMAIL_FROM,
HTML_MESSAGE,RCVD_IN_DNSWL_LOW autolearn=unavailable version=3.3.1
Received: from mail-ie0-f173.google.com (mail-ie0-f173.google.com [209.85.223.173])
by mail1.grosbedon.com (Postfix) with ESMTPS id 9BBD7BFAAA
for <raoul@openerp.fr>; Fri, 23 Aug 2013 13:17:55 +0200 (CEST)
Received: by mail-ie0-f173.google.com with SMTP id qd12so575130ieb.4
for <raoul@grosbedon.fr>; Fri, 23 Aug 2013 04:17:54 -0700 (PDT)
DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed;
d=gmail.com; s=20120113;
h=mime-version:date:message-id:subject:from:to:content-type;
bh=dMNHV52EC7GAa7+9a9tqwT9joy9z+1950J/3A6/M/hU=;
b=DGuv0VjegdSrEe36ADC8XZ9Inrb3Iu+3/52Bm+caltddXFH9yewTr0JkCRQaJgMwG9
qXTQgP8qu/VFEbCh6scu5ZgU1hknzlNCYr3LT+Ih7dAZVUEHUJdwjzUU1LFV95G2RaCd
/Lwff6CibuUvrA+0CBO7IRKW0Sn5j0mukYu8dbaKsm6ou6HqS8Nuj85fcXJfHSHp6Y9u
dmE8jBh3fHCHF/nAvU+8aBNSIzl1FGfiBYb2jCoapIuVFitKR4q5cuoodpkH9XqqtOdH
DG+YjEyi8L7uvdOfN16eMr7hfUkQei1yQgvGu9/5kXoHg9+Gx6VsZIycn4zoaXTV3Nhn
nu4g==
MIME-Version: 1.0
X-Received: by 10.50.124.65 with SMTP id mg1mr1144467igb.43.1377256674216;
Fri, 23 Aug 2013 04:17:54 -0700 (PDT)
Received: by 10.43.99.71 with HTTP; Fri, 23 Aug 2013 04:17:54 -0700 (PDT)
Date: Fri, 23 Aug 2013 13:17:54 +0200
Message-ID: <CAP76m_V4BY2F7DWHzwfjteyhW8L2LJswVshtmtVym+LUJ=rASQ@mail.gmail.com>
Subject: Test mail multipart/mixed
From: =?ISO-8859-1?Q?Raoul Grosbedon=E9e?= <ignasse.carambar@gmail.com>
To: Followers of ASUSTeK-Joseph-Walters <raoul@grosbedon.fr>
Content-Type: multipart/mixed; boundary=089e01536c4ed4d17204e49b8e96
--089e01536c4ed4d17204e49b8e96
Content-Type: multipart/alternative; boundary=089e01536c4ed4d16d04e49b8e94
--089e01536c4ed4d16d04e49b8e94
Content-Type: text/plain; charset=ISO-8859-1
Should create a multipart/mixed: from gmail, *bold*, with attachment.
--
Marcel Boitempoils.
--089e01536c4ed4d16d04e49b8e94
Content-Type: text/html; charset=ISO-8859-1
<div dir="ltr">Should create a multipart/mixed: from gmail, <b>bold</b>, with attachment.<br clear="all"><div><br></div>-- <br>Marcel Boitempoils.</div>
--089e01536c4ed4d16d04e49b8e94--
--089e01536c4ed4d17204e49b8e96
Content-Type: text/plain; charset=US-ASCII; name="test.txt"
Content-Disposition: attachment; filename="test.txt"
Content-Transfer-Encoding: base64
X-Attachment-Id: f_hkpb27k00
dGVzdAo=
--089e01536c4ed4d17204e49b8e96--"""
class TestMailgateway(TestMail):
def test_00_message_parse(self):
""" Testing incoming emails parsing """
cr, uid = self.cr, self.uid
res = self.mail_thread.message_parse(cr, uid, MAIL_TEMPLATE_PLAINTEXT)
self.assertIn('Please call me as soon as possible this afternoon!', res.get('body', ''),
'message_parse: missing text in text/plain body after parsing')
res = self.mail_thread.message_parse(cr, uid, MAIL_TEMPLATE)
self.assertIn('<p>Please call me as soon as possible this afternoon!</p>', res.get('body', ''),
'message_parse: missing html in multipart/alternative body after parsing')
res = self.mail_thread.message_parse(cr, uid, MAIL_MULTIPART_MIXED)
self.assertNotIn('Should create a multipart/mixed: from gmail, *bold*, with attachment', res.get('body', ''),
'message_parse: text version should not be in body after parsing multipart/mixed')
self.assertIn('<div dir="ltr">Should create a multipart/mixed: from gmail, <b>bold</b>, with attachment.<br clear="all"><div><br></div>', res.get('body', ''),
'message_parse: html version should be in body after parsing multipart/mixed')
@mute_logger('openerp.addons.mail.mail_thread', 'openerp.osv.orm')
def test_10_message_process(self):
""" Testing incoming emails processing. """
cr, uid, user_raoul = self.cr, self.uid, self.user_raoul
def format_and_process(template, to='groups@example.com, other@gmail.com', subject='Frogs',
extra='', email_from='Sylvie Lelitre <test.sylvie.lelitre@agrolait.com>',
msg_id='<1198923581.41972151344608186760.JavaMail@agrolait.com>',
model=None):
self.assertEqual(self.mail_group.search(cr, uid, [('name', '=', subject)]), [])
mail = template.format(to=to, subject=subject, extra=extra, email_from=email_from, msg_id=msg_id)
self.mail_thread.message_process(cr, uid, model, mail)
return self.mail_group.search(cr, uid, [('name', '=', subject)])
# --------------------------------------------------
# Data creation
# --------------------------------------------------
# groups@.. will cause the creation of new mail groups
self.mail_group_model_id = self.ir_model.search(cr, uid, [('model', '=', 'mail.group')])[0]
alias_id = self.mail_alias.create(cr, uid, {
'alias_name': 'groups',
'alias_user_id': False,
'alias_model_id': self.mail_group_model_id,
'alias_parent_model_id': self.mail_group_model_id,
'alias_parent_thread_id': self.group_pigs_id,
'alias_contact': 'everyone'})
# --------------------------------------------------
# Test1: new record creation
# --------------------------------------------------
# Do: incoming mail from an unknown partner on an alias creates a new mail_group "frogs"
self._init_mock_build_email()
frog_groups = format_and_process(MAIL_TEMPLATE, to='groups@example.com, other@gmail.com')
sent_emails = self._build_email_kwargs_list
# Test: one group created by mailgateway administrator
self.assertEqual(len(frog_groups), 1, 'message_process: a new mail.group should have been created')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
res = self.mail_group.perm_read(cr, uid, [frog_group.id], details=False)
self.assertEqual(res[0].get('create_uid'), uid,
'message_process: group should have been created by uid as alias_user__id is False on the alias')
# Test: one message that is the incoming email
self.assertEqual(len(frog_group.message_ids), 1,
'message_process: newly created group should have the incoming email in message_ids')
msg = frog_group.message_ids[0]
self.assertEqual('Frogs', msg.subject,
'message_process: newly created group should have the incoming email as first message')
self.assertIn('Please call me as soon as possible this afternoon!', msg.body,
'message_process: newly created group should have the incoming email as first message')
self.assertEqual('email', msg.type,
'message_process: newly created group should have an email as first message')
self.assertEqual('Discussions', msg.subtype_id.name,
'message_process: newly created group should not have a log first message but an email')
# Test: message: unknown email address -> message has email_from, not author_id
self.assertFalse(msg.author_id,
'message_process: message on created group should not have an author_id')
self.assertIn('test.sylvie.lelitre@agrolait.com', msg.email_from,
'message_process: message on created group should have an email_from')
# Test: followers: nobody
self.assertEqual(len(frog_group.message_follower_ids), 0, 'message_process: newly create group should not have any follower')
# Test: sent emails: no-one
self.assertEqual(len(sent_emails), 0,
'message_process: should create emails without any follower added')
# Data: unlink group
frog_group.unlink()
# Do: incoming email from an unknown partner on a Partners only alias -> bounce
self._init_mock_build_email()
self.mail_alias.write(cr, uid, [alias_id], {'alias_contact': 'partners'})
frog_groups = format_and_process(MAIL_TEMPLATE, to='groups@example.com, other2@gmail.com')
# Test: no group created
self.assertTrue(len(frog_groups) == 0)
# Test: email bounced
sent_emails = self._build_email_kwargs_list
self.assertEqual(len(sent_emails), 1,
'message_process: incoming email on Partners alias should send a bounce email')
self.assertIn('Frogs', sent_emails[0].get('subject'),
'message_process: bounce email on Partners alias should contain the original subject')
self.assertIn('test.sylvie.lelitre@agrolait.com', sent_emails[0].get('email_to'),
'message_process: bounce email on Partners alias should have original email sender as recipient')
# Do: incoming email from an unknown partner on a Followers only alias -> bounce
self._init_mock_build_email()
self.mail_alias.write(cr, uid, [alias_id], {'alias_contact': 'followers'})
frog_groups = format_and_process(MAIL_TEMPLATE, to='groups@example.com, other3@gmail.com')
# Test: no group created
self.assertTrue(len(frog_groups) == 0)
# Test: email bounced
sent_emails = self._build_email_kwargs_list
self.assertEqual(len(sent_emails), 1,
'message_process: incoming email on Followers alias should send a bounce email')
self.assertIn('Frogs', sent_emails[0].get('subject'),
'message_process: bounce email on Followers alias should contain the original subject')
self.assertIn('test.sylvie.lelitre@agrolait.com', sent_emails[0].get('email_to'),
'message_process: bounce email on Followers alias should have original email sender as recipient')
# Do: incoming email from a known partner on a Partners alias -> ok (+ test on alias.user_id)
self.mail_alias.write(cr, uid, [alias_id], {'alias_user_id': self.user_raoul_id, 'alias_contact': 'partners'})
p1id = self.res_partner.create(cr, uid, {'name': 'Sylvie Lelitre', 'email': 'test.sylvie.lelitre@agrolait.com'})
p2id = self.res_partner.create(cr, uid, {'name': 'Other Poilvache', 'email': 'other4@gmail.com'})
self._init_mock_build_email()
frog_groups = format_and_process(MAIL_TEMPLATE, to='groups@example.com, other4@gmail.com')
sent_emails = self._build_email_kwargs_list
# Test: one group created by Raoul
self.assertEqual(len(frog_groups), 1, 'message_process: a new mail.group should have been created')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
res = self.mail_group.perm_read(cr, uid, [frog_group.id], details=False)
self.assertEqual(res[0].get('create_uid'), self.user_raoul_id,
'message_process: group should have been created by alias_user_id')
# Test: one message that is the incoming email
self.assertEqual(len(frog_group.message_ids), 1,
'message_process: newly created group should have the incoming email in message_ids')
msg = frog_group.message_ids[0]
# Test: message: author found
self.assertEqual(p1id, msg.author_id.id,
'message_process: message on created group should have Sylvie as author_id')
self.assertIn('Sylvie Lelitre <test.sylvie.lelitre@agrolait.com>', msg.email_from,
'message_process: message on created group should have have an email_from')
# Test: author (not recipient and not Raoul (as alias owner)) added as follower
frog_follower_ids = set([p.id for p in frog_group.message_follower_ids])
self.assertEqual(frog_follower_ids, set([p1id]),
'message_process: newly created group should have 1 follower (author, not creator, not recipients)')
# Test: sent emails: no-one, no bounce effet
sent_emails = self._build_email_kwargs_list
self.assertEqual(len(sent_emails), 0,
'message_process: should not bounce incoming emails')
# Data: unlink group
frog_group.unlink()
# Do: incoming email from a not follower Partner on a Followers only alias -> bounce
self._init_mock_build_email()
self.mail_alias.write(cr, uid, [alias_id], {'alias_user_id': False, 'alias_contact': 'followers'})
frog_groups = format_and_process(MAIL_TEMPLATE, to='groups@example.com, other5@gmail.com')
# Test: no group created
self.assertTrue(len(frog_groups) == 0)
# Test: email bounced
sent_emails = self._build_email_kwargs_list
self.assertEqual(len(sent_emails), 1,
'message_process: incoming email on Partners alias should send a bounce email')
# Do: incoming email from a parent document follower on a Followers only alias -> ok
self._init_mock_build_email()
self.mail_group.message_subscribe(cr, uid, [self.group_pigs_id], [p1id])
frog_groups = format_and_process(MAIL_TEMPLATE, to='groups@example.com, other6@gmail.com')
# Test: one group created by Raoul (or Sylvie maybe, if we implement it)
self.assertEqual(len(frog_groups), 1, 'message_process: a new mail.group should have been created')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: one message that is the incoming email
self.assertEqual(len(frog_group.message_ids), 1,
'message_process: newly created group should have the incoming email in message_ids')
# Test: author (and not recipient) added as follower
frog_follower_ids = set([p.id for p in frog_group.message_follower_ids])
self.assertEqual(frog_follower_ids, set([p1id]),
'message_process: newly created group should have 1 follower (author, not creator, not recipients)')
# Test: sent emails: no-one, no bounce effet
sent_emails = self._build_email_kwargs_list
self.assertEqual(len(sent_emails), 0,
'message_process: should not bounce incoming emails')
# --------------------------------------------------
# Test2: update-like alias
# --------------------------------------------------
# Do: Pigs alias is restricted, should bounce
self._init_mock_build_email()
self.mail_group.write(cr, uid, [frog_group.id], {'alias_name': 'frogs', 'alias_contact': 'followers', 'alias_force_thread_id': frog_group.id})
frog_groups = format_and_process(MAIL_TEMPLATE, email_from='other4@gmail.com',
msg_id='<1198923581.41972151344608186760.JavaMail.diff1@agrolait.com>',
to='frogs@example.com>', subject='Re: news')
# Test: no group 'Re: news' created, still only 1 Frogs group
self.assertEqual(len(frog_groups), 0,
'message_process: reply on Frogs should not have created a new group with new subject')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
self.assertEqual(len(frog_groups), 1,
'message_process: reply on Frogs should not have created a duplicate group with old subject')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: email bounced
sent_emails = self._build_email_kwargs_list
self.assertEqual(len(sent_emails), 1,
'message_process: incoming email on Followers alias should send a bounce email')
self.assertIn('Re: news', sent_emails[0].get('subject'),
'message_process: bounce email on Followers alias should contain the original subject')
# Do: Pigs alias is restricted, should accept Followers
self._init_mock_build_email()
self.mail_group.message_subscribe(cr, uid, [frog_group.id], [p2id])
frog_groups = format_and_process(MAIL_TEMPLATE, email_from='other4@gmail.com',
msg_id='<1198923581.41972151344608186799.JavaMail.diff1@agrolait.com>',
to='frogs@example.com>', subject='Re: cats')
# Test: no group 'Re: news' created, still only 1 Frogs group
self.assertEqual(len(frog_groups), 0,
'message_process: reply on Frogs should not have created a new group with new subject')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
self.assertEqual(len(frog_groups), 1,
'message_process: reply on Frogs should not have created a duplicate group with old subject')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: one new message
self.assertEqual(len(frog_group.message_ids), 2, 'message_process: group should contain 2 messages after reply')
# Test: sent emails: 1 (Sylvie copy of the incoming email, but no bounce)
sent_emails = self._build_email_kwargs_list
self.assertEqual(len(sent_emails), 1,
'message_process: one email should have been generated')
self.assertIn('test.sylvie.lelitre@agrolait.com', sent_emails[0].get('email_to')[0],
'message_process: email should be sent to Sylvie')
self.mail_group.message_unsubscribe(cr, uid, [frog_group.id], [p2id])
# --------------------------------------------------
# Test3: discussion and replies
# --------------------------------------------------
# Do: even with a wrong destination, a reply should end up in the correct thread
frog_groups = format_and_process(MAIL_TEMPLATE, email_from='other4@gmail.com',
msg_id='<1198923581.41972151344608186760.JavaMail.diff1@agrolait.com>',
to='erroneous@example.com>', subject='Re: news',
extra='In-Reply-To: <1198923581.41972151344608186799.JavaMail.diff1@agrolait.com>\n')
# Test: no group 'Re: news' created, still only 1 Frogs group
self.assertEqual(len(frog_groups), 0,
'message_process: reply on Frogs should not have created a new group with new subject')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
self.assertEqual(len(frog_groups), 1,
'message_process: reply on Frogs should not have created a duplicate group with old subject')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: one new message
self.assertEqual(len(frog_group.message_ids), 3, 'message_process: group should contain 3 messages after reply')
# Test: author (and not recipient) added as follower
frog_follower_ids = set([p.id for p in frog_group.message_follower_ids])
self.assertEqual(frog_follower_ids, set([p1id, p2id]),
'message_process: after reply, group should have 2 followers')
# Do: incoming email with ref holding model / res_id but that does not match any message in the thread: must raise since OpenERP saas-3
self.assertRaises(ValueError,
format_and_process,
MAIL_TEMPLATE, email_from='other5@gmail.com',
to='noone@example.com', subject='spam',
extra='In-Reply-To: <12321321-openerp-%d-mail.group@example.com>' % frog_group.id,
msg_id='<1.1.JavaMail.new@agrolait.com>')
# When 6.1 messages are present, compat mode is available
# Create a fake 6.1 message
tmp_msg_id = self.mail_message.create(cr, uid, {'message_id': False, 'model': 'mail.group', 'res_id': frog_group.id})
# Do: compat mode accepts partial-matching emails
frog_groups = format_and_process(MAIL_TEMPLATE, email_from='other5@gmail.com',
msg_id='<1.2.JavaMail.new@agrolait.com>',
to='noone@example.com>', subject='spam',
extra='In-Reply-To: <12321321-openerp-%d-mail.group@%s>' % (frog_group.id, socket.gethostname()))
self.mail_message.unlink(cr, uid, [tmp_msg_id])
# Test: no group 'Re: news' created, still only 1 Frogs group
self.assertEqual(len(frog_groups), 0,
'message_process: reply on Frogs should not have created a new group with new subject')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
self.assertEqual(len(frog_groups), 1,
'message_process: reply on Frogs should not have created a duplicate group with old subject')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: one new message
self.assertEqual(len(frog_group.message_ids), 4, 'message_process: group should contain 4 messages after reply')
# 6.1 compat mode should not work if hostname does not match!
tmp_msg_id = self.mail_message.create(cr, uid, {'message_id': False, 'model': 'mail.group', 'res_id': frog_group.id})
self.assertRaises(ValueError,
format_and_process,
MAIL_TEMPLATE, email_from='other5@gmail.com',
msg_id='<1.3.JavaMail.new@agrolait.com>',
to='noone@example.com>', subject='spam',
extra='In-Reply-To: <12321321-openerp-%d-mail.group@neighbor.com>' % frog_group.id)
self.mail_message.unlink(cr, uid, [tmp_msg_id])
# Do: due to some issue, same email goes back into the mailgateway
frog_groups = format_and_process(MAIL_TEMPLATE, email_from='other4@gmail.com',
msg_id='<1198923581.41972151344608186760.JavaMail.diff1@agrolait.com>',
subject='Re: news', extra='In-Reply-To: <1198923581.41972151344608186799.JavaMail.diff1@agrolait.com>\n')
# Test: no group 'Re: news' created, still only 1 Frogs group
self.assertEqual(len(frog_groups), 0,
'message_process: reply on Frogs should not have created a new group with new subject')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
self.assertEqual(len(frog_groups), 1,
'message_process: reply on Frogs should not have created a duplicate group with old subject')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: no new message
self.assertEqual(len(frog_group.message_ids), 4, 'message_process: message with already existing message_id should not have been duplicated')
# Test: message_id is still unique
msg_ids = self.mail_message.search(cr, uid, [('message_id', 'ilike', '<1198923581.41972151344608186760.JavaMail.diff1@agrolait.com>')])
self.assertEqual(len(msg_ids), 1,
'message_process: message with already existing message_id should not have been duplicated')
# --------------------------------------------------
# Test4: email_from and partner finding
# --------------------------------------------------
# Data: extra partner with Raoul's email -> test the 'better author finding'
extra_partner_id = self.res_partner.create(cr, uid, {'name': 'A-Raoul', 'email': 'test_raoul@email.com'})
# Do: post a new message, with a known partner -> duplicate emails -> partner
format_and_process(MAIL_TEMPLATE, email_from='Lombrik Lubrik <test_raoul@email.com>',
subject='Re: news (2)',
msg_id='<1198923581.41972151344608186760.JavaMail.new1@agrolait.com>',
extra='In-Reply-To: <1198923581.41972151344608186799.JavaMail.diff1@agrolait.com>')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: author is A-Raoul (only existing)
self.assertEqual(frog_group.message_ids[0].author_id.id, extra_partner_id,
'message_process: email_from -> author_id wrong')
# Do: post a new message, with a known partner -> duplicate emails -> user
frog_group.message_unsubscribe([extra_partner_id])
self.res_users.write(cr, uid, self.user_raoul_id, {'email': 'test_raoul@email.com'})
format_and_process(MAIL_TEMPLATE, email_from='Lombrik Lubrik <test_raoul@email.com>',
to='groups@example.com', subject='Re: news (3)',
msg_id='<1198923581.41972151344608186760.JavaMail.new2@agrolait.com>',
extra='In-Reply-To: <1198923581.41972151344608186799.JavaMail.diff1@agrolait.com>')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: author is Raoul (user), not A-Raoul
self.assertEqual(frog_group.message_ids[0].author_id.id, self.partner_raoul_id,
'message_process: email_from -> author_id wrong')
# Do: post a new message, with a known partner -> duplicate emails -> partner because is follower
frog_group.message_unsubscribe([self.partner_raoul_id])
frog_group.message_subscribe([extra_partner_id])
raoul_email = self.user_raoul.email
self.res_users.write(cr, uid, self.user_raoul_id, {'email': 'test_raoul@email.com'})
format_and_process(MAIL_TEMPLATE, email_from='Lombrik Lubrik <test_raoul@email.com>',
to='groups@example.com', subject='Re: news (3)',
msg_id='<1198923581.41972151344608186760.JavaMail.new3@agrolait.com>',
extra='In-Reply-To: <1198923581.41972151344608186799.JavaMail.diff1@agrolait.com>')
frog_groups = self.mail_group.search(cr, uid, [('name', '=', 'Frogs')])
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
# Test: author is Raoul (user), not A-Raoul
self.assertEqual(frog_group.message_ids[0].author_id.id, extra_partner_id,
'message_process: email_from -> author_id wrong')
self.res_users.write(cr, uid, self.user_raoul_id, {'email': raoul_email})
# --------------------------------------------------
# Test5: misc gateway features
# --------------------------------------------------
# Do: incoming email with model that does not accepts incoming emails must raise
self.assertRaises(ValueError,
format_and_process,
MAIL_TEMPLATE,
to='noone@example.com', subject='spam', extra='', model='res.country',
msg_id='<1198923581.41972151344608186760.JavaMail.new4@agrolait.com>')
# Do: incoming email without model and without alias must raise
self.assertRaises(ValueError,
format_and_process,
MAIL_TEMPLATE,
to='noone@example.com', subject='spam', extra='',
msg_id='<1198923581.41972151344608186760.JavaMail.new5@agrolait.com>')
# Do: incoming email with model that accepting incoming emails as fallback
frog_groups = format_and_process(MAIL_TEMPLATE,
to='noone@example.com',
subject='Spammy', extra='', model='mail.group',
msg_id='<1198923581.41972151344608186760.JavaMail.new6@agrolait.com>')
self.assertEqual(len(frog_groups), 1,
'message_process: erroneous email but with a fallback model should have created a new mail.group')
# Do: incoming email in plaintext should be stored as html
frog_groups = format_and_process(MAIL_TEMPLATE_PLAINTEXT,
to='groups@example.com', subject='Frogs Return', extra='',
msg_id='<deadcafe.1337@smtp.agrolait.com>')
# Test: one group created with one message
self.assertEqual(len(frog_groups), 1, 'message_process: a new mail.group should have been created')
frog_group = self.mail_group.browse(cr, uid, frog_groups[0])
msg = frog_group.message_ids[0]
# Test: plain text content should be wrapped and stored as html
self.assertIn('<pre>\nPlease call me as soon as possible this afternoon!\n\n--\nSylvie\n</pre>', msg.body,
'message_process: plaintext incoming email incorrectly parsed')
@mute_logger('openerp.addons.mail.mail_thread', 'openerp.osv.orm')
def test_20_thread_parent_resolution(self):
""" Testing parent/child relationships are correctly established when processing incoming mails """
cr, uid = self.cr, self.uid
def format(template, to='Pretty Pigs <group+pigs@example.com>, other@gmail.com', subject='Re: 1',
extra='', email_from='Sylvie Lelitre <test.sylvie.lelitre@agrolait.com>',
msg_id='<1198923581.41972151344608186760.JavaMail@agrolait.com>'):
return template.format(to=to, subject=subject, extra=extra, email_from=email_from, msg_id=msg_id)
group_pigs = self.mail_group.browse(cr, uid, self.group_pigs_id)
msg1 = group_pigs.message_post(body='My Body', subject='1')
msg2 = group_pigs.message_post(body='My Body', subject='2')
msg1, msg2 = self.mail_message.browse(cr, uid, [msg1, msg2])
self.assertTrue(msg1.message_id, "message_process: new message should have a proper message_id")
# Reply to msg1, make sure the reply is properly attached using the various reply identification mechanisms
# 0. Direct alias match
reply_msg1 = format(MAIL_TEMPLATE, to='Pretty Pigs <group+pigs@example.com>',
extra='In-Reply-To: %s' % msg1.message_id,
msg_id='<1198923581.41972151344608186760.JavaMail.2@agrolait.com>')
self.mail_group.message_process(cr, uid, None, reply_msg1)
# 1. In-Reply-To header
reply_msg2 = format(MAIL_TEMPLATE, to='erroneous@example.com',
extra='In-Reply-To: %s' % msg1.message_id,
msg_id='<1198923581.41972151344608186760.JavaMail.3@agrolait.com>')
self.mail_group.message_process(cr, uid, None, reply_msg2)
# 2. References header
reply_msg3 = format(MAIL_TEMPLATE, to='erroneous@example.com',
extra='References: <2233@a.com>\r\n\t<3edss_dsa@b.com> %s' % msg1.message_id,
msg_id='<1198923581.41972151344608186760.JavaMail.4@agrolait.com>')
self.mail_group.message_process(cr, uid, None, reply_msg3)
# 3. Subject contains [<ID>] + model passed to message+process -> only attached to group, but not to mail (not in msg1.child_ids)
reply_msg4 = format(MAIL_TEMPLATE, to='erroneous@example.com',
extra='', subject='Re: [%s] 1' % self.group_pigs_id,
msg_id='<1198923581.41972151344608186760.JavaMail.5@agrolait.com>')
self.mail_group.message_process(cr, uid, 'mail.group', reply_msg4)
group_pigs.refresh()
msg1.refresh()
self.assertEqual(6, len(group_pigs.message_ids), 'message_process: group should contain 6 messages')
self.assertEqual(3, len(msg1.child_ids), 'message_process: msg1 should have 3 children now')
def test_30_private_discussion(self):
""" Testing private discussion between partners. """
cr, uid = self.cr, self.uid
def format(template, to='Pretty Pigs <group+pigs@example.com>, other@gmail.com', subject='Re: 1',
extra='', email_from='Sylvie Lelitre <test.sylvie.lelitre@agrolait.com>',
msg_id='<1198923581.41972151344608186760.JavaMail@agrolait.com>'):
return template.format(to=to, subject=subject, extra=extra, email_from=email_from, msg_id=msg_id)
# Do: Raoul writes to Bert and Administrator, with a thread_model in context that should not be taken into account
msg1_pids = [self.partner_admin_id, self.partner_bert_id]
msg1_id = self.mail_thread.message_post(
cr, self.user_raoul_id, False,
partner_ids=msg1_pids,
subtype='mail.mt_comment',
context={'thread_model': 'mail.group'}
)
# Test: message recipients
msg = self.mail_message.browse(cr, uid, msg1_id)
msg_pids = [p.id for p in msg.partner_ids]
msg_nids = [p.id for p in msg.notified_partner_ids]
test_pids = msg1_pids
test_nids = msg1_pids
self.assertEqual(set(msg_pids), set(test_pids),
'message_post: private discussion: incorrect recipients')
self.assertEqual(set(msg_nids), set(test_nids),
'message_post: private discussion: incorrect notified recipients')
self.assertEqual(msg.model, False,
'message_post: private discussion: context key "thread_model" not correctly ignored when having no res_id')
# Test: message-id
self.assertIn('openerp-private', msg.message_id,
'message_post: private discussion: message-id should contain the private keyword')
# Do: Bert replies through mailgateway (is a customer)
reply_message = format(MAIL_TEMPLATE, to='not_important@mydomain.com',
email_from='bert@bert.fr',
extra='In-Reply-To: %s' % msg.message_id,
msg_id='<test30.JavaMail.0@agrolait.com>')
self.mail_thread.message_process(cr, uid, None, reply_message)
# Test: last mail_message created
msg2_id = self.mail_message.search(cr, uid, [], limit=1)[0]
# Test: message recipients
msg = self.mail_message.browse(cr, uid, msg2_id)
msg_pids = [p.id for p in msg.partner_ids]
msg_nids = [p.id for p in msg.notified_partner_ids]
test_pids = [self.partner_admin_id, self.partner_raoul_id]
test_nids = test_pids
self.assertEqual(msg.author_id.id, self.partner_bert_id,
'message_post: private discussion: wrong author through mailgatewya based on email')
self.assertEqual(set(msg_pids), set(test_pids),
'message_post: private discussion: incorrect recipients when replying')
self.assertEqual(set(msg_nids), set(test_nids),
'message_post: private discussion: incorrect notified recipients when replying')
# Do: Bert replies through chatter (is a customer)
msg3_id = self.mail_thread.message_post(
cr, uid, False,
author_id=self.partner_bert_id,
parent_id=msg1_id, subtype='mail.mt_comment')
# Test: message recipients
msg = self.mail_message.browse(cr, uid, msg3_id)
msg_pids = [p.id for p in msg.partner_ids]
msg_nids = [p.id for p in msg.notified_partner_ids]
test_pids = [self.partner_admin_id, self.partner_raoul_id]
test_nids = test_pids
self.assertEqual(set(msg_pids), set(test_pids),
'message_post: private discussion: incorrect recipients when replying')
self.assertEqual(set(msg_nids), set(test_nids),
'message_post: private discussion: incorrect notified recipients when replying')
# Do: Administrator replies
msg3_id = self.mail_thread.message_post(cr, uid, False, parent_id=msg3_id, subtype='mail.mt_comment')
# Test: message recipients
msg = self.mail_message.browse(cr, uid, msg3_id)
msg_pids = [p.id for p in msg.partner_ids]
msg_nids = [p.id for p in msg.notified_partner_ids]
test_pids = [self.partner_bert_id, self.partner_raoul_id]
test_nids = test_pids
self.assertEqual(set(msg_pids), set(test_pids),
'message_post: private discussion: incorrect recipients when replying')
self.assertEqual(set(msg_nids), set(test_nids),
'message_post: private discussion: incorrect notified recipients when replying')
| agpl-3.0 |
petewarden/tensorflow | tensorflow/python/kernel_tests/random/stateless_random_ops_test.py | 9 | 22220 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for stateless random ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from absl.testing import parameterized
import numpy as np
from tensorflow.python.compat import compat
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import config
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_stateless_random_ops_v2
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import stateless_random_ops as stateless
from tensorflow.python.platform import test
# Note that in theory each test will reset the eager context and may choose to
# hide some devices, so we shouldn't cache this transient info. Tests in this
# file don't make those config changes, so caching is fine. It provides a good
# speed-up.
_cached_device = None
def get_device():
global _cached_device
if _cached_device is not None:
return _cached_device
# Precedence from high to low
for device_type in ('XLA_GPU', 'GPU', 'XLA_CPU', 'CPU'):
devices = config.list_logical_devices(device_type)
if devices:
_cached_device = devices[0]
return _cached_device
raise ValueError('Cannot find any suitable device. Available devices: %s' %
config.list_logical_devices())
BEFORE_EXPIRE = (2020, 10, 24)
AFTER_EXPIRE = (2020, 10, 26)
def invert_philox(key, value):
"""Invert the Philox bijection."""
key = np.array(key, dtype=np.uint32)
value = np.array(value, dtype=np.uint32)
step = np.array([0x9E3779B9, 0xBB67AE85], dtype=np.uint32)
for n in range(10)[::-1]:
key0, key1 = key + n * step
v0 = value[3] * 0x991a7cdb & 0xffffffff
v2 = value[1] * 0x6d7cae67 & 0xffffffff
hi0 = v0 * 0xD2511F53 >> 32
hi1 = v2 * 0xCD9E8D57 >> 32
v1 = hi1 ^ value[0] ^ key0
v3 = hi0 ^ value[2] ^ key1
value = v0, v1, v2, v3
return np.array(value)
SEEDS = ((7, 17), (11, 5), (2, 3))
SEED_TYPES = [dtypes.int32, dtypes.int64]
def float_cases(shape_dtypes=(None,)):
cases = (
# Uniform distribution, with and without range
('uniform', stateless.stateless_random_uniform, random_ops.random_uniform,
{}),
('uniform2', stateless.stateless_random_uniform,
random_ops.random_uniform, dict(minval=2.2, maxval=7.1)),
# Normal distribution, with and without mean+stddev
('normal', stateless.stateless_random_normal, random_ops.random_normal,
{}),
('normal2', stateless.stateless_random_normal, random_ops.random_normal,
dict(mean=2, stddev=3)),
# Truncated normal distribution, with and without mean+stddev
('trnorm', stateless.stateless_truncated_normal,
random_ops.truncated_normal, {}),
('trnorm2', stateless.stateless_truncated_normal,
random_ops.truncated_normal, dict(mean=3, stddev=4)),
)
# Explicitly passing in params because capturing cell variable from loop is
# problematic in Python
def wrap(op, dtype, shape, shape_dtype, kwds, seed):
device_type = get_device().device_type
# Some dtypes are not supported on some devices
if (dtype == dtypes.float16 and device_type in ('XLA_GPU', 'XLA_CPU') or
dtype == dtypes.bfloat16 and device_type == 'GPU'):
dtype = dtypes.float32
shape_ = (constant_op.constant(shape, dtype=shape_dtype)
if shape_dtype is not None else shape)
return op(seed=seed, shape=shape_, dtype=dtype, **kwds)
def _name(a):
if hasattr(a, 'name'):
return a.name
else:
return a
for dtype in dtypes.float16, dtypes.bfloat16, dtypes.float32, dtypes.float64:
for shape_dtype in shape_dtypes:
for shape in (), (3,), (2, 5):
for name, stateless_op, stateful_op, kwds in cases:
yield (('%s_%s_%s_%s' %
(name, _name(dtype), shape, _name(shape_dtype))).replace(
' ', ''),
functools.partial(wrap, stateless_op, dtype, shape,
shape_dtype, kwds),
functools.partial(wrap, stateful_op, dtype, shape, shape_dtype,
kwds))
def int_cases(shape_dtypes=(None,), minval_maxval=None):
def wrap(op, minval, maxval, shape, shape_dtype, dtype, seed):
shape_ = (constant_op.constant(shape, dtype=shape_dtype)
if shape_dtype is not None else shape)
return op(
seed=seed, shape=shape_, minval=minval, maxval=maxval, dtype=dtype)
if minval_maxval is None:
minval_maxval = ((2, 11111),)
for minval, maxval in minval_maxval:
for shape_dtype in shape_dtypes:
for shape in (), (3,), (2, 5):
for dtype in dtypes.int32, dtypes.int64:
yield ('uniform_%s_%s' % (minval, maxval),
functools.partial(wrap, stateless.stateless_random_uniform,
minval, maxval, shape, shape_dtype, dtype),
functools.partial(wrap, random_ops.random_uniform, minval,
maxval, shape, shape_dtype, dtype))
def multinomial_cases():
num_samples = 10
def wrap(op, logits, logits_dtype, output_dtype, seed):
return op(seed=seed,
logits=constant_op.constant(logits, dtype=logits_dtype),
num_samples=num_samples, output_dtype=output_dtype)
for logits_dtype in np.float16, np.float32, np.float64:
for output_dtype in dtypes.int32, dtypes.int64:
for logits in ([[0.1, 0.25, 0.5, 0.15]], [[0.5, 0.5], [0.8, 0.2],
[0.25, 0.75]]):
yield ('multinomial',
functools.partial(wrap, stateless.stateless_multinomial, logits,
logits_dtype, output_dtype),
functools.partial(wrap, random_ops.multinomial, logits,
logits_dtype, output_dtype))
def gamma_cases():
def wrap(op, alpha, dtype, shape, seed):
return op(seed=seed, shape=shape,
alpha=constant_op.constant(alpha, dtype=dtype), dtype=dtype)
for dtype in np.float16, np.float32, np.float64:
for alpha in ([[.5, 1., 2.]], [[0.5, 0.5], [0.8, 0.2], [0.25, 0.75]]):
yield ('gamma',
functools.partial(wrap, stateless.stateless_random_gamma, alpha,
dtype, (10,) + tuple(np.shape(alpha))),
functools.partial(wrap, random_ops.random_gamma, alpha, dtype,
(10,)))
def poisson_cases():
def wrap(op, lam, lam_dtype, out_dtype, shape, seed):
return op(seed=seed, shape=shape,
lam=constant_op.constant(lam_dtype(lam), dtype=lam_dtype),
dtype=out_dtype)
for lam_dtype in np.float16, np.float32, np.float64, np.int32, np.int64:
for out_dtype in np.float16, np.float32, np.float64, np.int32, np.int64:
for lam in ([[5.5, 1., 2.]], [[7.5, 10.5], [3.8, 8.2], [1.25, 9.75]]):
yield ('poisson',
functools.partial(wrap, stateless.stateless_random_poisson, lam,
lam_dtype, out_dtype,
(10,) + tuple(np.shape(lam))),
functools.partial(wrap, random_ops.random_poisson, lam,
lam_dtype, out_dtype, (10,)))
class StatelessOpsTest(test.TestCase, parameterized.TestCase):
def _test_match(self, case, seed):
# Stateless ops should be the same as stateful ops on the first call
# after seed scrambling.
key = 0x3ec8f720, 0x02461e29
preseed = invert_philox(key, (seed[0], 0, seed[1], 0)).astype(np.uint64)
preseed = preseed[::2] | preseed[1::2] << 32
with ops.device(get_device().name):
_, stateless_op, stateful_op = case
random_seed.set_random_seed(seed[0])
stateful = stateful_op(seed=seed[1])
pure = stateless_op(seed=preseed)
self.assertAllEqual(stateful, pure)
def _test_match_stateless_cpu_gpu(self, case, seed):
# Stateless ops should produce the same result on CPUs and GPUs.
_, stateless_op, _ = case
with ops.device('CPU'):
result_cpu = stateless_op(seed=seed)
with ops.device(get_device().name):
result_gpu = stateless_op(seed=seed)
self.assertAllClose(result_cpu, result_gpu)
def _test_old_and_new_stateless_match(self, case, seed):
"""Tests that the new stateless ops match the old stateless ones."""
with ops.device(get_device().name):
_, stateless_op, _ = case
with compat.forward_compatibility_horizon(*BEFORE_EXPIRE):
old = stateless_op(seed=seed)
with compat.forward_compatibility_horizon(*AFTER_EXPIRE):
new = stateless_op(seed=seed)
self.assertAllClose(old, new)
def _test_determinism(self, case, seed_type):
# Stateless values should be equal iff the seeds are equal (roughly)
seeds = [(x, y) for x in range(5) for y in range(5)] * 3 # pylint: disable=g-complex-comprehension
with self.test_session(), ops.device(get_device().name):
_, stateless_op, _ = case
if context.executing_eagerly():
values = [
(seed, stateless_op(seed=constant_op.constant(seed, seed_type)))
for seed in seeds]
else:
# Have this branch because the above branch is too slow in graph
# mode
seed_t = array_ops.placeholder(seed_type, shape=[2])
pure = stateless_op(seed=seed_t)
values = [
(seed, pure.eval(feed_dict={seed_t: seed})) for seed in seeds
]
for s0, v0 in values:
for s1, v1 in values:
if dtypes.as_dtype(v0.dtype) != dtypes.bfloat16:
self.assertEqual(s0 == s1, np.all(v0 == v1))
elif s0 == s1:
# Skip the s0 != s1 case because v0 and v1 can be either equal or
# unequal in that case due to bfloat16's low precision
self.assertAllEqual(v0, v1)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(float_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testMatchFloat(self, case, seed):
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Skip on XLA because XLA kernels do not support int64 '
'seeds needed by this test.')
self._test_match(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(int_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testMatchInt(self, case, seed):
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Skip on XLA because XLA kernels do not support int64 '
'seeds needed by this test.')
self._test_match(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(multinomial_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testMatchMultinomial(self, case, seed):
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking XLA kernel')
self._test_match(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(gamma_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testMatchGamma(self, case, seed):
if get_device().device_type == 'GPU':
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking GPU kernel')
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking XLA kernel')
self._test_match(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(gamma_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testStatelessGammaCpuGpuMatch(self, case, seed):
if get_device().device_type != 'GPU':
# This test compares the numbers produced by the CPU and GPU kernel for
# stateless_random_gamma.
self.skipTest('This test requires GPU')
self._test_match_stateless_cpu_gpu(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(poisson_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testMatchPoisson(self, case, seed):
if get_device().device_type == 'GPU':
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking GPU kernel')
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking XLA kernel')
self._test_match(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(float_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testOldAndNewStatelessMatchFloat(self, case, seed):
self._test_old_and_new_stateless_match(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(
int_cases(minval_maxval=((2, 11111), (None, None)))))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testOldAndNewStatelessMatchInt(self, case, seed):
self._test_old_and_new_stateless_match(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], seed_type.name, case_id), case, seed_type) # pylint: disable=g-complex-comprehension
for seed_type in SEED_TYPES
for case_id, case in enumerate(
float_cases(shape_dtypes=(dtypes.int32, dtypes.int64))))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testDeterminismFloat(self, case, seed_type):
if seed_type == dtypes.int64 and get_device().device_type in ('XLA_GPU',
'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest(
'Skip on XLA because XLA kernels do not support int64 seeds.')
self._test_determinism(case, seed_type)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], seed_type.name, case_id), case, seed_type) # pylint: disable=g-complex-comprehension
for seed_type in SEED_TYPES
for case_id, case in enumerate(
int_cases(shape_dtypes=(dtypes.int32, dtypes.int64))))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testDeterminismInt(self, case, seed_type):
if seed_type == dtypes.int64 and get_device().device_type in ('XLA_GPU',
'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest(
'Skip on XLA because XLA kernels do not support int64 seeds.')
self._test_determinism(case, seed_type)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], seed_type.name, case_id), case, seed_type) # pylint: disable=g-complex-comprehension
for seed_type in SEED_TYPES
for case_id, case in enumerate(multinomial_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testDeterminismMultinomial(self, case, seed_type):
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking XLA kernel')
self._test_determinism(case, seed_type)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], seed_type.name, case_id), case, seed_type) # pylint: disable=g-complex-comprehension
for seed_type in SEED_TYPES
for case_id, case in enumerate(gamma_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testDeterminismGamma(self, case, seed_type):
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking XLA kernel')
self._test_determinism(case, seed_type)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], seed_type.name, case_id), case, seed_type) # pylint: disable=g-complex-comprehension
for seed_type in SEED_TYPES
for case_id, case in enumerate(poisson_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testDeterminismPoisson(self, case, seed_type):
if get_device().device_type == 'GPU':
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking GPU kernel')
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking XLA kernel')
self._test_determinism(case, seed_type)
@test_util.run_v2_only
def testGetKeyCounterAlg(self):
seed = [1, 2]
key, counter = gen_stateless_random_ops_v2.stateless_random_get_key_counter(
seed)
self.assertAllEqual(key.shape, [1])
self.assertAllEqual(counter.shape, [2])
alg = gen_stateless_random_ops_v2.stateless_random_get_alg()
self.assertAllEqual(alg.shape, [])
def assertDTypeEqual(self, a, b):
self.assertEqual(dtypes.as_dtype(a), dtypes.as_dtype(b))
def assertNoEqualPair(self, ls):
for i in range(len(ls)):
for j in range(i + 1, len(ls)):
self.assertFalse(math_ops.reduce_all(ls[i] == ls[j]))
@parameterized.parameters(['int32', 'int64'])
@test_util.run_v2_only
def testSplit(self, dtype):
"""Test for `split`."""
seed = constant_op.constant([1, 2], dtype=dtype)
new_seed = stateless.split(seed, 3)
self.assertEqual(new_seed.shape, [3, 2])
self.assertDTypeEqual(new_seed.dtype, dtype)
self.assertNoEqualPair([seed] + array_ops.unstack(new_seed))
@parameterized.parameters(['int32', 'int64'])
@test_util.run_v2_only
def testFoldIn(self, dtype):
"""Test for `fold_in`."""
orig_seed = constant_op.constant([1, 2], dtype='int32')
seed = stateless.fold_in(orig_seed, constant_op.constant(3, dtype=dtype))
new_seeds = []
new_seeds.append(seed)
seed = stateless.fold_in(seed, constant_op.constant(4, dtype=dtype))
new_seeds.append(seed)
for s in new_seeds:
self.assertEqual(s.shape, [2])
self.assertDTypeEqual(s.dtype, dtype)
self.assertNoEqualPair([math_ops.cast(orig_seed, dtype)] + new_seeds)
@test_util.run_v2_only
def testErrors(self):
"""Tests that proper errors are raised.
"""
shape = [2, 3]
with self.assertRaisesWithPredicateMatch(
ValueError,
'minval must be a scalar; got a tensor of shape '):
@def_function.function
def f():
stateless.stateless_random_uniform(
shape=shape, seed=[1, 2], minval=array_ops.zeros(shape, 'int32'),
maxval=100, dtype='int32')
f()
with self.assertRaisesWithPredicateMatch(
ValueError,
'maxval must be a scalar; got a tensor of shape '):
@def_function.function
def f2():
stateless.stateless_random_uniform(
shape=shape, seed=[1, 2], minval=0,
maxval=array_ops.ones(shape, 'int32') * 100,
dtype='int32')
f2()
if __name__ == '__main__':
config.set_soft_device_placement(False)
context.context().enable_xla_devices()
test.main()
| apache-2.0 |
qedi-r/home-assistant | tests/components/alexa/test_state_report.py | 2 | 5501 | """Test report state."""
from homeassistant.components.alexa import state_report
from . import TEST_URL, DEFAULT_CONFIG
async def test_report_state(hass, aioclient_mock):
"""Test proactive state reports."""
aioclient_mock.post(TEST_URL, text="", status=202)
hass.states.async_set(
"binary_sensor.test_contact",
"on",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
await state_report.async_enable_proactive_mode(hass, DEFAULT_CONFIG)
hass.states.async_set(
"binary_sensor.test_contact",
"off",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
# To trigger event listener
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa"
assert call_json["event"]["header"]["name"] == "ChangeReport"
assert (
call_json["event"]["payload"]["change"]["properties"][0]["value"]
== "NOT_DETECTED"
)
assert call_json["event"]["endpoint"]["endpointId"] == "binary_sensor#test_contact"
async def test_report_state_instance(hass, aioclient_mock):
"""Test proactive state reports with instance."""
aioclient_mock.post(TEST_URL, text="", status=202)
hass.states.async_set(
"fan.test_fan",
"off",
{
"friendly_name": "Test fan",
"supported_features": 3,
"speed": "off",
"oscillating": False,
},
)
await state_report.async_enable_proactive_mode(hass, DEFAULT_CONFIG)
hass.states.async_set(
"fan.test_fan",
"on",
{
"friendly_name": "Test fan",
"supported_features": 3,
"speed": "high",
"oscillating": True,
},
)
# To trigger event listener
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa"
assert call_json["event"]["header"]["name"] == "ChangeReport"
change_reports = call_json["event"]["payload"]["change"]["properties"]
for report in change_reports:
if report["name"] == "toggleState":
assert report["value"] == "ON"
assert report["instance"] == "fan.oscillating"
assert report["namespace"] == "Alexa.ToggleController"
assert call_json["event"]["endpoint"]["endpointId"] == "fan#test_fan"
async def test_send_add_or_update_message(hass, aioclient_mock):
"""Test sending an AddOrUpdateReport message."""
aioclient_mock.post(TEST_URL, text="")
hass.states.async_set(
"binary_sensor.test_contact",
"on",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
await state_report.async_send_add_or_update_message(
hass, DEFAULT_CONFIG, ["binary_sensor.test_contact", "zwave.bla"]
)
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa.Discovery"
assert call_json["event"]["header"]["name"] == "AddOrUpdateReport"
assert len(call_json["event"]["payload"]["endpoints"]) == 1
assert (
call_json["event"]["payload"]["endpoints"][0]["endpointId"]
== "binary_sensor#test_contact"
)
async def test_send_delete_message(hass, aioclient_mock):
"""Test sending an AddOrUpdateReport message."""
aioclient_mock.post(TEST_URL, json={"data": "is irrelevant"})
hass.states.async_set(
"binary_sensor.test_contact",
"on",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
await state_report.async_send_delete_message(
hass, DEFAULT_CONFIG, ["binary_sensor.test_contact", "zwave.bla"]
)
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa.Discovery"
assert call_json["event"]["header"]["name"] == "DeleteReport"
assert len(call_json["event"]["payload"]["endpoints"]) == 1
assert (
call_json["event"]["payload"]["endpoints"][0]["endpointId"]
== "binary_sensor#test_contact"
)
async def test_doorbell_event(hass, aioclient_mock):
"""Test doorbell press reports."""
aioclient_mock.post(TEST_URL, text="", status=202)
hass.states.async_set(
"binary_sensor.test_doorbell",
"off",
{"friendly_name": "Test Doorbell Sensor", "device_class": "occupancy"},
)
await state_report.async_enable_proactive_mode(hass, DEFAULT_CONFIG)
hass.states.async_set(
"binary_sensor.test_doorbell",
"on",
{"friendly_name": "Test Doorbell Sensor", "device_class": "occupancy"},
)
# To trigger event listener
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa.DoorbellEventSource"
assert call_json["event"]["header"]["name"] == "DoorbellPress"
assert call_json["event"]["payload"]["cause"]["type"] == "PHYSICAL_INTERACTION"
assert call_json["event"]["endpoint"]["endpointId"] == "binary_sensor#test_doorbell"
| apache-2.0 |
uday1889/gensim | gensim/corpora/bleicorpus.py | 68 | 4496 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <radimrehurek@seznam.cz>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Blei's LDA-C format.
"""
from __future__ import with_statement
from os import path
import logging
from gensim import interfaces, utils
from gensim.corpora import IndexedCorpus
from six.moves import xrange
logger = logging.getLogger('gensim.corpora.bleicorpus')
class BleiCorpus(IndexedCorpus):
"""
Corpus in Blei's LDA-C format.
The corpus is represented as two files: one describing the documents, and another
describing the mapping between words and their ids.
Each document is one line::
N fieldId1:fieldValue1 fieldId2:fieldValue2 ... fieldIdN:fieldValueN
The vocabulary is a file with words, one word per line; word at line K has an
implicit ``id=K``.
"""
def __init__(self, fname, fname_vocab=None):
"""
Initialize the corpus from a file.
`fname_vocab` is the file with vocabulary; if not specified, it defaults to
`fname.vocab`.
"""
IndexedCorpus.__init__(self, fname)
logger.info("loading corpus from %s" % fname)
if fname_vocab is None:
fname_base, _ = path.splitext(fname)
fname_dir = path.dirname(fname)
for fname_vocab in [
utils.smart_extension(fname, '.vocab'),
utils.smart_extension(fname, '/vocab.txt'),
utils.smart_extension(fname_base, '.vocab'),
utils.smart_extension(fname_dir, '/vocab.txt'),
]:
if path.exists(fname_vocab):
break
else:
raise IOError('BleiCorpus: could not find vocabulary file')
self.fname = fname
with utils.smart_open(fname_vocab) as fin:
words = [utils.to_unicode(word).rstrip() for word in fin]
self.id2word = dict(enumerate(words))
def __iter__(self):
"""
Iterate over the corpus, returning one sparse vector at a time.
"""
lineno = -1
with utils.smart_open(self.fname) as fin:
for lineno, line in enumerate(fin):
yield self.line2doc(line)
self.length = lineno + 1
def line2doc(self, line):
parts = utils.to_unicode(line).split()
if int(parts[0]) != len(parts) - 1:
raise ValueError("invalid format in %s: %s" % (self.fname, repr(line)))
doc = [part.rsplit(':', 1) for part in parts[1:]]
doc = [(int(p1), float(p2)) for p1, p2 in doc]
return doc
@staticmethod
def save_corpus(fname, corpus, id2word=None, metadata=False):
"""
Save a corpus in the LDA-C format.
There are actually two files saved: `fname` and `fname.vocab`, where
`fname.vocab` is the vocabulary file.
This function is automatically called by `BleiCorpus.serialize`; don't
call it directly, call `serialize` instead.
"""
if id2word is None:
logger.info("no word id mapping provided; initializing from corpus")
id2word = utils.dict_from_corpus(corpus)
num_terms = len(id2word)
else:
num_terms = 1 + max([-1] + id2word.keys())
logger.info("storing corpus in Blei's LDA-C format into %s" % fname)
with utils.smart_open(fname, 'wb') as fout:
offsets = []
for doc in corpus:
doc = list(doc)
offsets.append(fout.tell())
parts = ["%i:%g" % p for p in doc if abs(p[1]) > 1e-7]
fout.write(utils.to_utf8("%i %s\n" % (len(doc), ' '.join(parts))))
# write out vocabulary, in a format compatible with Blei's topics.py script
fname_vocab = utils.smart_extension(fname, '.vocab')
logger.info("saving vocabulary of %i words to %s" % (num_terms, fname_vocab))
with utils.smart_open(fname_vocab, 'wb') as fout:
for featureid in xrange(num_terms):
fout.write(utils.to_utf8("%s\n" % id2word.get(featureid, '---')))
return offsets
def docbyoffset(self, offset):
"""
Return the document stored at file position `offset`.
"""
with utils.smart_open(self.fname) as f:
f.seek(offset)
return self.line2doc(f.readline())
# endclass BleiCorpus
| gpl-3.0 |
sander76/home-assistant | tests/components/alexa/test_flash_briefings.py | 21 | 4685 | """The tests for the Alexa component."""
# pylint: disable=protected-access
import datetime
import pytest
from homeassistant.components import alexa
from homeassistant.components.alexa import const
from homeassistant.const import HTTP_NOT_FOUND, HTTP_UNAUTHORIZED
from homeassistant.core import callback
from homeassistant.setup import async_setup_component
SESSION_ID = "amzn1.echo-api.session.0000000-0000-0000-0000-00000000000"
APPLICATION_ID = "amzn1.echo-sdk-ams.app.000000-d0ed-0000-ad00-000000d00ebe"
REQUEST_ID = "amzn1.echo-api.request.0000000-0000-0000-0000-00000000000"
# pylint: disable=invalid-name
calls = []
NPR_NEWS_MP3_URL = "https://pd.npr.org/anon.npr-mp3/npr/news/newscast.mp3"
@pytest.fixture
def alexa_client(loop, hass, hass_client):
"""Initialize a Home Assistant server for testing this module."""
@callback
def mock_service(call):
calls.append(call)
hass.services.async_register("test", "alexa", mock_service)
assert loop.run_until_complete(
async_setup_component(
hass,
alexa.DOMAIN,
{
# Key is here to verify we allow other keys in config too
"homeassistant": {},
"alexa": {
"flash_briefings": {
"password": "pass/abc",
"weather": [
{
"title": "Weekly forecast",
"text": "This week it will be sunny.",
},
{
"title": "Current conditions",
"text": "Currently it is 80 degrees fahrenheit.",
},
],
"news_audio": {
"title": "NPR",
"audio": NPR_NEWS_MP3_URL,
"display_url": "https://npr.org",
"uid": "uuid",
},
}
},
},
)
)
return loop.run_until_complete(hass_client())
def _flash_briefing_req(client, briefing_id, password="pass%2Fabc"):
if password is None:
return client.get(f"/api/alexa/flash_briefings/{briefing_id}")
return client.get(f"/api/alexa/flash_briefings/{briefing_id}?password={password}")
async def test_flash_briefing_invalid_id(alexa_client):
"""Test an invalid Flash Briefing ID."""
req = await _flash_briefing_req(alexa_client, 10000)
assert req.status == HTTP_NOT_FOUND
text = await req.text()
assert text == ""
async def test_flash_briefing_no_password(alexa_client):
"""Test for no Flash Briefing password."""
req = await _flash_briefing_req(alexa_client, "weather", password=None)
assert req.status == HTTP_UNAUTHORIZED
text = await req.text()
assert text == ""
async def test_flash_briefing_invalid_password(alexa_client):
"""Test an invalid Flash Briefing password."""
req = await _flash_briefing_req(alexa_client, "weather", password="wrongpass")
assert req.status == HTTP_UNAUTHORIZED
text = await req.text()
assert text == ""
async def test_flash_briefing_request_for_password(alexa_client):
"""Test for "password" Flash Briefing."""
req = await _flash_briefing_req(alexa_client, "password")
assert req.status == HTTP_NOT_FOUND
text = await req.text()
assert text == ""
async def test_flash_briefing_date_from_str(alexa_client):
"""Test the response has a valid date parsed from string."""
req = await _flash_briefing_req(alexa_client, "weather")
assert req.status == 200
data = await req.json()
assert isinstance(
datetime.datetime.strptime(
data[0].get(const.ATTR_UPDATE_DATE), const.DATE_FORMAT
),
datetime.datetime,
)
async def test_flash_briefing_valid(alexa_client):
"""Test the response is valid."""
data = [
{
"titleText": "NPR",
"redirectionURL": "https://npr.org",
"streamUrl": NPR_NEWS_MP3_URL,
"mainText": "",
"uid": "uuid",
"updateDate": "2016-10-10T19:51:42.0Z",
}
]
req = await _flash_briefing_req(alexa_client, "news_audio")
assert req.status == 200
json = await req.json()
assert isinstance(
datetime.datetime.strptime(
json[0].get(const.ATTR_UPDATE_DATE), const.DATE_FORMAT
),
datetime.datetime,
)
json[0].pop(const.ATTR_UPDATE_DATE)
data[0].pop(const.ATTR_UPDATE_DATE)
assert json == data
| apache-2.0 |
goanpeca/loghub | loghub/cli/label_creator.py | 2 | 1563 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (See LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Create github labels from a text file."""
# yapf: disable
# Standard library imports
import argparse
# Local imports
from loghub.cli.common import add_common_parser_args, parse_password_check_repo
from loghub.core.labels import process_labels
# yapf: enable
def main():
"""Main script."""
parse_arguments(skip=False)
def parse_arguments(skip=False):
"""Parse argument for label creator utility."""
# Get command-line arguments
parser = argparse.ArgumentParser()
parser = add_common_parser_args(parser)
parser.add_argument(
'-a',
'--action',
help='Action to take',
type=str,
choices=['get', 'update'],
default='get',
nargs='?')
parser.add_argument(
'-f',
'--filename',
help='File for storing labels',
type=str,
default='labels.txt')
options = parser.parse_args()
username = options.username
password = parse_password_check_repo(options)
if not skip:
process_labels(
username,
password,
options.token,
options.action,
options.repository,
options.filename, )
return options
if __name__ == '__main__':
main()
| mit |
zhaozengguang/opencog | examples/visual_demos/visualise_importance_diffusion.py | 30 | 1911 | #!/usr/bin/python2.5
import sys
import os
import subprocess
import time
# the visualiser executable, make blank if you don't want it to be spawned
# automatically
ubigraph_server="/home/joel/src/UbiGraph-alpha-0.2.4-Linux64-Ubuntu-8.04/bin/ubigraph_server"
N = 10
def main():
#/sys.argv()
#print "Spawning ubigraph server",
#pid = subprocess.Popen([ubigraph_server])
#if pid.returncode is not None:
# print ".. failed"
# sys.exit(1)
#print ".. started"
printHeader()
makeNet(30)
printTail()
def makeNet(N):
for i in range(0,N):
jN = 1
if i == N / 2:
jN = N
for j in range(0,jN):
node_name = str(i) + "__" + str(j)
print "(cog-new-node 'ConceptNode \"" + node_name + "\")"
# connect to above row
if i > 0 and j == 0:
makeLink(str(i-1) + "__" + str(j),node_name)
# connect to previous column's node
if j > 0:
makeLink(str(i) + "__" + str(j-1),node_name)
def makeLink(a,b):
print "(cog-new-link 'SymmetricHebbianLink " + \
"(cog-node 'ConceptNode \"" + a + "\") " + \
"(cog-node 'ConceptNode \"" + b + "\") " + \
"(cog-new-stv 0.05 0.8))"
def printHeader():
print """loadmodule opencog/ubigraph/libubigraph.so
ubigraph --compact
loadmodule opencog/dynamics/attention/libattention.so
agents-stop-loop
agents-start opencog::ImportanceDiffusionAgent
"""
sys.stdout.flush()
print "scm"
def printTail():
print "." # close scm shell
sys.stdout.flush()
time.sleep(10)
for i in range(0,100):
if i % 10 == 0:
print "ubigraph-random-sti 5"
print "ubigraph-update-sti"
print """agents-step
ubigraph-update-sti
"""
sys.stdout.flush()
time.sleep(1)
if __name__ == '__main__':
main()
| agpl-3.0 |
RudoCris/horizon | openstack_dashboard/dashboards/admin/instances/panel.py | 46 | 1091 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.admin import dashboard
class Instances(horizon.Panel):
name = _("Instances")
slug = 'instances'
permissions = ('openstack.roles.admin', 'openstack.services.compute')
dashboard.Admin.register(Instances)
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.