input stringlengths 11 7.65k | target stringlengths 22 8.26k |
|---|---|
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def patkind(pattern, default=None):
"""If pattern is 'kind:pat' with a known kind, return kind."""
return _patsplit(pattern, default)[0] |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def _patsplit(pattern, default):
"""Split a string into the optional pattern kind prefix and the actual
pattern."""
if ":" in pattern:
kind, pat = pattern.split(":", 1)
if kind in allpatternkinds:
return kind, pat
return default, pattern |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def peek():
return i < n and pat[i : i + 1] |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def _regex(kind, pat, globsuffix):
"""Convert a (normalized) pattern of any kind into a regular expression.
globsuffix is appended to the regexp of globs."""
if not pat and kind in ("glob", "relpath"):
return ""
if kind == "re":
return pat
if kind in ("path", "relpath"):
if pat == ".":
return ""
return util.re.escape(pat) + "(?:/|$)"
if kind == "rootfilesin":
if pat == ".":
escaped = ""
else:
# Pattern is a directory name.
escaped = util.re.escape(pat) + "/"
# Anything after the pattern must be a non-directory.
return escaped + "[^/]+$"
if kind == "relglob":
return "(?:|.*/)" + _globre(pat) + globsuffix
if kind == "relre":
if pat.startswith("^"):
return pat
return ".*" + pat
return _globre(pat) + globsuffix |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def matchsubinclude(f):
for prefix, matcherargs in subincludes:
if f.startswith(prefix):
mf = submatchers.get(prefix)
if mf is None:
mf = match(*matcherargs)
submatchers[prefix] = mf
if mf(f[len(prefix) :]):
return True
return False |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def _buildregexmatch(kindpats, globsuffix):
"""Build a match function from a list of kinds and kindpats,
return regexp string and a matcher function."""
try:
regex = "(?:%s)" % "|".join(
[_regex(k, p, globsuffix) for (k, p, s) in kindpats]
)
if len(regex) > 20000:
raise OverflowError
return regex, _rematcher(regex)
except OverflowError:
# We're using a Python with a tiny regex engine and we
# made it explode, so we'll divide the pattern list in two
# until it works
l = len(kindpats)
if l < 2:
raise
regexa, a = _buildregexmatch(kindpats[: l // 2], globsuffix)
regexb, b = _buildregexmatch(kindpats[l // 2 :], globsuffix)
return regex, lambda s: a(s) or b(s)
except re.error:
for k, p, s in kindpats:
try:
_rematcher("(?:%s)" % _regex(k, p, globsuffix))
except re.error:
if s:
raise error.Abort(_("%s: invalid pattern (%s): %s") % (s, k, p))
else:
raise error.Abort(_("invalid pattern (%s): %s") % (k, p))
raise error.Abort(_("invalid pattern")) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def _patternrootsanddirs(kindpats):
"""Returns roots and directories corresponding to each pattern.
This calculates the roots and directories exactly matching the patterns and
returns a tuple of (roots, dirs) for each. It does not return other
directories which may also need to be considered, like the parent
directories.
"""
r = []
d = []
for kind, pat, source in kindpats:
if kind == "glob": # find the non-glob prefix
root = []
for p in pat.split("/"):
if "[" in p or "{" in p or "*" in p or "?" in p:
break
root.append(p)
r.append("/".join(root))
elif kind in ("relpath", "path"):
if pat == ".":
pat = ""
r.append(pat)
elif kind in ("rootfilesin",):
if pat == ".":
pat = ""
d.append(pat)
else: # relglob, re, relre
r.append("")
return r, d |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def _roots(kindpats):
"""Returns root directories to match recursively from the given patterns."""
roots, dirs = _patternrootsanddirs(kindpats)
return roots |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def _rootsanddirs(kindpats):
"""Returns roots and exact directories from patterns.
roots are directories to match recursively, whereas exact directories should
be matched non-recursively. The returned (roots, dirs) tuple will also
include directories that need to be implicitly considered as either, such as
parent directories.
>>> _rootsanddirs(
... [(b'glob', b'g/h/*', b''), (b'glob', b'g/h', b''),
... (b'glob', b'g*', b'')])
(['g/h', 'g/h', ''], ['', 'g'])
>>> _rootsanddirs(
... [(b'rootfilesin', b'g/h', b''), (b'rootfilesin', b'', b'')])
([], ['g/h', '', '', 'g'])
>>> _rootsanddirs(
... [(b'relpath', b'r', b''), (b'path', b'p/p', b''),
... (b'path', b'', b'')])
(['r', 'p/p', ''], ['', 'p'])
>>> _rootsanddirs(
... [(b'relglob', b'rg*', b''), (b're', b're/', b''),
... (b'relre', b'rr', b'')])
(['', '', ''], [''])
"""
r, d = _patternrootsanddirs(kindpats)
# Append the parents as non-recursive/exact directories, since they must be
# scanned to get to either the roots or the other exact directories.
d.extend(sorted(util.dirs(d)))
d.extend(sorted(util.dirs(r)))
return r, d |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def _explicitfiles(kindpats):
"""Returns the potential explicit filenames from the patterns.
>>> _explicitfiles([(b'path', b'foo/bar', b'')])
['foo/bar']
>>> _explicitfiles([(b'rootfilesin', b'foo/bar', b'')])
[]
"""
# Keep only the pattern kinds where one can specify filenames (vs only
# directory names).
filable = [kp for kp in kindpats if kp[0] not in ("rootfilesin",)]
return _roots(filable) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def _prefix(kindpats):
"""Whether all the patterns match a prefix (i.e. recursively)"""
for kind, pat, source in kindpats:
if kind not in ("path", "relpath"):
return False
return True |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def readpatternfile(filepath, warn, sourceinfo=False):
"""parse a pattern file, returning a list of
patterns. These patterns should be given to compile()
to be validated and converted into a match function.
trailing white space is dropped.
the escape character is backslash.
comments start with #.
empty lines are skipped.
lines can be of the following formats:
syntax: regexp # defaults following lines to non-rooted regexps
syntax: glob # defaults following lines to non-rooted globs
re:pattern # non-rooted regular expression
glob:pattern # non-rooted glob
pattern # pattern of the current default type
if sourceinfo is set, returns a list of tuples:
(pattern, lineno, originalline). This is useful to debug ignore patterns.
"""
syntaxes = {
"re": "relre:",
"regexp": "relre:",
"glob": "relglob:",
"include": "include",
"subinclude": "subinclude",
}
syntax = "relre:"
patterns = []
fp = open(filepath, "rb")
for lineno, line in enumerate(util.iterfile(fp), start=1):
if "#" in line:
global _commentre
if not _commentre:
_commentre = util.re.compile(br"((?:^|[^\\])(?:\\\\)*)#.*")
# remove comments prefixed by an even number of escapes
m = _commentre.search(line)
if m:
line = line[: m.end(1)]
# fixup properly escaped comments that survived the above
line = line.replace("\\#", "#")
line = line.rstrip()
if not line:
continue
if line.startswith("syntax:"):
s = line[7:].strip()
try:
syntax = syntaxes[s]
except KeyError:
if warn:
warn(_("%s: ignoring invalid syntax '%s'\n") % (filepath, s))
continue
linesyntax = syntax
for s, rels in pycompat.iteritems(syntaxes):
if line.startswith(rels):
linesyntax = rels
line = line[len(rels) :]
break
elif line.startswith(s + ":"):
linesyntax = rels
line = line[len(s) + 1 :]
break
if sourceinfo:
patterns.append((linesyntax + line, lineno, line))
else:
patterns.append(linesyntax + line)
fp.close()
return patterns |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def sm_section(name: str) -> str:
""":return: section title used in .gitmodules configuration file"""
return f'submodule "{name}"' |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def _ccode(self, printer):
return "fabs(%s)" % printer._print(self.args[0]) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def sm_name(section: str) -> str:
""":return: name of the submodule as parsed from the section name"""
section = section.strip()
return section[11:-1] |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_sqrt():
assert ccode(sqrt(x)) == "sqrt(x)"
assert ccode(x**0.5) == "sqrt(x)"
assert ccode(sqrt(x)) == "sqrt(x)" |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def mkhead(repo: 'Repo', path: PathLike) -> 'Head':
""":return: New branch/head instance"""
return git.Head(repo, git.Head.to_full_path(path)) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_Pow():
assert ccode(x**3) == "pow(x, 3)"
assert ccode(x**(y**3)) == "pow(x, pow(y, 3))"
assert ccode(1/(g(x)*3.5)**(x - y**x)/(x**2 + y)) == \
"pow(3.5*g(x), -x + pow(y, x))/(pow(x, 2) + y)"
assert ccode(x**-1.0) == '1.0/x'
assert ccode(x**Rational(2, 3)) == 'pow(x, 2.0L/3.0L)'
_cond_cfunc = [(lambda base, exp: exp.is_integer, "dpowi"),
(lambda base, exp: not exp.is_integer, "pow")]
assert ccode(x**3, user_functions={'Pow': _cond_cfunc}) == 'dpowi(x, 3)'
assert ccode(x**3.2, user_functions={'Pow': _cond_cfunc}) == 'pow(x, 3.2)' |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def find_first_remote_branch(remotes: Sequence['Remote'], branch_name: str) -> 'RemoteReference':
"""Find the remote branch matching the name of the given branch or raise InvalidGitRepositoryError"""
for remote in remotes:
try:
return remote.refs[branch_name]
except IndexError:
continue
# END exception handling
# END for remote
raise InvalidGitRepositoryError("Didn't find remote branch '%r' in any of the given remotes" % branch_name) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_constants_mathh():
assert ccode(exp(1)) == "M_E"
assert ccode(pi) == "M_PI"
assert ccode(oo) == "HUGE_VAL"
assert ccode(-oo) == "-HUGE_VAL" |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def __init__(self, *args: Any, **kwargs: Any) -> None:
self._smref: Union['ReferenceType[Submodule]', None] = None
self._index = None
self._auto_write = True
super(SubmoduleConfigParser, self).__init__(*args, **kwargs) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_constants_other():
assert ccode(2*GoldenRatio) == "double const GoldenRatio = 1.61803398874989;\n2*GoldenRatio"
assert ccode(
2*Catalan) == "double const Catalan = 0.915965594177219;\n2*Catalan"
assert ccode(2*EulerGamma) == "double const EulerGamma = 0.577215664901533;\n2*EulerGamma" |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def set_submodule(self, submodule: 'Submodule') -> None:
"""Set this instance's submodule. It must be called before
the first write operation begins"""
self._smref = weakref.ref(submodule) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_Rational():
assert ccode(Rational(3, 7)) == "3.0L/7.0L"
assert ccode(Rational(18, 9)) == "2"
assert ccode(Rational(3, -7)) == "-3.0L/7.0L"
assert ccode(Rational(-3, -7)) == "3.0L/7.0L"
assert ccode(x + Rational(3, 7)) == "x + 3.0L/7.0L"
assert ccode(Rational(3, 7)*x) == "(3.0L/7.0L)*x" |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def flush_to_index(self) -> None:
"""Flush changes in our configuration file to the index"""
assert self._smref is not None
# should always have a file here
assert not isinstance(self._file_or_files, BytesIO)
sm = self._smref()
if sm is not None:
index = self._index
if index is None:
index = sm.repo.index
# END handle index
index.add([sm.k_modules_file], write=self._auto_write)
sm._clear_cache()
# END handle weakref |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_Integer():
assert ccode(Integer(67)) == "67"
assert ccode(Integer(-1)) == "-1" |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def write(self) -> None: # type: ignore[override]
rval: None = super(SubmoduleConfigParser, self).write()
self.flush_to_index()
return rval |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_functions():
assert ccode(sin(x) ** cos(x)) == "pow(sin(x), cos(x))" |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_inline_function():
x = symbols('x')
g = implemented_function('g', Lambda(x, 2*x))
assert ccode(g(x)) == "2*x"
g = implemented_function('g', Lambda(x, 2*x/Catalan))
assert ccode(
g(x)) == "double const Catalan = %s;\n2*x/Catalan" % Catalan.n()
A = IndexedBase('A')
i = Idx('i', symbols('n', integer=True))
g = implemented_function('g', Lambda(x, x*(1 + x)*(2 + x)))
assert ccode(g(A[i]), assign_to=A[i]) == (
"for (int i=0; i<n; i++){\n"
" A[i] = (A[i] + 1)*(A[i] + 2)*A[i];\n"
"}"
) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_exceptions():
assert ccode(ceiling(x)) == "ceil(x)"
assert ccode(Abs(x)) == "fabs(x)"
assert ccode(gamma(x)) == "tgamma(x)" |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_user_functions():
x = symbols('x', integer=False)
n = symbols('n', integer=True)
custom_functions = {
"ceiling": "ceil",
"Abs": [(lambda x: not x.is_integer, "fabs"), (lambda x: x.is_integer, "abs")],
}
assert ccode(ceiling(x), user_functions=custom_functions) == "ceil(x)"
assert ccode(Abs(x), user_functions=custom_functions) == "fabs(x)"
assert ccode(Abs(n), user_functions=custom_functions) == "abs(n)" |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_boolean():
assert ccode(x & y) == "x && y"
assert ccode(x | y) == "x || y"
assert ccode(~x) == "!x"
assert ccode(x & y & z) == "x && y && z"
assert ccode(x | y | z) == "x || y || z"
assert ccode((x & y) | z) == "z || x && y"
assert ccode((x | y) & z) == "z && (x || y)" |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_Piecewise():
p = ccode(Piecewise((x, x < 1), (x**2, True)))
s = \ |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_Piecewise_deep():
p = ccode(2*Piecewise((x, x < 1), (x**2, True)))
s = \ |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_settings():
raises(TypeError, lambda: ccode(sin(x), method="garbage")) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_Indexed():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m, o = symbols('n m o', integer=True)
i, j, k = Idx('i', n), Idx('j', m), Idx('k', o)
p = CCodePrinter()
p._not_c = set()
x = IndexedBase('x')[j]
assert p._print_Indexed(x) == 'x[j]'
A = IndexedBase('A')[i, j]
assert p._print_Indexed(A) == 'A[%s]' % (m*i+j)
B = IndexedBase('B')[i, j, k]
assert p._print_Indexed(B) == 'B[%s]' % (i*o*m+j*o+k)
assert p._not_c == set() |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_Indexed_without_looking_for_contraction():
len_y = 5
y = IndexedBase('y', shape=(len_y,))
x = IndexedBase('x', shape=(len_y,))
Dy = IndexedBase('Dy', shape=(len_y-1,))
i = Idx('i', len_y-1)
e=Eq(Dy[i], (y[i+1]-y[i])/(x[i+1]-x[i]))
code0 = ccode(e.rhs, assign_to=e.lhs, contract=False)
assert code0 == 'Dy[i] = (y[%s] - y[i])/(x[%s] - x[i]);' % (i + 1, i + 1) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_loops_matrix_vector():
n, m = symbols('n m', integer=True)
A = IndexedBase('A')
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
s = (
'for (int i=0; i<m; i++){\n'
' y[i] = 0;\n'
'}\n'
'for (int i=0; i<m; i++){\n'
' for (int j=0; j<n; j++){\n'
' y[i] = x[j]*A[%s] + y[i];\n' % (i*n + j) +\
' }\n'
'}'
)
c = ccode(A[i, j]*x[j], assign_to=y[i])
assert c == s |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_dummy_loops():
# the following line could also be
# [Dummy(s, integer=True) for s in 'im']
# or [Dummy(integer=True) for s in 'im']
i, m = symbols('i m', integer=True, cls=Dummy)
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx(i, m)
expected = (
'for (int i_%(icount)i=0; i_%(icount)i<m_%(mcount)i; i_%(icount)i++){\n'
' y[i_%(icount)i] = x[i_%(icount)i];\n'
'}'
) % {'icount': i.label.dummy_index, 'mcount': m.dummy_index}
code = ccode(x[i], assign_to=y[i])
assert code == expected |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_loops_add():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m = symbols('n m', integer=True)
A = IndexedBase('A')
x = IndexedBase('x')
y = IndexedBase('y')
z = IndexedBase('z')
i = Idx('i', m)
j = Idx('j', n)
s = (
'for (int i=0; i<m; i++){\n'
' y[i] = x[i] + z[i];\n'
'}\n'
'for (int i=0; i<m; i++){\n'
' for (int j=0; j<n; j++){\n'
' y[i] = x[j]*A[%s] + y[i];\n' % (i*n + j) +\
' }\n'
'}'
)
c = ccode(A[i, j]*x[j] + x[i] + z[i], assign_to=y[i])
assert c == s |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_loops_multiple_contractions():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m, o, p = symbols('n m o p', integer=True)
a = IndexedBase('a')
b = IndexedBase('b')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
k = Idx('k', o)
l = Idx('l', p)
s = (
'for (int i=0; i<m; i++){\n'
' y[i] = 0;\n'
'}\n'
'for (int i=0; i<m; i++){\n'
' for (int j=0; j<n; j++){\n'
' for (int k=0; k<o; k++){\n'
' for (int l=0; l<p; l++){\n'
' y[i] = y[i] + b[%s]*a[%s];\n' % (j*o*p + k*p + l, i*n*o*p + j*o*p + k*p + l) +\
' }\n'
' }\n'
' }\n'
'}'
)
c = ccode(b[j, k, l]*a[i, j, k, l], assign_to=y[i])
assert c == s |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_ccode_loops_addfactor():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m, o, p = symbols('n m o p', integer=True)
a = IndexedBase('a')
b = IndexedBase('b')
c = IndexedBase('c')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
k = Idx('k', o)
l = Idx('l', p)
s = (
'for (int i=0; i<m; i++){\n'
' y[i] = 0;\n'
'}\n'
'for (int i=0; i<m; i++){\n'
' for (int j=0; j<n; j++){\n'
' for (int k=0; k<o; k++){\n'
' for (int l=0; l<p; l++){\n'
' y[i] = (a[%s] + b[%s])*c[%s] + y[i];\n' % (i*n*o*p + j*o*p + k*p + l, i*n*o*p + j*o*p + k*p + l, j*o*p + k*p + l) +\
' }\n'
' }\n'
' }\n'
'}'
)
c = ccode((a[i, j, k, l] + b[i, j, k, l])*c[j, k, l], assign_to=y[i])
assert c == s |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = SqlVirtualMachineManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.availability_group_listeners = AvailabilityGroupListenersOperations(self._client, self._config, self._serialize, self._deserialize)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.sql_virtual_machine_groups = SqlVirtualMachineGroupsOperations(self._client, self._config, self._serialize, self._deserialize)
self.sql_virtual_machines = SqlVirtualMachinesOperations(self._client, self._config, self._serialize, self._deserialize) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def _send_request(
self,
request: HttpRequest,
**kwargs: Any
) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | async def close(self) -> None:
await self._client.close() |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | async def __aenter__(self) -> "SqlVirtualMachineManagementClient":
await self._client.__aenter__()
return self |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def build_get_request(
resource_group_name: str,
managed_instance_name: str,
database_name: str,
query_id: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/queries/{queryId}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"managedInstanceName": _SERIALIZER.url("managed_instance_name", managed_instance_name, 'str'),
"databaseName": _SERIALIZER.url("database_name", database_name, 'str'),
"queryId": _SERIALIZER.url("query_id", query_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def index():
return render_template('index.html'), 200 |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def build_list_by_query_request(
resource_group_name: str,
managed_instance_name: str,
database_name: str,
query_id: str,
subscription_id: str,
*,
start_time: Optional[str] = None,
end_time: Optional[str] = None,
interval: Optional[Union[str, "_models.QueryTimeGrainType"]] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/queries/{queryId}/statistics')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"managedInstanceName": _SERIALIZER.url("managed_instance_name", managed_instance_name, 'str'),
"databaseName": _SERIALIZER.url("database_name", database_name, 'str'),
"queryId": _SERIALIZER.url("query_id", query_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if start_time is not None:
query_parameters['startTime'] = _SERIALIZER.query("start_time", start_time, 'str')
if end_time is not None:
query_parameters['endTime'] = _SERIALIZER.query("end_time", end_time, 'str')
if interval is not None:
query_parameters['interval'] = _SERIALIZER.query("interval", interval, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def app_settings():
app_settings = {'GRAPHITE_HOST': settings.GRAPHITE_HOST,
'OCULUS_HOST': settings.OCULUS_HOST,
'FULL_NAMESPACE': settings.FULL_NAMESPACE,
}
resp = json.dumps(app_settings)
return resp, 200 |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def data():
metric = request.args.get('metric', None)
try:
raw_series = REDIS_CONN.get(metric)
if not raw_series:
resp = json.dumps({'results': 'Error: No metric by that name'})
return resp, 404
else:
unpacker = Unpacker(use_list = False)
unpacker.feed(raw_series)
timeseries = [item[:2] for item in unpacker]
resp = json.dumps({'results': timeseries})
return resp, 200
except Exception as e:
error = "Error: " + e
resp = json.dumps({'results': error})
return resp, 500 |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def get(
self,
resource_group_name: str,
managed_instance_name: str,
database_name: str,
query_id: str,
**kwargs: Any
) -> "_models.ManagedInstanceQuery":
"""Get query by query id.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param managed_instance_name: The name of the managed instance.
:type managed_instance_name: str
:param database_name: The name of the database.
:type database_name: str
:param query_id:
:type query_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagedInstanceQuery, or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.ManagedInstanceQuery
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedInstanceQuery"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {})) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = settings.LOG_PATH + '/webapp.log'
self.stderr_path = settings.LOG_PATH + '/webapp.log'
self.pidfile_path = settings.PID_PATH + '/webapp.pid'
self.pidfile_timeout = 5 |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def prepare_request(next_link=None):
if not next_link: |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def run(self):
logger.info('starting webapp')
logger.info('hosted at %s' % settings.WEBAPP_IP)
logger.info('running on port %d' % settings.WEBAPP_PORT)
app.run(settings.WEBAPP_IP, settings.WEBAPP_PORT) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def extract_data(pipeline_response):
deserialized = self._deserialize("ManagedInstanceQueryStatistics", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def idfun(x): return x |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_supertype(self):
self.assert_(isinstance(None, NullType))
self.assert_(isinstance(Optional('a'), NullType))
self.assert_(isinstance(NotPassed, NotPassedType))
self.assert_(isinstance(NotPassed, NullType)) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | async def _post_initial(
self,
body: "_models.CalculateExchangeRequest",
**kwargs: Any
) -> Optional["_models.CalculateExchangeOperationResultResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.CalculateExchangeOperationResultResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-10-01-preview"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._post_initial.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'CalculateExchangeRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CalculateExchangeOperationResultResponse', pipeline_response)
if response.status_code == 202:
response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def myfunc(first, second=None, third=Optional(5), fourth=Optional(execute=list)):
#Equivalent: second = deoption(second, 5)
if isinstance(second, type(None)):
second = 5 |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def get_long_running_output(pipeline_response):
deserialized = self._deserialize('CalculateExchangeOperationResultResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def _option_suite(self, value):
opt = Optional(value)
self.assert_(isinstance(opt, Optional))
self.assert_(isinstance(deoption(opt), type(value)))
self.assertEqual(deoption(opt), value) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_optional(self):
self._option_suite('a')
self._option_suite(5) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_execute(self):
opt = Optional(None, execute=dict)
self.assertEqual(deoption(opt), {})
self.assertEqual(deoption(opt, execute=dict), {})
self.assertEqual(deoption(None, execute=dict), {}) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_optional_arguments(self):
self.assertEqual(self.myfunc('a'), self.expected)
self.assertEqual(self.myfunc('a', 5), self.expected)
self.assertEqual(self.myfunc('a', second=5), self.expected)
self.assertEqual(self.myfunc('a', 5, 5), self.expected)
self.assertEqual(self.myfunc('a', fourth=[]), self.expected) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_edges(self):
self.assertEqual(self.myfunc('a', third=None), self.expected) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_exceptions(self):
self.assert_(issubclass(DeoptionError, TypeError))
self.assertRaises(TypeError,
lambda: Optional()
)
self.assertRaises(TypeError,
lambda: Optional(NotPassed, NotPassed)
) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def list_query_results_for_management_group(
self,
management_group_name: str,
query_options: Optional["_models.QueryOptions"] = None,
**kwargs: Any
) -> AsyncIterable["_models.PolicyTrackedResourcesQueryResults"]:
"""Queries policy tracked resources under the management group.
:param management_group_name: Management group name.
:type management_group_name: str
:param query_options: Parameter group.
:type query_options: ~azure.mgmt.policyinsights.models.QueryOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyTrackedResourcesQueryResults or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.policyinsights.models.PolicyTrackedResourcesQueryResults]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyTrackedResourcesQueryResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {})) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_query_results_for_management_group.metadata['url'] # type: ignore
path_format_arguments = {
'managementGroupsNamespace': self._serialize.url("management_groups_namespace", management_groups_namespace, 'str'),
'managementGroupName': self._serialize.url("management_group_name", management_group_name, 'str'),
'policyTrackedResourcesResource': self._serialize.url("policy_tracked_resources_resource", policy_tracked_resources_resource, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if _top is not None:
query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0)
if _filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | async def extract_data(pipeline_response):
deserialized = self._deserialize('PolicyTrackedResourcesQueryResults', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.QueryFailure, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def list_query_results_for_subscription(
self,
query_options: Optional["_models.QueryOptions"] = None,
**kwargs: Any
) -> AsyncIterable["_models.PolicyTrackedResourcesQueryResults"]:
"""Queries policy tracked resources under the subscription.
:param query_options: Parameter group.
:type query_options: ~azure.mgmt.policyinsights.models.QueryOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyTrackedResourcesQueryResults or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.policyinsights.models.PolicyTrackedResourcesQueryResults]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyTrackedResourcesQueryResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {})) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_query_results_for_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'policyTrackedResourcesResource': self._serialize.url("policy_tracked_resources_resource", policy_tracked_resources_resource, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if _top is not None:
query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0)
if _filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def list_query_results_for_resource_group(
self,
resource_group_name: str,
query_options: Optional["_models.QueryOptions"] = None,
**kwargs: Any
) -> AsyncIterable["_models.PolicyTrackedResourcesQueryResults"]:
"""Queries policy tracked resources under the resource group.
:param resource_group_name: Resource group name.
:type resource_group_name: str
:param query_options: Parameter group.
:type query_options: ~azure.mgmt.policyinsights.models.QueryOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyTrackedResourcesQueryResults or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.policyinsights.models.PolicyTrackedResourcesQueryResults]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyTrackedResourcesQueryResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {})) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_query_results_for_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'policyTrackedResourcesResource': self._serialize.url("policy_tracked_resources_resource", policy_tracked_resources_resource, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if _top is not None:
query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0)
if _filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | async def extract_data(pipeline_response):
deserialized = self._deserialize('PolicyTrackedResourcesQueryResults', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.QueryFailure, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def list_query_results_for_resource(
self,
resource_id: str,
query_options: Optional["_models.QueryOptions"] = None,
**kwargs: Any
) -> AsyncIterable["_models.PolicyTrackedResourcesQueryResults"]:
"""Queries policy tracked resources under the resource.
:param resource_id: Resource ID.
:type resource_id: str
:param query_options: Parameter group.
:type query_options: ~azure.mgmt.policyinsights.models.QueryOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyTrackedResourcesQueryResults or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.policyinsights.models.PolicyTrackedResourcesQueryResults]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyTrackedResourcesQueryResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {})) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_query_results_for_resource.metadata['url'] # type: ignore
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True),
'policyTrackedResourcesResource': self._serialize.url("policy_tracked_resources_resource", policy_tracked_resources_resource, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if _top is not None:
query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0)
if _filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def create_channel(
cls,
host: str = "appengine.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def operations_client(self) -> operations_v1.OperationsClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Quick check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsClient(self.grpc_channel)
# Return the client from cache.
return self._operations_client |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def list_instances(
self,
) -> Callable[[appengine.ListInstancesRequest], appengine.ListInstancesResponse]:
r"""Return a callable for the list instances method over gRPC.
Lists the instances of a version.
Tip: To aggregate details about instances over time, see the
`Stackdriver Monitoring
API <https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list>`__.
Returns:
Callable[[~.ListInstancesRequest],
~.ListInstancesResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_instances" not in self._stubs:
self._stubs["list_instances"] = self.grpc_channel.unary_unary(
"/google.appengine.v1.Instances/ListInstances",
request_serializer=appengine.ListInstancesRequest.serialize,
response_deserializer=appengine.ListInstancesResponse.deserialize,
)
return self._stubs["list_instances"] |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def get_instance(
self,
) -> Callable[[appengine.GetInstanceRequest], instance.Instance]:
r"""Return a callable for the get instance method over gRPC.
Gets instance information.
Returns:
Callable[[~.GetInstanceRequest],
~.Instance]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_instance" not in self._stubs:
self._stubs["get_instance"] = self.grpc_channel.unary_unary(
"/google.appengine.v1.Instances/GetInstance",
request_serializer=appengine.GetInstanceRequest.serialize,
response_deserializer=instance.Instance.deserialize,
)
return self._stubs["get_instance"] |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def delete_instance(
self,
) -> Callable[[appengine.DeleteInstanceRequest], operations_pb2.Operation]:
r"""Return a callable for the delete instance method over gRPC.
Stops a running instance.
The instance might be automatically recreated based on the
scaling settings of the version. For more information, see "How
Instances are Managed" (`standard
environment <https://cloud.google.com/appengine/docs/standard/python/how-instances-are-managed>`__
\| `flexible
environment <https://cloud.google.com/appengine/docs/flexible/python/how-instances-are-managed>`__).
To ensure that instances are not re-created and avoid getting
billed, you can stop all instances within the target version by
changing the serving status of the version to ``STOPPED`` with
the
```apps.services.versions.patch`` <https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions/patch>`__
method.
Returns:
Callable[[~.DeleteInstanceRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_instance" not in self._stubs:
self._stubs["delete_instance"] = self.grpc_channel.unary_unary(
"/google.appengine.v1.Instances/DeleteInstance",
request_serializer=appengine.DeleteInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_instance"] |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def debug_instance(
self,
) -> Callable[[appengine.DebugInstanceRequest], operations_pb2.Operation]:
r"""Return a callable for the debug instance method over gRPC.
Enables debugging on a VM instance. This allows you
to use the SSH command to connect to the virtual machine
where the instance lives. While in "debug mode", the
instance continues to serve live traffic. You should
delete the instance when you are done debugging and then
allow the system to take over and determine if another
instance should be started.
Only applicable for instances in App Engine flexible
environment.
Returns:
Callable[[~.DebugInstanceRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "debug_instance" not in self._stubs:
self._stubs["debug_instance"] = self.grpc_channel.unary_unary(
"/google.appengine.v1.Instances/DebugInstance",
request_serializer=appengine.DebugInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["debug_instance"] |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def close(self):
self.grpc_channel.close() |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def reset(cls):
cls.info = [
[
"Keyboard Control:",
" auto repeat: on key click percent: 0 LED mask: 00000002",
" XKB indicators:",
" 00: Caps Lock: off 01: Num Lock: on 02: Scroll Lock: off",
" 03: Compose: off 04: Kana: off 05: Sleep: off",
],
[
"Keyboard Control:",
" auto repeat: on key click percent: 0 LED mask: 00000002",
" XKB indicators:",
" 00: Caps Lock: on 01: Num Lock: on 02: Scroll Lock: off",
" 03: Compose: off 04: Kana: off 05: Sleep: off",
],
]
cls.index = 0
cls.is_error = False |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def call_process(cls, cmd):
if cls.is_error:
raise subprocess.CalledProcessError(-1, cmd=cmd, output="Couldn't call xset.")
if cmd[1:] == ["q"]:
track = cls.info[cls.index]
output = "\n".join(track)
return output |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def patched_cnli(monkeypatch):
MockCapsNumLockIndicator.reset()
monkeypatch.setattr(
"libqtile.widget.caps_num_lock_indicator.subprocess", MockCapsNumLockIndicator
)
monkeypatch.setattr(
"libqtile.widget.caps_num_lock_indicator.subprocess.CalledProcessError",
subprocess.CalledProcessError,
)
monkeypatch.setattr(
"libqtile.widget.caps_num_lock_indicator.base.ThreadPoolText.call_process",
MockCapsNumLockIndicator.call_process,
)
return caps_num_lock_indicator |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_cnli(fake_qtile, patched_cnli, fake_window):
widget = patched_cnli.CapsNumLockIndicator()
fakebar = FakeBar([widget], window=fake_window)
widget._configure(fake_qtile, fakebar)
text = widget.poll()
assert text == "Caps off Num on" |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def test_cnli_caps_on(fake_qtile, patched_cnli, fake_window):
widget = patched_cnli.CapsNumLockIndicator()
# Simulate Caps on
MockCapsNumLockIndicator.index = 1
fakebar = FakeBar([widget], window=fake_window)
widget._configure(fake_qtile, fakebar)
text = widget.poll()
assert text == "Caps on Num on" |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def draw_axis(img, charuco_corners, charuco_ids, board):
vecs = np.load("./calib.npz") # I already calibrated the camera
mtx, dist, _, _ = [vecs[i] for i in ('mtx', 'dist', 'rvecs', 'tvecs')]
ret, rvec, tvec = cv2.aruco.estimatePoseCharucoBoard(
charuco_corners, charuco_ids, board, mtx, dist)
if ret is not None and ret is True:
cv2.aruco.drawAxis(img, mtx, dist, rvec, tvec, 0.1) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def get_image(camera):
ret, img = camera.read()
return img |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def make_grayscale(img):
ret = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
return ret |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def main():
camera = cv2.VideoCapture(0)
img = get_image(camera)
while True:
cv2.imshow('calibration', img)
cv2.waitKey(10)
img = get_image(camera)
gray = make_grayscale(img)
corners, ids, rejected = cv2.aruco.detectMarkers(gray, aruco_dict,
corners, ids)
cv2.aruco.drawDetectedMarkers(img, corners, ids)
if ids is not None and corners is not None \
and len(ids) > 0 and len(ids) == len(corners):
diamond_corners, diamond_ids = \
cv2.aruco.detectCharucoDiamond(img, corners, ids,
0.05 / 0.03, cameraMatrix=mtx,
distCoeffs=dist)
cv2.aruco.drawDetectedDiamonds(img, diamond_corners, diamond_ids)
'''if diamond_ids is not None and len(diamond_ids) >= 4:
break'''
board = cv2.aruco.CharucoBoard_create(9, 6, 0.05, 0.03,
aruco_dict)
if diamond_corners is not None and diamond_ids is not None \
and len(diamond_corners) == len(diamond_ids):
count, char_corners, char_ids = \
cv2.aruco.interpolateCornersCharuco(diamond_corners,
diamond_ids, gray,
board)
if count >= 3:
draw_axis(img, char_corners, char_ids, board) |
def setUp(self):
itau_data = get_itau_data_from_file()
self.header_arquivo = itau_data['header_arquivo']
self.seg_p = itau_data['seg_p1']
self.seg_p_str = itau_data['seg_p1_str']
self.seg_q = itau_data['seg_q1']
self.seg_q_str = itau_data['seg_q1_str'] | def process_weight(sym, arg, aux):
for stride in RpnParam.anchor_generate.stride:
add_anchor_to_arg(
sym, arg, aux, RpnParam.anchor_generate.max_side,
stride, RpnParam.anchor_generate.scale,
RpnParam.anchor_generate.ratio) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.