text
stringlengths 4
1.02M
| meta
dict |
|---|---|
from pokemon_catch_worker import PokemonCatchWorker
from seen_fort_worker import SeenFortWorker
from move_to_fort_worker import MoveToFortWorker
from initial_transfer_worker import InitialTransferWorker
from evolve_all_worker import EvolveAllWorker
|
{
"content_hash": "8225efe6cb985b653a62350452bcc637",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 57,
"avg_line_length": 49.8,
"alnum_prop": 0.8755020080321285,
"repo_name": "chadsaun/PokemonGo-Bot",
"id": "8f6653421c5f099cec07b92890c433f4224826f2",
"size": "274",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "pokemongo_bot/cell_workers/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Protocol Buffer",
"bytes": "43769"
},
{
"name": "Python",
"bytes": "99887"
},
{
"name": "Shell",
"bytes": "778"
}
],
"symlink_target": ""
}
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='D-Link DDNS IP-Updater',
version='1.0',
packages=find_packages(),
url='https://github.com/philip-raschke/D-Link-DDNS-IP-Updater',
license='MIT',
author='Philip Raschke',
author_email='philip@raschke.cc',
description='A small script to update the public IP address to the D-Link DDNS web service.',
install_requires=['requests[security]', 'BeautifulSoup']
)
|
{
"content_hash": "44171455909c28241132c7747bb62936",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 97,
"avg_line_length": 34.07142857142857,
"alnum_prop": 0.7044025157232704,
"repo_name": "philip-raschke/D-Link-DDNS-IP-Updater",
"id": "8dd1a4e7da1aa278d8c64a914f47d8a5f635f15b",
"size": "477",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5027"
}
],
"symlink_target": ""
}
|
"""stringOps.py - Various String manipulation operations
by Giovanni Prinzivalli """
def fizz_buzz(length):
"""Prints all numbers between 1 and length, replacing multiples
of 3 with Fizz, multiples of 5 with Buzz, and multiples of 15 with FizzBuzz"""
arr = []
for i in range(1, length + 1):
if not i % 15:
arr.append("FizzBuzz")
elif not i % 3:
arr.append("Fizz")
elif not i % 5:
arr.append("Buzz")
else:
arr.append(i)
return arr
def reverse_string(text):
"""Reverses a string. Not rocket surgery."""
return text[::-1]
def pig_latin(text):
"""Later."""
pass
def count_vowels(text):
"""Counts the number of vowels used in a string. Reports by vowel."""
pass
def is_palindrome(text):
"""Checks if a string reads the same both ways"""
return text.lower() == text[::-1].lower()
def count_words(text):
"""Counts the number of words in a string. For added complexity, give a summary and read from a file."""
pass
if __name__ == "__main__":
print "Fizzbuzz list..."
print fizz_buzz(100)
print "String reverser"
print reverse_string("Hello!")
print "Pig latin converter..."
print "Psyche. Implementing later."
print "Palindrome checking..."
print is_palindrome("aha")
print is_palindrome("racecar")
print is_palindrome("aHA")
print is_palindrome("Racecar")
print is_palindrome("Magic schoolbus")
|
{
"content_hash": "72976f2d8df5ff82e98a7c05a8496bee",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 108,
"avg_line_length": 27.62962962962963,
"alnum_prop": 0.6166219839142091,
"repo_name": "Gprinziv/Projects",
"id": "f6fae94076c0c973b1ad9a6ddc5b28835326697b",
"size": "1492",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Python/Text/stringOps.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14429"
}
],
"symlink_target": ""
}
|
import logging
from odoo import _, api, fields, models
from odoo.exceptions import ValidationError
_logger = logging.getLogger(__name__)
class Lead(models.Model):
_inherit = "crm.lead"
def _default_website_id(self):
return self.env.context.get("website_id") or self.env.user.backend_website_id.id
website_id = fields.Many2one(
"website", string="From Website", default=_default_website_id
)
company_id = fields.Many2one(
default=lambda self: self.env["res.company"]._company_default_get()
)
@api.onchange("company_id")
def _onchange_company_id(self):
return (
self.company_id
and {"domain": {"website_id": [("company_id", "in", self.company_id.ids)]}}
or {"domain": {"website_id": []}}
)
@api.constrains("company_id", "website_id")
def _check_website_in_company(self):
for record in self:
if (
record.company_id
and record.website_id
and not record.website_id.company_id == record.company_id
):
raise ValidationError(_("Error! Website and Company are mismatched"))
|
{
"content_hash": "f2b35d01162ed0fff151c1613ae10c42",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 88,
"avg_line_length": 31.18421052631579,
"alnum_prop": 0.5949367088607594,
"repo_name": "it-projects-llc/website-addons",
"id": "e32c54bc5bb5a022ad654690a924860cc9dcae7f",
"size": "1314",
"binary": false,
"copies": "1",
"ref": "refs/heads/13.0",
"path": "website_multi_company_crm/models/crm_lead.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "38675"
},
{
"name": "HTML",
"bytes": "139628"
},
{
"name": "JavaScript",
"bytes": "355569"
},
{
"name": "Python",
"bytes": "223394"
}
],
"symlink_target": ""
}
|
from __future__ import division
from base64 import b16encode, b16decode
import copy
from itertools import izip
import time
from vistrails.core.modules.vistrails_module import Module, InvalidOutput, \
ModuleError, ModuleConnector, ModuleSuspended, ModuleWasSuspended
from vistrails.core.utils import xor, long2bytes
from fold import create_constant
try:
import hashlib
sha1_hash = hashlib.sha1
except ImportError:
import sha
sha1_hash = sha.new
class While(Module):
"""
The While Module runs a module over and over until the condition port
is false. Then, it returns the result.
"""
def update_upstream(self):
"""A modified version of the update_upstream method."""
# everything is the same except that we don't update the module on
# FunctionPort
suspended = []
was_suspended = None
for port_name, connector_list in self.inputPorts.iteritems():
if port_name == 'FunctionPort':
for connector in connector_list:
try:
connector.obj.update_upstream()
except ModuleWasSuspended, e:
was_suspended = e
except ModuleSuspended, e:
suspended.append(e)
else:
for connector in connector_list:
try:
connector.obj.update()
except ModuleWasSuspended, e:
was_suspended = e
except ModuleSuspended, e:
suspended.append(e)
if len(suspended) == 1:
raise suspended[0]
elif suspended:
raise ModuleSuspended(
self,
"multiple suspended upstream modules",
children=suspended)
elif was_suspended is not None:
raise was_suspended
for port_name, connectorList in list(self.inputPorts.items()):
if port_name != 'FunctionPort':
for connector in connectorList:
if connector.obj.get_output(connector.port) is \
InvalidOutput: # pragma: no cover
self.remove_input_connector(port_name, connector)
def compute(self):
name_output = self.get_input('OutputPort')
name_condition = self.force_get_input('ConditionPort')
name_state_input = self.force_get_input('StateInputPorts')
name_state_output = self.force_get_input('StateOutputPorts')
max_iterations = self.get_input('MaxIterations')
delay = self.force_get_input('Delay')
if (name_condition is None and
not self.has_input('MaxIterations')):
raise ModuleError(self,
"Please set MaxIterations or use ConditionPort")
if name_state_input or name_state_output:
if not name_state_input or not name_state_output:
raise ModuleError(self,
"Passing state between iterations requires "
"BOTH StateInputPorts and StateOutputPorts "
"to be set")
if len(name_state_input) != len(name_state_output):
raise ModuleError(self,
"StateInputPorts and StateOutputPorts need "
"to have the same number of ports "
"(got %d and %d)" % (len(name_state_input),
len(name_state_output)))
connectors = self.inputPorts.get('FunctionPort')
if len(connectors) != 1:
raise ModuleError(self,
"Multiple modules connected on FunctionPort")
module = copy.copy(connectors[0].obj)
state = None
loop = self.logging.begin_loop_execution(self, max_iterations)
for i in xrange(max_iterations):
if not self.upToDate:
module.upToDate = False
module.computed = False
# Set state on input ports
if i > 0 and name_state_input:
for value, input_port, output_port \
in izip(state, name_state_input, name_state_output):
if input_port in module.inputPorts:
del module.inputPorts[input_port]
new_connector = ModuleConnector(
create_constant(value), 'value',
module.output_specs.get(output_port, None))
module.set_input_port(input_port, new_connector)
# Affix a fake signature on the module
inputPort_hash = sha1_hash()
inputPort_hash.update(input_port)
module.signature = b16encode(xor(
b16decode(self.signature.upper()),
inputPort_hash.digest()))
loop.begin_iteration(module, i)
module.update() # might raise ModuleError, ModuleSuspended,
# ModuleHadError, ModuleWasSuspended
loop.end_iteration(module)
if name_condition is not None:
if name_condition not in module.outputPorts:
raise ModuleError(
module,
"Invalid output port: %s" % name_condition)
if not module.get_output(name_condition):
break
if delay and i+1 != max_iterations:
time.sleep(delay)
# Get state on output ports
if name_state_output:
state = [module.get_output(port) for port in name_state_output]
self.logging.update_progress(self, i * 1.0 / max_iterations)
loop.end_loop_execution()
if name_output not in module.outputPorts:
raise ModuleError(module,
"Invalid output port: %s" % name_output)
result = module.get_output(name_output)
self.set_output('Result', result)
class For(Module):
"""
The For Module runs a module with input from a range.
"""
def update_upstream(self):
"""A modified version of the update_upstream method."""
# everything is the same except that we don't update the module on
# FunctionPort
suspended = []
was_suspended = None
for port_name, connector_list in self.inputPorts.iteritems():
if port_name == 'FunctionPort':
for connector in connector_list:
try:
connector.obj.update_upstream()
except ModuleWasSuspended, e:
was_suspended = e
except ModuleSuspended, e:
suspended.append(e)
else:
for connector in connector_list:
try:
connector.obj.update()
except ModuleWasSuspended, e:
was_suspended = e
except ModuleSuspended, e:
suspended.append(e)
if len(suspended) == 1:
raise suspended[0]
elif suspended:
raise ModuleSuspended(
self,
"multiple suspended upstream modules",
children=suspended)
elif was_suspended is not None:
raise was_suspended
for port_name, connectorList in list(self.inputPorts.items()):
if port_name != 'FunctionPort':
for connector in connectorList:
if connector.obj.get_output(connector.port) is \
InvalidOutput: # pragma: no cover
self.removeInputConnector(port_name, connector)
def compute(self):
name_output = self.get_input('OutputPort') # or 'self'
name_input = self.force_get_input('InputPort') # or None
lower_bound = self.get_input('LowerBound') # or 0
higher_bound = self.get_input('HigherBound') # required
connectors = self.inputPorts.get('FunctionPort')
if len(connectors) != 1:
raise ModuleError(self,
"Multiple modules connected on FunctionPort")
outputs = []
suspended = []
loop = self.logging.begin_loop_execution(self,
higher_bound - lower_bound)
for i in xrange(lower_bound, higher_bound):
module = copy.copy(connectors[0].obj)
if not self.upToDate:
module.upToDate = False
module.computed = False
# Pass iteration number on input port
if name_input is not None:
if name_input in module.inputPorts:
del module.inputPorts[name_input]
new_connector = ModuleConnector(create_constant(i),
'value')
module.set_input_port(name_input, new_connector)
# Affix a fake signature on the module
inputPort_hash = sha1_hash()
inputPort_hash.update(name_input)
module.signature = b16encode(xor(
b16decode(self.signature.upper()),
long2bytes(i, 20),
inputPort_hash.digest()))
loop.begin_iteration(module, i)
try:
module.update()
except ModuleSuspended, e:
suspended.append(e)
loop.end_iteration(module)
continue
loop.end_iteration(module)
if name_output not in module.outputPorts:
raise ModuleError(module,
"Invalid output port: %s" % name_output)
outputs.append(module.get_output(name_output))
if suspended:
raise ModuleSuspended(
self,
"function module suspended in %d/%d iterations" % (
len(suspended), higher_bound - lower_bound),
children=suspended)
loop.end_loop_execution()
self.set_output('Result', outputs)
###############################################################################
import unittest
class TestWhile(unittest.TestCase):
def test_pythonsource(self):
import urllib2
source = ('o = i * 2\n'
"r = \"it's %d!!!\" % o\n"
'go_on = o < 100')
source = urllib2.quote(source)
from vistrails.tests.utils import execute, intercept_result
with intercept_result(While, 'Result') as results:
self.assertFalse(execute([
('PythonSource', 'org.vistrails.vistrails.basic', [
('source', [('String', source)]),
('i', [('Integer', '5')]),
]),
('While', 'org.vistrails.vistrails.control_flow', [
('ConditionPort', [('String', 'go_on')]),
('OutputPort', [('String', 'r')]),
('StateInputPorts', [('List', "['i']")]),
('StateOutputPorts', [('List', "['o']")]),
]),
],
[
(0, 'self', 1, 'FunctionPort'),
],
add_port_specs=[
(0, 'input', 'i',
'org.vistrails.vistrails.basic:Integer'),
(0, 'output', 'o',
'org.vistrails.vistrails.basic:Integer'),
(0, 'output', 'r',
'org.vistrails.vistrails.basic:String'),
(0, 'output', 'go_on',
'org.vistrails.vistrails.basic:Boolean'),
]))
self.assertEqual(results, ["it's 160!!!"])
|
{
"content_hash": "9c1c2e8ffc5ea4207b3394a1023f3064",
"timestamp": "",
"source": "github",
"line_count": 303,
"max_line_length": 86,
"avg_line_length": 40.59405940594059,
"alnum_prop": 0.49902439024390244,
"repo_name": "hjanime/VisTrails",
"id": "27b7b2e237581ff9c0eb063272e4b84a268cb97f",
"size": "14213",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vistrails/packages/controlflow/looping.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1421"
},
{
"name": "Inno Setup",
"bytes": "19550"
},
{
"name": "Makefile",
"bytes": "768"
},
{
"name": "Mako",
"bytes": "66613"
},
{
"name": "PHP",
"bytes": "49302"
},
{
"name": "Python",
"bytes": "19803915"
},
{
"name": "R",
"bytes": "782836"
},
{
"name": "Ruby",
"bytes": "875"
},
{
"name": "Shell",
"bytes": "35024"
},
{
"name": "TeX",
"bytes": "145333"
},
{
"name": "XSLT",
"bytes": "1090"
}
],
"symlink_target": ""
}
|
import platform
if platform.system() != 'Windows':
import PySide2.QtSvg # Must import this before fast due to conflicting symbols
import fast # Must import FAST before rest of pyside2
from PySide2.QtWidgets import *
from PySide2.QtCore import Qt
from shiboken2 import wrapInstance
from random import random
fast.downloadTestDataIfNotExists()
# Create FAST Pipeline and window
importer = fast.ImageFileImporter\
.create(fast.Config.getTestDataPath() + 'CT/CT-Abdomen.mhd')
smoothing = fast.GaussianSmoothing\
.create(stdDev=1.0)\
.connect(importer)
surfaceExtraction = fast.SurfaceExtraction\
.create(threshold=300)\
.connect(smoothing)
renderer = fast.TriangleRenderer.create()\
.connect(surfaceExtraction)
window = fast.SimpleWindow3D.create(width=1024, height=512)\
.connect(renderer)
# Get the underlying QtWidget of the FAST window and convert it to pyside2
mainWidget = wrapInstance(int(window.getWidget()), QWidget)
# Create GUI in Qt
layout = mainWidget.layout()
menuWidget = QWidget()
layout.addWidget(menuWidget)
menuLayout = QVBoxLayout()
menuWidget.setLayout(menuLayout)
menuLayout.setAlignment(Qt.AlignTop)
title = QLabel('<h3>Python GUI Example</h3>')
menuWidget.setFixedWidth(400)
menuLayout.addWidget(title)
# Threshold GUI
menuLayout.addWidget(QLabel('Threshold:'))
threshold_slider = QSlider(Qt.Horizontal)
threshold_slider.setRange(100, 500)
threshold_slider.setValue(300)
threshold_slider.setSingleStep(10)
# Connect slider to FAST
threshold_slider.valueChanged.connect(lambda x: surfaceExtraction.setThreshold(x))
menuLayout.addWidget(threshold_slider)
# Smoothing GUI
menuLayout.addWidget(QLabel('Smoothing:'))
smoothing_slider = QSlider(Qt.Horizontal)
smoothing_slider.setValue(1)
smoothing_slider.setRange(1, 3)
smoothing_slider.setSingleStep(1)
# Connect slider to FAST
smoothing_slider.valueChanged.connect(lambda x: smoothing.setStandardDeviation(x))
menuLayout.addWidget(smoothing_slider)
# Run everything!
window.run()
|
{
"content_hash": "b16fe712b013c66ea80b02734f7464eb",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 82,
"avg_line_length": 30.484848484848484,
"alnum_prop": 0.7818091451292246,
"repo_name": "smistad/FAST",
"id": "828cfe6c63cca97faa79ddd411f148021f332955",
"size": "2569",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source/FAST/Examples/Python/surface_extraction_gui_with_pyside2.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "365756"
},
{
"name": "C++",
"bytes": "3111908"
},
{
"name": "CMake",
"bytes": "210413"
},
{
"name": "GLSL",
"bytes": "16291"
},
{
"name": "Python",
"bytes": "38196"
},
{
"name": "SWIG",
"bytes": "13791"
}
],
"symlink_target": ""
}
|
"""
Create an argparser from a Schema
>>>
"""
import argparse
import logging
from functools import singledispatch
from ..config import Config
from ..schema_section import SchemaSection
from ..times import DateTime, Date, Time, TimeDelta
from ..toolbox.unrepr import unrepr
from ..utils import get_validator_argument
from .. import validator as _validator
from .undefined import UNDEFINED
__author__ = "Simone Campagna"
__copyright__ = 'Copyright (c) 2015 Simone Campagna'
__license__ = 'Apache License Version 2.0'
__all__ = [
'argument_name',
'argument_dest',
'update_argparser',
'create_argparser',
'read_namespace',
]
LOG = logging.getLogger(__name__)
def _make_fqname(fqname):
"""Make an fqname tuple"""
if isinstance(fqname, tuple):
return fqname
elif fqname is None:
return ()
elif isinstance(fqname, str):
return tuple(fqname.split("."))
else:
raise TypeError("invalid fqname {!r}".format(fqname))
def argument_name(fqname):
"""Returns the argument name"""
return "--" + "-".join(_make_fqname(fqname))
def argument_dest(fqname):
"""Returns the argument dest"""
return "_".join(_make_fqname(fqname))
def _argdata(validator, defaults, fqname):
"""Creates standard args, kwargs for the parser.add_argument method"""
fqname = _make_fqname(fqname)
argname = argument_name(fqname)
args = (argname,)
dest = argument_dest(fqname)
has_doc, doc = get_validator_argument(validator, "doc")
if not has_doc:
doc = "set option {!r}".format(dest)
kwargs = {
"dest": dest,
"help": doc,
"metavar": fqname[-1].upper(),
"default": UNDEFINED,
}
if defaults:
has_default, default = get_validator_argument(validator, "default")
if has_default:
kwargs["default"] = default
return args, kwargs
def boolean(value):
"""Converts a string to boolean"""
value = value.lower()
if value in {'on', 'true'}:
return True
elif value in {'off', 'false'}:
return False
else:
try:
return bool(int(value))
except ValueError:
raise ValueError("invalid bool value {!r}".format(value)) from None
class _ArgType(object):
"""Standard argument type based on validator"""
# pylint: disable=too-few-public-methods
def __init__(self, validator, name):
self._validator = validator
self._name = name
self._gdict = {
'DateTime': DateTime,
'Date': Date,
'Time': Time,
'TimeDelta': TimeDelta,
}
def __call__(self, string):
try:
value = unrepr(string, globals_d=self._gdict)
return self._validator.validate(name=self._name, value=value, defined=True)
except:
LOG.exception("argument parsing error:")
raise ValueError(string)
def __repr__(self): # pragma: no cover
return type(self._validator).__name__
@singledispatch
def _argtype(validator, name):
"""Returns the argparse argument type"""
return _ArgType(validator, name)
@_argtype.register(_validator.Str)
@_argtype.register(_validator.StrChoice)
def _(validator, name): # flake8: noqa
# pylint: disable=function-redefined
# pylint: disable=unused-argument
"""Overrides argtype for Str validator"""
return str
@_argtype.register(_validator.DateTimeOption)
def _(validator, name): # flake8: noqa
# pylint: disable=function-redefined
# pylint: disable=unused-argument
"""Overrides argtype for Str validator"""
return DateTime
@_argtype.register(_validator.DateOption)
def _(validator, name): # flake8: noqa
# pylint: disable=function-redefined
# pylint: disable=unused-argument
"""Overrides argtype for Str validator"""
return Date
@_argtype.register(_validator.TimeOption)
def _(validator, name): # flake8: noqa
# pylint: disable=function-redefined
# pylint: disable=unused-argument
"""Overrides argtype for Str validator"""
return Time
@_argtype.register(_validator.TimeDeltaOption)
def _(validator, name): # flake8: noqa
# pylint: disable=function-redefined
# pylint: disable=unused-argument
"""Overrides argtype for Str validator"""
return TimeDelta
@_argtype.register(_validator.Bool)
def _(validator, name): # flake8: noqa
# pylint: disable=function-redefined
# pylint: disable=unused-argument
"""Overrides argtype for Bool validator"""
return boolean
@singledispatch
def _add_argument(validator, argparser, defaults, fqname):
# pylint: disable=unused-argument
"""Adds argument(s) to argparser based on the validator"""
pass # raise TypeError("cannot compile validator {!r}".format(validator))
@_add_argument.register(_validator.Int)
@_add_argument.register(_validator.IntList)
@_add_argument.register(_validator.IntTuple)
@_add_argument.register(_validator.Float)
@_add_argument.register(_validator.FloatList)
@_add_argument.register(_validator.FloatTuple)
@_add_argument.register(_validator.Str)
@_add_argument.register(_validator.StrList)
@_add_argument.register(_validator.StrTuple)
@_add_argument.register(_validator.Bool)
@_add_argument.register(_validator.BoolList)
@_add_argument.register(_validator.BoolTuple)
@_add_argument.register(_validator.DateTimeOption)
@_add_argument.register(_validator.DateOption)
@_add_argument.register(_validator.TimeOption)
@_add_argument.register(_validator.TimeDeltaOption)
def _(validator, argparser, defaults, fqname): # flake8: noqa
# pylint: disable=function-redefined
"""Standard _add_argument function"""
args, kwargs = _argdata(validator, defaults, fqname)
kwargs["type"] = _argtype(validator, name=kwargs["dest"])
argparser.add_argument(
*args,
**kwargs)
@_add_argument.register(_validator.IntChoice)
@_add_argument.register(_validator.FloatChoice)
@_add_argument.register(_validator.StrChoice)
@_add_argument.register(_validator.BoolChoice)
def _(validator, argparser, defaults, fqname): # flake8: noqa
# pylint: disable=function-redefined
"""Overridden _add_argument function for *Choice"""
args, kwargs = _argdata(validator, defaults, fqname)
kwargs["choices"] = get_validator_argument(validator, "choices")[1]
kwargs["type"] = _argtype(validator, name=kwargs["dest"])
argparser.add_argument(
*args,
**kwargs)
def update_argparser(schema, argparser, *, defaults=True, prefix=None, group_depth=1):
"""Update an argparse parser based on a schema
Parameters
----------
schema: :class:`SchemaSection`
the schema instance
argparser: :class:`argparse.ArgumentParser`
the argument parser
defaults: bool, optional
if True, set the validator default (defaults to True)
prefix: str, optional
a prefix for options (defaults to None)
group_depth: int, optional
maximum depth for argument grouping (defaults to 1)
"""
if not isinstance(schema, SchemaSection):
raise TypeError("{!r}: not a schema".format(schema))
fqname = _make_fqname(prefix)
return _impl_update_argparser(
schema=schema,
argparser=argparser,
defaults=defaults,
fqname=fqname,
group_depth=group_depth)
def _impl_update_argparser(schema, argparser, fqname, *, defaults=True, group_depth=1, group=None):
"""Update an argparse parser based on a schema (implementation)
Parameters
----------
schema: :class:`SchemaSection`
the schema instance
argparser: :class:`argparse.ArgumentParser`
the argument parser
fqname: tuple
fully qualified name
defaults: bool, optional
if True, set the validator default (defaults to True)
group_depth: int, optional
maximum depth for argument grouping (defaults to 1)
group: object, optional
argument group to be used
"""
subgroup_depth = group_depth
if group_depth == 0 and group is None: # pragma: no cover
group = argparser
if group_depth > 0:
subgroup_depth -= 1
for key, value in schema.items():
if key in {SchemaSection.__default_option__, SchemaSection.__default_section__}:
continue
elif isinstance(value, SchemaSection):
if group is None or group_depth != 0:
group_name = ".".join(fqname + (key,))
subgroup = argparser.add_argument_group("configuration options for package {!r}".format(group_name))
else:
subgroup = group
_impl_update_argparser(
value,
argparser,
defaults=defaults,
fqname=fqname + (key,),
group_depth=subgroup_depth,
group=subgroup)
else:
if group is None:
argument_group = argparser
else:
argument_group = group
_add_argument(value, argparser=argument_group, defaults=defaults, fqname=fqname + (key,))
def create_argparser(schema, *, defaults=True, prefix=None, group_depth=1, **kwargs):
r"""Creates an argparse parser based on a schema
Parameters
----------
schema: :class:`Schema`
the schema instance or source
argparser: :class:`argparse.ArgumentParser`
the argument parser
defaults: bool, optional
if True, set the validator default (defaults to True)
group_depth: int, optional
maximum depth for argument grouping (defaults to 1)
prefix: str
a prefix for options
\*\*kwargs: dict, optional
passed to the :class:`argparse.ArgumentParser` constructor
Returns
-------
:class:`argparse.ArgumentParser`
the argument parser
"""
argparser = argparse.ArgumentParser(**kwargs)
update_argparser(schema=schema, argparser=argparser, defaults=defaults, prefix=prefix, group_depth=group_depth)
return argparser
def read_namespace(schema, namespace, *, config=None, prefix=None, validate=True):
"""Read config data from a namespace
Parameters
----------
schema: :class:`Schema`
the schema instance or source
argparser: :class:`argparse.ArgumentParser`
the argument parser
config: :class:`Config`, optional
the config object to be modified and returned (defaults to None, which means
that a new empty Config is created)
prefix: str, optional
a prefix for options (defaults to None)
validate: bool, optional
if True performs validation
Returns
-------
tuple
a 2-tuple (config, validation_result); validation_result is None if validate == False
"""
if not isinstance(schema, SchemaSection):
raise TypeError("{!r}: not a Schema".format(schema))
if prefix:
if isinstance(prefix, str):
fqname = (prefix,)
else:
fqname = tuple(prefix)
else:
fqname = ()
if config is None:
config = Config()
_read_namespace_impl(schema=schema, namespace=namespace, config=config, fqname=fqname)
if validate:
validation_result = schema.validate(config)
else:
validation_result = None
return config, validation_result
def _read_namespace_impl(schema, namespace, *, config, fqname):
"""read_namespace implementation"""
for key, value in schema.items():
if key in {SchemaSection.__default_option__, SchemaSection.__default_section__}:
continue
elif isinstance(value, SchemaSection):
subconfig = config.setdefault(key, {})
_read_namespace_impl(schema=value, namespace=namespace, config=subconfig, fqname=fqname + (key,))
else:
dest = argument_dest(fqname + (key,))
val = getattr(namespace, dest, None)
if val is not UNDEFINED:
config[key] = val
|
{
"content_hash": "e303ccca8c064c28bf4f9b3d57108261",
"timestamp": "",
"source": "github",
"line_count": 375,
"max_line_length": 116,
"avg_line_length": 31.944,
"alnum_prop": 0.6472994406878705,
"repo_name": "simone-campagna/zirkon",
"id": "805b8f70163492155080998d4e132f5f4ac7f9b8",
"size": "12587",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "zirkon/toolbox/argparser.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "833579"
}
],
"symlink_target": ""
}
|
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from pants.backend.android.targets.android_target import AndroidTarget
from pants.base.exceptions import TargetDefinitionException
class AndroidResources(AndroidTarget):
"""Android resources used to generate R.java."""
def __init__(self,
resource_dir=None,
**kwargs):
"""
:param string resource_dir: path/to/directory containing Android resource files,
often named 'res'.
"""
super(AndroidResources, self).__init__(**kwargs)
address = kwargs['address']
try:
self.resource_dir = os.path.join(address.spec_path, resource_dir)
except AttributeError:
raise TargetDefinitionException(self, 'An android_resources target must specify a '
'\'resource_dir\' that contains the target\'s '
'resource files.')
def globs_relative_to_buildroot(self):
return {'globs': os.path.join(self.resource_dir, '**')}
|
{
"content_hash": "fcc76e034745f6e8990b9682fd0790be",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 93,
"avg_line_length": 37.8,
"alnum_prop": 0.6331569664902998,
"repo_name": "dturner-tw/pants",
"id": "44c5cecaa4fe85a877373aa7c45ddac7c70c2883",
"size": "1281",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/python/pants/backend/android/targets/android_resources.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "781"
},
{
"name": "CSS",
"bytes": "11538"
},
{
"name": "Cucumber",
"bytes": "919"
},
{
"name": "GAP",
"bytes": "2459"
},
{
"name": "Go",
"bytes": "1849"
},
{
"name": "HTML",
"bytes": "70358"
},
{
"name": "Java",
"bytes": "293253"
},
{
"name": "JavaScript",
"bytes": "31042"
},
{
"name": "Protocol Buffer",
"bytes": "3783"
},
{
"name": "Python",
"bytes": "4404984"
},
{
"name": "Scala",
"bytes": "85217"
},
{
"name": "Shell",
"bytes": "50774"
},
{
"name": "Thrift",
"bytes": "2919"
}
],
"symlink_target": ""
}
|
import random
from rally.plugins.common.scenarios.requests import utils
from rally.task.scenarios import base
class HttpRequests(utils.RequestScenario):
"""Benchmark scenarios for HTTP requests."""
@base.scenario()
def check_request(self, url, method, status_code, **kwargs):
"""Standard way to benchmark web services.
This benchmark is used to make request and check it with expected
Response.
:param url: url for the Request object
:param method: method for the Request object
:param status_code: expected response code
:param kwargs: optional additional request parameters
"""
self._check_request(url, method, status_code, **kwargs)
@base.scenario()
def check_random_request(self, requests, status_code):
"""Benchmark the list of requests
This scenario takes random url from list of requests, and raises
exception if the response is not the expected response.
:param requests: List of request dicts
:param status_code: Expected Response Code it will
be used only if we doesn't specified it in request proper
"""
request = random.choice(requests)
request.setdefault("status_code", status_code)
self._check_request(**request)
|
{
"content_hash": "4e5f37d010c6d8c14a394f86c9061c93",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 73,
"avg_line_length": 33.58974358974359,
"alnum_prop": 0.6778625954198473,
"repo_name": "pyKun/rally",
"id": "50591eeb945d452aa2ea9fcd9e7c5b321d9fb24c",
"size": "1883",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "rally/plugins/common/scenarios/requests/http_requests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "46737"
},
{
"name": "Python",
"bytes": "2443451"
},
{
"name": "Shell",
"bytes": "36795"
}
],
"symlink_target": ""
}
|
"""
@copyright Copyright (c) 2011 - 2016, Intel Corporation.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@file test_acl_samples.py
@summary Samples for ACL configuration.
@details
Following test cases are tested:
1. Add simple ACL configuration.
2. Add/Delete simple ACLs.
3. Simple ACL test with traffic.
4. ACL Statistics.
"""
import time
import pytest
from testlib import helpers
@pytest.mark.acl
@pytest.mark.simplified
class TestAclSamples(object):
"""
@description Suite for ACL testing
"""
def test_acl_configuration(self, env):
"""
@brief Add simple ACL configuration
@steps
-# Create ACL Expression for Ethernet.dst=00:00:00:01:01:01 .
-# Verify ACL Expression has been created.
-# Create "Drop" ACL Action.
-# Verify ACL Action has been created.
-# Create ACL Rule for created Expression and Action.
-# Verify ACL Rule has been created.
@endsteps
"""
# Create ACL Expression
self.suite_logger.debug("Create and Verify ACL Expression")
expressions = [(1, 'DstMac', 'FF:FF:FF:FF:FF:FF', '00:00:00:01:01:01'), ]
env.switch[1].ui.create_acl(expressions=expressions)
# Verify ACL Expression
expression = env.switch[1].ui.get_table_acl("ACLExpressions")[0]
assert expression['data'] == '00:00:00:01:01:01'
assert expression['mask'] == 'FF:FF:FF:FF:FF:FF'
assert expression['expressionId'] == 1
assert expression['field'] == 'DstMac'
# Create ACL Actions
self.suite_logger.debug("Create and Verify ACL Action")
actions = [(1, 'Drop', ''), ]
env.switch[1].ui.create_acl(actions=actions)
# Verify ACL Action
action = env.switch[1].ui.get_table_acl("ACLActions")[0]
assert action['action'] == 'Drop'
assert action['param'] == ''
assert action['actionId'] == 1
# Create ACL Rule
self.suite_logger.debug("Create and Verify ACL Rule")
rules = [(1, 1, 1, 'Ingress', 'Enabled', 0), ]
# Note: ACL Rule should be assigned to ports
env.switch[1].ui.create_acl(ports=[1, ], rules=rules)
# Verify ACL Rule
rule = env.switch[1].ui.get_table_acl("ACLRules")[0]
assert rule['ruleId'] == 1
assert rule['expressionId'] == 1
assert rule['actionId'] == 1
assert rule['stage'] == 'Ingress'
assert rule['enabled'] == 'Enabled'
assert rule['priority'] == 0
def test_delete_acl(self, env):
"""
@brief Add/Delete simple ACL
@steps
-# Configure 2 ACL Expressions.
-# Verify Expressions have been created.
-# Delete second ACL Expression.
-# Verify ACL has been deleted.
-# Configure 2 ACL Actions.
-# Verify Actions have been created.
-# Delete second ACL Action.
-# Verify ACL has been deleted.
-# Configure ACL Rule.
-# Verify Rule has been created.
-# Delete ACL Rule.
-# Verify ACL has been deleted.
@endsteps
"""
# Create ACL Expressions
self.suite_logger.debug("Create ACL Expressions")
expressions = [(1, 'DstMac', 'FF:FF:FF:FF:FF:FF', '00:00:00:01:01:01'),
(2, 'SrcMac', 'FF:FF:FF:FF:FF:FF', '00:00:00:02:02:02')]
env.switch[1].ui.create_acl(expressions=expressions)
# Verify ACL Expression
expressions_table = env.switch[1].ui.get_table_acl("ACLExpressions")
# Verify first expression has been added
expr_1 = {"expressionId": expressions[0][0],
"field": expressions[0][1],
"mask": expressions[0][2],
"data": expressions[0][3]
}
assert expr_1 in expressions_table, \
"Expression {0} was not added".format(expressions[0])
# Verify second expression has been added
expr_2 = {"expressionId": expressions[1][0],
"field": expressions[1][1],
"mask": expressions[1][2],
"data": expressions[1][3]
}
assert expr_2 in expressions_table,\
"Expression {0} was not added".format(expressions[1])
# Delete Expression
self.suite_logger.debug("Delete ACL Expression")
env.switch[1].ui.delete_acl(expression_ids=[(2, 'SrcMac'), ])
# Verify Expression has been deleted
expressions_table = env.switch[1].ui.get_table_acl("ACLExpressions")
assert expr_2 not in expressions_table, \
"Expression {0} was not deleted".format(expressions[1])
# Create ACL Actions
self.suite_logger.debug("Create ACL Actions")
actions = [(1, 'Drop', ''),
(2, 'Count', '')]
env.switch[1].ui.create_acl(actions=actions)
# Verify ACL Action
actions_table = env.switch[1].ui.get_table_acl("ACLActions")
# Verify first action has been added
act_1 = {"actionId": actions[0][0],
"action": actions[0][1],
"param": actions[0][2]
}
assert act_1 in actions_table, "Action {0} was not added".format(actions[0])
# Verify second action has been added
act_2 = {"actionId": actions[1][0],
"action": actions[1][1],
"param": actions[1][2]
}
assert act_2 in actions_table, "Action {0} was not added".format(actions[1])
# Delete Action
self.suite_logger.debug("Delete ACL Action")
env.switch[1].ui.delete_acl(action_ids=[(2, 'Count'), ])
# Verify Action has been deleted
actions_table = env.switch[1].ui.get_table_acl("ACLActions")
assert act_2 not in actions_table, "Action {0} was not deleted".format(actions[1])
# Create ACL Rule
self.suite_logger.debug("Create ACL Rule")
rules = [(1, 1, 1, 'Ingress', 'Enabled', 0), ]
env.switch[1].ui.create_acl(ports=[1, ], rules=rules)
# Verify ACL Rule has been added
rules_table = env.switch[1].ui.get_table_acl("ACLRules")
rule = {"ruleId": rules[0][0],
"expressionId": rules[0][1],
"actionId": rules[0][2],
"stage": rules[0][3],
"enabled": rules[0][4],
"priority": rules[0][5]
}
assert rule in rules_table, "Rule {0} was not added".format(rules[0])
# Delete Rule
self.suite_logger.debug("Delete ACL Rule")
env.switch[1].ui.delete_acl(ports=[1, ], rule_ids=[1, ])
# Verify Rule has been deleted
rules_table = env.switch[1].ui.get_table_acl("ACLRules")
assert rule not in rules_table, "Rule {0} was not deleted".format(rules[0])
def test_acl_traffic(self, env):
"""
@brief Simple ACL test with traffic
@steps
-# Configure ACL: drop all packets.
-# Configure ACL: allow packets with Ethernet.dst=00:00:00:01:01:01.
-# Create stream with Ethernet.dst=00:00:00:01:01:01.
-# Create stream with Ethernet.dst=00:00:00:03:03:03.
-# Send streams.
-# Verify first stream is flooded.
-# Verify second stream is discarded.
@endsteps
"""
# Get active ports: use four ports for test case
active_ports = env.get_ports([['tg1', 'sw1', 4], ])
device_ports = list(active_ports[('sw1', 'tg1')].values())
sniff_ports = list(active_ports[('tg1', 'sw1')].values())
# Disable all ports and enabling only necessary ones:
helpers.set_all_ports_admin_disabled(env.switch)
helpers.set_ports_admin_enabled(env.switch, active_ports)
self.suite_logger.debug("Disable STP.")
env.switch[1].ui.configure_spanning_tree(enable='Disabled')
# Configure ACL: drop all packets;
# allow only packets with Ethernet.dst=00:00:00:01:01:01
# Configure ACL Expression in format (id, expression, mask, value)
self.suite_logger.debug("Create ACLs")
expressions = [(1, 'SrcMac', '00:00:00:00:00:00', '00:00:00:00:00:00'),
(2, 'DstMac', 'FF:FF:FF:FF:FF:FF', '00:00:00:01:01:01')]
# Configure ACL Action in format (id, action, params)
actions = [(1, 'Drop', ''), (2, 'Allow', '')]
# Configure ACL Rule in format
# (id, expression_id, action_id, stage, enabled, priority)
rules = [(1, 1, 1, 'Ingress', 'Enabled', 0), (2, 2, 2, 'Ingress', 'Enabled', 0)]
# Create ACLs on device's ports
try:
env.switch[1].ui.create_acl(ports=device_ports, expressions=expressions,
actions=actions, rules=rules)
except Exception as err:
# Exception in configuration
self.suite_logger.debug('ACL configuration failed: %s' % err)
pytest.fail('ACL configuration failed')
# Wait some time for proper switch behavior
time.sleep(1)
# Generate test traffic
packet_1 = ({"Ethernet": {"dst": "00:00:00:01:01:01", "src": "00:00:00:02:02:02"}},
{"IP": {}}, {"TCP": {}})
packet_2 = ({"Ethernet": {"dst": "00:00:00:03:03:03", "src": "00:00:00:04:04:04"}},
{"IP": {}}, {"TCP": {}})
# Send packets to the first port
stream_1 = env.tg[1].set_stream(packet_1, count=1, iface=sniff_ports[0])
stream_2 = env.tg[1].set_stream(packet_2, count=1, iface=sniff_ports[0])
streams = [stream_1, stream_2]
# Start capture
self.suite_logger.debug("Start the capture and send the test traffic")
env.tg[1].start_sniff(sniff_ports, sniffing_time=10)
# Send generated streams
env.tg[1].start_streams(streams)
# Stop capture
data = env.tg[1].stop_sniff(sniff_ports)
# Stop traffic
env.tg[1].stop_streams()
# Print captured data
helpers.print_sniffed_data_brief(data)
# Get packets from the captured data
self.suite_logger.debug("Verify traffic is processed according to the ACLs")
# Verify first packet is flooded
params_1 = [{"layer": "Ethernet", "field": 'dst', "value": '00:00:00:01:01:01'},
{"layer": "Ethernet", "field": 'src', "value": '00:00:00:02:02:02'}]
assert len(helpers.get_packet_from_the_port(sniff_port=sniff_ports[1],
params=params_1,
sniff_data=data,
tg=env.tg[1])) == 1, \
"Packet is not received"
assert len(helpers.get_packet_from_the_port(sniff_port=sniff_ports[2],
params=params_1,
sniff_data=data,
tg=env.tg[1])) == 1, \
"Packet is not received"
assert len(helpers.get_packet_from_the_port(sniff_port=sniff_ports[3],
params=params_1,
sniff_data=data,
tg=env.tg[1])) == 1, \
"Packet is not received"
# Verify second packet is dropped
params_1 = [{"layer": "Ethernet", "field": 'dst', "value": '00:00:00:03:03:03'},
{"layer": "Ethernet", "field": 'src', "value": '00:00:00:04:04:04'}]
assert len(helpers.get_packet_from_the_port(sniff_port=sniff_ports[1],
params=params_1,
sniff_data=data,
tg=env.tg[1])) == 0, \
"Packet is flooded"
assert len(helpers.get_packet_from_the_port(sniff_port=sniff_ports[2],
params=params_1,
sniff_data=data,
tg=env.tg[1])) == 0, \
"Packet is flooded"
assert len(helpers.get_packet_from_the_port(sniff_port=sniff_ports[3],
params=params_1,
sniff_data=data,
tg=env.tg[1])) == 0, \
"Packet is flooded"
def test_acl_statistics(self, env):
"""
@brief Simple ACL test for ACL Statistics
@steps
-# Configure ACL: drop all packets.
-# Configure ACL: allow packets with Ethernet.dst=00:00:00:01:01:01.
-# Create stream with Ethernet.dst=00:00:00:01:01:01.
-# Create stream with Ethernet.dst=00:00:00:03:03:03.
-# Send streams.
-# Get ACL Statistics.
-# Verify Statistics has been updated with correct values.
@endsteps
"""
# Get active ports: use four ports for test case
active_ports = env.get_ports([['tg1', 'sw1', 4], ])
device_ports = list(active_ports[('sw1', 'tg1')].values())
sniff_ports = list(active_ports[('tg1', 'sw1')].values())
# Disable all ports and enabling only necessary ones:
helpers.set_all_ports_admin_disabled(env.switch)
helpers.set_ports_admin_enabled(env.switch, active_ports)
self.suite_logger.debug("Disable STP.")
env.switch[1].ui.configure_spanning_tree(enable='Disabled')
# Configure ACL: drop all packets;
# allow only packets with Ethernet.dst=00:00:00:01:01:01
self.suite_logger.debug("Create ACLs")
# Configure ACL Expression in format (id, expression, mask, value)
expressions = [(1, 'SrcMac', '00:00:00:00:00:00', '00:00:00:00:00:00'),
(2, 'DstMac', 'FF:FF:FF:FF:FF:FF', '00:00:00:01:01:01')]
# Configure ACL Action in format (id, action, params)
# Additional 'Count' action should be added in order to update ACL Statistics
actions = [(1, 'Drop', ''), (1, 'Count', ''), (2, 'Allow', ''), (2, 'Count', '')]
# Configure ACL Rule in format
# (id, expression_id, action_id, stage, enabled, priority)
rules = [(1, 1, 1, 'Ingress', 'Enabled', 0), (2, 2, 2, 'Ingress', 'Enabled', 0)]
# Create ACLs on device's ports
try:
env.switch[1].ui.create_acl(ports=device_ports, expressions=expressions,
actions=actions, rules=rules)
except Exception as err:
# Exception in configuration
self.suite_logger.debug('ACL configuration failed: %s' % err)
pytest.fail('ACL configuration failed')
# Wait some time for proper switch behavior
time.sleep(1)
# Generate test traffic
packet_1 = ({"Ethernet": {"dst": "00:00:00:01:01:01", "src": "00:00:00:02:02:02"}},
{"IP": {}}, {"TCP": {}})
packet_2 = ({"Ethernet": {"dst": "00:00:00:03:03:03", "src": "00:00:00:04:04:04"}},
{"IP": {}}, {"TCP": {}})
# Send packets to the first port
count_2 = 5
count_1 = 10
packet_size = 100
stream_1 = env.tg[1].set_stream(packet_1, count=count_2,
iface=sniff_ports[0], required_size=packet_size)
stream_2 = env.tg[1].set_stream(packet_2, count=count_1,
iface=sniff_ports[0], required_size=packet_size)
streams = [stream_1, stream_2]
self.suite_logger.debug("Start the capture and send the test traffic")
# Start capture
env.tg[1].start_sniff(sniff_ports, sniffing_time=10)
# Send generated streams
env.tg[1].start_streams(streams)
# Stop capture
env.tg[1].stop_sniff(sniff_ports)
# Stop traffic
env.tg[1].stop_streams()
self.suite_logger.debug("Verify ACl Statistics is updated "
"according to the created ACLs")
# Get ACL Statistics
statistics = env.switch[1].ui.get_table_acl("ACLStatistics")
# Get statistics for first ACL Rule
stat_1 = [x for x in statistics if x["ruleId"] == 1][0]
# Verify statistics
assert stat_1["matchPkts"] == count_1
assert stat_1["matchOctets"] == count_1 * packet_size
# Get statistics for second ACL Rule
stat_1 = [x for x in statistics if x["ruleId"] == 2][0]
# Verify statistics
assert stat_1["matchPkts"] == count_2
assert stat_1["matchOctets"] == count_2 * packet_size
|
{
"content_hash": "7d28e4cab4ef09d18f698ab1b47e8b61",
"timestamp": "",
"source": "github",
"line_count": 399,
"max_line_length": 91,
"avg_line_length": 43.70175438596491,
"alnum_prop": 0.5418363250559156,
"repo_name": "taf3/testcases",
"id": "4c0d691cc90de5d21ff0de1a2f11e3c5197a6b31",
"size": "17437",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "l2/acl/test_acl_samples.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "250643"
}
],
"symlink_target": ""
}
|
from urllib.parse import urlparse
from .base import *
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
DATABASES = {}
DATABASES['default'] = env.db()
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
ALLOWED_HOSTS = env('ALLOWED_HOSTS').split('|')
STATIC_ROOT = str(PROJECT_ROOT / 'static')
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
redis_url = urlparse(env('REDIS_URL', default='redis://localhost:6959'))
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '%s:%s' % (redis_url.hostname, redis_url.port),
'OPTIONS': {
'DB': 0,
'PASSWORD': redis_url.password,
'PARSER_CLASS': 'redis.connection.HiredisParser',
'PICKLE_VERSION': 2,
},
},
}
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
MIDDLEWARE_CLASSES += (
'django.middleware.gzip.GZipMiddleware',
)
SECRET_KEY = env('SECRET_KEY')
# TODO:
# MEDIA_ROOT??
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_QUERYSTRING_AUTH = False
# put the cloudfront distro here
# AWS_S3_CUSTOM_DOMAIN = 'foo.cloudfront.net'
AWS_ACCESS_KEY_ID = env('AWS_ACCESS_KEY_ID', default='')
AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY', default='')
AWS_STORAGE_BUCKET_NAME = env('AWS_STORAGE_BUCKET_NAME', default='')
# EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
TEMPLATES[0]['OPTIONS']['loaders'] = (
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]),
)
# GEOS_LIBRARY_PATH = '/app/.heroku/vendor/lib/libgeos_c.so'
# GDAL_LIBRARY_PATH = '/app/.heroku/vendor/lib/libgdal.so'
# SOCIAL_AUTH_FACEBOOK_KEY = env('SOCIAL_AUTH_FACEBOOK_KEY')
# SOCIAL_AUTH_FACEBOOK_SECRET = env('SOCIAL_AUTH_FACEBOOK_SECRET')
|
{
"content_hash": "58076728f166920d9033a2160ba504e4",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 79,
"avg_line_length": 30.107692307692307,
"alnum_prop": 0.6801226366888095,
"repo_name": "gregazevedo/gregazevedo",
"id": "eef257e8b03223e89fe64022af82fb028009d5ae",
"size": "1957",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gregazevedo/gregazevedo/settings/heroku.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7277"
},
{
"name": "HTML",
"bytes": "14764"
},
{
"name": "JavaScript",
"bytes": "5743"
},
{
"name": "Python",
"bytes": "49091"
},
{
"name": "Shell",
"bytes": "3423"
}
],
"symlink_target": ""
}
|
"""
Outputs the page count of the wiki.
@copyright: 2007 MoinMoin:ThomasWaldmann
@license: GNU GPL, see COPYING for details
"""
Dependencies = ['namespace']
from MoinMoin import wikiutil
def macro_PageCount(macro, exists=None):
""" Return number of pages readable by current user
Return either an exact count (slow!) or fast count including deleted pages.
TODO: make macro syntax more sane
"""
request = macro.request
exists = wikiutil.get_unicode(request, exists, 'exists')
# Check input
only_existing = False
if exists == u'exists':
only_existing = True
elif exists:
raise ValueError("Wrong argument: %r" % exists)
count = request.rootpage.getPageCount(exists=only_existing)
return macro.formatter.text("%d" % count)
|
{
"content_hash": "11cd6e72590faf5e18bd72cfe6774ceb",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 79,
"avg_line_length": 27.766666666666666,
"alnum_prop": 0.6566626650660264,
"repo_name": "Glottotopia/aagd",
"id": "8d7e4cfd40d2f19c0ddf95d593427138b6b2496d",
"size": "863",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "moin/local/moin/MoinMoin/macro/PageCount.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "152885"
},
{
"name": "CSS",
"bytes": "454208"
},
{
"name": "ColdFusion",
"bytes": "438820"
},
{
"name": "HTML",
"bytes": "1998354"
},
{
"name": "Java",
"bytes": "510468"
},
{
"name": "JavaScript",
"bytes": "6505329"
},
{
"name": "Lasso",
"bytes": "72399"
},
{
"name": "Makefile",
"bytes": "10216"
},
{
"name": "PHP",
"bytes": "259528"
},
{
"name": "Perl",
"bytes": "137186"
},
{
"name": "Python",
"bytes": "13713475"
},
{
"name": "Shell",
"bytes": "346"
},
{
"name": "XSLT",
"bytes": "15970"
}
],
"symlink_target": ""
}
|
import asyncio
import datetime
import functools
import logging
import os
try:
import ssl
except ImportError:
ssl = None
import gunicorn.workers.base as base
from aiohttp.wsgi import WSGIServerHttpProtocol as OldWSGIServerHttpProtocol
class WSGIServerHttpProtocol(OldWSGIServerHttpProtocol):
def log_access(self, request, environ, response, time):
self.logger.access(response, request, environ, datetime.timedelta(0, 0, time))
class AiohttpWorker(base.Worker):
def __init__(self, *args, **kw): # pragma: no cover
super().__init__(*args, **kw)
cfg = self.cfg
if cfg.is_ssl:
self.ssl_context = self._create_ssl_context(cfg)
else:
self.ssl_context = None
self.servers = []
self.connections = {}
def init_process(self):
# create new event_loop after fork
asyncio.get_event_loop().close()
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
super().init_process()
def run(self):
self._runner = asyncio.ensure_future(self._run(), loop=self.loop)
try:
self.loop.run_until_complete(self._runner)
finally:
self.loop.close()
def wrap_protocol(self, proto):
proto.connection_made = _wrp(
proto, proto.connection_made, self.connections)
proto.connection_lost = _wrp(
proto, proto.connection_lost, self.connections, False)
return proto
def factory(self, wsgi, addr):
# are we in debug level
is_debug = self.log.loglevel == logging.DEBUG
proto = WSGIServerHttpProtocol(
wsgi, readpayload=True,
loop=self.loop,
log=self.log,
debug=is_debug,
keep_alive=self.cfg.keepalive,
access_log=self.log.access_log,
access_log_format=self.cfg.access_log_format)
return self.wrap_protocol(proto)
def get_factory(self, sock, addr):
return functools.partial(self.factory, self.wsgi, addr)
@asyncio.coroutine
def close(self):
try:
if hasattr(self.wsgi, 'close'):
yield from self.wsgi.close()
except:
self.log.exception('Process shutdown exception')
@asyncio.coroutine
def _run(self):
for sock in self.sockets:
factory = self.get_factory(sock.sock, sock.cfg_addr)
self.servers.append(
(yield from self._create_server(factory, sock)))
# If our parent changed then we shut down.
pid = os.getpid()
try:
while self.alive or self.connections:
self.notify()
if (self.alive and
pid == os.getpid() and self.ppid != os.getppid()):
self.log.info("Parent changed, shutting down: %s", self)
self.alive = False
# stop accepting requests
if not self.alive:
if self.servers:
self.log.info(
"Stopping server: %s, connections: %s",
pid, len(self.connections))
for server in self.servers:
server.close()
self.servers.clear()
# prepare connections for closing
for conn in self.connections.values():
if hasattr(conn, 'closing'):
conn.closing()
yield from asyncio.sleep(1.0, loop=self.loop)
except KeyboardInterrupt:
pass
if self.servers:
for server in self.servers:
server.close()
yield from self.close()
@asyncio.coroutine
def _create_server(self, factory, sock):
return self.loop.create_server(factory, sock=sock.sock,
ssl=self.ssl_context)
@staticmethod
def _create_ssl_context(cfg):
""" Creates SSLContext instance for usage in asyncio.create_server.
See ssl.SSLSocket.__init__ for more details.
"""
ctx = ssl.SSLContext(cfg.ssl_version)
ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
ctx.verify_mode = cfg.cert_reqs
if cfg.ca_certs:
ctx.load_verify_locations(cfg.ca_certs)
if cfg.ciphers:
ctx.set_ciphers(cfg.ciphers)
return ctx
class _wrp:
def __init__(self, proto, meth, tracking, add=True):
self._proto = proto
self._id = id(proto)
self._meth = meth
self._tracking = tracking
self._add = add
def __call__(self, *args):
if self._add:
self._tracking[self._id] = self._proto
elif self._id in self._tracking:
del self._tracking[self._id]
conn = self._meth(*args)
return conn
|
{
"content_hash": "38f002df23170494f1fcebff4fcf41a2",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 86,
"avg_line_length": 30.374233128834355,
"alnum_prop": 0.5544334477883256,
"repo_name": "cloudera/hue",
"id": "fe378c351b9a4734f8d3c215ead69e31433aa465",
"size": "5079",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/gunicorn-19.9.0/gunicorn/workers/_gaiohttp.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "2347"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "BASIC",
"bytes": "2884"
},
{
"name": "Batchfile",
"bytes": "143575"
},
{
"name": "C",
"bytes": "5129166"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "718011"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "680715"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "Closure Templates",
"bytes": "1072"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "Cython",
"bytes": "1016963"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "13576"
},
{
"name": "EJS",
"bytes": "752"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GAP",
"bytes": "29873"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "641"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "28328425"
},
{
"name": "Haml",
"bytes": "920"
},
{
"name": "Handlebars",
"bytes": "173"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "Java",
"bytes": "457398"
},
{
"name": "JavaScript",
"bytes": "39181239"
},
{
"name": "Jinja",
"bytes": "356"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Less",
"bytes": "396102"
},
{
"name": "Lex",
"bytes": "218764"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1751"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "1025937"
},
{
"name": "Mako",
"bytes": "3644004"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "29403"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "84273"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "6235"
},
{
"name": "Procfile",
"bytes": "47"
},
{
"name": "Pug",
"bytes": "584"
},
{
"name": "Python",
"bytes": "92881549"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "484108"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "SCSS",
"bytes": "78508"
},
{
"name": "Sass",
"bytes": "770"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "249165"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "SourcePawn",
"bytes": "948"
},
{
"name": "Stylus",
"bytes": "682"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "341963"
},
{
"name": "Twig",
"bytes": "761"
},
{
"name": "TypeScript",
"bytes": "1241396"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "Vue",
"bytes": "350385"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "522199"
},
{
"name": "Yacc",
"bytes": "1070437"
},
{
"name": "jq",
"bytes": "4"
}
],
"symlink_target": ""
}
|
import subprocess as sp
from ..utils import *
from .. import _HAS_MEDIAINFO
def mprobe(filename):
"""get metadata by using mediainfo
Checks the output of mediainfo on the desired video
file. Data is then parsed into a dictionary and
checked for video data. If no such video data exists,
an empty dictionary is returned.
Parameters
----------
filename : string
Path to the video file
Returns
-------
mediaDict : dict
Dictionary containing all header-based information
about the passed-in source video.
"""
assert _HAS_MEDIAINFO, "`mediainfo` not found in path. Is it installed?"
try:
# '-f' gets full output, and --Output=XML is xml formatted output
command = ["mediainfo", "-f", "--Output=XML", filename]
# simply get std output
xml = check_output(command)
d = xmltodictparser(xml)
assert "Mediainfo" in d
d = d["Mediainfo"]
assert "File" in d
d = d["File"]
assert "track" in d
unorderedtracks = d["track"]
# tracksbytype normalizes the input by key
tracksbytype = {}
if type(unorderedtracks) is list:
for d in unorderedtracks:
assert "@type" in d
# can't have more than 1 key. If this case arises
# an issue should be made in the tracker for a fix.
assert d["@type"] not in tracksbytype
tracksbytype[d["@type"]] = d
else: # not list
assert "@type" in unorderedtracks
tracksbytype[unorderedtracks["@type"]] = unorderedtracks
return tracksbytype
except:
return {}
|
{
"content_hash": "651c2f99daf08a514b6b625f2359e05b",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 76,
"avg_line_length": 27.64516129032258,
"alnum_prop": 0.5869311551925321,
"repo_name": "beyondmetis/scikit-video",
"id": "4f301294ee51f5eeb69b264a9d66d4d4ea7d4f12",
"size": "1714",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "skvideo/io/mprobe.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "171471"
},
{
"name": "Shell",
"bytes": "5690"
}
],
"symlink_target": ""
}
|
"""\
Worker
Provides classes and a decorator for using Redis as a task queue.
Run this script or start a TaskWorker instance to run background tasks.
Initial code can be found here: http://flask.pocoo.org/snippets/73/
"""
import logging
from threading import Thread
from pickle import dumps, loads
from uuid import uuid4
from redis import Redis, ConnectionError
from time import sleep
from traceback import format_exc
from flask import current_app
logger = logging.getLogger('worker')
def create_redis():
return Redis()
class TaskWorker(Thread):
"""A dedicated task worker that runs on a separate thread."""
def __init__(self, app=None, port=None, queue_key=None, rv_ttl=None, redis=None, worker_name=None, thread_name=None, debug=None):
Thread.__init__(self, name=thread_name)
self.daemon = True
self.queue_key = queue_key
self.port = port
self.rv_ttl = rv_ttl or 500
self.redis = redis or create_redis()
self.worker_name = worker_name or (thread_name if worker_name is None else None)
self.debug = app.debug if (debug is None and app) else (debug or False)
self._worker_prefix = (self.worker_name + ': ') if self.worker_name else ''
# Try getting the queue key from the specified or current Flask application
if not self.queue_key:
if not app and not current_app:
raise ValueError('Cannot connect to Redis since both queue_key and app were not provided and current_app is None.')
self.queue_key = (app or current_app).config.get('REDIS_QUEUE_KEY', None)
if not self.queue_key:
raise ValueError('Cannot connect to Redis since REDIS_QUEUE_KEY was not provided in the application config.')
# Connect to Redis for the first time so that connection exceptions happen in the caller thread
if not self.debug:
self.redis.ping()
def reset(self):
"""Resets the database to an empty task queue."""
try:
self.redis.flushdb()
except ConnectionError:
pass
def run(self):
"""Runs all current and future tasks."""
logger.info(' * %sRunning task worker ("%s")', self._worker_prefix, self.queue_key)
while True:
try:
self.run_task()
except ConnectionError:
logger.error(' * %sDisconnected, waiting for task queue...', self._worker_prefix)
while True:
try:
self.redis.ping()
sleep(1)
break
except ConnectionError:
pass
logger.info(' * %sConnected to task queue', self._worker_prefix)
except Exception, ex:
logger.error(self._worker_prefix + format_exc(ex))
def run_task(self):
"""Runs a single task."""
msg = self.redis.blpop(self.queue_key)
func, task_id, args, kwargs = loads(msg[1])
logger.info('%sStarted task: %s(%s%s)', self._worker_prefix, str(func.__name__), repr(args)[1:-1], ('**' + repr(kwargs) if kwargs else ''))
try:
rv = func(*args, **kwargs)
except Exception, ex:
rv = ex
logger.info('%s-> Completed: %s', self._worker_prefix, repr(rv))
if rv is not None:
self.redis.set(task_id, dumps(rv))
self.redis.expire(task_id, self.rv_ttl)
redis = create_redis()
class Task(object):
"""Represents an intermediate result."""
def __init__(self, task_id):
object.__init__(self)
self.id = task_id
self._value = None
@property
def return_value(self):
if self._value is None:
rv_encoded = redis.get(self.id)
if rv_encoded:
self._value = loads(rv_encoded)
return self._value
@property
def exists(self):
return self._value or redis.exists(self.id)
def delete(self):
redis.delete(self.id)
def delayable(f):
"""Marks a function as delayable by giving it the 'delay' and 'get_task' members."""
def delay(*args, **kwargs):
queue_key = current_app.config.get('REDIS_QUEUE_KEY', 'default')
task_id = '%s:result:%s' % (queue_key, str(uuid4()))
s = dumps((f, task_id, args, kwargs))
redis.set(task_id, '')
redis.rpush(queue_key, s)
return Task(task_id)
def get_task(task_id):
result = Task(task_id)
return result if result.exists else None
f.delay = delay
f.get_task = get_task
return f
# Run dedicated task worker
if __name__ == '__main__':
import sys
# Get queue key from app config
from hello_redis_tasks import app
queue_key = app.config.get('REDIS_QUEUE_KEY', 'default')
# It's easy to have multiple task worker instances
task_workers = []
try:
for i in range(5):
task_workers.append(TaskWorker(queue_key=queue_key, worker_name="Worker %d" % i))
except ConnectionError:
logger.error('Could not connect to Redis. Be sure Redis is running before starting the worker.')
sys.exit(1)
# Start each worker then wait forever, so we can receive KeyboardInterrupt to exit
map(lambda w: w.start(), task_workers)
while any(filter(lambda w: w.is_alive(), task_workers)):
sleep(1)
logger.info(' * Task worker stopped')
|
{
"content_hash": "482fa9c0938b4ab21d9524ed122db03b",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 147,
"avg_line_length": 35.21290322580645,
"alnum_prop": 0.600403078050568,
"repo_name": "vals/google_devnull",
"id": "a213c0273c812d84c9df32f08ddc30d3318cf14f",
"size": "5458",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "worker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15305"
},
{
"name": "Ruby",
"bytes": "28"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/clothing/shared_clothing_dress_casual_09.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "dd78e9e64bb5009a2de28454a0638178",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 88,
"avg_line_length": 24.384615384615383,
"alnum_prop": 0.7003154574132492,
"repo_name": "anhstudios/swganh",
"id": "fb904a693fc0f2be30b7585c46be9e35a574ded1",
"size": "462",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/draft_schematic/clothing/shared_clothing_dress_casual_09.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
}
|
import os
import sys
import re
import subprocess
import requests
import simplejson
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
repo = 'getredash/redash'
def _github_request(method, path, params=None, headers={}):
if not path.startswith('https://api.github.com'):
url = "https://api.github.com/{}".format(path)
else:
url = path
if params is not None:
params = simplejson.dumps(params)
response = requests.request(method, url, data=params, auth=auth)
return response
def exception_from_error(message, response):
return Exception("({}) {}: {}".format(response.status_code, message, response.json().get('message', '?')))
def rc_tag_name(version):
return "v{}-rc".format(version)
def get_rc_release(version):
tag = rc_tag_name(version)
response = _github_request('get', 'repos/{}/releases/tags/{}'.format(repo, tag))
if response.status_code == 404:
return None
elif response.status_code == 200:
return response.json()
raise exception_from_error("Unknown error while looking RC release: ", response)
def create_release(version, commit_sha):
tag = rc_tag_name(version)
params = {
'tag_name': tag,
'name': "{} - RC".format(version),
'target_commitish': commit_sha,
'prerelease': True
}
response = _github_request('post', 'repos/{}/releases'.format(repo), params)
if response.status_code != 201:
raise exception_from_error("Failed creating new release", response)
return response.json()
def upload_asset(release, filepath):
upload_url = release['upload_url'].replace('{?name,label}', '')
filename = filepath.split('/')[-1]
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, headers=headers, auth=auth, verify=False)
if response.status_code != 201: # not 200/201/...
raise exception_from_error('Failed uploading asset', response)
return response
def remove_previous_builds(release):
for asset in release['assets']:
response = _github_request('delete', asset['url'])
if response.status_code != 204:
raise exception_from_error("Failed deleting asset", response)
def get_changelog(commit_sha):
latest_release = _github_request('get', 'repos/{}/releases/latest'.format(repo))
if latest_release.status_code != 200:
raise exception_from_error('Failed getting latest release', latest_release)
latest_release = latest_release.json()
previous_sha = latest_release['target_commitish']
args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', '{}...{}'.format(previous_sha, commit_sha)]
log = subprocess.check_output(args)
changes = ["Changes since {}:".format(latest_release['name'])]
for line in log.split('\n'):
try:
sha, subject, body, parents = line[1:-1].split('|')
except ValueError:
continue
try:
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
pull_request = " #{}".format(pull_request)
except Exception as ex:
pull_request = ""
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author))
return "\n".join(changes)
def update_release_commit_sha(release, commit_sha):
params = {
'target_commitish': commit_sha,
}
response = _github_request('patch', 'repos/{}/releases/{}'.format(repo, release['id']), params)
if response.status_code != 200:
raise exception_from_error("Failed updating commit sha for existing release", response)
return response.json()
def update_release(version, build_filepath, commit_sha):
try:
release = get_rc_release(version)
if release:
release = update_release_commit_sha(release, commit_sha)
else:
release = create_release(version, commit_sha)
print("Using release id: {}".format(release['id']))
remove_previous_builds(release)
response = upload_asset(release, build_filepath)
changelog = get_changelog(commit_sha)
response = _github_request('patch', release['url'], {'body': changelog})
if response.status_code != 200:
raise exception_from_error("Failed updating release description", response)
except Exception as ex:
print(ex)
if __name__ == '__main__':
commit_sha = sys.argv[1]
version = sys.argv[2]
filepath = sys.argv[3]
# TODO: make sure running from git directory & remote = repo
update_release(version, filepath, commit_sha)
|
{
"content_hash": "e2b2b85f221dcc35666e8ab5316fdfbd",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 160,
"avg_line_length": 33.374149659863946,
"alnum_prop": 0.6300448430493274,
"repo_name": "alexanderlz/redash",
"id": "3d9b21c895228e83aaaacdc0d9686869f34fbb74",
"size": "4929",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "bin/release_manager.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "192079"
},
{
"name": "Dockerfile",
"bytes": "839"
},
{
"name": "HTML",
"bytes": "143048"
},
{
"name": "JavaScript",
"bytes": "723822"
},
{
"name": "Makefile",
"bytes": "1231"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "956802"
},
{
"name": "Shell",
"bytes": "8517"
}
],
"symlink_target": ""
}
|
"""
Test some lldb command abbreviations.
"""
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ExecTestCase(TestBase):
NO_DEBUG_INFO_TESTCASE = True
mydir = TestBase.compute_mydir(__file__)
@expectedFailureAll(archs=['i386'], bugnumber="rdar://28656532")
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://problem/34559552") # this exec test has problems on ios systems
@expectedFailureNetBSD
@skipIfAsan # rdar://problem/43756823
@skipIfWindows
def test_hitting_exec (self):
self.do_test(False)
@expectedFailureAll(archs=['i386'], bugnumber="rdar://28656532")
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://problem/34559552") # this exec test has problems on ios systems
@expectedFailureNetBSD
@skipIfAsan # rdar://problem/43756823
@skipIfWindows
def test_skipping_exec (self):
self.do_test(True)
def do_test(self, skip_exec):
self.build()
exe = self.getBuildArtifact("a.out")
secondprog = self.getBuildArtifact("secondprog")
# Create the target
target = self.dbg.CreateTarget(exe)
# Create any breakpoints we need
breakpoint1 = target.BreakpointCreateBySourceRegex(
'Set breakpoint 1 here', lldb.SBFileSpec("main.cpp", False))
self.assertTrue(breakpoint1, VALID_BREAKPOINT)
breakpoint2 = target.BreakpointCreateBySourceRegex(
'Set breakpoint 2 here', lldb.SBFileSpec("secondprog.cpp", False))
self.assertTrue(breakpoint2, VALID_BREAKPOINT)
# Launch the process
process = target.LaunchSimple(
None, None, self.get_process_working_directory())
self.assertTrue(process, PROCESS_IS_VALID)
if self.TraceOn():
self.runCmd("settings show target.process.stop-on-exec", check=False)
if skip_exec:
self.dbg.HandleCommand("settings set target.process.stop-on-exec false")
def cleanup():
self.runCmd("settings set target.process.stop-on-exec false",
check=False)
# Execute the cleanup function during test case tear down.
self.addTearDownHook(cleanup)
# The stop reason of the thread should be breakpoint.
self.assertTrue(process.GetState() == lldb.eStateStopped,
STOPPED_DUE_TO_BREAKPOINT)
threads = lldbutil.get_threads_stopped_at_breakpoint(
process, breakpoint1)
self.assertTrue(len(threads) == 1)
# We had a deadlock tearing down the TypeSystemMap on exec, but only if some
# expression had been evaluated. So make sure we do that here so the teardown
# is not trivial.
thread = threads[0]
value = thread.frames[0].EvaluateExpression("1 + 2")
self.assertTrue(
value.IsValid(),
"Expression evaluated successfully")
int_value = value.GetValueAsSigned()
self.assertTrue(int_value == 3, "Expression got the right result.")
# Run and we should stop due to exec
process.Continue()
if not skip_exec:
self.assertFalse(process.GetState() == lldb.eStateExited,
"Process should not have exited!")
self.assertTrue(process.GetState() == lldb.eStateStopped,
"Process should be stopped at __dyld_start")
threads = lldbutil.get_stopped_threads(
process, lldb.eStopReasonExec)
self.assertTrue(
len(threads) == 1,
"We got a thread stopped for exec.")
# Run and we should stop at breakpoint in main after exec
process.Continue()
threads = lldbutil.get_threads_stopped_at_breakpoint(
process, breakpoint2)
if self.TraceOn():
for t in process.threads:
print(t)
if t.GetStopReason() != lldb.eStopReasonBreakpoint:
self.runCmd("bt")
self.assertTrue(len(threads) == 1,
"Stopped at breakpoint in exec'ed process.")
|
{
"content_hash": "638472342b94ab5125d553e74b2534c5",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 152,
"avg_line_length": 38.1858407079646,
"alnum_prop": 0.6229432213209734,
"repo_name": "llvm-mirror/lldb",
"id": "019df21771371fd6e06254251a18d2ebba6526d9",
"size": "4315",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "packages/Python/lldbsuite/test/functionalities/exec/TestExec.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "131618"
},
{
"name": "C",
"bytes": "195293"
},
{
"name": "C++",
"bytes": "23346708"
},
{
"name": "CMake",
"bytes": "167302"
},
{
"name": "DTrace",
"bytes": "334"
},
{
"name": "LLVM",
"bytes": "6106"
},
{
"name": "Makefile",
"bytes": "50396"
},
{
"name": "Objective-C",
"bytes": "106956"
},
{
"name": "Objective-C++",
"bytes": "24806"
},
{
"name": "Perl",
"bytes": "72175"
},
{
"name": "Python",
"bytes": "3669886"
},
{
"name": "Shell",
"bytes": "6573"
},
{
"name": "Vim script",
"bytes": "8434"
}
],
"symlink_target": ""
}
|
import os
import subprocess
from textwrap import dedent
from mock import patch, Mock
import pytest
from pretend import stub
import pip
from pip.exceptions import (RequirementsFileParseError)
from pip.download import PipSession
from pip.index import PackageFinder
from pip.req.req_install import InstallRequirement
from pip.req.req_file import (parse_requirements, process_line, join_lines,
ignore_comments, break_args_options)
@pytest.fixture
def session():
return PipSession()
@pytest.fixture
def finder(session):
return PackageFinder([], [], session=session)
@pytest.fixture
def options(session):
return stub(
isolated_mode=False, default_vcs=None, index_url='default_url',
skip_requirements_regex=False,
format_control=pip.index.FormatControl(set(), set()))
class TestIgnoreComments(object):
"""tests for `ignore_comment`"""
def test_strip_empty_line(self):
lines = ['req1', '', 'req2']
result = ignore_comments(lines)
assert list(result) == ['req1', 'req2']
def test_strip_comment(self):
lines = ['req1', '# comment', 'req2']
result = ignore_comments(lines)
assert list(result) == ['req1', 'req2']
class TestJoinLines(object):
"""tests for `join_lines`"""
def test_join_lines(self):
lines = dedent('''\
line 1
line 2:1 \\
line 2:2
line 3:1 \\
line 3:2 \\
line 3:3
line 4
''').splitlines()
expect = [
'line 1',
'line 2:1 line 2:2',
'line 3:1 line 3:2 line 3:3',
'line 4',
]
assert expect == list(join_lines(lines))
class TestProcessLine(object):
"""tests for `process_line`"""
def test_parser_error(self):
with pytest.raises(RequirementsFileParseError):
list(process_line("--bogus", "file", 1))
def test_only_one_req_per_line(self):
# pkg_resources raises the ValueError
with pytest.raises(ValueError):
list(process_line("req1 req2", "file", 1))
def test_yield_line_requirement(self):
line = 'SomeProject'
filename = 'filename'
comes_from = '-r %s (line %s)' % (filename, 1)
req = InstallRequirement.from_line(line, comes_from=comes_from)
assert repr(list(process_line(line, filename, 1))[0]) == repr(req)
def test_yield_line_constraint(self):
line = 'SomeProject'
filename = 'filename'
comes_from = '-c %s (line %s)' % (filename, 1)
req = InstallRequirement.from_line(
line, comes_from=comes_from, constraint=True)
found_req = list(process_line(line, filename, 1, constraint=True))[0]
assert repr(found_req) == repr(req)
assert found_req.constraint is True
def test_yield_line_requirement_with_spaces_in_specifier(self):
line = 'SomeProject >= 2'
filename = 'filename'
comes_from = '-r %s (line %s)' % (filename, 1)
req = InstallRequirement.from_line(line, comes_from=comes_from)
assert repr(list(process_line(line, filename, 1))[0]) == repr(req)
assert req.req.specs == [('>=', '2')]
def test_yield_editable_requirement(self):
url = 'git+https://url#egg=SomeProject'
line = '-e %s' % url
filename = 'filename'
comes_from = '-r %s (line %s)' % (filename, 1)
req = InstallRequirement.from_editable(url, comes_from=comes_from)
assert repr(list(process_line(line, filename, 1))[0]) == repr(req)
def test_yield_editable_constraint(self):
url = 'git+https://url#egg=SomeProject'
line = '-e %s' % url
filename = 'filename'
comes_from = '-c %s (line %s)' % (filename, 1)
req = InstallRequirement.from_editable(
url, comes_from=comes_from, constraint=True)
found_req = list(process_line(line, filename, 1, constraint=True))[0]
assert repr(found_req) == repr(req)
assert found_req.constraint is True
def test_nested_requirements_file(self, monkeypatch):
line = '-r another_file'
req = InstallRequirement.from_line('SomeProject')
import pip.req.req_file
def stub_parse_requirements(req_url, finder, comes_from, options,
session, wheel_cache, constraint):
return [(req, constraint)]
parse_requirements_stub = stub(call=stub_parse_requirements)
monkeypatch.setattr(pip.req.req_file, 'parse_requirements',
parse_requirements_stub.call)
assert list(process_line(line, 'filename', 1)) == [(req, False)]
def test_nested_constraints_file(self, monkeypatch):
line = '-c another_file'
req = InstallRequirement.from_line('SomeProject')
import pip.req.req_file
def stub_parse_requirements(req_url, finder, comes_from, options,
session, wheel_cache, constraint):
return [(req, constraint)]
parse_requirements_stub = stub(call=stub_parse_requirements)
monkeypatch.setattr(pip.req.req_file, 'parse_requirements',
parse_requirements_stub.call)
assert list(process_line(line, 'filename', 1)) == [(req, True)]
def test_options_on_a_requirement_line(self):
line = 'SomeProject --install-option=yo1 --install-option yo2 '\
'--global-option="yo3" --global-option "yo4"'
filename = 'filename'
req = list(process_line(line, filename, 1))[0]
assert req.options == {
'global_options': ['yo3', 'yo4'],
'install_options': ['yo1', 'yo2']}
def test_set_isolated(self, options):
line = 'SomeProject'
filename = 'filename'
options.isolated_mode = True
result = process_line(line, filename, 1, options=options)
assert list(result)[0].isolated
def test_set_default_vcs(self, options):
url = 'https://url#egg=SomeProject'
line = '-e %s' % url
filename = 'filename'
options.default_vcs = 'git'
result = process_line(line, filename, 1, options=options)
assert list(result)[0].link.url == 'git+' + url
def test_set_finder_no_index(self, finder):
list(process_line("--no-index", "file", 1, finder=finder))
assert finder.index_urls == []
def test_set_finder_index_url(self, finder):
list(process_line("--index-url=url", "file", 1, finder=finder))
assert finder.index_urls == ['url']
def test_set_finder_find_links(self, finder):
list(process_line("--find-links=url", "file", 1, finder=finder))
assert finder.find_links == ['url']
def test_set_finder_extra_index_urls(self, finder):
list(process_line("--extra-index-url=url", "file", 1, finder=finder))
assert finder.index_urls == ['url']
def test_set_finder_use_wheel(self, finder):
list(process_line("--use-wheel", "file", 1, finder=finder))
no_use_wheel_fmt = pip.index.FormatControl(set(), set())
assert finder.format_control == no_use_wheel_fmt
def test_set_finder_no_use_wheel(self, finder):
list(process_line("--no-use-wheel", "file", 1, finder=finder))
no_use_wheel_fmt = pip.index.FormatControl(set([':all:']), set())
assert finder.format_control == no_use_wheel_fmt
def test_set_finder_trusted_host(self, finder):
list(process_line("--trusted-host=url", "file", 1, finder=finder))
assert finder.secure_origins == [('*', 'url', '*')]
def test_noop_always_unzip(self, finder):
# noop, but confirm it can be set
list(process_line("--always-unzip", "file", 1, finder=finder))
def test_noop_finder_no_allow_unsafe(self, finder):
# noop, but confirm it can be set
list(process_line("--no-allow-insecure", "file", 1, finder=finder))
def test_set_finder_allow_all_prereleases(self, finder):
list(process_line("--pre", "file", 1, finder=finder))
assert finder.allow_all_prereleases
def test_relative_local_find_links(self, finder, monkeypatch):
"""
Test a relative find_links path is joined with the req file directory
"""
req_file = '/path/req_file.txt'
nested_link = '/path/rel_path'
exists_ = os.path.exists
def exists(path):
if path == nested_link:
return True
else:
exists_(path)
monkeypatch.setattr(os.path, 'exists', exists)
list(process_line("--find-links=rel_path", req_file, 1,
finder=finder))
assert finder.find_links == [nested_link]
def test_relative_http_nested_req_files(self, finder, monkeypatch):
"""
Test a relative nested req file path is joined with the req file url
"""
req_file = 'http://me.com/me/req_file.txt'
def parse(*args, **kwargs):
return iter([])
mock_parse = Mock()
mock_parse.side_effect = parse
monkeypatch.setattr(pip.req.req_file, 'parse_requirements', mock_parse)
list(process_line("-r reqs.txt", req_file, 1, finder=finder))
call = mock_parse.mock_calls[0]
assert call[1][0] == 'http://me.com/me/reqs.txt'
def test_relative_local_nested_req_files(self, finder, monkeypatch):
"""
Test a relative nested req file path is joined with the req file dir
"""
req_file = '/path/req_file.txt'
def parse(*args, **kwargs):
return iter([])
mock_parse = Mock()
mock_parse.side_effect = parse
monkeypatch.setattr(pip.req.req_file, 'parse_requirements', mock_parse)
list(process_line("-r reqs.txt", req_file, 1, finder=finder))
call = mock_parse.mock_calls[0]
assert call[1][0] == '/path/reqs.txt'
def test_absolute_local_nested_req_files(self, finder, monkeypatch):
"""
Test an absolute nested req file path
"""
req_file = '/path/req_file.txt'
def parse(*args, **kwargs):
return iter([])
mock_parse = Mock()
mock_parse.side_effect = parse
monkeypatch.setattr(pip.req.req_file, 'parse_requirements', mock_parse)
list(process_line("-r /other/reqs.txt", req_file, 1, finder=finder))
call = mock_parse.mock_calls[0]
assert call[1][0] == '/other/reqs.txt'
def test_absolute_http_nested_req_file_in_local(self, finder, monkeypatch):
"""
Test a nested req file url in a local req file
"""
req_file = '/path/req_file.txt'
def parse(*args, **kwargs):
return iter([])
mock_parse = Mock()
mock_parse.side_effect = parse
monkeypatch.setattr(pip.req.req_file, 'parse_requirements', mock_parse)
list(process_line("-r http://me.com/me/reqs.txt", req_file, 1,
finder=finder))
call = mock_parse.mock_calls[0]
assert call[1][0] == 'http://me.com/me/reqs.txt'
def test_set_finder_process_dependency_links(self, finder):
list(process_line(
"--process-dependency-links", "file", 1, finder=finder))
assert finder.process_dependency_links
class TestBreakOptionsArgs(object):
def test_no_args(self):
assert ('', '--option') == break_args_options('--option')
def test_no_options(self):
assert ('arg arg', '') == break_args_options('arg arg')
def test_args_short_options(self):
result = break_args_options('arg arg -s')
assert ('arg arg', '-s') == result
def test_args_long_options(self):
result = break_args_options('arg arg --long')
assert ('arg arg', '--long') == result
class TestOptionVariants(object):
# this suite is really just testing optparse, but added it anyway
def test_variant1(self, finder):
list(process_line("-i url", "file", 1, finder=finder))
assert finder.index_urls == ['url']
def test_variant2(self, finder):
list(process_line("-i 'url'", "file", 1, finder=finder))
assert finder.index_urls == ['url']
def test_variant3(self, finder):
list(process_line("--index-url=url", "file", 1, finder=finder))
assert finder.index_urls == ['url']
def test_variant4(self, finder):
list(process_line("--index-url url", "file", 1, finder=finder))
assert finder.index_urls == ['url']
def test_variant5(self, finder):
list(process_line("--index-url='url'", "file", 1, finder=finder))
assert finder.index_urls == ['url']
class TestParseRequirements(object):
"""tests for `parse_requirements`"""
@pytest.mark.network
def test_remote_reqs_parse(self):
"""
Test parsing a simple remote requirements file
"""
# this requirements file just contains a comment previously this has
# failed in py3: https://github.com/pypa/pip/issues/760
for req in parse_requirements(
'https://raw.githubusercontent.com/pypa/'
'pip-test-package/master/'
'tests/req_just_comment.txt', session=PipSession()):
pass
def test_multiple_appending_options(self, tmpdir, finder, options):
with open(tmpdir.join("req1.txt"), "w") as fp:
fp.write("--extra-index-url url1 \n")
fp.write("--extra-index-url url2 ")
list(parse_requirements(tmpdir.join("req1.txt"), finder=finder,
session=PipSession(), options=options))
assert finder.index_urls == ['url1', 'url2']
def test_skip_regex(self, tmpdir, finder, options):
options.skip_requirements_regex = '.*Bad.*'
with open(tmpdir.join("req1.txt"), "w") as fp:
fp.write("--extra-index-url Bad \n")
fp.write("--extra-index-url Good ")
list(parse_requirements(tmpdir.join("req1.txt"), finder=finder,
options=options, session=PipSession()))
assert finder.index_urls == ['Good']
def test_join_lines(self, tmpdir, finder):
with open(tmpdir.join("req1.txt"), "w") as fp:
fp.write("--extra-index-url url1 \\\n--extra-index-url url2")
list(parse_requirements(tmpdir.join("req1.txt"), finder=finder,
session=PipSession()))
assert finder.index_urls == ['url1', 'url2']
def test_req_file_parse_no_only_binary(self, data, finder):
list(parse_requirements(
data.reqfiles.join("supported_options2.txt"), finder,
session=PipSession()))
expected = pip.index.FormatControl(set(['fred']), set(['wilma']))
assert finder.format_control == expected
def test_req_file_parse_comment_start_of_line(self, tmpdir, finder):
"""
Test parsing comments in a requirements file
"""
with open(tmpdir.join("req1.txt"), "w") as fp:
fp.write("# Comment ")
reqs = list(parse_requirements(tmpdir.join("req1.txt"), finder,
session=PipSession()))
assert not reqs
def test_req_file_parse_comment_end_of_line_with_url(self, tmpdir, finder):
"""
Test parsing comments in a requirements file
"""
with open(tmpdir.join("req1.txt"), "w") as fp:
fp.write("https://example.com/foo.tar.gz # Comment ")
reqs = list(parse_requirements(tmpdir.join("req1.txt"), finder,
session=PipSession()))
assert len(reqs) == 1
assert reqs[0].link.url == "https://example.com/foo.tar.gz"
def test_req_file_parse_egginfo_end_of_line_with_url(self, tmpdir, finder):
"""
Test parsing comments in a requirements file
"""
with open(tmpdir.join("req1.txt"), "w") as fp:
fp.write("https://example.com/foo.tar.gz#egg=wat")
reqs = list(parse_requirements(tmpdir.join("req1.txt"), finder,
session=PipSession()))
assert len(reqs) == 1
assert reqs[0].name == "wat"
def test_req_file_no_finder(self, tmpdir):
"""
Test parsing a requirements file without a finder
"""
with open(tmpdir.join("req.txt"), "w") as fp:
fp.write("""
--find-links https://example.com/
--index-url https://example.com/
--extra-index-url https://two.example.com/
--no-use-wheel
--no-index
""")
parse_requirements(tmpdir.join("req.txt"), session=PipSession())
def test_install_requirements_with_options(self, tmpdir, finder, session,
options):
global_option = '--dry-run'
install_option = '--prefix=/opt'
content = '''
--only-binary :all:
INITools==2.0 --global-option="{global_option}" \
--install-option "{install_option}"
'''.format(global_option=global_option, install_option=install_option)
req_path = tmpdir.join('requirements.txt')
with open(req_path, 'w') as fh:
fh.write(content)
req = next(parse_requirements(
req_path, finder=finder, options=options, session=session))
req.source_dir = os.curdir
with patch.object(subprocess, 'Popen') as popen:
popen.return_value.stdout.readline.return_value = ""
try:
req.install([])
except:
pass
call = popen.call_args_list[0][0][0]
assert call.index(install_option) > \
call.index('install') > \
call.index(global_option) > 0
assert options.format_control.no_binary == set([':all:'])
assert options.format_control.only_binary == set([])
|
{
"content_hash": "a60b379b8de5db20aac1526099bcb25d",
"timestamp": "",
"source": "github",
"line_count": 485,
"max_line_length": 79,
"avg_line_length": 36.90515463917526,
"alnum_prop": 0.5869042963294039,
"repo_name": "prasaianooz/pip",
"id": "1e3dbbbe5ce97397343152d6dda86975083b502c",
"size": "17899",
"binary": false,
"copies": "9",
"ref": "refs/heads/develop",
"path": "tests/unit/test_req_file.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1372"
},
{
"name": "Python",
"bytes": "2200383"
},
{
"name": "Shell",
"bytes": "2326"
}
],
"symlink_target": ""
}
|
from typing import Union, TypeVar, Mapping, Any, List as TList, Callable, Tuple, Type
from numbers import Number
from uuid import UUID
from types import FunctionType
from amino import Either, List, L, _, Right, Lists, Maybe, Path, Map, Boolean, do, Do, Try, Dat
from amino.json.encoder import Encoder, encode_json, json_object_with_type
from amino.json.data import JsonError, Json, JsonArray, JsonScalar, JsonObject, JsonNull
A = TypeVar('A')
B = TypeVar('B')
Sub = TypeVar('Sub', bound=Dat)
class ScalarEncoder(Encoder[Union[Number, str, None]], pred=L(issubclass)(_, (Number, str, type(None)))):
def encode(self, a: Union[Number, str, None]) -> Either[JsonError, Json]:
return Right(JsonScalar(a))
class MapEncoder(Encoder[List], pred=L(issubclass)(_, Mapping)):
def encode(self, a: Map[str, Any]) -> Either[JsonError, Json]:
return Map(a).traverse(encode_json, Either) / JsonObject
class ListEncoder(Encoder[List], pred=L(issubclass)(_, TList)):
def encode(self, a: TList) -> Either[JsonError, Json]:
return Lists.wrap(a).traverse(encode_json, Either) / JsonArray
class MaybeEncoder(Encoder[Maybe], tpe=Maybe):
def encode(self, a: Maybe[A]) -> Either[JsonError, Json]:
return a.map(encode_json) | (lambda: Right(JsonNull.cons()))
class EitherEncoder(Encoder[Either], tpe=Either):
@do(Either[JsonError, Json])
def encode(self, a: Either[B, A]) -> Do:
json = yield encode_json(a.value)
yield Right(json_object_with_type(Map(value=json), type(a)))
class UUIDEncoder(Encoder[UUID], tpe=UUID):
def encode(self, a: UUID) -> Either[JsonError, Json]:
return Right(JsonScalar(str(a)))
class PathEncoder(Encoder[Path], tpe=Path):
def encode(self, a: Path) -> Either[JsonError, Json]:
return Right(JsonScalar(str(a)))
class BooleanEncoder(Encoder[Boolean], tpe=Boolean):
def encode(self, a: Boolean) -> Either[JsonError, Json]:
return Right(JsonScalar(a.value))
@do(Either[JsonError, Json])
def encode_instance(a: A, tpe: type, module: str, names: list) -> Do:
mod_json = yield encode_json(module)
names_json = yield encode_json(names)
return json_object_with_type(Map(module=mod_json, names=names_json), tpe)
@do(Either[JsonError, Json])
def encode_instance_simple(data: A, tpe: type) -> Do:
mod = yield Try(lambda: data.__module__)
names = yield Try(lambda: data.__qualname__.split('.'))
yield encode_instance(data, tpe, mod, names)
class FunctionEncoder(Encoder[Callable], tpe=FunctionType):
@do(Either[JsonError, Json])
def encode(self, data: Callable) -> Do:
yield encode_instance_simple(data, Callable)
class TupleEncoder(Encoder[Tuple], tpe=tuple):
@do(Either[JsonError, Json])
def encode(self, data: Tuple) -> Do:
array = yield encode_json(Lists.wrap(data))
return json_object_with_type(Map(data=array), tuple)
class TypeEncoder(Encoder[Type], tpe=type):
def encode(self, data: Type) -> Either[JsonError, Json]:
return encode_instance_simple(data, Type)
class DatEncoder(Encoder, tpe=Dat):
@do(Either[JsonError, Map])
def encode(self, a: Sub) -> Do:
jsons = yield a._dat__values.traverse(encode_json, Either)
yield Right(json_object_with_type(Map(a._dat__names.zip(jsons)), type(a)))
__all__ = ('ListEncoder', 'ScalarEncoder', 'MaybeEncoder', 'UUIDEncoder', 'PathEncoder', 'MapEncoder', 'EitherEncoder',
'BooleanEncoder', 'encode_instance', 'FunctionEncoder', 'TupleEncoder', 'TypeEncoder', 'DatEncoder',)
|
{
"content_hash": "5af60e8e5214b40194bd54fb04f56ae0",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 119,
"avg_line_length": 32.74311926605505,
"alnum_prop": 0.6789016531241244,
"repo_name": "tek/amino",
"id": "a071d1b104cf15e0870baac207584347c2150932",
"size": "3569",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "amino/json/encoders.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "341735"
}
],
"symlink_target": ""
}
|
""" Access and control log capturing. """
import logging
import re
from contextlib import contextmanager
from io import StringIO
from typing import AbstractSet
from typing import Dict
from typing import Generator
from typing import List
from typing import Mapping
from typing import Optional
import pytest
from _pytest import nodes
from _pytest.compat import nullcontext
from _pytest.config import _strtobool
from _pytest.config import Config
from _pytest.config import create_terminal_writer
from _pytest.pathlib import Path
DEFAULT_LOG_FORMAT = "%(levelname)-8s %(name)s:%(filename)s:%(lineno)d %(message)s"
DEFAULT_LOG_DATE_FORMAT = "%H:%M:%S"
_ANSI_ESCAPE_SEQ = re.compile(r"\x1b\[[\d;]+m")
def _remove_ansi_escape_sequences(text):
return _ANSI_ESCAPE_SEQ.sub("", text)
class ColoredLevelFormatter(logging.Formatter):
"""
Colorize the %(levelname)..s part of the log format passed to __init__.
"""
LOGLEVEL_COLOROPTS = {
logging.CRITICAL: {"red"},
logging.ERROR: {"red", "bold"},
logging.WARNING: {"yellow"},
logging.WARN: {"yellow"},
logging.INFO: {"green"},
logging.DEBUG: {"purple"},
logging.NOTSET: set(),
} # type: Mapping[int, AbstractSet[str]]
LEVELNAME_FMT_REGEX = re.compile(r"%\(levelname\)([+-.]?\d*s)")
def __init__(self, terminalwriter, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self._original_fmt = self._style._fmt
self._level_to_fmt_mapping = {} # type: Dict[int, str]
assert self._fmt is not None
levelname_fmt_match = self.LEVELNAME_FMT_REGEX.search(self._fmt)
if not levelname_fmt_match:
return
levelname_fmt = levelname_fmt_match.group()
for level, color_opts in self.LOGLEVEL_COLOROPTS.items():
formatted_levelname = levelname_fmt % {
"levelname": logging.getLevelName(level)
}
# add ANSI escape sequences around the formatted levelname
color_kwargs = {name: True for name in color_opts}
colorized_formatted_levelname = terminalwriter.markup(
formatted_levelname, **color_kwargs
)
self._level_to_fmt_mapping[level] = self.LEVELNAME_FMT_REGEX.sub(
colorized_formatted_levelname, self._fmt
)
def format(self, record):
fmt = self._level_to_fmt_mapping.get(record.levelno, self._original_fmt)
self._style._fmt = fmt
return super().format(record)
class PercentStyleMultiline(logging.PercentStyle):
"""A logging style with special support for multiline messages.
If the message of a record consists of multiple lines, this style
formats the message as if each line were logged separately.
"""
def __init__(self, fmt, auto_indent):
super().__init__(fmt)
self._auto_indent = self._get_auto_indent(auto_indent)
@staticmethod
def _update_message(record_dict, message):
tmp = record_dict.copy()
tmp["message"] = message
return tmp
@staticmethod
def _get_auto_indent(auto_indent_option) -> int:
"""Determines the current auto indentation setting
Specify auto indent behavior (on/off/fixed) by passing in
extra={"auto_indent": [value]} to the call to logging.log() or
using a --log-auto-indent [value] command line or the
log_auto_indent [value] config option.
Default behavior is auto-indent off.
Using the string "True" or "on" or the boolean True as the value
turns auto indent on, using the string "False" or "off" or the
boolean False or the int 0 turns it off, and specifying a
positive integer fixes the indentation position to the value
specified.
Any other values for the option are invalid, and will silently be
converted to the default.
:param any auto_indent_option: User specified option for indentation
from command line, config or extra kwarg. Accepts int, bool or str.
str option accepts the same range of values as boolean config options,
as well as positive integers represented in str form.
:returns: indentation value, which can be
-1 (automatically determine indentation) or
0 (auto-indent turned off) or
>0 (explicitly set indentation position).
"""
if type(auto_indent_option) is int:
return int(auto_indent_option)
elif type(auto_indent_option) is str:
try:
return int(auto_indent_option)
except ValueError:
pass
try:
if _strtobool(auto_indent_option):
return -1
except ValueError:
return 0
elif type(auto_indent_option) is bool:
if auto_indent_option:
return -1
return 0
def format(self, record):
if "\n" in record.message:
if hasattr(record, "auto_indent"):
# passed in from the "extra={}" kwarg on the call to logging.log()
auto_indent = self._get_auto_indent(record.auto_indent)
else:
auto_indent = self._auto_indent
if auto_indent:
lines = record.message.splitlines()
formatted = self._fmt % self._update_message(record.__dict__, lines[0])
if auto_indent < 0:
indentation = _remove_ansi_escape_sequences(formatted).find(
lines[0]
)
else:
# optimizes logging by allowing a fixed indentation
indentation = auto_indent
lines[0] = formatted
return ("\n" + " " * indentation).join(lines)
return self._fmt % record.__dict__
def get_option_ini(config, *names):
for name in names:
ret = config.getoption(name) # 'default' arg won't work as expected
if ret is None:
ret = config.getini(name)
if ret:
return ret
def pytest_addoption(parser):
"""Add options to control log capturing."""
group = parser.getgroup("logging")
def add_option_ini(option, dest, default=None, type=None, **kwargs):
parser.addini(
dest, default=default, type=type, help="default value for " + option
)
group.addoption(option, dest=dest, **kwargs)
add_option_ini(
"--no-print-logs",
dest="log_print",
action="store_const",
const=False,
default=True,
type="bool",
help="disable printing caught logs on failed tests.",
)
add_option_ini(
"--log-level",
dest="log_level",
default=None,
metavar="LEVEL",
help=(
"level of messages to catch/display.\n"
"Not set by default, so it depends on the root/parent log handler's"
' effective level, where it is "WARNING" by default.'
),
)
add_option_ini(
"--log-format",
dest="log_format",
default=DEFAULT_LOG_FORMAT,
help="log format as used by the logging module.",
)
add_option_ini(
"--log-date-format",
dest="log_date_format",
default=DEFAULT_LOG_DATE_FORMAT,
help="log date format as used by the logging module.",
)
parser.addini(
"log_cli",
default=False,
type="bool",
help='enable log display during test run (also known as "live logging").',
)
add_option_ini(
"--log-cli-level", dest="log_cli_level", default=None, help="cli logging level."
)
add_option_ini(
"--log-cli-format",
dest="log_cli_format",
default=None,
help="log format as used by the logging module.",
)
add_option_ini(
"--log-cli-date-format",
dest="log_cli_date_format",
default=None,
help="log date format as used by the logging module.",
)
add_option_ini(
"--log-file",
dest="log_file",
default=None,
help="path to a file when logging will be written to.",
)
add_option_ini(
"--log-file-level",
dest="log_file_level",
default=None,
help="log file logging level.",
)
add_option_ini(
"--log-file-format",
dest="log_file_format",
default=DEFAULT_LOG_FORMAT,
help="log format as used by the logging module.",
)
add_option_ini(
"--log-file-date-format",
dest="log_file_date_format",
default=DEFAULT_LOG_DATE_FORMAT,
help="log date format as used by the logging module.",
)
add_option_ini(
"--log-auto-indent",
dest="log_auto_indent",
default=None,
help="Auto-indent multiline messages passed to the logging module. Accepts true|on, false|off or an integer.",
)
@contextmanager
def catching_logs(handler, formatter=None, level=None):
"""Context manager that prepares the whole logging machinery properly."""
root_logger = logging.getLogger()
if formatter is not None:
handler.setFormatter(formatter)
if level is not None:
handler.setLevel(level)
# Adding the same handler twice would confuse logging system.
# Just don't do that.
add_new_handler = handler not in root_logger.handlers
if add_new_handler:
root_logger.addHandler(handler)
if level is not None:
orig_level = root_logger.level
root_logger.setLevel(min(orig_level, level))
try:
yield handler
finally:
if level is not None:
root_logger.setLevel(orig_level)
if add_new_handler:
root_logger.removeHandler(handler)
class LogCaptureHandler(logging.StreamHandler):
"""A logging handler that stores log records and the log text."""
def __init__(self) -> None:
"""Creates a new log handler."""
logging.StreamHandler.__init__(self, StringIO())
self.records = [] # type: List[logging.LogRecord]
def emit(self, record: logging.LogRecord) -> None:
"""Keep the log records in a list in addition to the log text."""
self.records.append(record)
logging.StreamHandler.emit(self, record)
def reset(self) -> None:
self.records = []
self.stream = StringIO()
class LogCaptureFixture:
"""Provides access and control of log capturing."""
def __init__(self, item) -> None:
"""Creates a new funcarg."""
self._item = item
# dict of log name -> log level
self._initial_log_levels = {} # type: Dict[str, int]
def _finalize(self) -> None:
"""Finalizes the fixture.
This restores the log levels changed by :meth:`set_level`.
"""
# restore log levels
for logger_name, level in self._initial_log_levels.items():
logger = logging.getLogger(logger_name)
logger.setLevel(level)
@property
def handler(self) -> LogCaptureHandler:
"""
:rtype: LogCaptureHandler
"""
return self._item.catch_log_handler # type: ignore[no-any-return] # noqa: F723
def get_records(self, when: str) -> List[logging.LogRecord]:
"""
Get the logging records for one of the possible test phases.
:param str when:
Which test phase to obtain the records from. Valid values are: "setup", "call" and "teardown".
:rtype: List[logging.LogRecord]
:return: the list of captured records at the given stage
.. versionadded:: 3.4
"""
handler = self._item.catch_log_handlers.get(when)
if handler:
return handler.records # type: ignore[no-any-return] # noqa: F723
else:
return []
@property
def text(self):
"""Returns the formatted log text."""
return _remove_ansi_escape_sequences(self.handler.stream.getvalue())
@property
def records(self):
"""Returns the list of log records."""
return self.handler.records
@property
def record_tuples(self):
"""Returns a list of a stripped down version of log records intended
for use in assertion comparison.
The format of the tuple is:
(logger_name, log_level, message)
"""
return [(r.name, r.levelno, r.getMessage()) for r in self.records]
@property
def messages(self):
"""Returns a list of format-interpolated log messages.
Unlike 'records', which contains the format string and parameters for interpolation, log messages in this list
are all interpolated.
Unlike 'text', which contains the output from the handler, log messages in this list are unadorned with
levels, timestamps, etc, making exact comparisons more reliable.
Note that traceback or stack info (from :func:`logging.exception` or the `exc_info` or `stack_info` arguments
to the logging functions) is not included, as this is added by the formatter in the handler.
.. versionadded:: 3.7
"""
return [r.getMessage() for r in self.records]
def clear(self):
"""Reset the list of log records and the captured log text."""
self.handler.reset()
def set_level(self, level, logger=None):
"""Sets the level for capturing of logs. The level will be restored to its previous value at the end of
the test.
:param int level: the logger to level.
:param str logger: the logger to update the level. If not given, the root logger level is updated.
.. versionchanged:: 3.4
The levels of the loggers changed by this function will be restored to their initial values at the
end of the test.
"""
logger_name = logger
logger = logging.getLogger(logger_name)
# save the original log-level to restore it during teardown
self._initial_log_levels.setdefault(logger_name, logger.level)
logger.setLevel(level)
@contextmanager
def at_level(self, level, logger=None):
"""Context manager that sets the level for capturing of logs. After the end of the 'with' statement the
level is restored to its original value.
:param int level: the logger to level.
:param str logger: the logger to update the level. If not given, the root logger level is updated.
"""
logger = logging.getLogger(logger)
orig_level = logger.level
logger.setLevel(level)
try:
yield
finally:
logger.setLevel(orig_level)
@pytest.fixture
def caplog(request):
"""Access and control log capturing.
Captured logs are available through the following properties/methods::
* caplog.messages -> list of format-interpolated log messages
* caplog.text -> string containing formatted log output
* caplog.records -> list of logging.LogRecord instances
* caplog.record_tuples -> list of (logger_name, level, message) tuples
* caplog.clear() -> clear captured records and formatted log output string
"""
result = LogCaptureFixture(request.node)
yield result
result._finalize()
def get_log_level_for_setting(config: Config, *setting_names: str) -> Optional[int]:
for setting_name in setting_names:
log_level = config.getoption(setting_name)
if log_level is None:
log_level = config.getini(setting_name)
if log_level:
break
else:
return None
if isinstance(log_level, str):
log_level = log_level.upper()
try:
return int(getattr(logging, log_level, log_level))
except ValueError:
# Python logging does not recognise this as a logging level
raise pytest.UsageError(
"'{}' is not recognized as a logging level name for "
"'{}'. Please consider passing the "
"logging level num instead.".format(log_level, setting_name)
)
# run after terminalreporter/capturemanager are configured
@pytest.hookimpl(trylast=True)
def pytest_configure(config):
config.pluginmanager.register(LoggingPlugin(config), "logging-plugin")
class LoggingPlugin:
"""Attaches to the logging module and captures log messages for each test.
"""
def __init__(self, config: Config) -> None:
"""Creates a new plugin to capture log messages.
The formatter can be safely shared across all handlers so
create a single one for the entire test session here.
"""
self._config = config
self.print_logs = get_option_ini(config, "log_print")
if not self.print_logs:
from _pytest.warnings import _issue_warning_captured
from _pytest.deprecated import NO_PRINT_LOGS
_issue_warning_captured(NO_PRINT_LOGS, self._config.hook, stacklevel=2)
self.formatter = self._create_formatter(
get_option_ini(config, "log_format"),
get_option_ini(config, "log_date_format"),
get_option_ini(config, "log_auto_indent"),
)
self.log_level = get_log_level_for_setting(config, "log_level")
self.log_file_level = get_log_level_for_setting(config, "log_file_level")
self.log_file_format = get_option_ini(config, "log_file_format", "log_format")
self.log_file_date_format = get_option_ini(
config, "log_file_date_format", "log_date_format"
)
self.log_file_formatter = logging.Formatter(
self.log_file_format, datefmt=self.log_file_date_format
)
log_file = get_option_ini(config, "log_file")
if log_file:
self.log_file_handler = logging.FileHandler(
log_file, mode="w", encoding="UTF-8"
) # type: Optional[logging.FileHandler]
self.log_file_handler.setFormatter(self.log_file_formatter)
else:
self.log_file_handler = None
self.log_cli_handler = None
self.live_logs_context = lambda: nullcontext()
# Note that the lambda for the live_logs_context is needed because
# live_logs_context can otherwise not be entered multiple times due
# to limitations of contextlib.contextmanager.
if self._log_cli_enabled():
self._setup_cli_logging()
def _create_formatter(self, log_format, log_date_format, auto_indent):
# color option doesn't exist if terminal plugin is disabled
color = getattr(self._config.option, "color", "no")
if color != "no" and ColoredLevelFormatter.LEVELNAME_FMT_REGEX.search(
log_format
):
formatter = ColoredLevelFormatter(
create_terminal_writer(self._config), log_format, log_date_format
) # type: logging.Formatter
else:
formatter = logging.Formatter(log_format, log_date_format)
formatter._style = PercentStyleMultiline(
formatter._style._fmt, auto_indent=auto_indent
)
return formatter
def _setup_cli_logging(self):
config = self._config
terminal_reporter = config.pluginmanager.get_plugin("terminalreporter")
if terminal_reporter is None:
# terminal reporter is disabled e.g. by pytest-xdist.
return
capture_manager = config.pluginmanager.get_plugin("capturemanager")
# if capturemanager plugin is disabled, live logging still works.
log_cli_handler = _LiveLoggingStreamHandler(terminal_reporter, capture_manager)
log_cli_formatter = self._create_formatter(
get_option_ini(config, "log_cli_format", "log_format"),
get_option_ini(config, "log_cli_date_format", "log_date_format"),
get_option_ini(config, "log_auto_indent"),
)
log_cli_level = get_log_level_for_setting(config, "log_cli_level", "log_level")
self.log_cli_handler = log_cli_handler
self.live_logs_context = lambda: catching_logs(
log_cli_handler, formatter=log_cli_formatter, level=log_cli_level
)
def set_log_path(self, fname):
"""Public method, which can set filename parameter for
Logging.FileHandler(). Also creates parent directory if
it does not exist.
.. warning::
Please considered as an experimental API.
"""
fname = Path(fname)
if not fname.is_absolute():
fname = Path(self._config.rootdir, fname)
if not fname.parent.exists():
fname.parent.mkdir(exist_ok=True, parents=True)
self.log_file_handler = logging.FileHandler(
str(fname), mode="w", encoding="UTF-8"
)
self.log_file_handler.setFormatter(self.log_file_formatter)
def _log_cli_enabled(self):
"""Return True if log_cli should be considered enabled, either explicitly
or because --log-cli-level was given in the command-line.
"""
return self._config.getoption(
"--log-cli-level"
) is not None or self._config.getini("log_cli")
@pytest.hookimpl(hookwrapper=True, tryfirst=True)
def pytest_collection(self) -> Generator[None, None, None]:
with self.live_logs_context():
if self.log_cli_handler:
self.log_cli_handler.set_when("collection")
if self.log_file_handler is not None:
with catching_logs(self.log_file_handler, level=self.log_file_level):
yield
else:
yield
@contextmanager
def _runtest_for(self, item, when):
with self._runtest_for_main(item, when):
if self.log_file_handler is not None:
with catching_logs(self.log_file_handler, level=self.log_file_level):
yield
else:
yield
@contextmanager
def _runtest_for_main(
self, item: nodes.Item, when: str
) -> Generator[None, None, None]:
"""Implements the internals of pytest_runtest_xxx() hook."""
with catching_logs(
LogCaptureHandler(), formatter=self.formatter, level=self.log_level
) as log_handler:
if self.log_cli_handler:
self.log_cli_handler.set_when(when)
if item is None:
yield # run the test
return
if not hasattr(item, "catch_log_handlers"):
item.catch_log_handlers = {} # type: ignore[attr-defined] # noqa: F821
item.catch_log_handlers[when] = log_handler # type: ignore[attr-defined] # noqa: F821
item.catch_log_handler = log_handler # type: ignore[attr-defined] # noqa: F821
try:
yield # run test
finally:
if when == "teardown":
del item.catch_log_handler # type: ignore[attr-defined] # noqa: F821
del item.catch_log_handlers # type: ignore[attr-defined] # noqa: F821
if self.print_logs:
# Add a captured log section to the report.
log = log_handler.stream.getvalue().strip()
item.add_report_section(when, "log", log)
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_setup(self, item):
with self._runtest_for(item, "setup"):
yield
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_call(self, item):
with self._runtest_for(item, "call"):
yield
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_teardown(self, item):
with self._runtest_for(item, "teardown"):
yield
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_logstart(self):
if self.log_cli_handler:
self.log_cli_handler.reset()
with self._runtest_for(None, "start"):
yield
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_logfinish(self):
with self._runtest_for(None, "finish"):
yield
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_logreport(self):
with self._runtest_for(None, "logreport"):
yield
@pytest.hookimpl(hookwrapper=True, tryfirst=True)
def pytest_sessionfinish(self):
with self.live_logs_context():
if self.log_cli_handler:
self.log_cli_handler.set_when("sessionfinish")
if self.log_file_handler is not None:
try:
with catching_logs(
self.log_file_handler, level=self.log_file_level
):
yield
finally:
# Close the FileHandler explicitly.
# (logging.shutdown might have lost the weakref?!)
self.log_file_handler.close()
else:
yield
@pytest.hookimpl(hookwrapper=True, tryfirst=True)
def pytest_sessionstart(self):
with self.live_logs_context():
if self.log_cli_handler:
self.log_cli_handler.set_when("sessionstart")
if self.log_file_handler is not None:
with catching_logs(self.log_file_handler, level=self.log_file_level):
yield
else:
yield
@pytest.hookimpl(hookwrapper=True)
def pytest_runtestloop(self, session):
"""Runs all collected test items."""
if session.config.option.collectonly:
yield
return
if self._log_cli_enabled() and self._config.getoption("verbose") < 1:
# setting verbose flag is needed to avoid messy test progress output
self._config.option.verbose = 1
with self.live_logs_context():
if self.log_file_handler is not None:
with catching_logs(self.log_file_handler, level=self.log_file_level):
yield # run all the tests
else:
yield # run all the tests
class _LiveLoggingStreamHandler(logging.StreamHandler):
"""
Custom StreamHandler used by the live logging feature: it will write a newline before the first log message
in each test.
During live logging we must also explicitly disable stdout/stderr capturing otherwise it will get captured
and won't appear in the terminal.
"""
def __init__(self, terminal_reporter, capture_manager):
"""
:param _pytest.terminal.TerminalReporter terminal_reporter:
:param _pytest.capture.CaptureManager capture_manager:
"""
logging.StreamHandler.__init__(self, stream=terminal_reporter)
self.capture_manager = capture_manager
self.reset()
self.set_when(None)
self._test_outcome_written = False
def reset(self):
"""Reset the handler; should be called before the start of each test"""
self._first_record_emitted = False
def set_when(self, when):
"""Prepares for the given test phase (setup/call/teardown)"""
self._when = when
self._section_name_shown = False
if when == "start":
self._test_outcome_written = False
def emit(self, record):
ctx_manager = (
self.capture_manager.global_and_fixture_disabled()
if self.capture_manager
else nullcontext()
)
with ctx_manager:
if not self._first_record_emitted:
self.stream.write("\n")
self._first_record_emitted = True
elif self._when in ("teardown", "finish"):
if not self._test_outcome_written:
self._test_outcome_written = True
self.stream.write("\n")
if not self._section_name_shown and self._when:
self.stream.section("live log " + self._when, sep="-", bold=True)
self._section_name_shown = True
logging.StreamHandler.emit(self, record)
|
{
"content_hash": "57bd9b908981d3a5971145581adb1727",
"timestamp": "",
"source": "github",
"line_count": 787,
"max_line_length": 118,
"avg_line_length": 35.81829733163914,
"alnum_prop": 0.601546702614495,
"repo_name": "alfredodeza/pytest",
"id": "5e60a232172445f9b482faee7e94b1cb3794a081",
"size": "28189",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/_pytest/logging.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "837013"
}
],
"symlink_target": ""
}
|
from auth import *
from database import *
from handler import *
from router import *
from settings import *
|
{
"content_hash": "8a85ea369f46a44249d68c413060b84b",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 22,
"avg_line_length": 21.6,
"alnum_prop": 0.7685185185185185,
"repo_name": "lusionx/sae-lxpy-1",
"id": "a712a4117b587c2acdb0d2d85b1a48114766e53a",
"size": "125",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "toolkit/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2135802"
},
{
"name": "JavaScript",
"bytes": "8222547"
},
{
"name": "Python",
"bytes": "426925"
},
{
"name": "Shell",
"bytes": "81"
}
],
"symlink_target": ""
}
|
import re
import threading
from neutron_lib import context as nl_context
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
import requests
from six.moves import html_parser
from networking_mlnx._i18n import _LI, _LE, _LW
from networking_mlnx.db import db
from networking_mlnx.journal import dependency_validations
from networking_mlnx.plugins.ml2.drivers.sdn import client
from networking_mlnx.plugins.ml2.drivers.sdn import constants as sdn_const
from networking_mlnx.plugins.ml2.drivers.sdn import exceptions as sdn_exc
from networking_mlnx.plugins.ml2.drivers.sdn import utils as sdn_utils
LOG = logging.getLogger(__name__)
def call_thread_on_end(func):
def new_func(obj, *args, **kwargs):
return_value = func(obj, *args, **kwargs)
obj.journal.set_sync_event()
return return_value
return new_func
def record(db_session, object_type, object_uuid, operation, data,
context=None):
db.create_pending_row(db_session, object_type, object_uuid, operation,
data)
class SdnJournalThread(object):
"""Thread worker for the SDN Journal Database."""
def __init__(self):
self.client = client.SdnRestClient.create_client()
self._sync_timeout = cfg.CONF.sdn.sync_timeout
self._row_retry_count = cfg.CONF.sdn.retry_count
self.event = threading.Event()
self.lock = threading.Lock()
self._sync_thread = self.start_sync_thread()
self._start_sync_timer()
def start_sync_thread(self):
# Start the sync thread
LOG.debug("Starting a new sync thread")
sync_thread = threading.Thread(
name='sync',
target=self.run_sync_thread)
sync_thread.start()
return sync_thread
def set_sync_event(self):
# Prevent race when starting the timer
with self.lock:
LOG.debug("Resetting thread timer")
self._timer.cancel()
self._start_sync_timer()
self.event.set()
def _start_sync_timer(self):
self._timer = threading.Timer(self._sync_timeout,
self.set_sync_event)
self._timer.start()
def run_sync_thread(self, exit_after_run=False):
while True:
try:
self.event.wait()
self.event.clear()
context = nl_context.get_admin_context()
self._sync_pending_rows(context.session, exit_after_run)
self._sync_progress_rows(context.session)
LOG.debug("Clearing sync thread event")
if exit_after_run:
# Permanently waiting thread model breaks unit tests
# Adding this arg to exit here only for unit tests
break
except Exception:
# Catch exceptions to protect the thread while running
LOG.exception(_LE("Error on run_sync_thread"))
def _sync_pending_rows(self, session, exit_after_run):
while True:
LOG.debug("sync_pending_rows operation walking database")
row = db.get_oldest_pending_db_row_with_lock(session)
if not row:
LOG.debug("No rows to sync")
break
# Validate the operation
valid = dependency_validations.validate(session, row)
if not valid:
LOG.info(_LI("%(operation)s %(type)s %(uuid)s is not a "
"valid operation yet, skipping for now"),
{'operation': row.operation,
'type': row.object_type,
'uuid': row.object_uuid})
# Set row back to pending.
db.update_db_row_state(session, row, sdn_const.PENDING)
if exit_after_run:
break
continue
LOG.info(_LI("Syncing %(operation)s %(type)s %(uuid)s"),
{'operation': row.operation, 'type': row.object_type,
'uuid': row.object_uuid})
# Add code to sync this to NEO
urlpath = sdn_utils.strings_to_url(row.object_type)
if row.operation != sdn_const.POST:
urlpath = sdn_utils.strings_to_url(urlpath, row.object_uuid)
try:
client_operation_method = (
getattr(self.client, row.operation.lower()))
response = (
client_operation_method(
urlpath, jsonutils.loads(row.data)))
if response.status_code == requests.codes.not_implemented:
db.update_db_row_state(session, row, sdn_const.COMPLETED)
elif (response.status_code == requests.codes.not_found and
row.operation == sdn_const.DELETE):
db.update_db_row_state(session, row, sdn_const.COMPLETED)
else:
# update in progress and job_id
job_id = None
try:
try:
job_id = response.json()
except ValueError:
# Note(moshele) workaround for NEO
# because for POST port it return html
# and not json
parser = html_parser.HTMLParser()
parser.feed(response.text)
parser.handle_starttag('a', [])
url = parser.get_starttag_text()
match = re.match(
r'<a href="([a-zA-Z0-9\/]+)">', url)
if match:
job_id = match.group(1)
except Exception as e:
LOG.error(_LE("Failed to extract job_id %s"), e)
if job_id:
db.update_db_row_job_id(
session, row, job_id=job_id)
db.update_db_row_state(
session, row, sdn_const.MONITORING)
else:
LOG.warning(_LW("object %s has NULL job_id"),
row.object_uuid)
except (sdn_exc.SDNConnectionError, sdn_exc.SDNLoginError):
# Log an error and raise the retry count. If the retry count
# exceeds the limit, move it to the failed state.
LOG.error(_LE("Cannot connect to the NEO Controller"))
db.update_pending_db_row_retry(session, row,
self._row_retry_count)
# Break out of the loop and retry with the next
# timer interval
break
def _sync_progress_rows(self, session):
# 1. get all progressed job
# 2. get status for NEO
# 3. Update status if completed/failed
LOG.debug("sync_progress_rows operation walking database")
rows = db.get_all_monitoring_db_row_by_oldest(session)
if not rows:
LOG.debug("No rows to sync")
return
for row in rows:
try:
if row.job_id is None:
LOG.warning(_LW("object %s has NULL job_id"),
row.object_uuid)
continue
response = self.client.get(row.job_id.strip("/"))
if response:
try:
job_status = response.json().get('Status')
if job_status == 'Completed':
db.update_db_row_state(
session, row, sdn_const.COMPLETED)
continue
elif job_status in ("Pending", "Running"):
LOG.debug("NEO Job id %(job_id)s is %(status)s "
"continue monitoring",
{'job_id': row.job_id,
'status': job_status})
continue
else:
LOG.error(_LE("NEO Job id %(job_id)s, failed with"
" %(status)s"),
{'job_id': row.job_id,
'status': job_status})
db.update_db_row_state(
session, row, sdn_const.PENDING)
except ValueError or AttributeError:
LOG.error(_LE("failed to extract response for job"
"id %s"), row.job_id)
else:
LOG.error(_LE("NEO Job id %(job_id)s, failed with "
"%(status)s"),
{'job_id': row.job_id, 'status': job_status})
db.update_db_row_state(session, row, sdn_const.PENDING)
except (sdn_exc.SDNConnectionError, sdn_exc.SDNLoginError):
# Don't raise the retry count, just log an error
LOG.error(_LE("Cannot connect to the NEO Controller"))
db.update_db_row_state(session, row, sdn_const.PENDING)
# Break out of the loop and retry with the next
# timer interval
break
|
{
"content_hash": "57df3a0083022ccce0bdb97d7942410e",
"timestamp": "",
"source": "github",
"line_count": 220,
"max_line_length": 78,
"avg_line_length": 43.56363636363636,
"alnum_prop": 0.49561769616026713,
"repo_name": "openstack/networking-mlnx",
"id": "c83585ab3b2c12a8d997572034bd9a36f7c417be",
"size": "10226",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "networking_mlnx/journal/journal.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1025"
},
{
"name": "Python",
"bytes": "280379"
},
{
"name": "Shell",
"bytes": "10336"
}
],
"symlink_target": ""
}
|
from distutils.core import setup
setup(name='riffa',
version='2.0',
description='RIFFA 2.0 Python Library',
author='Matt Jacobsen',
author_email='mdjacobs@cs.ucsd.edu',
url='http://cseweb.ucsd.edu/~mdjacobs',
platforms='Linux,Windows',
license='Other/Proprietary License',
py_modules=['riffa'],
long_description='''
Copyright (c) 2016, The Regents of the University of California All
rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of The Regents of the University of California
nor the names of its contributors may be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL REGENTS OF THE
UNIVERSITY OF CALIFORNIA BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.'''
)
|
{
"content_hash": "d467128728d1787511aad24c928e868a",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 70,
"avg_line_length": 44.81818181818182,
"alnum_prop": 0.7565922920892495,
"repo_name": "drichmond/riffa",
"id": "85f506303b83ace220f105292414b78f8ec52901",
"size": "3932",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "990"
},
{
"name": "C",
"bytes": "240052"
},
{
"name": "C++",
"bytes": "2324"
},
{
"name": "Inno Setup",
"bytes": "4440"
},
{
"name": "Java",
"bytes": "21822"
},
{
"name": "Makefile",
"bytes": "191126"
},
{
"name": "Matlab",
"bytes": "10791"
},
{
"name": "Objective-C",
"bytes": "6418"
},
{
"name": "Python",
"bytes": "11509"
},
{
"name": "Shell",
"bytes": "2152"
},
{
"name": "SystemVerilog",
"bytes": "56823"
},
{
"name": "Verilog",
"bytes": "1961548"
}
],
"symlink_target": ""
}
|
'''
Focus Behavior
==============
The :class:`~kivy.uix.behaviors.FocusBehavior`
`mixin <https://en.wikipedia.org/wiki/Mixin>`_ class provides
keyboard focus behavior. When combined with other
FocusBehavior widgets it allows one to cycle focus among them by pressing
tab. In addition, upon gaining focus, the instance will automatically
receive keyboard input.
Focus, very different from selection, is intimately tied with the keyboard;
each keyboard can focus on zero or one widgets, and each widget can only
have the focus of one keyboard. However, multiple keyboards can focus
simultaneously on different widgets. When escape is hit, the widget having
the focus of that keyboard will de-focus.
Managing focus
--------------
In essence, focus is implemented as a doubly linked list, where each
node holds a (weak) reference to the instance before it and after it,
as visualized when cycling through the nodes using tab (forward) or
shift+tab (backward). If a previous or next widget is not specified,
:attr:`focus_next` and :attr:`focus_previous` defaults to `None`. This
means that the :attr:`~kivy.uix.widget.Widget.children` list and
:attr:`parents <kivy.uix.widget.Widget.parent>` are
walked to find the next focusable widget, unless :attr:`focus_next` or
:attr:`focus_previous` is set to the `StopIteration` class, in which case
focus stops there.
For example, to cycle focus between :class:`~kivy.uix.button.Button`
elements of a :class:`~kivy.uix.gridlayout.GridLayout`::
class FocusButton(FocusBehavior, Button):
pass
grid = GridLayout(cols=4)
for i in range(40):
grid.add_widget(FocusButton(text=str(i)))
# clicking on a widget will activate focus, and tab can now be used
# to cycle through
When using a software keyboard, typical on mobile and touch devices, the
keyboard display behavior is determined by the
:attr:`~kivy.core.window.WindowBase.softinput_mode` property. You can use
this property to ensure the focused widget is not covered or obscured by the
keyboard.
Initializing focus
------------------
Widgets needs to be visible before they can receive the focus. This means that
setting their *focus* property to True before they are visible will have no
effect. To initialize focus, you can use the 'on_parent' event::
from kivy.app import App
from kivy.uix.textinput import TextInput
class MyTextInput(TextInput):
def on_parent(self, widget, parent):
self.focus = True
class SampleApp(App):
def build(self):
return MyTextInput()
SampleApp().run()
If you are using a :class:`~kivy.uix.popup`, you can use the 'on_open' event.
For an overview of behaviors, please refer to the :mod:`~kivy.uix.behaviors`
documentation.
.. warning::
This code is still experimental, and its API is subject to change in a
future version.
'''
__all__ = ('FocusBehavior', )
from kivy.properties import OptionProperty, ObjectProperty, BooleanProperty, \
AliasProperty
from kivy.config import Config
from kivy.base import EventLoop
# When we are generating documentation, Config doesn't exist
_is_desktop = False
_keyboard_mode = 'system'
if Config:
_is_desktop = Config.getboolean('kivy', 'desktop')
_keyboard_mode = Config.get('kivy', 'keyboard_mode')
class FocusBehavior(object):
'''Provides keyboard focus behavior. When combined with other
FocusBehavior widgets it allows one to cycle focus among them by pressing
tab. Please see the
:mod:`focus behavior module documentation <kivy.uix.behaviors.focus>`
for more information.
.. versionadded:: 1.9.0
'''
_requested_keyboard = False
_keyboard = ObjectProperty(None, allownone=True)
_keyboards = {}
ignored_touch = []
'''A list of touches that should not be used to defocus. After on_touch_up,
every touch that is not in :attr:`ignored_touch` will defocus all the
focused widgets if the config keyboard mode is not multi. Touches on
focusable widgets that were used to focus are automatically added here.
Example usage::
class Unfocusable(Widget):
def on_touch_down(self, touch):
if self.collide_point(*touch.pos):
FocusBehavior.ignored_touch.append(touch)
Notice that you need to access this as a class, not an instance variable.
'''
def _set_keyboard(self, value):
focus = self.focus
keyboard = self._keyboard
keyboards = FocusBehavior._keyboards
if keyboard:
self.focus = False # this'll unbind
if self._keyboard: # remove assigned keyboard from dict
del keyboards[keyboard]
if value and value not in keyboards:
keyboards[value] = None
self._keyboard = value
self.focus = focus
def _get_keyboard(self):
return self._keyboard
keyboard = AliasProperty(_get_keyboard, _set_keyboard,
bind=('_keyboard', ))
'''The keyboard to bind to (or bound to the widget) when focused.
When None, a keyboard is requested and released whenever the widget comes
into and out of focus. If not None, it must be a keyboard, which gets
bound and unbound from the widget whenever it's in or out of focus. It is
useful only when more than one keyboard is available, so it is recommended
to be set to None when only one keyboard is available.
If more than one keyboard is available, whenever an instance gets focused
a new keyboard will be requested if None. Unless the other instances lose
focus (e.g. if tab was used), a new keyboard will appear. When this is
undesired, the keyboard property can be used. For example, if there are
two users with two keyboards, then each keyboard can be assigned to
different groups of instances of FocusBehavior, ensuring that within
each group, only one FocusBehavior will have focus, and will receive input
from the correct keyboard. See `keyboard_mode` in :mod:`~kivy.config` for
more information on the keyboard modes.
**Keyboard and focus behavior**
When using the keyboard, there are some important default behaviors you
should keep in mind.
* When Config's `keyboard_mode` is multi, each new touch is considered
a touch by a different user and will set the focus (if clicked on a
focusable) with a new keyboard. Already focused elements will not lose
their focus (even if an unfocusable widget is touched).
* If the keyboard property is set, that keyboard will be used when the
instance gets focused. If widgets with different keyboards are linked
through :attr:`focus_next` and :attr:`focus_previous`, then as they are
tabbed through, different keyboards will become active. Therefore,
typically it's undesirable to link instances which are assigned
different keyboards.
* When a widget has focus, setting its keyboard to None will remove its
keyboard, but the widget will then immediately try to get
another keyboard. In order to remove its keyboard, rather set its
:attr:`focus` to False.
* When using a software keyboard, typical on mobile and touch devices, the
keyboard display behavior is determined by the
:attr:`~kivy.core.window.WindowBase.softinput_mode` property. You can use
this property to ensure the focused widget is not covered or obscured.
:attr:`keyboard` is an :class:`~kivy.properties.AliasProperty` and defaults
to None.
.. warning:
When assigning a keyboard, the keyboard must not be released while
it is still assigned to an instance. Similarly, the keyboard created
by the instance on focus and assigned to :attr:`keyboard` if None,
will be released by the instance when the instance loses focus.
Therefore, it is not safe to assign this keyboard to another instance's
:attr:`keyboard`.
'''
is_focusable = BooleanProperty(_is_desktop)
'''Whether the instance can become focused. If focused, it'll lose focus
when set to False.
:attr:`is_focusable` is a :class:`~kivy.properties.BooleanProperty` and
defaults to True on a desktop (i.e. `desktop` is True in
:mod:`~kivy.config`), False otherwise.
'''
focus = BooleanProperty(False)
'''Whether the instance currently has focus.
Setting it to True will bind to and/or request the keyboard, and input
will be forwarded to the instance. Setting it to False will unbind
and/or release the keyboard. For a given keyboard, only one widget can
have its focus, so focusing one will automatically unfocus the other
instance holding its focus.
When using a software keyboard, please refer to the
:attr:`~kivy.core.window.WindowBase.softinput_mode` property to determine
how the keyboard display is handled.
:attr:`focus` is a :class:`~kivy.properties.BooleanProperty` and defaults
to False.
'''
focused = focus
'''An alias of :attr:`focus`.
:attr:`focused` is a :class:`~kivy.properties.BooleanProperty` and defaults
to False.
.. warning::
:attr:`focused` is an alias of :attr:`focus` and will be removed in
2.0.0.
'''
def _set_on_focus_next(self, instance, value):
''' If changing code, ensure following code is not infinite loop:
widget.focus_next = widget
widget.focus_previous = widget
widget.focus_previous = widget2
'''
next = self._old_focus_next
if next is value: # prevent infinite loop
return
if isinstance(next, FocusBehavior):
next.focus_previous = None
self._old_focus_next = value
if value is None or value is StopIteration:
return
if not isinstance(value, FocusBehavior):
raise ValueError('focus_next accepts only objects based on'
' FocusBehavior, or the `StopIteration` class.')
value.focus_previous = self
focus_next = ObjectProperty(None, allownone=True)
'''The :class:`FocusBehavior` instance to acquire focus when
tab is pressed and this instance has focus, if not `None` or
`StopIteration`.
When tab is pressed, focus cycles through all the :class:`FocusBehavior`
widgets that are linked through :attr:`focus_next` and are focusable. If
:attr:`focus_next` is `None`, it instead walks the children lists to find
the next focusable widget. Finally, if :attr:`focus_next` is
the `StopIteration` class, focus won't move forward, but end here.
.. note:
Setting :attr:`focus_next` automatically sets :attr:`focus_previous`
of the other instance to point to this instance, if not None or
`StopIteration`. Similarly, if it wasn't None or `StopIteration`, it
also sets the :attr:`focus_previous` property of the instance
previously in :attr:`focus_next` to `None`. Therefore, it is only
required to set one of the :attr:`focus_previous` or
:attr:`focus_next` links since the other side will be set
automatically.
:attr:`focus_next` is an :class:`~kivy.properties.ObjectProperty` and
defaults to `None`.
'''
def _set_on_focus_previous(self, instance, value):
prev = self._old_focus_previous
if prev is value:
return
if isinstance(prev, FocusBehavior):
prev.focus_next = None
self._old_focus_previous = value
if value is None or value is StopIteration:
return
if not isinstance(value, FocusBehavior):
raise ValueError('focus_previous accepts only objects based'
'on FocusBehavior, or the `StopIteration` class.')
value.focus_next = self
focus_previous = ObjectProperty(None, allownone=True)
'''The :class:`FocusBehavior` instance to acquire focus when
shift+tab is pressed on this instance, if not None or `StopIteration`.
When shift+tab is pressed, focus cycles through all the
:class:`FocusBehavior` widgets that are linked through
:attr:`focus_previous` and are focusable. If :attr:`focus_previous` is
`None`, it instead walks the children tree to find the
previous focusable widget. Finally, if :attr:`focus_previous` is the
`StopIteration` class, focus won't move backward, but end here.
.. note:
Setting :attr:`focus_previous` automatically sets :attr:`focus_next`
of the other instance to point to this instance, if not None or
`StopIteration`. Similarly, if it wasn't None or `StopIteration`, it
also sets the :attr:`focus_next` property of the instance previously in
:attr:`focus_previous` to `None`. Therefore, it is only required
to set one of the :attr:`focus_previous` or :attr:`focus_next`
links since the other side will be set automatically.
:attr:`focus_previous` is an :class:`~kivy.properties.ObjectProperty` and
defaults to `None`.
'''
keyboard_mode = OptionProperty('auto', options=('auto', 'managed'))
'''Determines how the keyboard visibility should be managed. 'auto' will
result in the standard behaviour of showing/hiding on focus. 'managed'
requires setting the keyboard visibility manually, or calling the helper
functions :meth:`show_keyboard` and :meth:`hide_keyboard`.
:attr:`keyboard_mode` is an :class:`~kivy.properties.OptionsProperty` and
defaults to 'auto'. Can be one of 'auto' or 'managed'.
'''
input_type = OptionProperty('text', options=('text', 'number', 'url',
'mail', 'datetime', 'tel',
'address'))
'''The kind of input keyboard to request.
.. versionadded:: 1.8.0
:attr:`input_type` is an :class:`~kivy.properties.OptionsProperty` and
defaults to 'text'. Can be one of 'text', 'number', 'url', 'mail',
'datetime', 'tel' or 'address'.
'''
unfocus_on_touch = BooleanProperty(_keyboard_mode not in
('multi', 'systemandmulti'))
'''Whether a instance should lose focus when clicked outside the instance.
When a user clicks on a widget that is focus aware and shares the same
keyboard as this widget (which in the case of only one keyboard, are
all focus aware widgets), then as the other widgets gains focus, this
widget loses focus. In addition to that, if this property is `True`,
clicking on any widget other than this widget, will remove focus form this
widget.
:attr:`unfocus_on_touch` is a :class:`~kivy.properties.BooleanProperty` and
defaults to `False` if the `keyboard_mode` in :attr:`~kivy.config.Config`
is `'multi'` or `'systemandmulti'`, otherwise it defaults to `True`.
'''
def __init__(self, **kwargs):
self._old_focus_next = None
self._old_focus_previous = None
super(FocusBehavior, self).__init__(**kwargs)
self._keyboard_mode = _keyboard_mode
fbind = self.fbind
fbind('focus', self._on_focus)
fbind('disabled', self._on_focusable)
fbind('is_focusable', self._on_focusable)
fbind('focus_next', self._set_on_focus_next)
fbind('focus_previous', self._set_on_focus_previous)
def _on_focusable(self, instance, value):
if self.disabled or not self.is_focusable:
self.focus = False
def _on_focus(self, instance, value, *largs):
if self.is_focusable and self.keyboard_mode == 'auto':
if value:
self._bind_keyboard()
else:
self._unbind_keyboard()
def _ensure_keyboard(self):
if self._keyboard is None:
self._requested_keyboard = True
keyboard = self._keyboard =\
EventLoop.window.request_keyboard(
self._keyboard_released, self, input_type=self.input_type)
keyboards = FocusBehavior._keyboards
if keyboard not in keyboards:
keyboards[keyboard] = None
def _bind_keyboard(self):
self._ensure_keyboard()
keyboard = self._keyboard
if not keyboard or self.disabled or not self.is_focusable:
self.focus = False
return
keyboards = FocusBehavior._keyboards
old_focus = keyboards[keyboard] # keyboard should be in dict
if old_focus:
old_focus.focus = False
# keyboard shouldn't have been released here, see keyboard warning
keyboards[keyboard] = self
keyboard.bind(on_key_down=self.keyboard_on_key_down,
on_key_up=self.keyboard_on_key_up,
on_textinput=self.keyboard_on_textinput)
def _unbind_keyboard(self):
keyboard = self._keyboard
if keyboard:
keyboard.unbind(on_key_down=self.keyboard_on_key_down,
on_key_up=self.keyboard_on_key_up,
on_textinput=self.keyboard_on_textinput)
if self._requested_keyboard:
keyboard.release()
self._keyboard = None
self._requested_keyboard = False
del FocusBehavior._keyboards[keyboard]
else:
FocusBehavior._keyboards[keyboard] = None
def keyboard_on_textinput(self, window, text):
pass
def _keyboard_released(self):
self.focus = False
def on_touch_down(self, touch):
if not self.collide_point(*touch.pos):
return
if (not self.disabled and self.is_focusable and
('button' not in touch.profile or
not touch.button.startswith('scroll'))):
self.focus = True
FocusBehavior.ignored_touch.append(touch)
return super(FocusBehavior, self).on_touch_down(touch)
@staticmethod
def _handle_post_on_touch_up(touch):
''' Called by window after each touch has finished.
'''
touches = FocusBehavior.ignored_touch
if touch in touches:
touches.remove(touch)
return
if 'button' in touch.profile and touch.button in\
('scrollup', 'scrolldown', 'scrollleft', 'scrollright'):
return
for focusable in list(FocusBehavior._keyboards.values()):
if focusable is None or not focusable.unfocus_on_touch:
continue
focusable.focus = False
def _get_focus_next(self, focus_dir):
current = self
walk_tree = 'walk' if focus_dir is 'focus_next' else 'walk_reverse'
while 1:
# if we hit a focusable, walk through focus_xxx
while getattr(current, focus_dir) is not None:
current = getattr(current, focus_dir)
if current is self or current is StopIteration:
return None # make sure we don't loop forever
if current.is_focusable and not current.disabled:
return current
# hit unfocusable, walk widget tree
itr = getattr(current, walk_tree)(loopback=True)
if focus_dir is 'focus_next':
next(itr) # current is returned first when walking forward
for current in itr:
if isinstance(current, FocusBehavior):
break
# why did we stop
if isinstance(current, FocusBehavior):
if current is self:
return None
if current.is_focusable and not current.disabled:
return current
else:
return None
def get_focus_next(self):
'''Returns the next focusable widget using either :attr:`focus_next`
or the :attr:`children` similar to the order when tabbing forwards
with the ``tab`` key.
'''
return self._get_focus_next('focus_next')
def get_focus_previous(self):
'''Returns the previous focusable widget using either
:attr:`focus_previous` or the :attr:`children` similar to the
order when ``tab`` + ``shift`` key are triggered together.
'''
return self._get_focus_next('focus_previous')
def keyboard_on_key_down(self, window, keycode, text, modifiers):
'''The method bound to the keyboard when the instance has focus.
When the instance becomes focused, this method is bound to the
keyboard and will be called for every input press. The parameters are
the same as :meth:`kivy.core.window.WindowBase.on_key_down`.
When overwriting the method in the derived widget, super should be
called to enable tab cycling. If the derived widget wishes to use tab
for its own purposes, it can call super after it has processed the
character (if it does not wish to consume the tab).
Similar to other keyboard functions, it should return True if the
key was consumed.
'''
if keycode[1] == 'tab': # deal with cycle
if ['shift'] == modifiers:
next = self.get_focus_previous()
else:
next = self.get_focus_next()
if next:
self.focus = False
next.focus = True
return True
return False
def keyboard_on_key_up(self, window, keycode):
'''The method bound to the keyboard when the instance has focus.
When the instance becomes focused, this method is bound to the
keyboard and will be called for every input release. The parameters are
the same as :meth:`kivy.core.window.WindowBase.on_key_up`.
When overwriting the method in the derived widget, super should be
called to enable de-focusing on escape. If the derived widget wishes
to use escape for its own purposes, it can call super after it has
processed the character (if it does not wish to consume the escape).
See :meth:`keyboard_on_key_down`
'''
if keycode[1] == 'escape':
self.focus = False
return True
return False
def show_keyboard(self):
'''
Convenience function to show the keyboard in managed mode.
'''
if self.keyboard_mode == 'managed':
self._bind_keyboard()
def hide_keyboard(self):
'''
Convenience function to hide the keyboard in managed mode.
'''
if self.keyboard_mode == 'managed':
self._unbind_keyboard()
|
{
"content_hash": "fbccb071f88e02e0b1d2a6d1802bc6d2",
"timestamp": "",
"source": "github",
"line_count": 563,
"max_line_length": 79,
"avg_line_length": 40.262877442273535,
"alnum_prop": 0.649594141521087,
"repo_name": "kived/kivy",
"id": "9108941ee78dc3387825f6039dcc205956b99bf8",
"size": "22668",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "kivy/uix/behaviors/focus.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "321734"
},
{
"name": "C++",
"bytes": "3551"
},
{
"name": "Emacs Lisp",
"bytes": "9671"
},
{
"name": "GLSL",
"bytes": "289"
},
{
"name": "Makefile",
"bytes": "4084"
},
{
"name": "Objective-C",
"bytes": "14779"
},
{
"name": "Python",
"bytes": "3856810"
},
{
"name": "Vim script",
"bytes": "1123"
}
],
"symlink_target": ""
}
|
from datapackage_pipelines.wrapper import process
from datapackage_pipelines_fiscal.processors.consts import ID_COLUMN_NAME
def process_row(row, row_index,
spec, resource_index,
parameters, stats):
row[ID_COLUMN_NAME] = row_index + 1
return row
def modify_datapackage(dp, *_):
dp['resources'][0]['schema']['fields'].insert(0, {
'name': ID_COLUMN_NAME,
'type': 'integer'
})
dp['resources'][0]['schema']['primaryKey'].insert(0, ID_COLUMN_NAME)
return dp
process(modify_datapackage=modify_datapackage,
process_row=process_row)
|
{
"content_hash": "2cb15657f0730bdb6c4af2f54291e88d",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 73,
"avg_line_length": 26.52173913043478,
"alnum_prop": 0.6442622950819672,
"repo_name": "openspending/datapackage-pipelines-fiscal",
"id": "27c19f7129c29a6e4b6ec0d1f9f2f042b1681c22",
"size": "610",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "datapackage_pipelines_fiscal/processors/helpers/enumerate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "422"
},
{
"name": "Python",
"bytes": "40314"
}
],
"symlink_target": ""
}
|
"""Functions used by multiple converter files."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from tensorflow.core.framework import graph_pb2 as _graph_pb2
from tensorflow.core.protobuf import config_pb2 as _config_pb2
from tensorflow.core.protobuf import meta_graph_pb2 as _meta_graph_pb2
from tensorflow.lite.python.op_hint import convert_op_hints_to_stubs
from tensorflow.lite.python.op_hint import find_all_hinted_output_nodes
from tensorflow.lite.toco import types_pb2 as _types_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import graph_util as tf_graph_util
from tensorflow.python.grappler import tf_optimizer
from tensorflow.python.training.saver import export_meta_graph as _export_meta_graph
# Map of tf.dtypes to TFLite types_flag_pb2.
_MAP_TF_TO_TFLITE_TYPES = {
dtypes.float32: _types_pb2.FLOAT,
dtypes.float16: _types_pb2.FLOAT16,
dtypes.int32: _types_pb2.INT32,
dtypes.int64: _types_pb2.INT64,
dtypes.string: _types_pb2.STRING,
dtypes.uint8: _types_pb2.QUANTIZED_UINT8,
dtypes.int8: _types_pb2.INT8,
dtypes.complex64: _types_pb2.COMPLEX64
}
_LOWER_USING_SWITCH_MERGE = "_lower_using_switch_merge"
def convert_dtype_to_tflite_type(tf_dtype):
"""Converts tf.dtype to TFLite proto type.
Args:
tf_dtype: tf.dtype
Raises:
ValueError: Unsupported tf.dtype.
Returns:
types_flag_pb2.
"""
result = _MAP_TF_TO_TFLITE_TYPES.get(tf_dtype)
if result is None:
raise ValueError("Unsupported tf.dtype {0}".format(tf_dtype))
return result
def get_tensor_name(tensor):
"""Returns name of the input tensor.
Args:
tensor: tf.Tensor
Returns:
str
"""
parts = tensor.name.split(":")
if len(parts) > 2:
raise ValueError("Tensor name invalid. Expect 0 or 1 colon, got {0}".format(
len(parts) - 1))
# To be consistent with the tensor naming scheme in tensorflow, we need
# drop the ':0' suffix for the first tensor.
if len(parts) > 1 and parts[1] != "0":
return tensor.name
return parts[0]
def get_tensors_from_tensor_names(graph, tensor_names):
"""Gets the Tensors associated with the `tensor_names` in the provided graph.
Args:
graph: TensorFlow Graph.
tensor_names: List of strings that represent names of tensors in the graph.
Returns:
A list of Tensor objects in the same order the names are provided.
Raises:
ValueError:
tensor_names contains an invalid tensor name.
"""
# Get the list of all of the tensors.
tensor_name_to_tensor = {}
for op in graph.get_operations():
for tensor in op.values():
tensor_name_to_tensor[get_tensor_name(tensor)] = tensor
# Get the tensors associated with tensor_names.
tensors = []
invalid_tensors = []
for name in tensor_names:
tensor = tensor_name_to_tensor.get(name)
if tensor is None:
invalid_tensors.append(name)
else:
tensors.append(tensor)
# Throw ValueError if any user input names are not valid tensors.
if invalid_tensors:
raise ValueError("Invalid tensors '{}' were found.".format(
",".join(invalid_tensors)))
return tensors
def set_tensor_shapes(tensors, shapes):
"""Sets Tensor shape for each tensor if the shape is defined.
Args:
tensors: TensorFlow ops.Tensor.
shapes: Dict of strings representing input tensor names to list of
integers representing input shapes (e.g., {"foo": : [1, 16, 16, 3]}).
Raises:
ValueError:
`shapes` contains an invalid tensor.
`shapes` contains an invalid shape for a valid tensor.
"""
if shapes:
tensor_names_to_tensor = {
get_tensor_name(tensor): tensor for tensor in tensors
}
for name, shape in shapes.items():
if name not in tensor_names_to_tensor:
raise ValueError("Invalid tensor \'{}\' found in tensor shapes "
"map.".format(name))
if shape is not None:
tensor = tensor_names_to_tensor[name]
try:
tensor.set_shape(shape)
except ValueError as error:
message = ("The shape of tensor '{0}' cannot be changed from {1} to "
"{2}. {3}".format(name, tensor.shape, shape, str(error)))
raise ValueError(message)
def get_grappler_config(optimizers_list):
"""Creates a tf.compat.v1.ConfigProto for configuring Grappler.
Args:
optimizers_list: List of strings that represents the list of optimizers.
Returns:
tf.ConfigProto.
"""
config = _config_pb2.ConfigProto()
rewrite_options = config.graph_options.rewrite_options
for optimizer in optimizers_list:
rewrite_options.optimizers.append(optimizer)
return config
def run_graph_optimizations(graph_def,
input_arrays,
output_arrays,
config,
graph=None):
"""Apply standard TensorFlow optimizations to the graph_def.
Args:
graph_def: Frozen GraphDef to be optimized.
input_arrays: List of arrays that are considered inputs of the graph.
output_arrays: List of arrays that are considered outputs of the graph.
config: tf.ConfigProto.
graph: TensorFlow Graph. Required when Eager mode is enabled. (default None)
Returns:
A new, optimized GraphDef.
"""
meta_graph = _export_meta_graph(graph_def=graph_def, graph=graph)
# We need to add a collection called 'train_op' so that grappler
# knows what the outputs are.
fetch_collection = _meta_graph_pb2.CollectionDef()
for array in input_arrays + output_arrays:
fetch_collection.node_list.value.append(array.name)
meta_graph.collection_def["train_op"].CopyFrom(fetch_collection)
return tf_optimizer.OptimizeGraph(config, meta_graph)
def _remove_lower_using_switch_merge(graph_def):
"""Remove '_lower_using_switch_merge' attributes from the given graph.
Args:
graph_def: GraphDef to be optimized.
Returns:
A new GraphDef that with no '_lower_using_switch_merge' attribute.
"""
out = _graph_pb2.GraphDef()
out.library.CopyFrom(graph_def.library)
out.versions.CopyFrom(graph_def.versions)
for node in graph_def.node:
new_node = copy.deepcopy(node)
if new_node.op == "While":
new_node.attr[_LOWER_USING_SWITCH_MERGE].b = False
out.node.extend([new_node])
return out
def _convert_op_hints_if_present(sess, graph_def, output_tensors,
hinted_outputs_nodes):
if is_frozen_graph(sess):
raise ValueError("Try to convert op hints, needs unfrozen graph.")
output_arrays = [get_tensor_name(tensor) for tensor in output_tensors]
graph_def = tf_graph_util.convert_variables_to_constants(
sess, graph_def, output_arrays + hinted_outputs_nodes)
graph_def = convert_op_hints_to_stubs(graph_def=graph_def)
graph_def = tf_graph_util.remove_training_nodes(graph_def)
return graph_def
def freeze_graph(sess, input_tensors, output_tensors):
"""Returns a frozen GraphDef.
Runs a Grappler pass and freezes a graph with Variables in it. Otherwise the
existing GraphDef is returned. The Grappler pass is only run on models that
are frozen in order to inline the functions in the graph.
If OpHints is present, it will try to convert the OpHint graph.
Args:
sess: TensorFlow Session.
input_tensors: List of input tensors.
output_tensors: List of output tensors (only .name is used from this).
Returns:
Frozen GraphDef.
"""
# Runs a Grappler pass in order to inline any functions in the graph.
# Asides from inlining any simple function, Grappler will also try to lower
# while loop into switch merge representation which is undesired for Ophints,
# so we simply remove those attributes to prevent Grappler from doing so.
graph_def = _remove_lower_using_switch_merge(sess.graph_def)
config = get_grappler_config(["function"])
graph_def = run_graph_optimizations(
graph_def, input_tensors, output_tensors, config, graph=sess.graph)
# If ophints are present, just convert them.
hinted_outputs_nodes = find_all_hinted_output_nodes(sess)
if hinted_outputs_nodes:
return _convert_op_hints_if_present(sess, graph_def, output_tensors,
hinted_outputs_nodes)
if not is_frozen_graph(sess):
output_arrays = [get_tensor_name(tensor) for tensor in output_tensors]
return tf_graph_util.convert_variables_to_constants(sess, graph_def,
output_arrays)
else:
return sess.graph_def
def is_frozen_graph(sess):
"""Determines if the graph is frozen.
Determines if a graph has previously been frozen by checking for any
operations of type Variable*. If variables are found, the graph is not frozen.
Args:
sess: TensorFlow Session.
Returns:
Bool.
"""
for op in sess.graph.get_operations():
if op.type.startswith("Variable") or op.type.endswith("VariableOp"):
return False
return True
|
{
"content_hash": "4d87b6c4213e8b4929c2fdf21be3ba4c",
"timestamp": "",
"source": "github",
"line_count": 273,
"max_line_length": 84,
"avg_line_length": 33.0989010989011,
"alnum_prop": 0.6893536963258079,
"repo_name": "alsrgv/tensorflow",
"id": "4b8f2f76610934848fca517a47a9e287e533994a",
"size": "9725",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/lite/python/util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3568"
},
{
"name": "Batchfile",
"bytes": "15317"
},
{
"name": "C",
"bytes": "755360"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "68001148"
},
{
"name": "CMake",
"bytes": "204596"
},
{
"name": "Dockerfile",
"bytes": "73602"
},
{
"name": "Go",
"bytes": "1627121"
},
{
"name": "HTML",
"bytes": "4680118"
},
{
"name": "Java",
"bytes": "842866"
},
{
"name": "Jupyter Notebook",
"bytes": "1665584"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "101157"
},
{
"name": "Objective-C",
"bytes": "104061"
},
{
"name": "Objective-C++",
"bytes": "175222"
},
{
"name": "PHP",
"bytes": "17570"
},
{
"name": "Pascal",
"bytes": "3239"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "48843099"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "4733"
},
{
"name": "Shell",
"bytes": "488241"
},
{
"name": "Smarty",
"bytes": "27495"
},
{
"name": "Swift",
"bytes": "56155"
},
{
"name": "TSQL",
"bytes": "921"
}
],
"symlink_target": ""
}
|
__author__ = 'Axion'
def _check_duplicates_in_path():
"""Check the $PATH environmental variable for duplicates."""
import os
path = os.environ['PATH']
path_list = [piece for piece in path.split(":")]
print('duplicates: {} items'.format(len(path_list)-len(set(path_list))))
path_count = {part: 0 for part in set(path_list)}
# Build new path with only unique path parts
new_path = ''
for i in range(len(path_list)):
if not path_count[path_list[i]]:
new_path += ':{}'.format(path_list[i])
path_count[path_list[i]] += 1
for v in path_count.values():
assert v == 1
print(new_path)
# This is the current (07/04/14) default path
"/Library/Frameworks/Python.framework/Versions/2.7/bin:/Library/Frameworks/Python.framework/Versions/3.4/bin:/opt/local/bin:/opt/local/sbin:/usr/local/bin:/usr/local/sbin:/Library/Frameworks/Python.framework/Versions/2.7/bin:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin:/usr/local/git/bin"
if __name__ == '__main__':
_check_duplicates_in_path()
|
{
"content_hash": "68c0209158808f9e417b6f753953cf99",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 293,
"avg_line_length": 31.61764705882353,
"alnum_prop": 0.6344186046511628,
"repo_name": "devinbarry/yellow-worktracker",
"id": "2081828cd5cbb15e2172b8cbccd0ab40537ac35e",
"size": "1075",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "environ_path.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "14382"
}
],
"symlink_target": ""
}
|
import os
import locale
import time
from i3pystatus import IntervalModule
class Clock(IntervalModule):
"""
This class shows a clock.
format can be passed in four different ways:
- single string, no timezone, just the strftime-format
- one two-tuple, first is the format, second the timezone
- list of strings - no timezones
- list of two tuples, first is the format, second is timezone
Use mousewheel to cycle between formats.
For complete time format specification see:
::
man strftime
All available timezones are located in directory:
::
/usr/share/zoneinfo/
.. rubric:: Format examples
::
# one format, local timezone
format = '%a %b %-d %b %X'
# multiple formats, local timezone
format = [ '%a %b %-d %b %X', '%X' ]
# one format, specified timezone
format = ('%a %b %-d %b %X', 'Europe/Bratislava')
# multiple formats, specified timezones
format = [ ('%a %b %-d %b %X', 'America/New_York'), ('%X', 'Etc/GMT+9') ]
"""
settings = (
("format", "`None` means to use the default, locale-dependent format."),
("color", "RGB hexadecimal code color specifier, default to #ffffff"),
)
format = None
color = "#ffffff"
interval = 1
current_format_id = 0
on_upscroll = ["scroll_format", 1]
on_downscroll = ["scroll_format", -1]
def init(self):
env_lang = os.environ.get('LC_TIME', None)
if env_lang is None:
env_lang = os.environ.get('LANG', None)
if env_lang is not None:
if env_lang.find('.') != -1:
lang = tuple(env_lang.split('.', 1))
else:
lang = (env_lang, None)
else:
lang = (None, None)
if lang != locale.getlocale(locale.LC_TIME):
# affects time.strftime() in whole program
locale.setlocale(locale.LC_TIME, lang)
if self.format is None:
if lang[0] == 'en_US':
# MDY format - United States of America
self.format = ["%a %b %-d %X"]
else:
# DMY format - almost all other countries
self.format = ["%a %-d %b %X"]
elif isinstance(self.format, str) or isinstance(self.format, tuple):
self.format = [self.format]
self.format = self.expand_formats(self.format)
@staticmethod
def expand_formats(formats):
def expand_format(format_):
if isinstance(format_, tuple):
# check if timezone exists (man tzset)
if len(format_) > 1 and os.path.isfile('/usr/share/zoneinfo/' + format_[1]):
return (format_[0], format_[1])
else:
return (format_[0], time.tzname[0])
return (format_, time.tzname[0])
return [expand_format(format_) for format_ in formats]
def run(self):
# set timezone
if time.tzname[0] is not self.format[self.current_format_id][1]:
os.environ.putenv('TZ', self.format[self.current_format_id][1])
time.tzset()
self.output = {
"full_text": time.strftime(self.format[self.current_format_id][0]),
"color": self.color,
"urgent": False,
}
def scroll_format(self, step=1):
self.current_format_id = (self.current_format_id + step) % len(self.format)
|
{
"content_hash": "5b22fe6088adb99c72d883b239689a36",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 92,
"avg_line_length": 30.191304347826087,
"alnum_prop": 0.5532834101382489,
"repo_name": "claria/i3pystatus",
"id": "bb0780faa7ce8021d2c6e7ddfb44ba141e2a3c6f",
"size": "3519",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "i3pystatus/clock.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "208292"
},
{
"name": "Shell",
"bytes": "721"
}
],
"symlink_target": ""
}
|
from vol import Vol
from net import Net
from trainers import Trainer
from sklearn.datasets import load_iris
iris_data = None
network = None
sgd = None
N_TRAIN = 120
def load_data():
global iris_data
data = load_iris()
xs = data.data
ys = data.target
inputs = [ Vol(list(row)) for row in xs ]
labels = list(ys)
iris_data = zip(inputs, labels)
print 'Data loaded...'
def start():
global network, sgd
layers = []
layers.append({'type': 'input', 'out_sx': 1, 'out_sy': 1, 'out_depth': 4})
layers.append({'type': 'softmax', 'num_classes': 3}) #svm works too
print 'Layers made...'
network = Net(layers)
print 'Net made...'
print network
sgd = Trainer(network, {'momentum': 0.1, 'l2_decay': 0.001})
print 'Trainer made...'
print sgd
def train():
global iris_data, sgd
print 'In training...'
print 'k', 'time\t\t ', 'loss\t ', 'training accuracy'
print '----------------------------------------------------'
for x, y in iris_data[:N_TRAIN]:
stats = sgd.train(x, y)
print stats['k'], stats['time'], stats['loss'], stats['accuracy']
def test():
global iris_data, network
print 'In testing...'
right = 0
for x, y in iris_data[N_TRAIN:]:
network.forward(x)
right += network.getPrediction() == y
accuracy = float(right) / (150 - N_TRAIN) * 100
print accuracy
|
{
"content_hash": "5bbffcfc8696fb42e05d55d970af49fa",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 78,
"avg_line_length": 23.508196721311474,
"alnum_prop": 0.5627615062761506,
"repo_name": "benglard/ConvNetPy",
"id": "3179939dbb88183401df41aae8fc35f9ded6de8e",
"size": "1459",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/iris.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "151042"
}
],
"symlink_target": ""
}
|
import time
import smbus
import math
bus = smbus.SMBus(1)
class MS5607:
"""
http://www.parallaxinc.com/sites/default/files/downloads/29124-APPNote_520_C_code.pdf
http://www.parallaxinc.com/sites/default/files/downloads/29124-MS5607-02BA03-Datasheet.pdf
Offset for humidity to provide better precision
"""
DEVICE_ADDRESS = 0x76
_CMD_RESET = 0x1E
_CMD_ADC_READ = 0x00
_CMD_PROM_RD = 0xA0
_CMD_ADC_CONV = 0x40
_CMD_ADC_D1 = 0x00
_CMD_ADC_D2 = 0x10
_CMD_ADC_256 = 0x00
_CMD_ADC_512 = 0x02
_CMD_ADC_1024 = 0x04
_CMD_ADC_2048 = 0x06
_CMD_ADC_4096 = 0x08
def __init__(self):
self.resetSensor()
self.coefficients = self.readCoefficients()
# Some utility methods
def read16U(self, register1, register2):
bytes = bus.read_i2c_block_data(self.DEVICE_ADDRESS, register1, 2)
return (bytes[0] << 8) + (bytes[1])
def read24U(self, register):
bytes = bus.read_i2c_block_data(self.DEVICE_ADDRESS, register, 3)
return (bytes[0] << 16) + (bytes[1] << 8) + bytes[2]
def hectoPascalToInHg(self, milliBar):
return milliBar * 29.5333727 / 100000
def inHgToHectoPascal(self, inHg):
return 100 * 1000 * inHg / 29.5333727
def getImperialAltitude(self, currentMilliBar, baseMilliBar):
return (1 - math.pow(currentMilliBar / baseMilliBar, .190284)) * 145366.45
def getMetricAltitude(self, currentMilliBar, baseMilliBar):
return 0.3048 * self.getImperialAltitude(currentMilliBar, baseMilliBar)
# Commands
def resetSensor(self):
bus.write_byte(self.DEVICE_ADDRESS, self._CMD_RESET)
time.sleep(0.003) # wait for the reset sequence timing
def readCoefficient(self, i):
return self.read16U(self._CMD_PROM_RD + 2 * i, self._CMD_PROM_RD + 2 * i + 1)
def readCoefficients(self):
coefficients = [0] * 6
for i in range(6):
coefficients[i] = self.readCoefficient(i + 1)
return coefficients
def readAdc(self, cmd):
# set conversion mode
bus.write_byte(self.DEVICE_ADDRESS, self._CMD_ADC_CONV + cmd)
sleepTime = {self._CMD_ADC_256: 0.0009, self._CMD_ADC_512: 0.003, self._CMD_ADC_1024: 0.004, self._CMD_ADC_2048: 0.006, self._CMD_ADC_4096: 0.010}
time.sleep(sleepTime[cmd & 0x0f])
return self.read24U(self._CMD_ADC_READ)
def getDigitalPressure(self):
return self.readAdc(self._CMD_ADC_D1 + self._CMD_ADC_4096)
def getDigitalTemperature(self):
return self.readAdc(self._CMD_ADC_D2 + self._CMD_ADC_4096)
def getTemperature(self):
dT = self.getDigitalTemperature() - self.coefficients[4] * math.pow(2, 8)
return (2000 + dT * self.coefficients[5] / math.pow(2, 23)) / 100
def convertPressureTemperature(self, pressure, temperature):
# Calculate 1st order pressure and temperature
dT = temperature - self.coefficients[4] * 256
# Offset at actual temperature
off = self.coefficients[1] * 4 + ((float(dT) / 2048) * (float(self.coefficients[3]) / 1024))
# Sensitivity at actual temperature
sens = self.coefficients[0] * 2 + ((float(dT) / 4096) * (float(self.coefficients[2]) / 1024))
# Temperature compensated pressure
press = (float(pressure) / 2048) * (float(sens) / 1024) - off
return press
|
{
"content_hash": "434b92377d6ee33672063dcaa76871f1",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 154,
"avg_line_length": 41.75308641975309,
"alnum_prop": 0.6404494382022472,
"repo_name": "llinear/MS5607",
"id": "e39b193e7b3fe254714a0a515b6ec12ba93f88be",
"size": "3382",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MS5607.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3219"
}
],
"symlink_target": ""
}
|
INDEX_DIR = "Films.index"
import sys
import os
import re
import lucene
from java.io import File
from org.apache.lucene.analysis.standard import StandardAnalyzer
from org.apache.lucene.queryparser.classic import QueryParser
from org.apache.lucene.index import IndexReader, IndexWriter, IndexWriterConfig, Term
from org.apache.lucene.search import IndexSearcher, BooleanQuery, BooleanClause, NumericRangeQuery
from org.apache.lucene.store import SimpleFSDirectory
from org.apache.lucene.util import Version
from org.apache.lucene.document import Field, StringField
"""
This script will try to map netflix id's to IMDB ones.
Mismatchings between IMDB and Netflix collection are ignored.
A film titles file is required.
Lucene index will be updated with netflix ids.
"""
def do_mapping(line):
regex = re.match(r"(?P<netflix_id>[0-9]+),(?P<year>([0-9]+)|NULL),(?P<title>.+)", line)
if not regex:
raise ValueError(line)
netflix_id = int(regex.group("netflix_id"))
title = QueryParser.escape(regex.group("title"))
query1 = QueryParser(Version.LUCENE_CURRENT, "title", analyzer).parse(title)
year = regex.group("year")
if year == "NULL":
scoreDocs = searcher.search(query1, 1).scoreDocs
else:
year = int(year)
query2 = NumericRangeQuery.newIntRange("year", year, year, True, True)
booleanQuery = BooleanQuery();
booleanQuery.add(query1, BooleanClause.Occur.MUST);
booleanQuery.add(query2, BooleanClause.Occur.MUST);
scoreDocs = searcher.search(booleanQuery, 1).scoreDocs
if scoreDocs:
if scoreDocs[0].score > 1.5:
doc = searcher.doc(scoreDocs[0].doc)
doc_id = doc.getField("id").stringValue()
doc.add(StringField("netflix_id", str(netflix_id), Field.Store.YES))
writer.updateDocument(Term("id", doc_id), doc)
if __name__ == '__main__':
lucene.initVM()
if len(sys.argv) < 2:
print "{0} <titles_file>".format(sys.argv[0])
sys.exit(0)
base_dir = os.path.abspath(os.path.curdir)
index_file = os.path.join(base_dir, INDEX_DIR)
store = SimpleFSDirectory(File(index_file))
analyzer = StandardAnalyzer(Version.LUCENE_CURRENT)
reader = IndexReader.open(store)
searcher = IndexSearcher(reader)
writer_config = IndexWriterConfig(Version.LUCENE_CURRENT, analyzer)
writer_config.setOpenMode(IndexWriterConfig.OpenMode.APPEND)
writer = IndexWriter(store, writer_config)
with open(sys.argv[1], 'r') as titles:
for line in titles:
do_mapping(line)
writer.commit()
|
{
"content_hash": "d3d2d51350c73f8e9c3472ba69e749a6",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 98,
"avg_line_length": 32.160493827160494,
"alnum_prop": 0.6898272552783109,
"repo_name": "dvalcarce/filmyou-web",
"id": "5d9b3273593ef84e0d558db818a7b75adad4d2f6",
"size": "2654",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/map_netflix_imdb.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "6664"
},
{
"name": "JavaScript",
"bytes": "16164"
},
{
"name": "PHP",
"bytes": "969"
},
{
"name": "Python",
"bytes": "84226"
}
],
"symlink_target": ""
}
|
"""
Copyright (c) 2011, 2012, Regents of the University of California
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
OF THE POSSIBILITY OF SUCH DAMAGE.
"""
"""
@author Stephen Dawson-Haggerty <stevedh@eecs.berkeley.edu>
"""
import os
import urllib2
import urlparse
from binascii import hexlify
import hashlib
import json
try:
from BeautifulSoup import BeautifulSoup as bs
except ImportError:
bs = None
urllib2.install_opener(urllib2.build_opener())
CACHEDIR='cache'
def load_http(url, cache=False, auth=None, data=None, as_fp=False, verbose=False):
name = hashlib.md5()
name.update(url)
cachename = os.path.join(CACHEDIR, hexlify(name.digest()))
if os.access(cachename, os.W_OK | os.R_OK) and cache and not data:
if not os.access(CACHEDIR, os.W_OK):
os.makedirs(CACHEDIR)
with open(cachename, 'r') as fp:
if as_fp:
return fp
else:
return fp.read()
else:
try:
if auth != None:
mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
url_p = urlparse.urlparse(url)
mgr.add_password(None, url_p.netloc, auth[0], auth[1])
handler = urllib2.HTTPBasicAuthHandler(mgr)
opener = urllib2.build_opener(handler)
req = urllib2.Request(url, data=data)
pagefp = opener.open(req, timeout=15)
else:
pagefp = urllib2.urlopen(url, timeout=10)
if as_fp:
return pagefp
else:
data = pagefp.read()
pagefp.close();
return data
except Exception, e:
print e
return None
if cache and not data:
with open(cachename, 'w') as cachefp:
cachefp.write(data)
return data
def load_html(url, **kwargs):
if bs:
return bs(load_http(url, **kwargs))
else:
raise NotImplementedError("Install BeautifulSoup to enable load_html")
def get(urls, **kwargs):
parser = kwargs.pop('parser', json.loads)
v = map(lambda x: load_http(x, **kwargs), urls)
return zip(urls, map(parser, v))
|
{
"content_hash": "0591726f60434751deebf14634628aa5",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 82,
"avg_line_length": 35.21649484536083,
"alnum_prop": 0.6595433255269321,
"repo_name": "SoftwareDefinedBuildings/smap",
"id": "b345d3ffa5e15748e089e59e2afdc2b8789cdcb5",
"size": "3416",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "python/smap/iface/http/httputils.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "325117"
},
{
"name": "HTML",
"bytes": "9642"
},
{
"name": "Java",
"bytes": "47918"
},
{
"name": "Lua",
"bytes": "9058"
},
{
"name": "Makefile",
"bytes": "5715"
},
{
"name": "Python",
"bytes": "1641521"
},
{
"name": "R",
"bytes": "23461"
},
{
"name": "Shell",
"bytes": "1273"
},
{
"name": "TeX",
"bytes": "40212"
},
{
"name": "XSLT",
"bytes": "5081"
}
],
"symlink_target": ""
}
|
import markdown
from django.utils import timezone
from flask import request
from api.caching.tasks import ban_url
from osf.models import Guid
from framework.postcommit_tasks.handlers import enqueue_postcommit_task
from modularodm import Q
from website import settings
from addons.base.signals import file_updated
from osf.models import FileNode, TrashedFileNode
from osf.models import Comment
from website.notifications.constants import PROVIDERS
from website.notifications.emails import notify, notify_mentions
from website.project.decorators import must_be_contributor_or_public
from osf.models import Node
from website.project.signals import comment_added, mention_added
@file_updated.connect
def update_file_guid_referent(self, node, event_type, payload, user=None):
if event_type == 'addon_file_moved' or event_type == 'addon_file_renamed':
source = payload['source']
destination = payload['destination']
source_node = Node.load(source['node']['_id'])
destination_node = node
file_guids = FileNode.resolve_class(source['provider'], FileNode.ANY).get_file_guids(
materialized_path=source['materialized'] if source['provider'] != 'osfstorage' else source['path'],
provider=source['provider'],
node=source_node)
if event_type == 'addon_file_renamed' and source['provider'] in settings.ADDONS_BASED_ON_IDS:
return
if event_type == 'addon_file_moved' and (source['provider'] == destination['provider'] and
source['provider'] in settings.ADDONS_BASED_ON_IDS) and source_node == destination_node:
return
for guid in file_guids:
obj = Guid.load(guid)
if source_node != destination_node and Comment.find(Q('root_target._id', 'eq', guid)).count() != 0:
update_comment_node(guid, source_node, destination_node)
if source['provider'] != destination['provider'] or source['provider'] != 'osfstorage':
old_file = FileNode.load(obj.referent._id)
obj.referent = create_new_file(obj, source, destination, destination_node)
obj.save()
if old_file and not TrashedFileNode.load(old_file._id):
old_file.delete()
def create_new_file(obj, source, destination, destination_node):
# TODO: Remove when materialized paths are fixed in the payload returned from waterbutler
if not source['materialized'].startswith('/'):
source['materialized'] = '/' + source['materialized']
if not destination['materialized'].startswith('/'):
destination['materialized'] = '/' + destination['materialized']
if not source['path'].endswith('/'):
data = dict(destination)
new_file = FileNode.resolve_class(destination['provider'], FileNode.FILE).get_or_create(destination_node, destination['path'])
if destination['provider'] != 'osfstorage':
new_file.update(revision=None, data=data)
else:
new_file = find_and_create_file_from_metadata(destination.get('children', []), source, destination, destination_node, obj)
if not new_file:
if source['provider'] == 'box':
new_path = obj.referent.path
else:
new_path = obj.referent.materialized_path.replace(source['materialized'], destination['materialized'])
new_file = FileNode.resolve_class(destination['provider'], FileNode.FILE).get_or_create(destination_node, new_path)
new_file.name = new_path.split('/')[-1]
new_file.materialized_path = new_path
new_file.save()
return new_file
def find_and_create_file_from_metadata(children, source, destination, destination_node, obj):
""" Given a Guid obj, recursively search for the metadata of its referent (a file obj)
in the waterbutler response. If found, create a new addon FileNode with that metadata
and return the new file.
"""
for item in children:
# TODO: Remove when materialized paths are fixed in the payload returned from waterbutler
if not item['materialized'].startswith('/'):
item['materialized'] = '/' + item['materialized']
if item['kind'] == 'folder':
return find_and_create_file_from_metadata(item.get('children', []), source, destination, destination_node, obj)
elif item['kind'] == 'file' and item['materialized'].replace(destination['materialized'], source['materialized']) == obj.referent.materialized_path:
data = dict(item)
new_file = FileNode.resolve_class(destination['provider'], FileNode.FILE).get_or_create(destination_node, item['path'])
if destination['provider'] != 'osfstorage':
new_file.update(revision=None, data=data)
return new_file
def update_comment_node(root_target_id, source_node, destination_node):
Comment.objects.filter(root_target___id=root_target_id).invalidated_update(node=destination_node)
source_node.save()
destination_node.save()
def render_email_markdown(content):
return markdown.markdown(content, ['del_ins', 'markdown.extensions.tables', 'markdown.extensions.fenced_code'])
@comment_added.connect
def send_comment_added_notification(comment, auth):
node = comment.node
target = comment.target
context = dict(
gravatar_url=auth.user.profile_image_url(),
content=render_email_markdown(comment.content),
page_type=comment.get_comment_page_type(),
page_title=comment.get_comment_page_title(),
provider=PROVIDERS[comment.root_target.referent.provider] if comment.page == Comment.FILES else '',
target_user=target.referent.user if is_reply(target) else None,
parent_comment=target.referent.content if is_reply(target) else '',
url=comment.get_comment_page_url()
)
time_now = timezone.now()
sent_subscribers = notify(
event='comments',
user=auth.user,
node=node,
timestamp=time_now,
**context
)
if is_reply(target):
if target.referent.user and target.referent.user not in sent_subscribers:
notify(
event='global_comment_replies',
user=auth.user,
node=node,
timestamp=time_now,
**context
)
@mention_added.connect
def send_mention_added_notification(comment, new_mentions, auth):
node = comment.node
target = comment.target
context = dict(
gravatar_url=auth.user.profile_image_url(),
content=render_email_markdown(comment.content),
page_type='file' if comment.page == Comment.FILES else node.project_or_component,
page_title=comment.root_target.referent.name if comment.page == Comment.FILES else '',
provider=PROVIDERS[comment.root_target.referent.provider] if comment.page == Comment.FILES else '',
target_user=target.referent.user if is_reply(target) else None,
parent_comment=target.referent.content if is_reply(target) else '',
new_mentions=new_mentions,
url=comment.get_comment_page_url()
)
time_now = timezone.now()
notify_mentions(
event='global_mentions',
user=auth.user,
node=node,
timestamp=time_now,
**context
)
def is_reply(target):
return isinstance(target.referent, Comment)
def _update_comments_timestamp(auth, node, page=Comment.OVERVIEW, root_id=None):
if node.is_contributor(auth.user):
enqueue_postcommit_task(ban_url, (node, ), {}, celery=False, once_per_request=True)
if root_id is not None:
guid_obj = Guid.load(root_id)
if guid_obj is not None:
enqueue_postcommit_task(ban_url, (guid_obj.referent, ), {}, celery=False, once_per_request=True)
# update node timestamp
if page == Comment.OVERVIEW:
root_id = node._id
auth.user.comments_viewed_timestamp[root_id] = timezone.now()
auth.user.save()
return {root_id: auth.user.comments_viewed_timestamp[root_id].isoformat()}
else:
return {}
@must_be_contributor_or_public
def update_comments_timestamp(auth, node, **kwargs):
timestamp_info = request.get_json()
page = timestamp_info.get('page')
root_id = timestamp_info.get('rootId')
return _update_comments_timestamp(auth, node, page, root_id)
|
{
"content_hash": "a966a729c517b990e68f2ac9dcb6bed1",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 156,
"avg_line_length": 43.69072164948454,
"alnum_prop": 0.654672015101463,
"repo_name": "acshi/osf.io",
"id": "e5a3df1bf38eb57b478cf31d8fc06b043dc62054",
"size": "8501",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "website/project/views/comment.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "176516"
},
{
"name": "HTML",
"bytes": "181969"
},
{
"name": "JavaScript",
"bytes": "2017102"
},
{
"name": "Mako",
"bytes": "756427"
},
{
"name": "Perl",
"bytes": "13885"
},
{
"name": "Python",
"bytes": "8555915"
},
{
"name": "Shell",
"bytes": "436"
}
],
"symlink_target": ""
}
|
import time
from neutronclient.common import exceptions as neutron_client_exc
from oslo.config import cfg
import six
from nova.compute import flavors
from nova import conductor
from nova import context
from nova.db import base
from nova import exception
from nova.network import api as network_api
from nova.network import model as network_model
from nova.network import neutronv2
from nova.network.neutronv2 import constants
from nova.network.security_group import openstack_driver
from nova.openstack.common import excutils
from nova.openstack.common.gettextutils import _
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.openstack.common import uuidutils
neutron_opts = [
cfg.StrOpt('neutron_url',
default='http://127.0.0.1:9696',
help='URL for connecting to neutron'),
cfg.IntOpt('neutron_url_timeout',
default=30,
help='Timeout value for connecting to neutron in seconds'),
cfg.StrOpt('neutron_admin_username',
help='Username for connecting to neutron in admin context'),
cfg.StrOpt('neutron_admin_password',
help='Password for connecting to neutron in admin context',
secret=True),
cfg.StrOpt('neutron_admin_tenant_name',
help='Tenant name for connecting to neutron in admin context'),
cfg.StrOpt('neutron_region_name',
help='Region name for connecting to neutron in admin context'),
cfg.StrOpt('neutron_admin_auth_url',
default='http://localhost:5000/v2.0',
help='Authorization URL for connecting to neutron in admin '
'context'),
cfg.BoolOpt('neutron_api_insecure',
default=False,
help='If set, ignore any SSL validation issues'),
cfg.StrOpt('neutron_auth_strategy',
default='keystone',
help='Authorization strategy for connecting to '
'neutron in admin context'),
# TODO(berrange) temporary hack until Neutron can pass over the
# name of the OVS bridge it is configured with
cfg.StrOpt('neutron_ovs_bridge',
default='br-int',
help='Name of Integration Bridge used by Open vSwitch'),
cfg.IntOpt('neutron_extension_sync_interval',
default=600,
help='Number of seconds before querying neutron for'
' extensions'),
cfg.StrOpt('neutron_ca_certificates_file',
help='Location of CA certificates file to use for '
'neutron client requests.'),
]
CONF = cfg.CONF
CONF.register_opts(neutron_opts)
CONF.import_opt('default_floating_pool', 'nova.network.floating_ips')
CONF.import_opt('flat_injected', 'nova.network.manager')
LOG = logging.getLogger(__name__)
refresh_cache = network_api.refresh_cache
update_instance_info_cache = network_api.update_instance_cache_with_nw_info
class API(base.Base):
"""API for interacting with the neutron 2.x API."""
def __init__(self):
super(API, self).__init__()
self.last_neutron_extension_sync = None
self.extensions = {}
self.conductor_api = conductor.API()
self.security_group_api = (
openstack_driver.get_openstack_security_group_driver())
def setup_networks_on_host(self, context, instance, host=None,
teardown=False):
"""Setup or teardown the network structures."""
def _get_available_networks(self, context, project_id,
net_ids=None, neutron=None):
"""Return a network list available for the tenant.
The list contains networks owned by the tenant and public networks.
If net_ids specified, it searches networks with requested IDs only.
"""
if not neutron:
neutron = neutronv2.get_client(context)
if net_ids:
# If user has specified to attach instance only to specific
# networks then only add these to **search_opts. This search will
# also include 'shared' networks.
search_opts = {'id': net_ids}
nets = neutron.list_networks(**search_opts).get('networks', [])
else:
# (1) Retrieve non-public network list owned by the tenant.
search_opts = {'tenant_id': project_id, 'shared': False}
nets = neutron.list_networks(**search_opts).get('networks', [])
# (2) Retrieve public network list.
search_opts = {'shared': True}
nets += neutron.list_networks(**search_opts).get('networks', [])
_ensure_requested_network_ordering(
lambda x: x['id'],
nets,
net_ids)
return nets
def _create_port(self, port_client, instance, network_id, port_req_body,
fixed_ip=None, security_group_ids=None,
available_macs=None, dhcp_opts=None):
"""Attempts to create a port for the instance on the given network.
:param port_client: The client to use to create the port.
:param instance: Create the port for the given instance.
:param network_id: Create the port on the given network.
:param port_req_body: Pre-populated port request. Should have the
device_id, device_owner, and any required neutron extension values.
:param fixed_ip: Optional fixed IP to use from the given network.
:param security_group_ids: Optional list of security group IDs to
apply to the port.
:param available_macs: Optional set of available MAC addresses to use.
:param dhcp_opts: Optional DHCP options.
:returns: ID of the created port.
:raises PortLimitExceeded: If neutron fails with an OverQuota error.
"""
try:
if fixed_ip:
port_req_body['port']['fixed_ips'] = [{'ip_address': fixed_ip}]
port_req_body['port']['network_id'] = network_id
port_req_body['port']['admin_state_up'] = True
port_req_body['port']['tenant_id'] = instance['project_id']
if security_group_ids:
port_req_body['port']['security_groups'] = security_group_ids
if available_macs is not None:
if not available_macs:
raise exception.PortNotFree(
instance=instance['display_name'])
mac_address = available_macs.pop()
port_req_body['port']['mac_address'] = mac_address
if dhcp_opts is not None:
port_req_body['port']['extra_dhcp_opts'] = dhcp_opts
port_id = port_client.create_port(port_req_body)['port']['id']
LOG.debug(_('Successfully created port: %s') % port_id,
instance=instance)
return port_id
except neutron_client_exc.NeutronClientException as e:
# NOTE(mriedem): OverQuota in neutron is a 409
if e.status_code == 409:
LOG.warning(_('Neutron error: quota exceeded'))
raise exception.PortLimitExceeded()
with excutils.save_and_reraise_exception():
LOG.exception(_('Neutron error creating port on network %s'),
network_id, instance=instance)
def allocate_for_instance(self, context, instance, **kwargs):
"""Allocate network resources for the instance.
:param requested_networks: optional value containing
network_id, fixed_ip, and port_id
:param security_groups: security groups to allocate for instance
:param macs: None or a set of MAC addresses that the instance
should use. macs is supplied by the hypervisor driver (contrast
with requested_networks which is user supplied).
NB: NeutronV2 currently assigns hypervisor supplied MAC addresses
to arbitrary networks, which requires openflow switches to
function correctly if more than one network is being used with
the bare metal hypervisor (which is the only one known to limit
MAC addresses).
:param dhcp_options: None or a set of key/value pairs that should
determine the DHCP BOOTP response, eg. for PXE booting an instance
configured with the baremetal hypervisor. It is expected that these
are already formatted for the quantum v2 api.
See nova/virt/driver.py:dhcp_options_for_instance for an example.
"""
hypervisor_macs = kwargs.get('macs', None)
available_macs = None
if hypervisor_macs is not None:
# Make a copy we can mutate: records macs that have not been used
# to create a port on a network. If we find a mac with a
# pre-allocated port we also remove it from this set.
available_macs = set(hypervisor_macs)
neutron = neutronv2.get_client(context)
LOG.debug(_('allocate_for_instance() for %s'),
instance['display_name'])
if not instance['project_id']:
msg = _('empty project id for instance %s')
raise exception.InvalidInput(
reason=msg % instance['display_name'])
requested_networks = kwargs.get('requested_networks')
dhcp_opts = kwargs.get('dhcp_options', None)
ports = {}
fixed_ips = {}
net_ids = []
if requested_networks:
for network_id, fixed_ip, port_id in requested_networks:
if port_id:
port = neutron.show_port(port_id)['port']
if port.get('device_id'):
raise exception.PortInUse(port_id=port_id)
if hypervisor_macs is not None:
if port['mac_address'] not in hypervisor_macs:
raise exception.PortNotUsable(port_id=port_id,
instance=instance['display_name'])
else:
# Don't try to use this MAC if we need to create a
# port on the fly later. Identical MACs may be
# configured by users into multiple ports so we
# discard rather than popping.
available_macs.discard(port['mac_address'])
network_id = port['network_id']
ports[network_id] = port
elif fixed_ip and network_id:
fixed_ips[network_id] = fixed_ip
if network_id:
net_ids.append(network_id)
nets = self._get_available_networks(context, instance['project_id'],
net_ids)
if not nets:
LOG.warn(_("No network configured!"), instance=instance)
return network_model.NetworkInfo([])
security_groups = kwargs.get('security_groups', [])
security_group_ids = []
# TODO(arosen) Should optimize more to do direct query for security
# group if len(security_groups) == 1
if len(security_groups):
search_opts = {'tenant_id': instance['project_id']}
user_security_groups = neutron.list_security_groups(
**search_opts).get('security_groups')
for security_group in security_groups:
name_match = None
uuid_match = None
for user_security_group in user_security_groups:
if user_security_group['name'] == security_group:
if name_match:
raise exception.NoUniqueMatch(
_("Multiple security groups found matching"
" '%s'. Use an ID to be more specific.") %
security_group)
name_match = user_security_group['id']
if user_security_group['id'] == security_group:
uuid_match = user_security_group['id']
# If a user names the security group the same as
# another's security groups uuid, the name takes priority.
if not name_match and not uuid_match:
raise exception.SecurityGroupNotFound(
security_group_id=security_group)
elif name_match:
security_group_ids.append(name_match)
elif uuid_match:
security_group_ids.append(uuid_match)
touched_port_ids = []
created_port_ids = []
for network in nets:
# If security groups are requested on an instance then the
# network must has a subnet associated with it. Some plugins
# implement the port-security extension which requires
# 'port_security_enabled' to be True for security groups.
# That is why True is returned if 'port_security_enabled'
# is not found.
if (security_groups and not (
network['subnets']
and network.get('port_security_enabled', True))):
raise exception.SecurityGroupCannotBeApplied()
network_id = network['id']
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone}}
try:
port = ports.get(network_id)
self._populate_neutron_extension_values(instance,
port_req_body)
# Requires admin creds to set port bindings
port_client = (neutron if not
self._has_port_binding_extension() else
neutronv2.get_client(context, admin=True))
if port:
port_client.update_port(port['id'], port_req_body)
touched_port_ids.append(port['id'])
else:
created_port_ids.append(self._create_port(
port_client, instance, network_id,
port_req_body, fixed_ips.get(network_id),
security_group_ids, available_macs, dhcp_opts))
except Exception:
with excutils.save_and_reraise_exception():
for port_id in touched_port_ids:
try:
port_req_body = {'port': {'device_id': None}}
# Requires admin creds to set port bindings
if self._has_port_binding_extension():
port_req_body['port']['binding:host_id'] = None
port_client = neutronv2.get_client(
context, admin=True)
else:
port_client = neutron
port_client.update_port(port_id, port_req_body)
except Exception:
msg = _("Failed to update port %s")
LOG.exception(msg, port_id)
for port_id in created_port_ids:
try:
neutron.delete_port(port_id)
except Exception:
msg = _("Failed to delete port %s")
LOG.exception(msg, port_id)
nw_info = self.get_instance_nw_info(context, instance, networks=nets)
# NOTE(danms): Only return info about ports we created in this run.
# In the initial allocation case, this will be everything we created,
# and in later runs will only be what was created that time. Thus,
# this only affects the attach case, not the original use for this
# method.
return network_model.NetworkInfo([port for port in nw_info
if port['id'] in created_port_ids +
touched_port_ids])
def _refresh_neutron_extensions_cache(self):
"""Refresh the neutron extensions cache when necessary."""
if (not self.last_neutron_extension_sync or
((time.time() - self.last_neutron_extension_sync)
>= CONF.neutron_extension_sync_interval)):
neutron = neutronv2.get_client(context.get_admin_context(),
admin=True)
extensions_list = neutron.list_extensions()['extensions']
self.last_neutron_extension_sync = time.time()
self.extensions.clear()
self.extensions = dict((ext['name'], ext)
for ext in extensions_list)
def _has_port_binding_extension(self, refresh_cache=False):
if refresh_cache:
self._refresh_neutron_extensions_cache()
return constants.PORTBINDING_EXT in self.extensions
def _populate_neutron_extension_values(self, instance, port_req_body):
"""Populate neutron extension values for the instance.
If the extension contains nvp-qos then get the rxtx_factor.
"""
self._refresh_neutron_extensions_cache()
if 'nvp-qos' in self.extensions:
flavor = flavors.extract_flavor(instance)
rxtx_factor = flavor.get('rxtx_factor')
port_req_body['port']['rxtx_factor'] = rxtx_factor
if self._has_port_binding_extension():
port_req_body['port']['binding:host_id'] = instance.get('host')
def deallocate_for_instance(self, context, instance, **kwargs):
"""Deallocate all network resources related to the instance."""
LOG.debug(_('deallocate_for_instance() for %s'),
instance['display_name'])
search_opts = {'device_id': instance['uuid']}
neutron = neutronv2.get_client(context)
data = neutron.list_ports(**search_opts)
ports = [port['id'] for port in data.get('ports', [])]
requested_networks = kwargs.get('requested_networks') or {}
ports_to_skip = [port_id for nets, fips, port_id in requested_networks]
ports = set(ports) - set(ports_to_skip)
for port in ports:
try:
neutron.delete_port(port)
except neutronv2.exceptions.NeutronClientException as e:
if e.status_code == 404:
LOG.warning(_("Port %s does not exist"), port)
else:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to delete neutron port %s"),
port)
def allocate_port_for_instance(self, context, instance, port_id,
network_id=None, requested_ip=None):
"""Allocate a port for the instance."""
return self.allocate_for_instance(context, instance,
requested_networks=[(network_id, requested_ip, port_id)])
def deallocate_port_for_instance(self, context, instance, port_id):
"""Remove a specified port from the instance.
Return network information for the instance
"""
try:
neutronv2.get_client(context).delete_port(port_id)
except Exception:
LOG.exception(_("Failed to delete neutron port %s") %
port_id)
return self.get_instance_nw_info(context, instance)
def list_ports(self, context, **search_opts):
"""List ports for the client based on search options."""
return neutronv2.get_client(context).list_ports(**search_opts)
def show_port(self, context, port_id):
"""Return the port for the client given the port id."""
return neutronv2.get_client(context).show_port(port_id)
@refresh_cache
def get_instance_nw_info(self, context, instance, networks=None,
use_slave=False):
"""Return network information for specified instance
and update cache.
"""
# NOTE(geekinutah): It would be nice if use_slave had us call
# special APIs that pummeled slaves instead of
# the master. For now we just ignore this arg.
result = self._get_instance_nw_info(context, instance, networks)
return result
def _get_instance_nw_info(self, context, instance, networks=None):
# keep this caching-free version of the get_instance_nw_info method
# because it is used by the caching logic itself.
LOG.debug(_('get_instance_nw_info() for %s'), instance['display_name'])
nw_info = self._build_network_info_model(context, instance, networks)
return network_model.NetworkInfo.hydrate(nw_info)
@refresh_cache
def add_fixed_ip_to_instance(self, context, instance, network_id):
"""Add a fixed ip to the instance from specified network."""
search_opts = {'network_id': network_id}
data = neutronv2.get_client(context).list_subnets(**search_opts)
ipam_subnets = data.get('subnets', [])
if not ipam_subnets:
raise exception.NetworkNotFoundForInstance(
instance_id=instance['uuid'])
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone,
'network_id': network_id}
data = neutronv2.get_client(context).list_ports(**search_opts)
ports = data['ports']
for p in ports:
for subnet in ipam_subnets:
fixed_ips = p['fixed_ips']
fixed_ips.append({'subnet_id': subnet['id']})
port_req_body = {'port': {'fixed_ips': fixed_ips}}
try:
neutronv2.get_client(context).update_port(p['id'],
port_req_body)
return
except Exception as ex:
msg = _("Unable to update port %(portid)s on subnet "
"%(subnet_id)s with failure: %(exception)s")
LOG.debug(msg, {'portid': p['id'],
'subnet_id': subnet['id'],
'exception': ex})
raise exception.NetworkNotFoundForInstance(
instance_id=instance['uuid'])
@refresh_cache
def remove_fixed_ip_from_instance(self, context, instance, address):
"""Remove a fixed ip from the instance."""
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone,
'fixed_ips': 'ip_address=%s' % address}
data = neutronv2.get_client(context).list_ports(**search_opts)
ports = data['ports']
for p in ports:
fixed_ips = p['fixed_ips']
new_fixed_ips = []
for fixed_ip in fixed_ips:
if fixed_ip['ip_address'] != address:
new_fixed_ips.append(fixed_ip)
port_req_body = {'port': {'fixed_ips': new_fixed_ips}}
try:
neutronv2.get_client(context).update_port(p['id'],
port_req_body)
except Exception as ex:
msg = _("Unable to update port %(portid)s with"
" failure: %(exception)s")
LOG.debug(msg, {'portid': p['id'], 'exception': ex})
return
raise exception.FixedIpNotFoundForSpecificInstance(
instance_uuid=instance['uuid'], ip=address)
def validate_networks(self, context, requested_networks, num_instances):
"""Validate that the tenant can use the requested networks.
Return the number of instances than can be successfully allocated
with the requested network configuration.
"""
LOG.debug(_('validate_networks() for %s'),
requested_networks)
neutron = neutronv2.get_client(context)
ports_needed_per_instance = 0
if not requested_networks:
nets = self._get_available_networks(context, context.project_id,
neutron=neutron)
if len(nets) > 1:
# Attaching to more than one network by default doesn't
# make sense, as the order will be arbitrary and the guest OS
# won't know which to configure
msg = _("Multiple possible networks found, use a Network "
"ID to be more specific.")
raise exception.NetworkAmbiguous(msg)
else:
ports_needed_per_instance = 1
else:
net_ids = []
for (net_id, _i, port_id) in requested_networks:
if port_id:
try:
port = neutron.show_port(port_id).get('port')
except neutronv2.exceptions.NeutronClientException as e:
if e.status_code == 404:
port = None
else:
with excutils.save_and_reraise_exception():
LOG.exception(_("Failed to access port %s"),
port_id)
if not port:
raise exception.PortNotFound(port_id=port_id)
if port.get('device_id', None):
raise exception.PortInUse(port_id=port_id)
net_id = port['network_id']
else:
ports_needed_per_instance += 1
if net_id in net_ids:
raise exception.NetworkDuplicated(network_id=net_id)
net_ids.append(net_id)
# Now check to see if all requested networks exist
nets = self._get_available_networks(context,
context.project_id, net_ids,
neutron=neutron)
if len(nets) != len(net_ids):
requsted_netid_set = set(net_ids)
returned_netid_set = set([net['id'] for net in nets])
lostid_set = requsted_netid_set - returned_netid_set
id_str = ''
for _id in lostid_set:
id_str = id_str and id_str + ', ' + _id or _id
raise exception.NetworkNotFound(network_id=id_str)
# Note(PhilD): Ideally Nova would create all required ports as part of
# network validation, but port creation requires some details
# from the hypervisor. So we just check the quota and return
# how many of the requested number of instances can be created
ports = neutron.list_ports(tenant_id=context.project_id)['ports']
quotas = neutron.show_quota(tenant_id=context.project_id)['quota']
if quotas.get('port') == -1:
# Unlimited Port Quota
return num_instances
else:
free_ports = quotas.get('port') - len(ports)
ports_needed = ports_needed_per_instance * num_instances
if free_ports >= ports_needed:
return num_instances
else:
return free_ports // ports_needed_per_instance
def _get_instance_uuids_by_ip(self, context, address):
"""Retrieve instance uuids associated with the given ip address.
:returns: A list of dicts containing the uuids keyed by 'instance_uuid'
e.g. [{'instance_uuid': uuid}, ...]
"""
search_opts = {"fixed_ips": 'ip_address=%s' % address}
data = neutronv2.get_client(context).list_ports(**search_opts)
ports = data.get('ports', [])
return [{'instance_uuid': port['device_id']} for port in ports
if port['device_id']]
def get_instance_uuids_by_ip_filter(self, context, filters):
"""Return a list of dicts in the form of
[{'instance_uuid': uuid}] that matched the ip filter.
"""
# filters['ip'] is composed as '^%s$' % fixed_ip.replace('.', '\\.')
ip = filters.get('ip')
# we remove ^$\ in the ip filer
if ip[0] == '^':
ip = ip[1:]
if ip[-1] == '$':
ip = ip[:-1]
ip = ip.replace('\\.', '.')
return self._get_instance_uuids_by_ip(context, ip)
def _get_port_id_by_fixed_address(self, client,
instance, address):
"""Return port_id from a fixed address."""
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone}
data = client.list_ports(**search_opts)
ports = data['ports']
port_id = None
for p in ports:
for ip in p['fixed_ips']:
if ip['ip_address'] == address:
port_id = p['id']
break
if not port_id:
raise exception.FixedIpNotFoundForAddress(address=address)
return port_id
@refresh_cache
def associate_floating_ip(self, context, instance,
floating_address, fixed_address,
affect_auto_assigned=False):
"""Associate a floating ip with a fixed ip."""
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = neutronv2.get_client(context)
port_id = self._get_port_id_by_fixed_address(client, instance,
fixed_address)
fip = self._get_floating_ip_by_address(client, floating_address)
param = {'port_id': port_id,
'fixed_ip_address': fixed_address}
client.update_floatingip(fip['id'], {'floatingip': param})
if fip['port_id']:
port = client.show_port(fip['port_id'])['port']
orig_instance_uuid = port['device_id']
msg_dict = dict(address=floating_address,
instance_id=orig_instance_uuid)
LOG.info(_('re-assign floating IP %(address)s from '
'instance %(instance_id)s') % msg_dict)
orig_instance = self.db.instance_get_by_uuid(context,
orig_instance_uuid)
# purge cached nw info for the original instance
update_instance_info_cache(self, context, orig_instance)
def get_all(self, context):
"""Get all networks for client."""
client = neutronv2.get_client(context)
networks = client.list_networks().get('networks')
for network in networks:
network['label'] = network['name']
return networks
def get(self, context, network_uuid):
"""Get specific network for client."""
client = neutronv2.get_client(context)
network = client.show_network(network_uuid).get('network') or {}
network['label'] = network['name']
return network
def delete(self, context, network_uuid):
"""Delete a network for client."""
raise NotImplementedError()
def disassociate(self, context, network_uuid):
"""Disassociate a network for client."""
raise NotImplementedError()
def get_fixed_ip(self, context, id):
"""Get a fixed ip from the id."""
raise NotImplementedError()
def get_fixed_ip_by_address(self, context, address):
"""Return instance uuids given an address."""
uuid_maps = self._get_instance_uuids_by_ip(context, address)
if len(uuid_maps) == 1:
return uuid_maps[0]
elif not uuid_maps:
raise exception.FixedIpNotFoundForAddress(address=address)
else:
raise exception.FixedIpAssociatedWithMultipleInstances(
address=address)
def _setup_net_dict(self, client, network_id):
if not network_id:
return {}
pool = client.show_network(network_id)['network']
return {pool['id']: pool}
def _setup_port_dict(self, client, port_id):
if not port_id:
return {}
port = client.show_port(port_id)['port']
return {port['id']: port}
def _setup_pools_dict(self, client):
pools = self._get_floating_ip_pools(client)
return dict([(i['id'], i) for i in pools])
def _setup_ports_dict(self, client, project_id=None):
search_opts = {'tenant_id': project_id} if project_id else {}
ports = client.list_ports(**search_opts)['ports']
return dict([(p['id'], p) for p in ports])
def get_floating_ip(self, context, id):
"""Return floating ip object given the floating ip id."""
client = neutronv2.get_client(context)
try:
fip = client.show_floatingip(id)['floatingip']
except neutronv2.exceptions.NeutronClientException as e:
if e.status_code == 404:
raise exception.FloatingIpNotFound(id=id)
else:
with excutils.save_and_reraise_exception():
LOG.exception(_('Unable to access floating IP %s'), id)
pool_dict = self._setup_net_dict(client,
fip['floating_network_id'])
port_dict = self._setup_port_dict(client, fip['port_id'])
return self._format_floating_ip_model(fip, pool_dict, port_dict)
def _get_floating_ip_pools(self, client, project_id=None):
search_opts = {constants.NET_EXTERNAL: True}
if project_id:
search_opts.update({'tenant_id': project_id})
data = client.list_networks(**search_opts)
return data['networks']
def get_floating_ip_pools(self, context):
"""Return floating ip pools."""
client = neutronv2.get_client(context)
pools = self._get_floating_ip_pools(client)
return [{'name': n['name'] or n['id']} for n in pools]
def _format_floating_ip_model(self, fip, pool_dict, port_dict):
pool = pool_dict[fip['floating_network_id']]
result = {'id': fip['id'],
'address': fip['floating_ip_address'],
'pool': pool['name'] or pool['id'],
'project_id': fip['tenant_id'],
# In Neutron v2, an exact fixed_ip_id does not exist.
'fixed_ip_id': fip['port_id'],
}
# In Neutron v2 API fixed_ip_address and instance uuid
# (= device_id) are known here, so pass it as a result.
result['fixed_ip'] = {'address': fip['fixed_ip_address']}
if fip['port_id']:
instance_uuid = port_dict[fip['port_id']]['device_id']
result['instance'] = {'uuid': instance_uuid}
else:
result['instance'] = None
return result
def get_floating_ip_by_address(self, context, address):
"""Return a floating ip given an address."""
client = neutronv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
pool_dict = self._setup_net_dict(client,
fip['floating_network_id'])
port_dict = self._setup_port_dict(client, fip['port_id'])
return self._format_floating_ip_model(fip, pool_dict, port_dict)
def get_floating_ips_by_project(self, context):
client = neutronv2.get_client(context)
project_id = context.project_id
fips = client.list_floatingips(tenant_id=project_id)['floatingips']
pool_dict = self._setup_pools_dict(client)
port_dict = self._setup_ports_dict(client, project_id)
return [self._format_floating_ip_model(fip, pool_dict, port_dict)
for fip in fips]
def get_floating_ips_by_fixed_address(self, context, fixed_address):
return []
def get_instance_id_by_floating_address(self, context, address):
"""Return the instance id a floating ip's fixed ip is allocated to."""
client = neutronv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
if not fip['port_id']:
return None
port = client.show_port(fip['port_id'])['port']
return port['device_id']
def get_vifs_by_instance(self, context, instance):
raise NotImplementedError()
def get_vif_by_mac_address(self, context, mac_address):
raise NotImplementedError()
def _get_floating_ip_pool_id_by_name_or_id(self, client, name_or_id):
search_opts = {constants.NET_EXTERNAL: True, 'fields': 'id'}
if uuidutils.is_uuid_like(name_or_id):
search_opts.update({'id': name_or_id})
else:
search_opts.update({'name': name_or_id})
data = client.list_networks(**search_opts)
nets = data['networks']
if len(nets) == 1:
return nets[0]['id']
elif len(nets) == 0:
raise exception.FloatingIpPoolNotFound()
else:
msg = (_("Multiple floating IP pools matches found for name '%s'")
% name_or_id)
raise exception.NovaException(message=msg)
def allocate_floating_ip(self, context, pool=None):
"""Add a floating ip to a project from a pool."""
client = neutronv2.get_client(context)
pool = pool or CONF.default_floating_pool
pool_id = self._get_floating_ip_pool_id_by_name_or_id(client, pool)
# TODO(amotoki): handle exception during create_floatingip()
# At this timing it is ensured that a network for pool exists.
# quota error may be returned.
param = {'floatingip': {'floating_network_id': pool_id}}
fip = client.create_floatingip(param)
return fip['floatingip']['floating_ip_address']
def _get_floating_ip_by_address(self, client, address):
"""Get floatingip from floating ip address."""
if not address:
raise exception.FloatingIpNotFoundForAddress(address=address)
data = client.list_floatingips(floating_ip_address=address)
fips = data['floatingips']
if len(fips) == 0:
raise exception.FloatingIpNotFoundForAddress(address=address)
elif len(fips) > 1:
raise exception.FloatingIpMultipleFoundForAddress(address=address)
return fips[0]
def _get_floating_ips_by_fixed_and_port(self, client, fixed_ip, port):
"""Get floatingips from fixed ip and port."""
try:
data = client.list_floatingips(fixed_ip_address=fixed_ip,
port_id=port)
# If a neutron plugin does not implement the L3 API a 404 from
# list_floatingips will be raised.
except neutronv2.exceptions.NeutronClientException as e:
if e.status_code == 404:
return []
with excutils.save_and_reraise_exception():
LOG.exception(_('Unable to access floating IP %(fixed_ip)s '
'for port %(port_id)'),
{'fixed_ip': fixed_ip, 'port_id': port})
return data['floatingips']
def release_floating_ip(self, context, address,
affect_auto_assigned=False):
"""Remove a floating ip with the given address from a project."""
# Note(amotoki): We cannot handle a case where multiple pools
# have overlapping IP address range. In this case we cannot use
# 'address' as a unique key.
# This is a limitation of the current nova.
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = neutronv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
if fip['port_id']:
raise exception.FloatingIpAssociated(address=address)
client.delete_floatingip(fip['id'])
@refresh_cache
def disassociate_floating_ip(self, context, instance, address,
affect_auto_assigned=False):
"""Disassociate a floating ip from the instance."""
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = neutronv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
client.update_floatingip(fip['id'], {'floatingip': {'port_id': None}})
def migrate_instance_start(self, context, instance, migration):
"""Start to migrate the network of an instance."""
# NOTE(wenjianhn): just pass to make migrate instance doesn't
# raise for now.
pass
def migrate_instance_finish(self, context, instance, migration):
"""Finish migrating the network of an instance."""
if not self._has_port_binding_extension(refresh_cache=True):
return
neutron = neutronv2.get_client(context, admin=True)
search_opts = {'device_id': instance['uuid'],
'tenant_id': instance['project_id']}
data = neutron.list_ports(**search_opts)
ports = data['ports']
for p in ports:
port_req_body = {'port': {'binding:host_id':
migration['dest_compute']}}
try:
neutron.update_port(p['id'], port_req_body)
except Exception:
with excutils.save_and_reraise_exception():
msg = _("Unable to update host of port %s")
LOG.exception(msg, p['id'])
def add_network_to_project(self, context, project_id, network_uuid=None):
"""Force add a network to the project."""
raise NotImplementedError()
def _nw_info_get_ips(self, client, port):
network_IPs = []
for fixed_ip in port['fixed_ips']:
fixed = network_model.FixedIP(address=fixed_ip['ip_address'])
floats = self._get_floating_ips_by_fixed_and_port(
client, fixed_ip['ip_address'], port['id'])
for ip in floats:
fip = network_model.IP(address=ip['floating_ip_address'],
type='floating')
fixed.add_floating_ip(fip)
network_IPs.append(fixed)
return network_IPs
def _nw_info_get_subnets(self, context, port, network_IPs):
subnets = self._get_subnets_from_port(context, port)
for subnet in subnets:
subnet['ips'] = [fixed_ip for fixed_ip in network_IPs
if fixed_ip.is_in_subnet(subnet)]
return subnets
def _nw_info_build_network(self, port, networks, subnets):
network_name = None
for net in networks:
if port['network_id'] == net['id']:
network_name = net['name']
tenant_id = net['tenant_id']
break
else:
tenant_id = port['tenant_id']
LOG.warning(_("Network %(id)s not matched with the tenants "
"network! The ports tenant %(tenant_id)s will be "
"used."),
{'id': port['network_id'], 'tenant_id': tenant_id})
bridge = None
ovs_interfaceid = None
# Network model metadata
should_create_bridge = None
vif_type = port.get('binding:vif_type')
# TODO(berrange) Neutron should pass the bridge name
# in another binding metadata field
if vif_type == network_model.VIF_TYPE_OVS:
bridge = CONF.neutron_ovs_bridge
ovs_interfaceid = port['id']
elif vif_type == network_model.VIF_TYPE_BRIDGE:
bridge = "brq" + port['network_id']
should_create_bridge = True
if bridge is not None:
bridge = bridge[:network_model.NIC_NAME_LEN]
network = network_model.Network(
id=port['network_id'],
bridge=bridge,
injected=CONF.flat_injected,
label=network_name,
tenant_id=tenant_id
)
network['subnets'] = subnets
port_profile = port.get('binding:profile')
if port_profile:
physical_network = port_profile.get('physical_network')
if physical_network:
network['physical_network'] = physical_network
if should_create_bridge is not None:
network['should_create_bridge'] = should_create_bridge
return network, ovs_interfaceid
def _build_network_info_model(self, context, instance, networks=None):
# Note(arosen): on interface-attach networks only contains the
# network that the interface is being attached to.
search_opts = {'tenant_id': instance['project_id'],
'device_id': instance['uuid'], }
client = neutronv2.get_client(context, admin=True)
data = client.list_ports(**search_opts)
ports = data.get('ports', [])
nw_info = network_model.NetworkInfo()
# Unfortunately, this is sometimes in unicode and sometimes not
if isinstance(instance['info_cache']['network_info'], six.text_type):
ifaces = jsonutils.loads(instance['info_cache']['network_info'])
else:
ifaces = instance['info_cache']['network_info']
if networks is None:
net_ids = [iface['network']['id'] for iface in ifaces]
networks = self._get_available_networks(context,
instance['project_id'],
net_ids)
# ensure ports are in preferred network order, and filter out
# those not attached to one of the provided list of networks
else:
# Include existing interfaces so they are not removed from the db.
# Needed when interfaces are added to existing instances.
for iface in ifaces:
nw_info.append(network_model.VIF(
id=iface['id'],
address=iface['address'],
network=iface['network'],
type=iface['type'],
ovs_interfaceid=iface['ovs_interfaceid'],
devname=iface['devname']))
net_ids = [n['id'] for n in networks]
ports = [port for port in ports if port['network_id'] in net_ids]
_ensure_requested_network_ordering(lambda x: x['network_id'],
ports, net_ids)
for port in ports:
network_IPs = self._nw_info_get_ips(client, port)
subnets = self._nw_info_get_subnets(context, port, network_IPs)
devname = "tap" + port['id']
devname = devname[:network_model.NIC_NAME_LEN]
network, ovs_interfaceid = self._nw_info_build_network(port,
networks,
subnets)
nw_info.append(network_model.VIF(
id=port['id'],
address=port['mac_address'],
network=network,
type=port.get('binding:vif_type'),
ovs_interfaceid=ovs_interfaceid,
devname=devname))
return nw_info
def _get_subnets_from_port(self, context, port):
"""Return the subnets for a given port."""
fixed_ips = port['fixed_ips']
# No fixed_ips for the port means there is no subnet associated
# with the network the port is created on.
# Since list_subnets(id=[]) returns all subnets visible for the
# current tenant, returned subnets may contain subnets which is not
# related to the port. To avoid this, the method returns here.
if not fixed_ips:
return []
search_opts = {'id': [ip['subnet_id'] for ip in fixed_ips]}
data = neutronv2.get_client(context).list_subnets(**search_opts)
ipam_subnets = data.get('subnets', [])
subnets = []
for subnet in ipam_subnets:
subnet_dict = {'cidr': subnet['cidr'],
'gateway': network_model.IP(
address=subnet['gateway_ip'],
type='gateway'),
}
# attempt to populate DHCP server field
search_opts = {'network_id': subnet['network_id'],
'device_owner': 'network:dhcp'}
data = neutronv2.get_client(context).list_ports(**search_opts)
dhcp_ports = data.get('ports', [])
for p in dhcp_ports:
for ip_pair in p['fixed_ips']:
if ip_pair['subnet_id'] == subnet['id']:
subnet_dict['dhcp_server'] = ip_pair['ip_address']
break
subnet_object = network_model.Subnet(**subnet_dict)
for dns in subnet.get('dns_nameservers', []):
subnet_object.add_dns(
network_model.IP(address=dns, type='dns'))
# TODO(gongysh) get the routes for this subnet
subnets.append(subnet_object)
return subnets
def get_dns_domains(self, context):
"""Return a list of available dns domains.
These can be used to create DNS entries for floating ips.
"""
raise NotImplementedError()
def add_dns_entry(self, context, address, name, dns_type, domain):
"""Create specified DNS entry for address."""
raise NotImplementedError()
def modify_dns_entry(self, context, name, address, domain):
"""Create specified DNS entry for address."""
raise NotImplementedError()
def delete_dns_entry(self, context, name, domain):
"""Delete the specified dns entry."""
raise NotImplementedError()
def delete_dns_domain(self, context, domain):
"""Delete the specified dns domain."""
raise NotImplementedError()
def get_dns_entries_by_address(self, context, address, domain):
"""Get entries for address and domain."""
raise NotImplementedError()
def get_dns_entries_by_name(self, context, name, domain):
"""Get entries for name and domain."""
raise NotImplementedError()
def create_private_dns_domain(self, context, domain, availability_zone):
"""Create a private DNS domain with nova availability zone."""
raise NotImplementedError()
def create_public_dns_domain(self, context, domain, project=None):
"""Create a private DNS domain with optional nova project."""
raise NotImplementedError()
def _ensure_requested_network_ordering(accessor, unordered, preferred):
"""Sort a list with respect to the preferred network ordering."""
if preferred:
unordered.sort(key=lambda i: preferred.index(accessor(i)))
|
{
"content_hash": "687a3ea72c0369ad7c084f8b42887916",
"timestamp": "",
"source": "github",
"line_count": 1142,
"max_line_length": 79,
"avg_line_length": 45.175131348511385,
"alnum_prop": 0.5607482070168638,
"repo_name": "petrutlucian94/nova_dev",
"id": "edd66b36ddbcb12e71e3d88a07f7ed9345c326f6",
"size": "52264",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/network/neutronv2/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13338689"
},
{
"name": "Shell",
"bytes": "16180"
}
],
"symlink_target": ""
}
|
import datetime
from django.utils import timezone
from django.test import TestCase
from django.core.urlresolvers import reverse
from polls.models import Poll
def create_poll(question, days):
"""
Creates a poll with the given `question` published the given number of
`days` offset to now (negative for polls published in the past,
positive for polls that have yet to be published).
"""
return Poll.objects.create(
question=question,
pub_date=timezone.now() + datetime.timedelta(days=days)
)
class PollViewTests(TestCase):
def test_index_view_with_no_polls(self):
"""
If no polls exist, an appropriate message should be displayed.
"""
response = self.client.get(reverse('polls:index'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "No polls are available.")
self.assertQuerysetEqual(response.context['latest_poll_list'], [])
def test_index_view_with_a_past_poll(self):
"""
Polls with a pub_date in the past should be displayed on the index page.
"""
create_poll(question="Past poll.", days=-30)
response = self.client.get(reverse('polls:index'))
self.assertQuerysetEqual(
response.context['latest_poll_list'],
['<Poll: Past poll.>']
)
def test_index_view_with_a_future_poll(self):
"""
Polls with a pub_date in the future should not be displayed on the
index page.
"""
create_poll(question="Future poll.", days=30)
response = self.client.get(reverse('polls:index'))
self.assertContains(
response,
"No polls are available.",
status_code=200
)
self.assertQuerysetEqual(response.context['latest_poll_list'], [])
def test_index_view_with_future_poll_and_past_poll(self):
"""
Even if both past and future polls exist, only past polls should be
displayed.
"""
create_poll(question="Past poll.", days=-30)
create_poll(question="Future poll.", days=30)
response = self.client.get(reverse('polls:index'))
self.assertQuerysetEqual(
response.context['latest_poll_list'],
['<Poll: Past poll.>']
)
def test_index_view_with_two_past_polls(self):
"""
The polls index page may display multiple polls.
"""
create_poll(question="Past poll 1.", days=-30)
create_poll(question="Past poll 2.", days=-5)
response = self.client.get(reverse('polls:index'))
self.assertQuerysetEqual(
response.context['latest_poll_list'],
['<Poll: Past poll 2.>', '<Poll: Past poll 1.>']
)
class PollIndexDetailTests(TestCase):
def test_detail_view_with_a_future_poll(self):
"""
The detail view of a poll with a pub_date in the future should
return a 404 not found.
"""
future_poll = create_poll(question='Future poll.', days=5)
response = self.client.get(
reverse('polls:detail', args=(future_poll.id,))
)
self.assertEqual(response.status_code, 404)
def test_detail_view_with_a_past_poll(self):
"""
The detail view of a poll with a pub_date in the past should display
the poll's question.
"""
past_poll = create_poll(question='Past Poll.', days=-5)
response = self.client.get(
reverse('polls:detail', args=(past_poll.id,))
)
self.assertContains(response, past_poll.question, status_code=200)
class PollMethodTests(TestCase):
def test_was_published_recently_with_future_poll(self):
"""
was_published_recently() should return False for polls whose
pub_date is in the future
"""
future_poll = Poll(
pub_date=timezone.now() + datetime.timedelta(days=30)
)
self.assertEqual(future_poll.was_published_recently(), False)
def test_was_published_recently_with_old_poll(self):
"""
was_published_recently() should return False for polls whose pub_date
is older than 1 day
"""
old_poll = Poll(pub_date=timezone.now() - datetime.timedelta(days=30))
self.assertEqual(old_poll.was_published_recently(), False)
def test_was_published_recently_with_recent_poll(self):
"""
was_published_recently() should return True for polls whose pub_date
is within the last day
"""
recent_poll = Poll(
pub_date=timezone.now() - datetime.timedelta(hours=1)
)
self.assertEqual(recent_poll.was_published_recently(), True)
|
{
"content_hash": "792c19985443cb9121ac279f72257cc5",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 80,
"avg_line_length": 34.85294117647059,
"alnum_prop": 0.6135021097046414,
"repo_name": "pinax-archives/django-discover-slowest-tests-runner",
"id": "3bc82b010afe17df09f1527bfbdcd0a4b586d88a",
"size": "4740",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "_example/polls/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1114"
},
{
"name": "Makefile",
"bytes": "195"
},
{
"name": "Python",
"bytes": "15798"
}
],
"symlink_target": ""
}
|
import fitbit
from stravalib import Client, unithelper
from datetime import datetime, timedelta
from pytz import timezone
from config import *
debug = False
METERS_IN_A_MILE = 1609.34
pacific_timezone = timezone('America/Los_Angeles')
#mmf = MapMyFitness(api_key=MMF_CLIENT_KEY, access_token=MMF_ACCESS_TOKEN)
strava = Client( access_token=STRAVA_ACCESS_TOKEN)
fitbit_client = fitbit.Fitbit( FITBIT_CLIENT_KEY, FITBIT_CLIENT_SECRET, user_key=FITBIT_USER_KEY, user_secret=FITBIT_USER_SECRET)
# Only grab the last 10 activities
activities = strava.get_activities( limit = 10 )
#for activity in activities:
# print activity.type
# Iterate through all valid activity types
#for activity_id in MMF_BIKE_ACTIVITY_TYPES:
# workouts = workouts + mmf.workout.search( user=MMF_USER_ID, activity_type=activity_id, started_after=started_after )
for activity in activities:
start_datetime = activity.start_date_local
start_date = start_datetime.strftime( '%Y-%m-%d' )
start_time = start_datetime.strftime( '%H:%M' )
duration_milliseconds = int( 1000 * activity.moving_time.total_seconds() )
distance = unithelper.miles( activity.distance ).num
dupe = False
activity_information = {
'startTime' : start_time,
'durationMillis': duration_milliseconds,
'date' : start_date,
'distance' : distance
}
# If it's a ride in strava we have a direct mapping to FitBit, otherwise we need to use a custom activity type
if activity.type == 'Ride':
activity_information['activityId'] = FITBIT_BIKE_ACTIVITY_ID
elif activity.type == 'Hike':
activity_information['activityId'] = FITBIT_HIKE_ACTIVITY_ID
else:
continue
# Make sure we didn't already log this activity
for fitbit_activity in fitbit_client.activities( date = start_date )['activities']:
if start_time == fitbit_activity['startTime'] and duration_milliseconds == fitbit_activity['duration']:
dupe = True
break
# Log the activity in FitBit if it's not a duplicate
if not dupe:
fitbit_client.log_activity( activity_information )
print "Created an activity record in FitBit for the workout named: " + activity.name
# Otherwise, skip
else:
if debug:
print 'Activity record for the workout named "'+ activity.name + '" already exists in FitBit!'
print "Fitbit raw data"
print fitbit_activity['startTime']
print start_date
print fitbit_activity['duration']
print fitbit_activity['distance']
if debug:
print "Strava raw data"
print start_time
print start_date
print duration_milliseconds
print distance
|
{
"content_hash": "453b55d1354217cff8c5dd41244bf5a4",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 129,
"avg_line_length": 32.39506172839506,
"alnum_prop": 0.7115091463414634,
"repo_name": "erikgranlund/mmr_to_fitbit_sync",
"id": "1d514cd3048ab4d7cb1b779b3ea820542d02d463",
"size": "2624",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sync_strava_and_fitbit.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11243"
}
],
"symlink_target": ""
}
|
import ast
import json
import hashlib
import urllib
import base64
from django.test import TestCase
from django.conf import settings
from django.core.urlresolvers import reverse
from lrs import models, views
class ActivityProfileTests(TestCase):
test_activityId1 = 'act:act-1'
test_activityId2 = 'act:act-2'
test_activityId3 = 'act:act-3'
other_activityId = 'act:act-other'
content_type = "application/json"
testprofileId1 = "http://profile.test.id/test/1"
testprofileId2 = "http://profile.test.id/test/2"
testprofileId3 = "http://profile.test.id/test/3"
otherprofileId1 = "http://profile.test.id/other/1"
@classmethod
def setUpClass(cls):
print "\n%s" % __name__
def setUp(self):
self.username = "tester"
self.email = "test@tester.com"
self.password = "test"
self.auth = "Basic %s" % base64.b64encode("%s:%s" % (self.username, self.password))
form = {'username':self.username, 'email': self.email,'password':self.password,'password2':self.password}
self.client.post(reverse(views.register),form, X_Experience_API_Version=settings.XAPI_VERSION)
self.testparams1 = {"profileId": self.testprofileId1, "activityId": self.test_activityId1}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(self.testparams1))
self.testprofile1 = {"test":"put profile 1","obj":{"activity":"test"}}
self.put1 = self.client.put(path, json.dumps(self.testprofile1), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.testparams2 = {"profileId": self.testprofileId2, "activityId": self.test_activityId2}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(self.testparams2))
self.testprofile2 = {"test":"put profile 2","obj":{"activity":"test"}}
self.put2 = self.client.put(path, json.dumps(self.testprofile2), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.testparams3 = {"profileId": self.testprofileId3, "activityId": self.test_activityId3}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(self.testparams3))
self.testprofile3 = {"test":"put profile 3","obj":{"activity":"test"}}
self.put3 = self.client.put(path, json.dumps(self.testprofile3), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.testparams4 = {"profileId": self.otherprofileId1, "activityId": self.other_activityId}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(self.testparams4))
self.otherprofile1 = {"test":"put profile other","obj":{"activity":"other"}}
self.put4 = self.client.put(path, json.dumps(self.otherprofile1), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.testparams5 = {"profileId": self.otherprofileId1, "activityId": self.test_activityId1}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(self.testparams5))
self.anotherprofile1 = {"test":"put another profile 1","obj":{"activity":"other"}}
self.put5 = self.client.put(path, json.dumps(self.anotherprofile1), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
def tearDown(self):
self.client.delete(reverse(views.activity_profile), self.testparams1, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.client.delete(reverse(views.activity_profile), self.testparams2, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.client.delete(reverse(views.activity_profile), self.testparams3, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.client.delete(reverse(views.activity_profile), self.testparams4, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.client.delete(reverse(views.activity_profile), self.testparams5, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
def test_put(self):
#Test the puts
self.assertEqual(self.put1.status_code, 204)
self.assertEqual(self.put2.status_code, 204)
self.assertEqual(self.put3.status_code, 204)
self.assertEqual(self.put4.status_code, 204)
self.assertEqual(self.put5.status_code, 204)
#Make sure profiles have correct activities
self.assertEqual(models.ActivityProfile.objects.filter(profileId=self.testprofileId1)[0].activityId, self.test_activityId1)
self.assertEqual(models.ActivityProfile.objects.filter(profileId=self.testprofileId2)[0].activityId, self.test_activityId2)
self.assertEqual(models.ActivityProfile.objects.filter(profileId=self.testprofileId3)[0].activityId, self.test_activityId3)
def test_put_no_params(self):
put = self.client.put(reverse(views.activity_profile) ,content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEquals(put.content, 'Error -- activity_profile - method = PUT, but activityId parameter missing..')
def test_put_no_activityId(self):
put = self.client.put(reverse(views.activity_profile), {'profileId':'10'},content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEquals(put.content, 'Error -- activity_profile - method = PUT, but activityId parameter missing..')
def test_put_no_profileId(self):
testparams = {'activityId':'act:act:act'}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(testparams))
put = self.client.put(path, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEquals(put.content, 'Error -- activity_profile - method = PUT, but profileId parameter missing..')
def test_put_etag_missing_on_change(self):
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(self.testparams1))
profile = {"test":"error - trying to put new profile w/o etag header","obj":{"activity":"test"}}
response = self.client.put(path, profile, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(response.status_code, 409)
self.assertIn('If-Match and If-None-Match headers were missing', response.content)
r = self.client.get(reverse(views.activity_profile), self.testparams1, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], self.testprofile1['test'])
self.assertEqual(robj['obj']['activity'], self.testprofile1['obj']['activity'])
def test_put_etag_right_on_change(self):
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(self.testparams1))
profile = {"test":"good - trying to put new profile w/ etag header","obj":{"activity":"act:test"}}
thehash = '"%s"' % hashlib.sha1(json.dumps(self.testprofile1)).hexdigest()
response = self.client.put(path, json.dumps(profile), content_type=self.content_type, If_Match=thehash, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(response.status_code, 204)
r = self.client.get(reverse(views.activity_profile), self.testparams1, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.content, json.dumps(profile))
def test_put_etag_wrong_on_change(self):
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(self.testparams1))
profile = {"test":"error - trying to put new profile w/ wrong etag value","obj":{"activity":"act:test"}}
thehash = '"%s"' % hashlib.sha1('%s' % 'wrong hash').hexdigest()
response = self.client.put(path, profile, content_type=self.content_type, If_Match=thehash, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(response.status_code, 412)
self.assertIn('No resources matched', response.content)
r = self.client.get(reverse(views.activity_profile), self.testparams1, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], self.testprofile1['test'])
self.assertEqual(robj['obj']['activity'], self.testprofile1['obj']['activity'])
def test_put_etag_if_none_match_good(self):
params = {"profileId": 'http://etag.nomatch.good', "activityId": self.test_activityId1}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(params))
profile = {"test":"good - trying to put new profile w/ if none match etag header","obj":{"activity":"act:test"}}
response = self.client.put(path, json.dumps(profile), content_type=self.content_type, if_none_match='*', Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(response.status_code, 204)
r = self.client.get(reverse(views.activity_profile), params, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], profile['test'])
self.assertEqual(robj['obj']['activity'], profile['obj']['activity'])
r = self.client.delete(reverse(views.activity_profile), params, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
def test_put_etag_if_none_match_bad(self):
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(self.testparams1))
profile = {"test":"error - trying to put new profile w/ if none match etag but one exists","obj":{"activity":"act:test"}}
response = self.client.put(path, profile, content_type=self.content_type, If_None_Match='*', Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(response.status_code, 412)
self.assertEqual(response.content, 'Resource detected')
r = self.client.get(reverse(views.activity_profile), self.testparams1, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], self.testprofile1['test'])
self.assertEqual(robj['obj']['activity'], self.testprofile1['obj']['activity'])
def test_get_activity_only(self):
response = self.client.get(reverse(views.activity_profile), {'activityId':self.test_activityId2}, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
self.assertEqual(response.status_code, 200)
self.assertContains(response, self.testprofileId2)
params = {'activityId': self.test_activityId2, 'profileId': self.testprofileId2}
self.client.delete(reverse(views.activity_profile), params, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
def test_get_activity_profileId(self):
response = self.client.get(reverse(views.activity_profile), {'activityId':self.test_activityId1,'profileId':self.testprofileId1},
X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
self.assertEqual(response.status_code, 200)
robj = ast.literal_eval(response.content)
self.assertEqual(robj['test'], self.testprofile1['test'])
self.assertEqual(robj['obj']['activity'], self.testprofile1['obj']['activity'])
resp_hash = hashlib.sha1(response.content).hexdigest()
self.assertEqual(response['etag'], '"%s"' % resp_hash)
params = {'activityId': self.test_activityId1, 'profileId': self.testprofileId1}
self.client.delete(reverse(views.activity_profile), params, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
def test_get_activity_profileId_no_auth(self):
response = self.client.get(reverse(views.activity_profile), {'activityId':self.test_activityId1,'profileId':self.testprofileId1}, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(response.status_code, 400)
def test_get_activity_profileId_activity_dne(self):
response = self.client.get(reverse(views.activity_profile), {'activityId':'http://actID','profileId':self.testprofileId1}, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
self.assertEqual(response.status_code, 404)
def test_get_activity_since_tz(self):
actid = "test:activity"
profid = "test://test/tz"
st = json.dumps({"actor":{"objectType":"Agent","mbox": "mailto:tom@adlnet.gov"},
"verb":{"id": "http://adlnet.gov/expapi/verbs/assess","display": {"en-US":"assessed"}},
"object":{'objectType':'Activity', 'id': actid}})
st_post = self.client.post(reverse(views.statements), st, content_type="application/json", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(st_post.status_code, 200)
params = {"profileId": profid, "activityId": actid}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(params))
prof = {"test":"timezone since","obj":{"activity":"other"}}
r = self.client.put(path, json.dumps(prof), content_type=self.content_type, updated="2012-11-11T12:00:00+00:00", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(r.status_code, 204)
since = "2012-11-11T12:00:00-02:00"
response = self.client.get(reverse(views.activity_profile), {'activityId': actid,'since':since}, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
self.assertEqual(response.status_code, 200)
self.assertNotIn(profid, response.content)
params = {"activityId": actid, "profileId": profid}
self.client.delete(reverse(views.activity_profile), params, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
def test_get_activity_bad_since(self):
actid = "test:activity"
profid = "test://test/tz"
st = json.dumps({"actor":{"objectType":"Agent","mbox": "mailto:tom@adlnet.gov"},
"verb":{"id": "http://adlnet.gov/expapi/verbs/assess","display": {"en-US":"assessed"}},
"object":{'objectType':'Activity', 'id': actid}})
st_post = self.client.post(reverse(views.statements), st, content_type="application/json", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(st_post.status_code, 200)
params = {"profileId": profid, "activityId": actid}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(params))
prof = {"test":"timezone since","obj":{"activity":"other"}}
r = self.client.put(path, json.dumps(prof), content_type=self.content_type, updated="2012-11-11T12:00:00+00:00", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(r.status_code, 204)
since = "2012-11-1112:00:00-02:00"
response = self.client.get(reverse(views.activity_profile), {'activityId': actid,'since':since}, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, "Since parameter was not a valid ISO8601 timestamp")
params = {"activityId": actid, "profileId": profid}
self.client.delete(reverse(views.activity_profile), params, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
def test_get_no_activityId_with_profileId(self):
response = self.client.get(reverse(views.activity_profile), {'profileId': self.testprofileId3}, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, 'Error -- activity_profile - method = GET, but activityId parameter missing..')
def test_get_no_activityId_with_since(self):
since = "2012-07-01T13:30:00+04:00"
response = self.client.get(reverse(views.activity_profile), {'since':since}, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, 'Error -- activity_profile - method = GET, but activityId parameter missing..')
def test_delete(self):
response = self.client.delete(reverse(views.activity_profile), {'activityId':self.other_activityId, 'profileId':self.otherprofileId1}, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
def test_cors_put(self):
profileid = 'http://test.cors.put'
activityid = 'act:test_cors_put-activity'
testparams1 = {"profileId": profileid, "activityId": activityid}
content = {"test":"put profile 1","obj":{"activity":"act:test"}}
params = "profileId=%s&activityId=%s&Authorization=%s&content=%s&X-Experience-API-Version=1.0" % (profileid, activityid,self.auth,content)
path = path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode({"method":"PUT"}))
st = json.dumps({"actor":{"objectType":"Agent","mbox": "mailto:tom@adlnet.gov"},
"verb":{"id": "http://adlnet.gov/expapi/verbs/assess","display": {"en-US":"assessed"}},
"object":{'objectType':'Activity', 'id': activityid}})
st_post = self.client.post(reverse(views.statements), st, content_type="application/json", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(st_post.status_code, 200)
thedata = urllib.quote_plus(params)
put1 = self.client.post(path, thedata, content_type="application/x-www-form-urlencoded")
self.assertEqual(put1.status_code, 204)
get1 = self.client.get(reverse(views.activity_profile), testparams1, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(get1.status_code, 200)
import ast
c = ast.literal_eval(get1.content)
self.assertEqual(c['test'], content['test'])
self.client.delete(reverse(views.activity_profile), testparams1, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
def test_cors_put_etag(self):
pid = 'http://ie.cors.etag/test'
aid = 'act:ie.cors.etag/test'
st = json.dumps({"actor":{"objectType":"Agent","mbox": "mailto:tom@adlnet.gov"},
"verb":{"id": "http://adlnet.gov/expapi/verbs/assess","display": {"en-US":"assessed"}},
"object":{'objectType':'Activity', 'id': aid}})
st_post = self.client.post(reverse(views.statements), st, content_type="application/json", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(st_post.status_code, 200)
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(self.testparams1))
tp = {"test":"put example profile for test_cors_put_etag","obj":{"activity":"this should be replaced -- ie cors post/put"}}
thehash = '"%s"' % hashlib.sha1(json.dumps(self.testprofile1)).hexdigest()
put1 = self.client.put(path, tp, content_type=self.content_type, If_Match=thehash, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(put1.status_code, 204)
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode({"method":"PUT"}))
content = {"test":"good - trying to put new profile w/ etag header - IE cors","obj":{"activity":"test IE cors etag"}}
thehash = '"%s"' % hashlib.sha1('%s' % tp).hexdigest()
thedata = "profileId=%s&activityId=%s&If-Match=%s&Authorization=%s&Content-Type=application/x-www-form-urlencoded&content=%s&X-Experience-API-Version=1.0.0" % (pid, aid, thehash, self.auth, content)
response = self.client.post(path, thedata, content_type="application/x-www-form-urlencoded")
self.assertEqual(response.status_code, 204)
r = self.client.get(reverse(views.activity_profile), {'activityId': aid, 'profileId': pid}, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
self.assertEqual(r.status_code, 200)
import ast
c = ast.literal_eval(r.content)
self.assertEqual(c['test'], content['test'])
self.client.delete(reverse(views.activity_profile), {'activityId': aid, 'profileId': pid}, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
def test_tetris_snafu(self):
params = {"profileId": "http://test.tetris/", "activityId": "act:tetris.snafu"}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(params))
profile = {"test":"put profile 1","obj":{"activity":"test"}}
st = json.dumps({"actor":{"objectType":"Agent","mbox": "mailto:tom@adlnet.gov"},
"verb":{"id": "http://adlnet.gov/expapi/verbs/assess","display": {"en-US":"assessed"}},
"object":{'objectType':'Activity', 'id': "act:tetris.snafu"}})
st_post = self.client.post(reverse(views.statements), st, content_type="application/json", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(st_post.status_code, 200)
p_r = self.client.put(path, json.dumps(profile), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(p_r.status_code, 204)
r = self.client.get(reverse(views.activity_profile), {'activityId': "act:tetris.snafu", 'profileId': "http://test.tetris/"}, X_Experience_API_Version=settings.XAPI_VERSION, Authorization=self.auth)
self.assertEqual(r.status_code, 200)
self.assertEqual(r['Content-Type'], self.content_type)
self.assertIn("\"", r.content)
self.client.delete(path, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
def test_post_new_profile(self):
params = {"profileId": "prof:test_post_new_profile", "activityId": "act:test.post.new.prof"}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(params))
prof = {"test":"post new profile","obj":{"activity":"act:test.post.new.prof"}}
post = self.client.post(path, json.dumps(prof), content_type="application/json", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(post.status_code, 204)
get = self.client.get(path, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(get.status_code, 200)
self.assertEqual(ast.literal_eval(get.content), prof)
self.assertEqual(get.get('etag'), '"%s"' % hashlib.sha1(get.content).hexdigest())
self.client.delete(path, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
def test_post_blank_profile(self):
params = {"profileId": "prof:test_post_new_profile", "activityId": "act:test.post.new.prof"}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(params))
prof = ""
post = self.client.post(path, prof, content_type="application/json", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(post.status_code, 400)
self.assertEqual(post.content, 'No body in request')
def test_post_update_profile(self):
params = {"profileId": "prof:test_post_update_profile", "activityId": "act:test.post.update.prof"}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(params))
prof = {"test":"post updated profile","obj":{"activity":"act:test.post.update.prof"}}
post = self.client.post(path, json.dumps(prof), content_type="application/json", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(post.status_code, 204)
get = self.client.get(path, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(get.status_code, 200)
what = ast.literal_eval(get.content)
self.assertEqual(what, prof)
etag = '"%s"' % hashlib.sha1(get.content).hexdigest()
self.assertEqual(get.get('etag'), etag)
params = {"profileId": "prof:test_post_update_profile", "activityId": "act:test.post.update.prof"}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(params))
prof = {"obj":{"activity":"act:test.post.update.prof_changed", "new":"thing"}, "added":"yes"}
post = self.client.post(path, json.dumps(prof), content_type="application/json", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(post.status_code, 409)
post = self.client.post(path, json.dumps(prof), content_type="application/json", If_Match=etag, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(post.status_code, 204)
get = self.client.get(path, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(get.status_code, 200)
ret_json = ast.literal_eval(get.content)
self.assertEqual(ret_json['added'], prof['added'])
self.assertEqual(ret_json['test'], "post updated profile")
self.assertEqual(ret_json['obj']['activity'], prof['obj']['activity'])
self.assertEqual(ret_json['obj']['new'], prof['obj']['new'])
self.assertEqual(get.get('etag'), '"%s"' % hashlib.sha1(get.content).hexdigest())
self.client.delete(path, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
def test_post_and_put_profile(self):
params = {"profileId": "prof:test_post_and_put_profile", "activityId": "act:test.post.put.prof"}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(params))
prof = {"test":"post and put profile","obj":{"activity":"act:test.post.put.prof"}}
post = self.client.post(path, json.dumps(prof), content_type="application/json", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(post.status_code, 204)
get = self.client.get(path, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(get.status_code, 200)
self.assertEqual(ast.literal_eval(get.content), prof)
self.assertEqual(get.get('etag'), '"%s"' % hashlib.sha1(get.content).hexdigest())
params = {"profileId": "prof:test_post_and_put_profile", "activityId": "act:test.post.put.prof"}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(params))
prof = {"wipe":"new data"}
thehash = get.get('etag')
put = self.client.put(path, json.dumps(prof), content_type="application/json", If_Match=thehash, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(put.status_code, 204)
get = self.client.get(path, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(get.status_code, 200)
self.assertEqual(ast.literal_eval(get.content), prof)
etag = '"%s"' % hashlib.sha1(get.content).hexdigest()
self.assertEqual(get.get('etag'), etag)
params = {"profileId": "prof:test_post_and_put_profile", "activityId": "act:test.post.put.prof"}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(params))
prof = {"test":"post updated profile","obj":{"activity":"act:test.post.update.prof_changed", "new":"thing"}, "added":"yes"}
post = self.client.post(path, json.dumps(prof), content_type="application/json", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(post.status_code, 409)
post = self.client.post(path, json.dumps(prof), content_type="application/json", If_Match=etag, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(post.status_code, 204)
get = self.client.get(path, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(get.status_code, 200)
ret_json = ast.literal_eval(get.content)
self.assertEqual(ret_json['wipe'], "new data")
self.assertEqual(ret_json['added'], prof['added'])
self.assertEqual(ret_json['test'], prof['test'])
self.assertEqual(ret_json['obj']['activity'], prof['obj']['activity'])
self.assertEqual(ret_json['obj']['new'], prof['obj']['new'])
self.assertEqual(get.get('etag'), '"%s"' % hashlib.sha1(get.content).hexdigest())
self.client.delete(path, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
def test_put_wrong_activityId(self):
params = {'activityId':'foo','profileId':'10'}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(params))
put = self.client.put(path, '{test:body}', content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEquals(put.content, 'activityId param for activity profile with value foo was not a valid URI')
def test_current_tetris(self):
params = {"profileId":"profile:highscores","activityId":"act:adlnet.gov/JsTetris_TCAPI"}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(params))
put = self.client.put(path, '[{"actor":{"name":"tom","mbox":"mailto:tom@tom.com"},"score":802335,"date":"2013-07-26T13:42:13.465Z"},{"actor":{"name":"tom","mbox":"mailto:tom@tom.com"},"score":159482,"date":"2013-07-26T13:49:14.011Z"},{"actor":{"name":"lou","mbox":"mailto:l@l.com"},"score":86690,"date":"2013-07-26T13:27:29.083Z"},{"actor":{"name":"tom","mbox":"mailto:tom@tom.com"},"score":15504,"date":"2013-07-26T13:27:30.763Z"},{"actor":{"name":"tom","mbox":"mailto:tom@tom.com"},"score":1982,"date":"2013-07-26T13:29:46.067Z"},{"actor":{"name":"unknown","mbox":"mailto:unknown@example.com"},"score":348,"date":"2013-07-26T13:51:08.043Z"}]', content_type="application/json", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(put.status_code, 204)
theget = self.client.get(path, Authorization=self.auth, X_Experience_API_Version="1.0")
self.assertEqual(theget['ETag'], '"d4827d99a5cc3510d3847baa341ba5a3b477fdfc"')
def test_json_merge(self):
prof = '{"test": { "goal": "ensure proper json parse", "attempt": 1, "result": null } }'
params = {"profileId": "prof:test_json_merge", "activityId": "act:test.json.merge.prof"}
path = '%s?%s' % (reverse(views.activity_profile), urllib.urlencode(params))
post = self.client.post(path, prof, content_type="application/json", If_None_Match='*', Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(post.status_code, 204)
get = self.client.get(path, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(get.status_code, 200)
returned = json.loads(get.content)
sent = json.loads(prof)
self.assertEqual(returned['test']['goal'], sent['test']['goal'])
self.assertEqual(returned['test']['attempt'], sent['test']['attempt'])
self.assertEqual(returned['test']['result'], sent['test']['result'])
etag = '"%s"' % hashlib.sha1(get.content).hexdigest()
self.assertEqual(get.get('etag'), etag)
sent['test']['result'] = True
sent['test']['attempt'] = sent['test']['attempt'] + 1
prof = json.dumps(sent)
post = self.client.post(path, prof, content_type="application/json", If_Match=etag, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(post.status_code, 204)
get = self.client.get(path, Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION)
self.assertEqual(get.status_code, 200)
returned = json.loads(get.content)
sent = json.loads(prof)
self.assertEqual(returned['test']['goal'], sent['test']['goal'])
self.assertEqual(returned['test']['attempt'], sent['test']['attempt'])
self.assertEqual(returned['test']['result'], sent['test']['result'])
etag = '"%s"' % hashlib.sha1(get.content).hexdigest()
self.assertEqual(get.get('etag'), etag)
|
{
"content_hash": "c1e8cc5e08270df96253873ce2055a88",
"timestamp": "",
"source": "github",
"line_count": 492,
"max_line_length": 759,
"avg_line_length": 68.15447154471545,
"alnum_prop": 0.6772635094834785,
"repo_name": "diagonalwalnut/Experience",
"id": "040bda4faaf74a4f5b89d4a621131687105f3434",
"size": "33532",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lrs/tests/ActivityProfileTests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5459"
},
{
"name": "HTML",
"bytes": "40109"
},
{
"name": "Python",
"bytes": "1327153"
}
],
"symlink_target": ""
}
|
"""
==============================
The :mod:`array_split` Package
==============================
.. currentmodule:: array_split
Python package for splitting a :obj:`numpy.ndarray` (or just an array shape)
into a number of sub-arrays.
The two main functions are:
:func:`array_split.array_split`
Similar to :func:`numpy.array_split`, returns a list of
sub-array *views* of the input :obj:`numpy.ndarray`.
Can split along multiple axes and has more splitting
criteria (parameters) than :func:`numpy.array_split`.
:func:`array_split.shape_split`
Instead taking an :obj:`numpy.ndarray` as an argument, it
takes the array *shape* and returns tuples of :obj:`slice`
objects which indicate the extents of the sub-arrays.
These two functions use an instance of the :obj:`array_split.ShapeSplitter` class
which contains the bulk of the *split* implementation.
Instances of :obj:`array_split.ShapeSplitter` also
maintain state related to the computed split.
Splitting of multi-dimensional arrays can be performed according to several criteria:
* Per-axis indices indicating the *cut* positions.
* Per-axis number of sub-arrays.
* Total number of sub-arrays (with optional per-axis *number of sections* constraints).
* Specific sub-array shape.
* Specification of *halo* (*ghost*) elements for sub-arrays.
* Arbitrary *start index* for the shape to be partitioned.
* Maximum number of bytes for a sub-array with constraints:
- sub-arrays are an even multiple of a specified sub-tile shape
- upper limit on the per-axis sub-array shape
The usage documentation is given in the :ref:`array_split-examples` section.
Classes and Functions
=====================
.. autosummary::
:toctree: generated/
shape_split - Splits a shape and returns :obj:`numpy.ndarray` of :obj:`slice` elements.
array_split - Equivalent to :func:`numpy.array_split`.
ShapeSplitter - Array shape splitting class.
Attributes
==========
.. autodata:: ARRAY_BOUNDS
.. autodata:: NO_BOUNDS
"""
from __future__ import absolute_import
from .license import license as _license, copyright as _copyright, version as _version
from . import split # noqa: E402,F401
from .split import array_split, shape_split, ShapeSplitter # noqa: E402,F401
__author__ = "Shane J. Latham"
__license__ = _license()
__copyright__ = _copyright()
__version__ = _version()
#: See :data:`array_split.split.ARRAY_BOUNDS`
ARRAY_BOUNDS = split.ARRAY_BOUNDS
#: See :data:`array_split.split.NO_BOUNDS`
NO_BOUNDS = split.NO_BOUNDS
__all__ = [s for s in dir() if not s.startswith('_')]
|
{
"content_hash": "a4806d6a508c48d0ab97ce833d29e41d",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 90,
"avg_line_length": 32.8625,
"alnum_prop": 0.6899961962723469,
"repo_name": "array-split/array_split",
"id": "9bed6513d4d3c98ecec372922e7ba360eb599056",
"size": "2629",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "array_split/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "148484"
}
],
"symlink_target": ""
}
|
import flatbuffers
from flatbuffers.compat import import_numpy
np = import_numpy()
class CosOptions(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsCosOptions(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = CosOptions()
x.Init(buf, n + offset)
return x
@classmethod
def CosOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed)
# CosOptions
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
def CosOptionsStart(builder): builder.StartObject(0)
def CosOptionsEnd(builder): return builder.EndObject()
class CosOptionsT(object):
# CosOptionsT
def __init__(self):
pass
@classmethod
def InitFromBuf(cls, buf, pos):
cosOptions = CosOptions()
cosOptions.Init(buf, pos)
return cls.InitFromObj(cosOptions)
@classmethod
def InitFromObj(cls, cosOptions):
x = CosOptionsT()
x._UnPack(cosOptions)
return x
# CosOptionsT
def _UnPack(self, cosOptions):
if cosOptions is None:
return
# CosOptionsT
def Pack(self, builder):
CosOptionsStart(builder)
cosOptions = CosOptionsEnd(builder)
return cosOptions
|
{
"content_hash": "6e73066f33b1663757191800a1f998d0",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 114,
"avg_line_length": 26.037037037037038,
"alnum_prop": 0.6529160739687055,
"repo_name": "google-research/falken",
"id": "584442be62cb4eca61dc89218f39c5249c3c1fb1",
"size": "2074",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "service/generated_flatbuffers/tflite/CosOptions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "27651"
},
{
"name": "C#",
"bytes": "673937"
},
{
"name": "C++",
"bytes": "1250409"
},
{
"name": "CMake",
"bytes": "133649"
},
{
"name": "Java",
"bytes": "6034"
},
{
"name": "JavaScript",
"bytes": "112279"
},
{
"name": "Objective-C++",
"bytes": "4177"
},
{
"name": "Python",
"bytes": "1666229"
},
{
"name": "SWIG",
"bytes": "27937"
},
{
"name": "ShaderLab",
"bytes": "1473"
},
{
"name": "Shell",
"bytes": "8257"
}
],
"symlink_target": ""
}
|
import numpy as np
import sys
from collections import defaultdict, namedtuple
from operator import itemgetter
def parse_proj(scores, gold=None):
'''
Parse using Eisner's algorithm.
'''
nr, nc = np.shape(scores)
if nr != nc:
raise ValueError("scores must be a squared matrix with nw+1 rows")
N = nr - 1 # Number of words (excluding root).
# Initialize CKY table.
complete = np.zeros([N+1, N+1, 2]) # s, t, direction (right=1).
incomplete = np.zeros([N+1, N+1, 2]) # s, t, direction (right=1).
complete_backtrack = -np.ones([N+1, N+1, 2], dtype=int) # s, t, direction (right=1).
incomplete_backtrack = -np.ones([N+1, N+1, 2], dtype=int) # s, t, direction (right=1).
incomplete[0, :, 0] -= np.inf
# Loop from smaller items to larger items.
for k in xrange(1,N+1):
for s in xrange(N-k+1):
t = s+k
# First, create incomplete items.
# left tree
incomplete_vals0 = complete[s, s:t, 1] + complete[(s+1):(t+1), t, 0] + scores[t, s] + (0.0 if gold is not None and gold[s]==t else 1.0)
incomplete[s, t, 0] = np.max(incomplete_vals0)
incomplete_backtrack[s, t, 0] = s + np.argmax(incomplete_vals0)
# right tree
incomplete_vals1 = complete[s, s:t, 1] + complete[(s+1):(t+1), t, 0] + scores[s, t] + (0.0 if gold is not None and gold[t]==s else 1.0)
incomplete[s, t, 1] = np.max(incomplete_vals1)
incomplete_backtrack[s, t, 1] = s + np.argmax(incomplete_vals1)
# Second, create complete items.
# left tree
complete_vals0 = complete[s, s:t, 0] + incomplete[s:t, t, 0]
complete[s, t, 0] = np.max(complete_vals0)
complete_backtrack[s, t, 0] = s + np.argmax(complete_vals0)
# right tree
complete_vals1 = incomplete[s, (s+1):(t+1), 1] + complete[(s+1):(t+1), t, 1]
complete[s, t, 1] = np.max(complete_vals1)
complete_backtrack[s, t, 1] = s + 1 + np.argmax(complete_vals1)
value = complete[0][N][1]
heads = [-1 for _ in range(N+1)] #-np.ones(N+1, dtype=int)
backtrack_eisner(incomplete_backtrack, complete_backtrack, 0, N, 1, 1, heads)
value_proj = 0.0
for m in xrange(1,N+1):
h = heads[m]
value_proj += scores[h,m]
return heads
def backtrack_eisner(incomplete_backtrack, complete_backtrack, s, t, direction, complete, heads):
'''
Backtracking step in Eisner's algorithm.
- incomplete_backtrack is a (NW+1)-by-(NW+1) numpy array indexed by a start position,
an end position, and a direction flag (0 means left, 1 means right). This array contains
the arg-maxes of each step in the Eisner algorithm when building *incomplete* spans.
- complete_backtrack is a (NW+1)-by-(NW+1) numpy array indexed by a start position,
an end position, and a direction flag (0 means left, 1 means right). This array contains
the arg-maxes of each step in the Eisner algorithm when building *complete* spans.
- s is the current start of the span
- t is the current end of the span
- direction is 0 (left attachment) or 1 (right attachment)
- complete is 1 if the current span is complete, and 0 otherwise
- heads is a (NW+1)-sized numpy array of integers which is a placeholder for storing the
head of each word.
'''
if s == t:
return
if complete:
r = complete_backtrack[s][t][direction]
if direction == 0:
backtrack_eisner(incomplete_backtrack, complete_backtrack, s, r, 0, 1, heads)
backtrack_eisner(incomplete_backtrack, complete_backtrack, r, t, 0, 0, heads)
return
else:
backtrack_eisner(incomplete_backtrack, complete_backtrack, s, r, 1, 0, heads)
backtrack_eisner(incomplete_backtrack, complete_backtrack, r, t, 1, 1, heads)
return
else:
r = incomplete_backtrack[s][t][direction]
if direction == 0:
heads[s] = t
backtrack_eisner(incomplete_backtrack, complete_backtrack, s, r, 1, 1, heads)
backtrack_eisner(incomplete_backtrack, complete_backtrack, r+1, t, 0, 1, heads)
return
else:
heads[t] = s
backtrack_eisner(incomplete_backtrack, complete_backtrack, s, r, 1, 1, heads)
backtrack_eisner(incomplete_backtrack, complete_backtrack, r+1, t, 0, 1, heads)
return
|
{
"content_hash": "49bb3b3c8fc35056ea7f222c9b843e7d",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 147,
"avg_line_length": 44.11764705882353,
"alnum_prop": 0.6015555555555555,
"repo_name": "elikip/bist-parser",
"id": "96205e8e0b6ef52adbeaa2354b4eb9c986001d7d",
"size": "4669",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "bmstparser/src/decoder.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "99308"
},
{
"name": "Python",
"bytes": "106226"
}
],
"symlink_target": ""
}
|
""" Generate a fake catalog of open cluster stars. """
# Assumptions:
# (0) Every cluster has a unique chemical fingerprint.
# (1) There is a underlying, single latent factor common to all abundances.
# (2) The abundances in every cluster star will have some intrinsic variance.
# (3) The abundances will have some uncertainty in their measurements, and
# uncertainty is homoscedastic.
# (4) The number of stars in each cluster can be different.
# (5) The cluster abundances are taken from some mean metallicity from the
# work of Chen et al. (2003): http://adsabs.harvard.edu/abs/2003AJ....125.1397C
import pickle
import numpy as np
from astropy.table import Table
random_seed = 1
np.random.seed(random_seed)
oc_mean_properties = Table.read("Chen-et-al-2003-AJ-125-1397-table1.fits")
# Adjustable parameters #
# --------------------- #
elements_measured = ("Fe", "Ti", "Ca", "Si")
elements_measured = "ABCDEFGHIJKLMNO"
def draw_number_of_stars_in_cluster(mean_cluster_abundance=None):
return np.random.randint(1, 250)
means = np.mean(oc_mean_properties["__Fe_H_"]) \
+ np.random.normal(0, 0.1, size=len(elements_measured))
specific_sigmas = np.clip(
np.abs(np.random.normal(0, 0.01, size=len(elements_measured))),
0.005, 0.05)
cluster_dispersion = 0.01
magnitude_of_factor_load = 0.1
# --------------------- #
K = len(elements_measured)
L = 1 # number of latent factors.
# Generate the factor_loads (a_k)
factor_loads = np.random.uniform(
-abs(magnitude_of_factor_load),
+abs(magnitude_of_factor_load),
size=(L, K))
data = []
cluster_variates = []
cluster_factor_scores = []
for i, open_cluster in enumerate(oc_mean_properties):
mean_cluster_abundance = open_cluster["__Fe_H_"]
# How many stars will we draw from this cluster.
N_cluster = draw_number_of_stars_in_cluster(mean_cluster_abundance)
# Determine the cluster fingerprint.
# factor_scores (v_n)
factor_scores = np.random.normal(0, 1.) * np.ones((N_cluster, 1))
#+ np.random.normal(1, cluster_dispersion,
# size=(N_cluster, 1))
# variates (r_{nk})
variates = np.random.normal(0, 1, size=(N_cluster, K))
stellar_abundances = means \
+ np.dot(factor_scores, factor_loads) \
+ specific_sigmas * variates
# TODO: add measurement noise.
"""
_ = ax.plot(np.mean(means + np.dot(factor_scores, factor_loads), axis=0),
lw=2)
ax.plot(stellar_abundances.T, c=_[0].get_color(), zorder=-1, alpha=0.1)
"""
# Generate rows for the faux catalog.
rows = np.vstack([
i * np.ones(N_cluster, dtype=int).T,
stellar_abundances.T
]).T
data.extend(rows)
cluster_variates.append(variates)
cluster_factor_scores.append(factor_scores)
# Generate a faux-catalog.
names = ["star_id", "cluster_id"]
names.extend(["{}_h".format(el).upper() for el in elements_measured])
# Collect the data together, and add a star_id column.
data = np.vstack(data)
N = len(data)
data = np.hstack([np.atleast_2d(np.arange(N)).T, data])
stars = Table(data, names=names)
for column_name in ("star_id", "cluster_id"):
stars[column_name] = np.array(stars[column_name], dtype=int)
stars.write("catalog.fits", overwrite=True)
# Load the data that we may want to compare with later.
generated_parameters = dict(
random_seed=random_seed,
means=means,
variates=np.vstack(cluster_variates),
factor_loads=factor_loads,
factor_scores=np.vstack(cluster_factor_scores),
specific_sigmas=specific_sigmas
)
with open("generated-parameters.pkl", "wb") as fp:
pickle.dump(generated_parameters, fp, -1)
|
{
"content_hash": "73210932796ac83e9bd294423bc9beb5",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 83,
"avg_line_length": 27.14814814814815,
"alnum_prop": 0.6638472032742155,
"repo_name": "andycasey/snob",
"id": "2d621c13359ca71d2c76a2a5816ce617fd846edf",
"size": "3666",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "articles/chemical-tagging-gmm/experiments/slf-clustering-fake-ocs/data/generate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "2904054"
},
{
"name": "MAXScript",
"bytes": "15074"
},
{
"name": "Makefile",
"bytes": "2199"
},
{
"name": "Python",
"bytes": "489030"
},
{
"name": "Shell",
"bytes": "2964"
},
{
"name": "TeX",
"bytes": "490057"
}
],
"symlink_target": ""
}
|
import pydoc
import os, sys
# This script generates mkdocs friendly Markdown documentation from a python package.
# It is based on the the following blog post by Christian Medina
# https://medium.com/python-pandemonium/python-introspection-with-the-inspect-module-2c85d5aa5a48#.twcmlyack
# https://gist.github.com/dvirsky/30ffbd3c7d8f37d4831b30671b681c24
module_header = "# Package {} Documentation\n"
class_header = "## Class {}"
function_header = "### {}"
def getmarkdown(module):
output = [ module_header.format(module.__name__) ]
if module.__doc__:
output.append(module.__doc__)
output.extend(getclasses(module))
output.extend(getfunctions(module))
return "\n".join((str(x) for x in output))
def getclasses(item, depth=0):
output = []
for cl in pydoc.inspect.getmembers(item, pydoc.inspect.isclass):
# Make sure we are only getting classes in this file
if depth == 0:
if item.__name__ != cl[1].__module__:
continue
# Ignore bogus stuff
if cl[0] == "__class__" or cl[0].startswith("_"):
continue
# Consider anything that starts with _ private
# and don't document it
output.append(class_header.format(cl[0]))
# Get the docstring
output.append(pydoc.inspect.getdoc(cl[1]))
# Get the functions
output.extend(getfunctions(cl[1]))
# Recurse into any subclasses
output.extend(getclasses(cl[1], depth+1))
output.append('\n')
return output
def getfunctions(item):
output = []
at_end = []
for func in pydoc.inspect.getmembers(item, pydoc.inspect.isfunction):
out = output
if func[0].startswith('_') and func[0] != '__init__':
out = at_end
out.append(function_header.format(func[0].replace('_', '\\_')))
# Get the signature
out.append ('```py\n')
out.append('def {}{}\n'.format(func[0], pydoc.inspect.formatargspec(*pydoc.inspect.getfullargspec(func[1]))))
out.append ('```\n')
# get the docstring
if pydoc.inspect.getdoc(func[1]):
out.append('\n')
out.append(pydoc.inspect.getdoc(func[1]))
out.append('\n')
return output + at_end
def generatedocs(module, filename):
try:
sys.path.insert(0, os.getcwd() + '/..')
# Attempt import
mod = pydoc.safeimport(module)
if mod is None:
print("Module not found")
# Module imported correctly, let's create the docs
with open(filename, 'w') as f:
f.write(getmarkdown(mod))
except pydoc.ErrorDuringImport as e:
print("Error while trying to import " + module)
# if __name__ == '__main__':
generatedocs('tockloader.main', 'main.md')
generatedocs('tockloader.tockloader', 'tockloader.md')
generatedocs('tockloader.board_interface', 'board_interface.md')
generatedocs('tockloader.bootloader_serial', 'bootloader_serial.md')
generatedocs('tockloader.jlinkexe', 'jlinkexe.md')
generatedocs('tockloader.openocd', 'openocd.md')
generatedocs('tockloader.app', 'app.md')
generatedocs('tockloader.tab', 'tab.md')
generatedocs('tockloader.app_installed', 'app_installed.md')
generatedocs('tockloader.app_tab', 'app_tab.md')
generatedocs('tockloader.app_padding', 'app_padding.md')
generatedocs('tockloader.tbfh', 'tbfh.md')
generatedocs('tockloader.exceptions', 'exceptions.md')
generatedocs('tockloader.helpers', 'helpers.md')
# Make index from readme
with open('../README.md') as infile:
with open('index.md', 'w') as outfile:
outfile.write(infile.read())
|
{
"content_hash": "295cb1ec0b35205f2e817bf5dee5ce4d",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 117,
"avg_line_length": 34.056603773584904,
"alnum_prop": 0.6426592797783933,
"repo_name": "helena-project/tockloader",
"id": "4e919795861efeff26a01124120b79a3613810bb",
"size": "3634",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/generate_docs.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "88367"
}
],
"symlink_target": ""
}
|
from beritest_tools import BaseBERITestCase
class test_raw_bne_lt_back(BaseBERITestCase):
def test_before_bne(self):
self.assertRegisterEqual(self.MIPS.a0, 1, "instruction before backward bne missed")
def test_bne_branch_delay(self):
self.assertRegisterEqual(self.MIPS.a1, 2, "instruction in branch-delay slot missed")
def test_bne_skipped(self):
self.assertRegisterNotEqual(self.MIPS.a2, 3, "branch didn't happen")
def test_bne_target(self):
self.assertRegisterEqual(self.MIPS.a3, 4, "instruction at branch target didn't run")
|
{
"content_hash": "538139c5a87ddb64d22f80082ac89389",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 92,
"avg_line_length": 41.42857142857143,
"alnum_prop": 0.7224137931034482,
"repo_name": "8l/beri",
"id": "573ea48b1c9ec5968638281c96c0a0743d11b5bb",
"size": "1720",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cheritest/trunk/tests/branch/test_raw_bne_lt_back.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1629022"
},
{
"name": "Bluespec",
"bytes": "2336405"
},
{
"name": "C",
"bytes": "1058899"
},
{
"name": "C++",
"bytes": "1864"
},
{
"name": "Groff",
"bytes": "14381"
},
{
"name": "Haskell",
"bytes": "11711"
},
{
"name": "Lex",
"bytes": "2894"
},
{
"name": "Makefile",
"bytes": "242450"
},
{
"name": "Mathematica",
"bytes": "291"
},
{
"name": "Objective-C",
"bytes": "2387"
},
{
"name": "OpenEdge ABL",
"bytes": "568"
},
{
"name": "Perl",
"bytes": "19159"
},
{
"name": "Python",
"bytes": "1491002"
},
{
"name": "Shell",
"bytes": "91130"
},
{
"name": "SystemVerilog",
"bytes": "12058"
},
{
"name": "Tcl",
"bytes": "132818"
},
{
"name": "TeX",
"bytes": "4996"
},
{
"name": "Verilog",
"bytes": "125674"
},
{
"name": "Yacc",
"bytes": "5871"
}
],
"symlink_target": ""
}
|
""" List and set StaSh configuration options """
import sys
import argparse
_stash = globals()['_stash']
def main(args):
ap = argparse.ArgumentParser()
ap.add_argument('name', nargs='?', help='variable name')
ap.add_argument('value', nargs='?', type=int, help='variable value')
ap.add_argument('-l', '--list', action='store_true',
help='list all config variables and their values')
ns = ap.parse_args(args)
config = {
'py_traceback': _stash.runtime,
'py_pdb': _stash.runtime,
'input_encoding_utf8': _stash.runtime,
'ipython_style_history_search': _stash.runtime,
}
if ns.list:
for name in sorted(config.keys()):
print '%s=%s' % (name, config[name].__dict__[name])
else:
try:
if ns.name is not None and ns.value is not None:
config[ns.name].__dict__[ns.name] = ns.value
elif ns.name is not None:
print '%s=%s' % (ns.name, config[ns.name].__dict__[ns.name])
else:
ap.print_help()
except KeyError:
print '%s: invalid config option name' % ns.name
sys.exit(1)
if __name__ == '__main__':
main(sys.argv[1:])
|
{
"content_hash": "da9820e5f70e86817aaaf23a03f44af9",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 76,
"avg_line_length": 29.69047619047619,
"alnum_prop": 0.5445068163592622,
"repo_name": "jsbain/stash",
"id": "e57a12bcec5c4a5e7748c784553310f187f052ad",
"size": "1247",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bin/stashconf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "402187"
},
{
"name": "Shell",
"bytes": "1648"
}
],
"symlink_target": ""
}
|
from django.conf.urls import *
from corehq.apps.styleguide.views import *
urlpatterns = patterns('corehq.apps.styleguide.views',
url(r'^$', MainStyleGuideView.as_view(), name=MainStyleGuideView.urlname),
url(r'^forms/$', FormsStyleGuideView.as_view(), name=FormsStyleGuideView.urlname),
)
|
{
"content_hash": "03ad38466c32a97836c520d6e374975e",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 86,
"avg_line_length": 37.375,
"alnum_prop": 0.7491638795986622,
"repo_name": "SEL-Columbia/commcare-hq",
"id": "ec41e9d2db9c6ca4a1d1e877e4d65f6ee95fcf52",
"size": "299",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/apps/styleguide/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "768322"
},
{
"name": "JavaScript",
"bytes": "2647080"
},
{
"name": "Python",
"bytes": "7806659"
},
{
"name": "Shell",
"bytes": "28569"
}
],
"symlink_target": ""
}
|
import argparse
import tempfile
import os
import shutil
import serial
import serial.tools.list_ports as list_ports
import sys
import unittest
from kubos import init, target, build, flash, clean
from kubos.test.utils import get_arg_list, KubosTestCase
class SDKIntegrationTest(KubosTestCase):
uart_read_size = 8
expected_output = 'echo, x='
project_name = 'project-test'
init_args = argparse.Namespace(proj_name=[project_name],
subcommand_name='init')
build_args = argparse.Namespace(subcommand_name='build')
flash_args = argparse.Namespace(subcommand_name='flash')
clean_args = argparse.Namespace(subcommand_name='clean')
def setUp(self):
self.base_dir = os.getcwd()
self.test_dir = os.path.join(self.base_dir, self.project_name)
os.chdir(self.base_dir)
def test_integration(self):
for target in self.target_list:
'''
This is where the specific usb ports would be powered on and any
other hardware set up for our specific CI config would happen.
'''
self.run_build(target)
def run_build(self, hw_target):
target_args = argparse.Namespace(subcommand_name='target',
target=hw_target)
init.execCommand(self.init_args, [])
os.chdir(self.test_dir)
target.execCommand(target_args, [])
build.execCommand(self.build_args, [])
flash.execCommand(self.flash_args, [])
if not self.ignore_uart:
output = self.get_uart_output()
self.assertEqual(self.expected_output, output)
clean.execCommand(self.clean_args, [])
def get_uart_output(self):
port = self.get_port()
uart = serial.Serial(port=port, baudrate=115200)
return uart.read(self.uart_read_size)
def get_port(self):
devs = list_ports.comports()
for dev in devs:
if 'usb' in dev.device:
return dev.device
print 'No Serial device found. Are you sure its connected?'
sys.exit(1)
def tearDown(self):
os.chdir(self.base_dir)
shutil.rmtree(self.test_dir)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-i', action='store_true', help='Skip checking UART output from the target board after flashing')
parser.add_argument('--targets', default='stm32f407-disco-gcc', nargs='*', help='enter a list of targets to run the test against')
args = parser.parse_args()
arg_dict = vars(args)
target_list = arg_dict['targets']
if type(target_list) is str:
target_list = [target_list]
SDKIntegrationTest.target_list = target_list
if arg_dict['i']:
SDKIntegrationTest.ignore_uart = True
else:
SDKIntegrationTest.ignore_uart = False
sys.argv[1:] = list() #unittest is fussy about having extra command line arguments
unittest.main()
|
{
"content_hash": "d40c55211d5a425d5c6b1e5ce48319e9",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 134,
"avg_line_length": 33.51685393258427,
"alnum_prop": 0.6325846463291988,
"repo_name": "kyleparrott/kubos-sdk",
"id": "a10a1f759912f8c9c30cbb85a08ef7ff26da5bc1",
"size": "2998",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubos/test/integration/test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "91637"
},
{
"name": "Shell",
"bytes": "1124"
},
{
"name": "Tcl",
"bytes": "1687"
}
],
"symlink_target": ""
}
|
from __future__ import braces
|
{
"content_hash": "2aebb3fc8ecfa8dea10fb36085127e10",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 29,
"avg_line_length": 30,
"alnum_prop": 0.7333333333333333,
"repo_name": "iamwrm/coding",
"id": "1b01e42e4b213337d89b6b90d55dfdb1d8ea7642",
"size": "30",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/t2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "163687"
},
{
"name": "C++",
"bytes": "78151"
},
{
"name": "Dockerfile",
"bytes": "427"
},
{
"name": "Emacs Lisp",
"bytes": "18535"
},
{
"name": "JavaScript",
"bytes": "381"
},
{
"name": "Limbo",
"bytes": "38"
},
{
"name": "M",
"bytes": "777"
},
{
"name": "Makefile",
"bytes": "133"
},
{
"name": "Matlab",
"bytes": "70101"
},
{
"name": "Mercury",
"bytes": "4"
},
{
"name": "Python",
"bytes": "8628"
},
{
"name": "Shell",
"bytes": "648"
},
{
"name": "TeX",
"bytes": "28087"
}
],
"symlink_target": ""
}
|
"""
This module contains a CloudTasksHook
which allows you to connect to Google Cloud Tasks service,
performing actions to queues or tasks.
"""
from __future__ import annotations
from typing import Sequence
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
from google.api_core.retry import Retry
from google.cloud.tasks_v2 import CloudTasksClient
from google.cloud.tasks_v2.types import Queue, Task
from google.protobuf.field_mask_pb2 import FieldMask
from airflow.exceptions import AirflowException
from airflow.providers.google.common.consts import CLIENT_INFO
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
class CloudTasksHook(GoogleBaseHook):
"""
Hook for Google Cloud Tasks APIs. Cloud Tasks allows developers to manage
the execution of background work in their applications.
All the methods in the hook where project_id is used must be called with
keyword arguments rather than positional.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account.
"""
def __init__(
self,
gcp_conn_id: str = "google_cloud_default",
delegate_to: str | None = None,
impersonation_chain: str | Sequence[str] | None = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
delegate_to=delegate_to,
impersonation_chain=impersonation_chain,
)
self._client: CloudTasksClient | None = None
def get_conn(self) -> CloudTasksClient:
"""
Provides a client for interacting with the Google Cloud Tasks API.
:return: Google Cloud Tasks API Client
"""
if self._client is None:
self._client = CloudTasksClient(credentials=self.get_credentials(), client_info=CLIENT_INFO)
return self._client
@GoogleBaseHook.fallback_to_default_project_id
def create_queue(
self,
location: str,
task_queue: dict | Queue,
project_id: str = PROVIDE_PROJECT_ID,
queue_name: str | None = None,
retry: Retry | _MethodDefault = DEFAULT,
timeout: float | None = None,
metadata: Sequence[tuple[str, str]] = (),
) -> Queue:
"""
Creates a queue in Cloud Tasks.
:param location: The location name in which the queue will be created.
:param task_queue: The task queue to create.
Queue's name cannot be the same as an existing queue.
If a dict is provided, it must be of the same form as the protobuf message Queue.
:param project_id: (Optional) The ID of the Google Cloud project that owns the Cloud Tasks.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:param queue_name: (Optional) The queue's name.
If provided, it will be used to construct the full queue path.
:param retry: (Optional) A retry object used to retry requests.
If None is specified, requests will not be retried.
:param timeout: (Optional) The amount of time, in seconds, to wait for the request
to complete. Note that if retry is specified, the timeout applies to each
individual attempt.
:param metadata: (Optional) Additional metadata that is provided to the method.
"""
client = self.get_conn()
if queue_name:
full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
if isinstance(task_queue, Queue):
task_queue.name = full_queue_name
elif isinstance(task_queue, dict):
task_queue["name"] = full_queue_name
else:
raise AirflowException("Unable to set queue_name.")
full_location_path = f"projects/{project_id}/locations/{location}"
return client.create_queue(
request={"parent": full_location_path, "queue": task_queue},
retry=retry,
timeout=timeout,
metadata=metadata,
)
@GoogleBaseHook.fallback_to_default_project_id
def update_queue(
self,
task_queue: Queue,
project_id: str = PROVIDE_PROJECT_ID,
location: str | None = None,
queue_name: str | None = None,
update_mask: FieldMask | None = None,
retry: Retry | _MethodDefault = DEFAULT,
timeout: float | None = None,
metadata: Sequence[tuple[str, str]] = (),
) -> Queue:
"""
Updates a queue in Cloud Tasks.
:param task_queue: The task queue to update.
This method creates the queue if it does not exist and updates the queue if
it does exist. The queue's name must be specified.
:param project_id: (Optional) The ID of the Google Cloud project that owns the Cloud Tasks.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:param location: (Optional) The location name in which the queue will be updated.
If provided, it will be used to construct the full queue path.
:param queue_name: (Optional) The queue's name.
If provided, it will be used to construct the full queue path.
:param update_mask: A mast used to specify which fields of the queue are being updated.
If empty, then all fields will be updated.
If a dict is provided, it must be of the same form as the protobuf message.
:param retry: (Optional) A retry object used to retry requests.
If None is specified, requests will not be retried.
:param timeout: (Optional) The amount of time, in seconds, to wait for the request
to complete. Note that if retry is specified, the timeout applies to each
individual attempt.
:param metadata: (Optional) Additional metadata that is provided to the method.
"""
client = self.get_conn()
if queue_name and location:
full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
if isinstance(task_queue, Queue):
task_queue.name = full_queue_name
elif isinstance(task_queue, dict):
task_queue["name"] = full_queue_name
else:
raise AirflowException("Unable to set queue_name.")
return client.update_queue(
request={"queue": task_queue, "update_mask": update_mask},
retry=retry,
timeout=timeout,
metadata=metadata,
)
@GoogleBaseHook.fallback_to_default_project_id
def get_queue(
self,
location: str,
queue_name: str,
project_id: str = PROVIDE_PROJECT_ID,
retry: Retry | _MethodDefault = DEFAULT,
timeout: float | None = None,
metadata: Sequence[tuple[str, str]] = (),
) -> Queue:
"""
Gets a queue from Cloud Tasks.
:param location: The location name in which the queue was created.
:param queue_name: The queue's name.
:param project_id: (Optional) The ID of the Google Cloud project that owns the Cloud Tasks.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:param retry: (Optional) A retry object used to retry requests.
If None is specified, requests will not be retried.
:param timeout: (Optional) The amount of time, in seconds, to wait for the request
to complete. Note that if retry is specified, the timeout applies to each
individual attempt.
:param metadata: (Optional) Additional metadata that is provided to the method.
"""
client = self.get_conn()
full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
return client.get_queue(
request={"name": full_queue_name},
retry=retry,
timeout=timeout,
metadata=metadata,
)
@GoogleBaseHook.fallback_to_default_project_id
def list_queues(
self,
location: str,
project_id: str = PROVIDE_PROJECT_ID,
results_filter: str | None = None,
page_size: int | None = None,
retry: Retry | _MethodDefault = DEFAULT,
timeout: float | None = None,
metadata: Sequence[tuple[str, str]] = (),
) -> list[Queue]:
"""
Lists queues from Cloud Tasks.
:param location: The location name in which the queues were created.
:param project_id: (Optional) The ID of the Google Cloud project that owns the Cloud Tasks.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:param results_filter: (Optional) Filter used to specify a subset of queues.
:param page_size: (Optional) The maximum number of resources contained in the
underlying API response.
:param retry: (Optional) A retry object used to retry requests.
If None is specified, requests will not be retried.
:param timeout: (Optional) The amount of time, in seconds, to wait for the request
to complete. Note that if retry is specified, the timeout applies to each
individual attempt.
:param metadata: (Optional) Additional metadata that is provided to the method.
"""
client = self.get_conn()
full_location_path = f"projects/{project_id}/locations/{location}"
queues = client.list_queues(
request={"parent": full_location_path, "filter": results_filter, "page_size": page_size},
retry=retry,
timeout=timeout,
metadata=metadata,
)
return list(queues)
@GoogleBaseHook.fallback_to_default_project_id
def delete_queue(
self,
location: str,
queue_name: str,
project_id: str = PROVIDE_PROJECT_ID,
retry: Retry | _MethodDefault = DEFAULT,
timeout: float | None = None,
metadata: Sequence[tuple[str, str]] = (),
) -> None:
"""
Deletes a queue from Cloud Tasks, even if it has tasks in it.
:param location: The location name in which the queue will be deleted.
:param queue_name: The queue's name.
:param project_id: (Optional) The ID of the Google Cloud project that owns the Cloud Tasks.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:param retry: (Optional) A retry object used to retry requests.
If None is specified, requests will not be retried.
:param timeout: (Optional) The amount of time, in seconds, to wait for the request
to complete. Note that if retry is specified, the timeout applies to each
individual attempt.
:param metadata: (Optional) Additional metadata that is provided to the method.
"""
client = self.get_conn()
full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
client.delete_queue(
request={"name": full_queue_name},
retry=retry,
timeout=timeout,
metadata=metadata,
)
@GoogleBaseHook.fallback_to_default_project_id
def purge_queue(
self,
location: str,
queue_name: str,
project_id: str = PROVIDE_PROJECT_ID,
retry: Retry | _MethodDefault = DEFAULT,
timeout: float | None = None,
metadata: Sequence[tuple[str, str]] = (),
) -> Queue:
"""
Purges a queue by deleting all of its tasks from Cloud Tasks.
:param location: The location name in which the queue will be purged.
:param queue_name: The queue's name.
:param project_id: (Optional) The ID of the Google Cloud project that owns the Cloud Tasks.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:param retry: (Optional) A retry object used to retry requests.
If None is specified, requests will not be retried.
:param timeout: (Optional) The amount of time, in seconds, to wait for the request
to complete. Note that if retry is specified, the timeout applies to each
individual attempt.
:param metadata: (Optional) Additional metadata that is provided to the method.
"""
client = self.get_conn()
full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
return client.purge_queue(
request={"name": full_queue_name},
retry=retry,
timeout=timeout,
metadata=metadata,
)
@GoogleBaseHook.fallback_to_default_project_id
def pause_queue(
self,
location: str,
queue_name: str,
project_id: str = PROVIDE_PROJECT_ID,
retry: Retry | _MethodDefault = DEFAULT,
timeout: float | None = None,
metadata: Sequence[tuple[str, str]] = (),
) -> Queue:
"""
Pauses a queue in Cloud Tasks.
:param location: The location name in which the queue will be paused.
:param queue_name: The queue's name.
:param project_id: (Optional) The ID of the Google Cloud project that owns the Cloud Tasks.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:param retry: (Optional) A retry object used to retry requests.
If None is specified, requests will not be retried.
:param timeout: (Optional) The amount of time, in seconds, to wait for the request
to complete. Note that if retry is specified, the timeout applies to each
individual attempt.
:param metadata: (Optional) Additional metadata that is provided to the method.
"""
client = self.get_conn()
full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
return client.pause_queue(
request={"name": full_queue_name},
retry=retry,
timeout=timeout,
metadata=metadata,
)
@GoogleBaseHook.fallback_to_default_project_id
def resume_queue(
self,
location: str,
queue_name: str,
project_id: str = PROVIDE_PROJECT_ID,
retry: Retry | _MethodDefault = DEFAULT,
timeout: float | None = None,
metadata: Sequence[tuple[str, str]] = (),
) -> Queue:
"""
Resumes a queue in Cloud Tasks.
:param location: The location name in which the queue will be resumed.
:param queue_name: The queue's name.
:param project_id: (Optional) The ID of the Google Cloud project that owns the Cloud Tasks.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:param retry: (Optional) A retry object used to retry requests.
If None is specified, requests will not be retried.
:param timeout: (Optional) The amount of time, in seconds, to wait for the request
to complete. Note that if retry is specified, the timeout applies to each
individual attempt.
:param metadata: (Optional) Additional metadata that is provided to the method.
"""
client = self.get_conn()
full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
return client.resume_queue(
request={"name": full_queue_name},
retry=retry,
timeout=timeout,
metadata=metadata,
)
@GoogleBaseHook.fallback_to_default_project_id
def create_task(
self,
location: str,
queue_name: str,
task: dict | Task,
project_id: str = PROVIDE_PROJECT_ID,
task_name: str | None = None,
response_view: Task.View | None = None,
retry: Retry | _MethodDefault = DEFAULT,
timeout: float | None = None,
metadata: Sequence[tuple[str, str]] = (),
) -> Task:
"""
Creates a task in Cloud Tasks.
:param location: The location name in which the task will be created.
:param queue_name: The queue's name.
:param task: The task to add.
If a dict is provided, it must be of the same form as the protobuf message Task.
:param project_id: (Optional) The ID of the Google Cloud project that owns the Cloud Tasks.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:param task_name: (Optional) The task's name.
If provided, it will be used to construct the full task path.
:param response_view: (Optional) This field specifies which subset of the Task will
be returned.
:param retry: (Optional) A retry object used to retry requests.
If None is specified, requests will not be retried.
:param timeout: (Optional) The amount of time, in seconds, to wait for the request
to complete. Note that if retry is specified, the timeout applies to each
individual attempt.
:param metadata: (Optional) Additional metadata that is provided to the method.
"""
client = self.get_conn()
if task_name:
full_task_name = (
f"projects/{project_id}/locations/{location}/queues/{queue_name}/tasks/{task_name}"
)
if isinstance(task, Task):
task.name = full_task_name
elif isinstance(task, dict):
task["name"] = full_task_name
else:
raise AirflowException("Unable to set task_name.")
full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
return client.create_task(
request={"parent": full_queue_name, "task": task, "response_view": response_view},
retry=retry,
timeout=timeout,
metadata=metadata,
)
@GoogleBaseHook.fallback_to_default_project_id
def get_task(
self,
location: str,
queue_name: str,
task_name: str,
project_id: str = PROVIDE_PROJECT_ID,
response_view: Task.View | None = None,
retry: Retry | _MethodDefault = DEFAULT,
timeout: float | None = None,
metadata: Sequence[tuple[str, str]] = (),
) -> Task:
"""
Gets a task from Cloud Tasks.
:param location: The location name in which the task was created.
:param queue_name: The queue's name.
:param task_name: The task's name.
:param project_id: (Optional) The ID of the Google Cloud project that owns the Cloud Tasks.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:param response_view: (Optional) This field specifies which subset of the Task will
be returned.
:param retry: (Optional) A retry object used to retry requests.
If None is specified, requests will not be retried.
:param timeout: (Optional) The amount of time, in seconds, to wait for the request
to complete. Note that if retry is specified, the timeout applies to each
individual attempt.
:param metadata: (Optional) Additional metadata that is provided to the method.
"""
client = self.get_conn()
full_task_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}/tasks/{task_name}"
return client.get_task(
request={"name": full_task_name, "response_view": response_view},
retry=retry,
timeout=timeout,
metadata=metadata,
)
@GoogleBaseHook.fallback_to_default_project_id
def list_tasks(
self,
location: str,
queue_name: str,
project_id: str,
response_view: Task.View | None = None,
page_size: int | None = None,
retry: Retry | _MethodDefault = DEFAULT,
timeout: float | None = None,
metadata: Sequence[tuple[str, str]] = (),
) -> list[Task]:
"""
Lists the tasks in Cloud Tasks.
:param location: The location name in which the tasks were created.
:param queue_name: The queue's name.
:param project_id: (Optional) The ID of the Google Cloud project that owns the Cloud Tasks.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:param response_view: (Optional) This field specifies which subset of the Task will
be returned.
:param page_size: (Optional) The maximum number of resources contained in the
underlying API response.
:param retry: (Optional) A retry object used to retry requests.
If None is specified, requests will not be retried.
:param timeout: (Optional) The amount of time, in seconds, to wait for the request
to complete. Note that if retry is specified, the timeout applies to each
individual attempt.
:param metadata: (Optional) Additional metadata that is provided to the method.
"""
client = self.get_conn()
full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}"
tasks = client.list_tasks(
request={"parent": full_queue_name, "response_view": response_view, "page_size": page_size},
retry=retry,
timeout=timeout,
metadata=metadata,
)
return list(tasks)
@GoogleBaseHook.fallback_to_default_project_id
def delete_task(
self,
location: str,
queue_name: str,
task_name: str,
project_id: str,
retry: Retry | _MethodDefault = DEFAULT,
timeout: float | None = None,
metadata: Sequence[tuple[str, str]] = (),
) -> None:
"""
Deletes a task from Cloud Tasks.
:param location: The location name in which the task will be deleted.
:param queue_name: The queue's name.
:param task_name: The task's name.
:param project_id: (Optional) The ID of the Google Cloud project that owns the Cloud Tasks.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:param retry: (Optional) A retry object used to retry requests.
If None is specified, requests will not be retried.
:param timeout: (Optional) The amount of time, in seconds, to wait for the request
to complete. Note that if retry is specified, the timeout applies to each
individual attempt.
:param metadata: (Optional) Additional metadata that is provided to the method.
"""
client = self.get_conn()
full_task_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}/tasks/{task_name}"
client.delete_task(
request={"name": full_task_name},
retry=retry,
timeout=timeout,
metadata=metadata,
)
@GoogleBaseHook.fallback_to_default_project_id
def run_task(
self,
location: str,
queue_name: str,
task_name: str,
project_id: str,
response_view: Task.View | None = None,
retry: Retry | _MethodDefault = DEFAULT,
timeout: float | None = None,
metadata: Sequence[tuple[str, str]] = (),
) -> Task:
"""
Forces to run a task in Cloud Tasks.
:param location: The location name in which the task was created.
:param queue_name: The queue's name.
:param task_name: The task's name.
:param project_id: (Optional) The ID of the Google Cloud project that owns the Cloud Tasks.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:param response_view: (Optional) This field specifies which subset of the Task will
be returned.
:param retry: (Optional) A retry object used to retry requests.
If None is specified, requests will not be retried.
:param timeout: (Optional) The amount of time, in seconds, to wait for the request
to complete. Note that if retry is specified, the timeout applies to each
individual attempt.
:param metadata: (Optional) Additional metadata that is provided to the method.
"""
client = self.get_conn()
full_task_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}/tasks/{task_name}"
return client.run_task(
request={"name": full_task_name, "response_view": response_view},
retry=retry,
timeout=timeout,
metadata=metadata,
)
|
{
"content_hash": "7754e5aaaa5726917fabd4beedec3ff7",
"timestamp": "",
"source": "github",
"line_count": 577,
"max_line_length": 108,
"avg_line_length": 44.69844020797227,
"alnum_prop": 0.6244426350277229,
"repo_name": "nathanielvarona/airflow",
"id": "6d685c0506aaf6433ace48156744960fc82ffd9c",
"size": "26578",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "airflow/providers/google/cloud/hooks/tasks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "25980"
},
{
"name": "Dockerfile",
"bytes": "70681"
},
{
"name": "HCL",
"bytes": "3786"
},
{
"name": "HTML",
"bytes": "173025"
},
{
"name": "JavaScript",
"bytes": "142848"
},
{
"name": "Jinja",
"bytes": "38895"
},
{
"name": "Jupyter Notebook",
"bytes": "5482"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "23169682"
},
{
"name": "R",
"bytes": "313"
},
{
"name": "Shell",
"bytes": "211967"
},
{
"name": "TypeScript",
"bytes": "484556"
}
],
"symlink_target": ""
}
|
import base64
import hashlib
import hmac
import requests
import sys
import time
import uuid
API_VERSION = "2010-12-30"
API_ENDPOINT = "https://cloudapi.atlantic.net/"
class AtlanticError(Exception):
def __init__(self, message):
self.message = message
def __str__(self):
return str(self.message)
class AtlanticBase(object):
def __init__(self, access_key, private_key):
self.api_version = API_VERSION
self.api_endpoint = API_ENDPOINT
self.access_key = access_key
self.private_key = private_key
self.format = "json"
def _generate_signature(self, time_since_epoch, random_uuid):
"""
This method generates a signature.
All requests are required to include a signature computed using a
base64 encoded, SHA256 encrypted hash generated from the request
timestamp and a random GUID. The encryption needs to be done using
your API private key.
"""
string_to_sign = "%s%s" % (time_since_epoch, random_uuid)
m = hmac.new(key=self.private_key, msg=string_to_sign, digestmod=hashlib.sha256)
signature = base64.b64encode(m.digest())
return signature
def request(self, params):
"""
This method creates a request and calls the Atlantic.net API.
"""
random_uuid = uuid.uuid4()
time_since_epoch = int(time.time())
signature = self._generate_signature(time_since_epoch, random_uuid)
required_params = {
"Format": self.format,
"Version": self.api_version,
"ACSAccessKeyId": self.access_key,
"Timestamp": str(time_since_epoch),
"Rndguid": str(random_uuid),
"Signature": signature
}
required_params.update(params)
response = requests.get(self.api_endpoint, params=required_params)
response_json = response.json()
if "error" in response_json:
message = response_json["error"]["message"]
raise AtlanticError(message)
return response_json
|
{
"content_hash": "15f9f8c340e3c7cce372eb8fb51df3d8",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 88,
"avg_line_length": 33.61290322580645,
"alnum_prop": 0.6238003838771593,
"repo_name": "kbrebanov/atlantic-python",
"id": "41d6aa6a0380594f85887674c2cf00e70b931a17",
"size": "2084",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "atlantic/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7721"
}
],
"symlink_target": ""
}
|
import ffgame.classes.warrior
warrior = ffgame.classes.warrior.Warrior("test")
def test_init():
assert warrior.get_name() == "test"
|
{
"content_hash": "96e2eb5d61a7ba0d4435064f27b793bd",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 48,
"avg_line_length": 17.5,
"alnum_prop": 0.7071428571428572,
"repo_name": "FR13ND5/full-fight-game",
"id": "3dbd6b684cc3026ccf8cec3b2978c4da51163630",
"size": "140",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ffgame/test/classes/test_warrior.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9523"
}
],
"symlink_target": ""
}
|
import logging
from oslo_config import cfg
from keystoneclient import _discover
from keystoneclient.auth.identity.generic import base
from keystoneclient.auth.identity import v2
from keystoneclient.auth.identity import v3
from keystoneclient import utils
LOG = logging.getLogger(__name__)
def get_options():
return [
cfg.StrOpt('user-id', help='User id'),
cfg.StrOpt('user-name', dest='username', help='Username',
deprecated_name='username'),
cfg.StrOpt('user-domain-id', help="User's domain id"),
cfg.StrOpt('user-domain-name', help="User's domain name"),
cfg.StrOpt('password', help="User's password"),
]
class Password(base.BaseGenericPlugin):
"""A common user/password authentication plugin.
:param string username: Username for authentication.
:param string user_id: User ID for authentication.
:param string password: Password for authentication.
:param string user_domain_id: User's domain ID for authentication.
:param string user_domain_name: User's domain name for authentication.
"""
@utils.positional()
def __init__(self, auth_url, username=None, user_id=None, password=None,
user_domain_id=None, user_domain_name=None, **kwargs):
super(Password, self).__init__(auth_url=auth_url, **kwargs)
self._username = username
self._user_id = user_id
self._password = password
self._user_domain_id = user_domain_id
self._user_domain_name = user_domain_name
def create_plugin(self, session, version, url, raw_status=None):
if _discover.version_match((2,), version):
if self._user_domain_id or self._user_domain_name:
# If you specify any domain parameters it won't work so quit.
return None
return v2.Password(auth_url=url,
user_id=self._user_id,
username=self._username,
password=self._password,
**self._v2_params)
elif _discover.version_match((3,), version):
return v3.Password(auth_url=url,
user_id=self._user_id,
username=self._username,
user_domain_id=self._user_domain_id,
user_domain_name=self._user_domain_name,
password=self._password,
**self._v3_params)
@classmethod
def get_options(cls):
options = super(Password, cls).get_options()
options.extend(get_options())
return options
|
{
"content_hash": "22a5c81307f65bb1a859075feb043f6e",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 77,
"avg_line_length": 37.541666666666664,
"alnum_prop": 0.5878653348131706,
"repo_name": "ging/python-keystoneclient",
"id": "6790fe22c3d6dbef9c1f8883ead0ef5242970fed",
"size": "3249",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "keystoneclient/auth/identity/generic/password.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1480821"
},
{
"name": "Shell",
"bytes": "7148"
}
],
"symlink_target": ""
}
|
import requests
import json
import bs4
from bs4 import BeautifulSoup
import urllib2
import lxml
import os
####I will try to get a list of the relevant URLs, then 'for loop' through them so I don't have to write a different script for each debate.####
##Saving home page as text file.##
res = requests.get('http://www.presidency.ucsb.edu/debates.php')
print type(res)
print res.raise_for_status()
print len(res.text)
print (res.text[:250])
home = open('DebateTextHomePage.txt', 'wb')
for chunk in res.iter_content(1000000):
home.write(chunk)
home.close()
##Opening newly created home page text file.##
homepage = open('DebateTextHomePage.txt')
homepageHTML = bs4.BeautifulSoup(homepage.read(), "html.parser")
##Defining and executing function to retrieve all of the home page's URLs.##
urllist = []
def geturl(x):
global urllist
for link in x.find_all('a'):
urllist.append(link.get('href'))
geturl(homepageHTML)
print urllist
##Now that I have my URL list, I want to find the specific elements of the list that I want so that I can extract them.##
print len(urllist)
a = urllist.index("http://www.presidency.ucsb.edu/ws/index.php?pid=116995") #Last Democratic debate link. Output is 40.#
b = urllist.index("http://www.presidency.ucsb.edu/ws/index.php?pid=110903") #First Democratic debate link. Output is 48.#
c = urllist.index("http://www.presidency.ucsb.edu/ws/index.php?pid=115148") #Last Republican debate link. Output is 49.#
d = urllist.index("http://www.presidency.ucsb.edu/ws/index.php?pid=110757") #First Republican debate link. Output is 67.#
print a,b,c,d
## Creating Democrat documents for each line
DEMdebates2016list = urllist[a:b+1]
REPUBdebates2016list = urllist[c:d+1]
# print DEMdebates2016list
# print REPUBdebates2016list
arry = []
x = 1
for i in REPUBdebates2016list:
soup = BeautifulSoup(urllib2.urlopen(i), "lxml")
for tag in soup.find_all('p'):
arry.append(tag)
print len(arry)
print 'First'
print arry[:2]
print 'Last'
print arry[-2:]
for line in arry:
fname = "repub_%s.txt" % (x)
outpath = os.path.abspath(fname)
with open(outpath, 'w') as f:
f.write(line.text.encode('utf-8') + '\n')
x+=1
print 'Done'
|
{
"content_hash": "e95d5c5d4b6f1349fa292672e5814242",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 144,
"avg_line_length": 31.971830985915492,
"alnum_prop": 0.6925110132158591,
"repo_name": "pplatzman/RedBlue-Classifier",
"id": "c9ab6669907371d4de3092e3e7fb0f9697ecd98a",
"size": "2270",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rep_parse_UPDATED.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4715195"
},
{
"name": "Python",
"bytes": "15475"
}
],
"symlink_target": ""
}
|
from abc import ABC, abstractmethod
import numpy as np
import scipy.stats as ss
class BaseLikelihood(ABC):
"""
This is an abstract class to serve as a template for future users to mimick
if they want to add new models for online bayesian changepoint detection.
Make sure to override the abstract methods to do which is desired.
Otherwise you will get an error.
Update theta has **kwargs to pass in the timestep iteration (t) if desired.
To use the time step add this into your update theta function:
timestep = kwargs['t']
"""
@abstractmethod
def pdf(self, data: np.array):
raise NotImplementedError(
"PDF is not defined. Please define in separate class to override this function."
)
@abstractmethod
def update_theta(self, data: np.array, **kwargs):
raise NotImplementedError(
"Update theta is not defined. Please define in separate class to override this function."
)
class MultivariateT(BaseLikelihood):
def __init__(
self,
dims: int = 1,
dof: int = 0,
kappa: int = 1,
mu: float = -1,
scale: float = -1,
):
"""
Create a new predictor using the multivariate student T distribution as the posterior predictive.
This implies a multivariate Gaussian distribution on the data, a Wishart prior on the precision,
and a Gaussian prior on the mean.
Implementation based on Haines, T.S., Gaussian Conjugate Prior Cheat Sheet.
:param dof: The degrees of freedom on the prior distribution of the precision (inverse covariance)
:param kappa: The number of observations we've already seen
:param mu: The mean of the prior distribution on the mean
:param scale: The mean of the prior distribution on the precision
:param dims: The number of variables
"""
# We default to the minimum possible degrees of freedom, which is 1 greater than the dimensionality
if dof == 0:
dof = dims + 1
# The default mean is all 0s
if mu == -1:
mu = [0] * dims
else:
mu = [mu] * dims
# The default covariance is the identity matrix. The scale is the inverse of that, which is also the identity
if scale == -1:
scale = np.identity(dims)
else:
scale = np.identity(scale)
# Track time
self.t = 0
# The dimensionality of the dataset (number of variables)
self.dims = dims
# Each parameter is a vector of size 1 x t, where t is time. Therefore each vector grows with each update.
self.dof = np.array([dof])
self.kappa = np.array([kappa])
self.mu = np.array([mu])
self.scale = np.array([scale])
def pdf(self, data: np.array):
"""
Returns the probability of the observed data under the current and historical parameters
Parmeters:
data - the datapoints to be evaualted (shape: 1 x D vector)
"""
self.t += 1
t_dof = self.dof - self.dims + 1
expanded = np.expand_dims((self.kappa * t_dof) / (self.kappa + 1), (1, 2))
ret = np.empty(self.t)
try:
# This can't be vectorised due to https://github.com/scipy/scipy/issues/13450
for i, (df, loc, shape) in islice(
enumerate(zip(t_dof, self.mu, inv(expanded * self.scale))), self.t
):
ret[i] = stats.multivariate_t.pdf(x=data, df=df, loc=loc, shape=shape)
except AttributeError:
raise Exception(
"You need scipy 1.6.0 or greater to use the multivariate t distribution"
)
return ret
def update_theta(self, data: np.array, **kwargs):
"""
Performs a bayesian update on the prior parameters, given data
Parmeters:
data - the datapoints to be evaluated (shape: 1 x D vector)
"""
centered = data - self.mu
# We simultaneously update each parameter in the vector, because following figure 1c of the BOCD paper, each
# parameter for a given t, r is derived from the same parameter for t-1, r-1
# Then, we add the prior back in as the first element
self.scale = np.concatenate(
[
self.scale[:1],
inv(
inv(self.scale)
+ np.expand_dims(self.kappa / (self.kappa + 1), (1, 2))
* (np.expand_dims(centered, 2) @ np.expand_dims(centered, 1))
),
]
)
self.mu = np.concatenate(
[
self.mu[:1],
(np.expand_dims(self.kappa, 1) * self.mu + data)
/ np.expand_dims(self.kappa + 1, 1),
]
)
self.dof = np.concatenate([self.dof[:1], self.dof + 1])
self.kappa = np.concatenate([self.kappa[:1], self.kappa + 1])
class StudentT(BaseLikelihood):
def __init__(
self, alpha: float = 0.1, beta: float = 0.1, kappa: float = 1, mu: float = 0
):
"""
StudentT distribution except normal distribution is replaced with the student T distribution
https://en.wikipedia.org/wiki/Normal-gamma_distribution
Parameters:
alpha - alpha in gamma distribution prior
beta - beta inn gamma distribution prior
mu - mean from normal distribution
kappa - variance from normal distribution
"""
self.alpha0 = self.alpha = np.array([alpha])
self.beta0 = self.beta = np.array([beta])
self.kappa0 = self.kappa = np.array([kappa])
self.mu0 = self.mu = np.array([mu])
def pdf(self, data: np.array):
"""
Return the pdf function of the t distribution
Parmeters:
data - the datapoints to be evaluated (shape: 1 x D vector)
"""
return ss.t.pdf(
x=data,
df=2 * self.alpha,
loc=self.mu,
scale=np.sqrt(self.beta * (self.kappa + 1) / (self.alpha * self.kappa)),
)
def update_theta(self, data: np.array, **kwargs):
"""
Performs a bayesian update on the prior parameters, given data
Parmeters:
data - the datapoints to be evaluated (shape: 1 x D vector)
"""
muT0 = np.concatenate(
(self.mu0, (self.kappa * self.mu + data) / (self.kappa + 1))
)
kappaT0 = np.concatenate((self.kappa0, self.kappa + 1.0))
alphaT0 = np.concatenate((self.alpha0, self.alpha + 0.5))
betaT0 = np.concatenate(
(
self.beta0,
self.beta
+ (self.kappa * (data - self.mu) ** 2) / (2.0 * (self.kappa + 1.0)),
)
)
self.mu = muT0
self.kappa = kappaT0
self.alpha = alphaT0
self.beta = betaT0
|
{
"content_hash": "b5c395908367de0b73be451a4cd45a0b",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 117,
"avg_line_length": 36.73684210526316,
"alnum_prop": 0.5693409742120343,
"repo_name": "hildensia/bayesian_changepoint_detection",
"id": "02cab2e7f4c747d434ebddcbdeeac035798509f0",
"size": "6980",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bayesian_changepoint_detection/online_likelihoods.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "648623"
},
{
"name": "Python",
"bytes": "22673"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(
self, plotly_name="color", parent_name="histogram2dcontour.marker", **kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "data"),
**kwargs
)
|
{
"content_hash": "b60a48f9da9ad12055c8cf0cc3a76ff5",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 84,
"avg_line_length": 33.57142857142857,
"alnum_prop": 0.6,
"repo_name": "plotly/python-api",
"id": "3974bdfc3e7e78afa3f2d9ca4d81b0a4f6362be3",
"size": "470",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/histogram2dcontour/marker/_color.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
}
|
"""
Unit tests for the rebuildinitrd module
"""
import subprocess
import sys
import unittest
import mock
import moduletests.src.rebuildinitrd
try:
# Python 2.x
from cStringIO import StringIO
except ImportError:
# Python 3.x
from io import StringIO
if sys.hexversion >= 0x3040000:
# contextlib.redirect_stdout was introduced in Python 3.4
import contextlib
else:
# contextlib2 is a backport of contextlib from Python 3.5 and is compatible with Python2/3
import contextlib2 as contextlib
class Testrebuildinitrd(unittest.TestCase):
def setUp(self):
self.output = StringIO()
def tearDown(self):
self.output.close()
@mock.patch("moduletests.src.rebuildinitrd.open", mock.mock_open(read_data="/boot"))
@mock.patch("subprocess.check_output", return_value="stuff")
def test_mount_boot(self, check_output_mock):
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.rebuildinitrd.mount_boot())
self.assertTrue(check_output_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.open", mock.mock_open(read_data=""))
def test_mount_boot_emptyfstab(self):
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.rebuildinitrd.mount_boot())
self.assertEqual(self.output.getvalue(), "[WARN] No /boot in /etc/fstab and /boot empty. Cannot proceed\n")
@mock.patch("moduletests.src.rebuildinitrd.open", side_effect=IOError("test"))
def test_mount_boot_exception(self, open_mock):
with contextlib.redirect_stdout(self.output):
with self.assertRaises(IOError) as ex:
moduletests.src.rebuildinitrd.mount_boot()
self.assertEqual(self.output.getvalue(), "[WARN] /boot empty. Cannot proceed.\n")
self.assertEqual(str(ex.exception), "test")
self.assertTrue(open_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.os.listdir",
return_value=["initramfs-4.9.32-15.41.amzn1.x86_64.img", "garbage"])
def test_get_initrd(self, os_listdir_mock):
self.assertEqual(moduletests.src.rebuildinitrd.get_initrd(), ["initramfs-4.9.32-15.41.amzn1.x86_64.img"])
self.assertTrue(os_listdir_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.os.listdir",
return_value=["garbage"])
def test_get_initrd_empty(self, os_listdir_mock):
with self.assertRaises(ValueError) as ve:
moduletests.src.rebuildinitrd.get_initrd()
self.assertEqual(str(ve.exception), "initrd list is empty! Did not find any initrd files!")
self.assertTrue(os_listdir_mock.called)
def test_rebuild_invalid_distro(self):
with self.assertRaises(ValueError) as assert_obj:
moduletests.src.rebuildinitrd.rebuild("invalid", dict(), "/test/path")
self.assertEqual(str(assert_obj.exception), "[FAILURE] unsupported distribution: invalid")
@mock.patch("moduletests.src.rebuildinitrd.get_initrd", return_value=["initramfs-4.9.76-3.78.amzn1.x86_64.img",
"initramfs-4.9.77-31.58.amzn1.x86_64.img"])
@mock.patch("moduletests.src.rebuildinitrd.backup", return_value=True)
@mock.patch("subprocess.check_output", return_value="stuff")
def test_rebuild_alami_success(self, check_output_mock, backup_mock, get_initrd_mock):
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.rebuildinitrd.rebuild("alami", dict(), "/test/path"))
self.assertTrue(self.output.getvalue().endswith("Creating new initial ramdisk for 4.9.77-31.58.amzn1.x86_64\n"))
self.assertTrue(check_output_mock.called)
self.assertTrue(backup_mock.called)
self.assertTrue(get_initrd_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.get_initrd", return_value=["initramfs-4.9.76-3.78.amzn2.x86_64.img",
"initramfs-4.9.77-31.58.amzn2.x86_64.img"])
@mock.patch("moduletests.src.rebuildinitrd.backup", return_value=True)
@mock.patch("subprocess.check_output", return_value="stuff")
def test_rebuild_alami2_success(self, check_output_mock, backup_mock, get_initrd_mock):
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.rebuildinitrd.rebuild("alami2", dict(), "/test/path"))
self.assertTrue(self.output.getvalue().endswith("Creating new initial ramdisk for 4.9.77-31.58.amzn2.x86_64\n"))
self.assertTrue(check_output_mock.called)
self.assertTrue(backup_mock.called)
self.assertTrue(get_initrd_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.get_initrd", return_value=["initramfs-3.10.0-514.el7.x86_64.img"])
@mock.patch("moduletests.src.rebuildinitrd.backup", return_value=True)
@mock.patch("subprocess.check_output", return_value="stuff")
def test_rebuild_rhel_success(self, check_output_mock, backup_mock, get_initrd_mock):
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.rebuildinitrd.rebuild("rhel", dict(), "/test/path"))
self.assertTrue(self.output.getvalue().endswith("Creating new initial ramdisk for 3.10.0-514.el7.x86_64\n"))
self.assertTrue(check_output_mock.called)
self.assertTrue(backup_mock.called)
self.assertTrue(get_initrd_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.get_initrd", return_value=["initrd.img-4.4.0-1031-aws"])
@mock.patch("moduletests.src.rebuildinitrd.backup", return_value=True)
@mock.patch("subprocess.check_output", return_value="stuff")
def test_rebuild_ubuntu_success(self, check_output_mock, backup_mock, get_initrd_mock):
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.rebuildinitrd.rebuild("ubuntu", dict(), "/test/path"))
self.assertTrue(self.output.getvalue().endswith("Creating new initial ramdisk for 4.4.0-1031-aws\n"))
self.assertTrue(check_output_mock.called)
self.assertTrue(backup_mock.called)
self.assertTrue(get_initrd_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.get_initrd", return_value=["initrd-4.4.59-92.17-default"])
@mock.patch("moduletests.src.rebuildinitrd.backup", return_value=True)
@mock.patch("subprocess.check_output", return_value="stuff")
def test_rebuild_suse_success(self, check_output_mock, backup_mock, get_initrd_mock):
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.rebuildinitrd.rebuild("suse", dict(), "/test/path"))
self.assertTrue(self.output.getvalue().endswith("Creating new initial ramdisk for 4.4.59-92.17-default\n"))
self.assertTrue(check_output_mock.called)
self.assertTrue(backup_mock.called)
self.assertTrue(get_initrd_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.get_initrd", return_value=[])
def test_rebuild_alami_get_initrd_failure(self, get_initrd_mock):
with self.assertRaises(Exception) as assert_obj:
moduletests.src.rebuildinitrd.rebuild("alami", dict(), "/test/path")
self.assertEqual(str(assert_obj.exception), "[FAILURE] Failed to find initial ramdisk!")
self.assertTrue(get_initrd_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.get_initrd", return_value=["initramfs-4.9.32-15.41.amzn1.x86_64.img"])
@mock.patch("moduletests.src.rebuildinitrd.backup", side_effect=IOError)
def test_rebuild_alami_backup_failure(self, backup_mock, get_initrd_mock):
with contextlib.redirect_stdout(self.output):
self.assertRaises(Exception, moduletests.src.rebuildinitrd.rebuild, "alami", dict(), "/test/path")
self.assertTrue(self.output.getvalue().endswith("[WARN] Backup of initial ramdisk failed.\n"))
self.assertTrue(backup_mock.called)
self.assertTrue(get_initrd_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.get_initrd", return_value=["initramfs-4.9.32-15.41.amzn1.x86_64.img"])
@mock.patch("moduletests.src.rebuildinitrd.backup", return_value=True)
@mock.patch("subprocess.check_output", side_effect=subprocess.CalledProcessError(1, "call"))
def test_rebuild_alami_subprocess_failure(self, check_output_mock, backup_mock, get_initrd_mock):
with contextlib.redirect_stdout(self.output):
self.assertRaises(subprocess.CalledProcessError,
moduletests.src.rebuildinitrd.rebuild, "alami", dict(), "/test/path")
self.assertTrue(self.output.getvalue().endswith("[WARN] Rebuild of initial ramdisk failed.\n"))
self.assertTrue(check_output_mock.called)
self.assertTrue(backup_mock.called)
self.assertTrue(get_initrd_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.get_config_dict")
@mock.patch("moduletests.src.rebuildinitrd.os.listdir", return_value=False)
@mock.patch("moduletests.src.rebuildinitrd.mount_boot", return_value=True)
@mock.patch("moduletests.src.rebuildinitrd.rebuild", return_value=True)
def test_run_mount_alami(self, rebuild_mock, mount_boot_mock, os_listdir_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": True,
"SUDO": True,
"DISTRO": "alami"}
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.rebuildinitrd.run())
self.assertTrue("[SUCCESS] initial ramdisk rebuilt" in self.output.getvalue())
self.assertTrue(rebuild_mock.called)
self.assertTrue(mount_boot_mock.called)
self.assertTrue(os_listdir_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.get_config_dict")
@mock.patch("moduletests.src.rebuildinitrd.os.listdir", return_value=True)
@mock.patch("moduletests.src.rebuildinitrd.rebuild", return_value=True)
def test_run_nomount_alami(self, rebuild_mock, os_listdir_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": True,
"SUDO": True,
"DISTRO": "alami"}
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.rebuildinitrd.run())
self.assertTrue("[SUCCESS] initial ramdisk rebuilt" in self.output.getvalue())
self.assertTrue(rebuild_mock.called)
self.assertTrue(os_listdir_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.get_config_dict")
@mock.patch("moduletests.src.rebuildinitrd.os.listdir", return_value=True)
@mock.patch("moduletests.src.rebuildinitrd.rebuild", return_value=True)
def test_run_nomount_ubuntu(self, rebuild_mock, os_listdir_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": True,
"SUDO": True,
"DISTRO": "ubuntu"}
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.rebuildinitrd.run())
self.assertTrue("[SUCCESS] initial ramdisk rebuilt" in self.output.getvalue())
self.assertTrue(rebuild_mock.called)
self.assertTrue(os_listdir_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.get_config_dict")
@mock.patch("moduletests.src.rebuildinitrd.os.listdir", return_value=True)
@mock.patch("moduletests.src.rebuildinitrd.rebuild", return_value=True)
def test_run_nomount_rhel(self, rebuild_mock, os_listdir_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": True,
"SUDO": True,
"DISTRO": "rhel"}
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.rebuildinitrd.run())
self.assertTrue("[SUCCESS] initial ramdisk rebuilt" in self.output.getvalue())
self.assertTrue(rebuild_mock.called)
self.assertTrue(os_listdir_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.get_config_dict")
@mock.patch("moduletests.src.rebuildinitrd.os.listdir", return_value=True)
@mock.patch("moduletests.src.rebuildinitrd.rebuild", return_value=True)
def test_run_nomount_suse(self, rebuild_mock, os_listdir_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": True,
"SUDO": True,
"DISTRO": "suse"}
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.rebuildinitrd.run())
self.assertTrue("[SUCCESS] initial ramdisk rebuilt" in self.output.getvalue())
self.assertTrue(rebuild_mock.called)
self.assertTrue(os_listdir_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.get_config_dict")
@mock.patch("moduletests.src.rebuildinitrd.os.listdir", return_value=True)
@mock.patch("moduletests.src.rebuildinitrd.rebuild", side_effect=IOError("test"))
def test_run_nomount_alami_exception(self, rebuild_mock, os_listdir_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": True,
"SUDO": True,
"DISTRO": "alami"}
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.rebuildinitrd.run())
self.assertTrue("/boot has contents\ntest\n"
"[WARN] module generated an exception and exited abnormally. "
"Review the logs to determine the cause of the issue.\n"
in self.output.getvalue())
self.assertTrue(rebuild_mock.called)
self.assertTrue(os_listdir_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.rebuildinitrd.get_config_dict")
@mock.patch("moduletests.src.rebuildinitrd.os.listdir", return_value=True)
@mock.patch("moduletests.src.rebuildinitrd.rebuild", side_effect=IOError("test"))
@mock.patch("moduletests.src.rebuildinitrd.restore", return_value=True)
def test_run_nomount_alami_exception_restore(self, restore_mock, rebuild_mock, os_listdir_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": {"some file": "test"},
"REMEDIATE": True,
"SUDO": True,
"DISTRO": "alami"}
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.rebuildinitrd.run())
self.assertTrue("/boot has contents\ntest\n"
"[WARN] module generated an exception and exited abnormally. "
"Review the logs to determine the cause of the issue.\n"
in self.output.getvalue())
self.assertTrue(restore_mock.called)
self.assertTrue(rebuild_mock.called)
self.assertTrue(os_listdir_mock.called)
self.assertTrue(config_mock.called)
|
{
"content_hash": "9abf77d799e091953bdc1aa1565fd35e",
"timestamp": "",
"source": "github",
"line_count": 288,
"max_line_length": 120,
"avg_line_length": 57.62152777777778,
"alnum_prop": 0.636456764085568,
"repo_name": "gregbdunn/aws-ec2rescue-linux",
"id": "e3987de9daf761c3fff95d12e50db142ee71c782",
"size": "17167",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "tools/moduletests/unit/test_rebuildinitrd.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "701"
},
{
"name": "Makefile",
"bytes": "5044"
},
{
"name": "Python",
"bytes": "4595518"
},
{
"name": "Shell",
"bytes": "5229"
}
],
"symlink_target": ""
}
|
import unittest
from stix.test import EntityTestCase
from stix.common import Names
class NamesTests(EntityTestCase, unittest.TestCase):
klass = Names
_full_dict = [
"foo",
"bar",
{'value': 'User Data Loss', 'xsi:type': 'stixVocabs:IncidentEffectVocab-1.0'},
]
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "ea5f503800badb82f065a9fed10e6c13",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 86,
"avg_line_length": 20.470588235294116,
"alnum_prop": 0.6235632183908046,
"repo_name": "chriskiehl/python-stix",
"id": "2c7b9d2be0c8aa29b6e9f7fc1a79c1f1342d98ef",
"size": "453",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stix/test/common/names_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1610823"
}
],
"symlink_target": ""
}
|
import unittest, sys
sys.path.extend(['.','..','../..','py'])
import string
import h2o2 as h2o
import h2o_cmd, h2o_import as h2i, h2o_browse as h2b
from h2o_test import find_file, dump_json, verboseprint
expectedZeros = [0, 4914, 656, 24603, 38665, 124, 13, 5, 1338, 51, 320216, 551128, 327648, 544044, 577981,
573487, 576189, 568616, 579415, 574437, 580907, 580833, 579865, 548378, 568602, 551041,
563581, 580413, 581009, 578167, 577590, 579113, 576991, 571753, 580174, 547639, 523260,
559734, 580538, 578423, 579926, 580066, 465765, 550842, 555346, 528493, 535858, 579401,
579121, 580893, 580714, 565439, 567206, 572262, 0]
DO_2X_SRC = False
DO_TEST_BAD_COLNAME = False
DO_TEST_BAD_COL_LENGTH = False
DO_IMPORT_PARSE = True
SINGLE_CSVFILENAME = 'covtype.data.sorted'
SINGLE_CSVFILENAME = 'covtype.data'
def assertEqualMsg(a, b): assert a == b, "%s %s" % (a, b)
def parseKeyIndexedCheck(frames_result, multiplyExpected, expectedColumnNames):
# get the name of the frame?
print ""
frame = frames_result['frames'][0]
rows = frame['rows']
columns = frame['columns']
for i,c in enumerate(columns):
label = c['label']
stype = c['type']
missing = c['missing_count']
zeros = c['zero_count']
domain = c['domain']
print "column: %s label: %s type: %s missing: %s zeros: %s domain: %s" %\
(i,label,stype,missing,zeros,domain)
# files are concats of covtype. so multiply expected
assertEqualMsg(zeros, expectedZeros[i] * multiplyExpected)
assertEqualMsg(label, expectedColumnNames[i])
assertEqualMsg(stype,"int")
assertEqualMsg(missing, 0)
assertEqualMsg(domain, None)
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
h2o.init()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_parse_covtype(self):
tryList = [
(['covtype.data', 'covtype.shuffled.data', 'covtype.sorted.data'], 3, 30),
]
for (csvFilenameList, multiplyExpected, timeoutSecs) in tryList:
# h2o-dev doesn't take ../.. type paths? make find_file return absolute pathj
a_node = h2o.nodes[0]
# import_result = a_node.import_files(path=find_file("smalldata/logreg/prostate.csv"))
importFolderPath = "/home/0xdiag/datasets/standard"
# keep a list of the keys you import, to feed to parse
kList = []
for csvFilename in csvFilenameList:
csvPathname = importFolderPath + "/" + csvFilename
if not DO_IMPORT_PARSE:
import_result = a_node.import_files(path=csvPathname)
k = import_result['keys'][0]
frames_result = a_node.frames(key=k, row_count=5, timeoutSecs=timeoutSecs)
kList.append(k)
# print "frames_result from the first import_result key", dump_json(frames_result)
print "I think I imported these keys:", kList
# what happens if I put the kList in twice? can it touch the same source file without lock issues?
if DO_2X_SRC:
kList2 = kList + kList
multiplyExpected = 2 * multiplyExpected
else:
kList2 = kList
# try passing column names also.
# questions to try
# what if you pass missing (,,)
# what if you pass too many, too few, or some with same name?
# let's try all the characters
basename = string.printable
# remove the ',' in the string (remember strings are immutable..can't use .replace to remove
# other characters are illegal? [] '
if DO_TEST_BAD_COLNAME:
basename = basename.translate(None, ",[]!#$%&'()*+-./:;<=>?@\^_`{|}~" + '"')
else:
basename = "abcd012345"
colLength = 1 if DO_TEST_BAD_COL_LENGTH else 55
expectedColumnNames = map(lambda x: basename + "_" + str(x+1), range(colLength))
# need to quote each column name in the string passed
column_names = "[" + ",".join(map((lambda x: "'" + x + "'"), expectedColumnNames)) + "]"
kwargs = {
'column_names': column_names,
'intermediateResults': False,
}
print kwargs
if DO_IMPORT_PARSE:
multiplyExpected = 1
csvPathname = importFolderPath + "/" + SINGLE_CSVFILENAME
parse_result = h2i.import_parse(path=csvPathname, timeoutSecs=timeoutSecs, **kwargs)
else:
parse_result = a_node.parse(key=kList2, timeoutSecs=timeoutSecs, **kwargs)
k = parse_result['frames'][0]['key']['name']
# print "parse_result:", dump_json(parse_result)
frames_result = a_node.frames(key=k, row_count=5)
# print "frames_result from the first parse_result key", dump_json(frames_result)
# we doubled the keyList, from what was in tryList
parseKeyIndexedCheck(frames_result, multiplyExpected, expectedColumnNames)
if __name__ == '__main__':
h2o.unit_main()
|
{
"content_hash": "ffc6c530cc774452bfe4e40b78412324",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 110,
"avg_line_length": 39.74626865671642,
"alnum_prop": 0.5906871948929778,
"repo_name": "bikash/h2o-dev",
"id": "7015c3ee65a3159fe18a1b93ae4af41cdaee6f10",
"size": "5326",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "py2/testdir_single_jvm/test_import2.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "35527"
},
{
"name": "CoffeeScript",
"bytes": "254380"
},
{
"name": "Emacs Lisp",
"bytes": "8775"
},
{
"name": "Groovy",
"bytes": "17053"
},
{
"name": "HTML",
"bytes": "614833"
},
{
"name": "Java",
"bytes": "4393249"
},
{
"name": "JavaScript",
"bytes": "83136"
},
{
"name": "Makefile",
"bytes": "25333"
},
{
"name": "Python",
"bytes": "1481182"
},
{
"name": "R",
"bytes": "1090098"
},
{
"name": "Rebol",
"bytes": "3310"
},
{
"name": "Scala",
"bytes": "13626"
},
{
"name": "Shell",
"bytes": "40984"
}
],
"symlink_target": ""
}
|
import flask
import os
from werkzeug.utils import secure_filename
from donut.modules.uploads import blueprint, helpers
@blueprint.route('/lib/<path:url>')
def display(url):
'''
Displays the webpages that have been created by users.
'''
page = helpers.read_page(url.replace(' ', '_'))
if page == None:
return flask.abort(404)
return flask.render_template(
'page.html',
title=url.replace('_', ' '),
permissions=helpers.check_upload_permission())
@blueprint.route('/uploads/_send_page')
def get_page():
'''
Sends the page to frontend.
'''
url = flask.request.args.get('url')
return helpers.read_page(url.replace(' ', '_'))
@blueprint.route('/uploads', methods=['GET'])
def uploads():
'''
Serves the webpage that allows a user to upload a file.
'''
if helpers.check_upload_permission():
return flask.render_template('uploads.html')
else:
flask.abort(403)
@blueprint.route('/uploads/_upload_file', methods=['POST'])
def upload_file():
'''
Handles the uploading of the file
'''
if 'file' not in flask.request.files:
flask.abort(500)
file = flask.request.files['file']
filename = secure_filename(file.filename)
uploads = os.path.join(flask.current_app.root_path,
flask.current_app.config['UPLOAD_FOLDER'])
if helpers.check_upload_permission():
helpers.remove_link(filename)
file.save(os.path.join(uploads, filename))
return flask.jsonify({
'url':
flask.url_for('uploads.uploaded_file', filename=filename)
})
else:
flask.abort(403)
@blueprint.route('/uploads/_check_valid_file', methods=['POST'])
def check_file():
'''
Checks if the file: exists, has a valid extension, and
smaller than 10 mb
'''
if 'file' not in flask.request.files:
return flask.jsonify({'error': 'No file selected'})
if helpers.check_upload_permission():
file = flask.request.files['file']
return flask.jsonify({'error': helpers.check_valid_file(file)})
else:
return flask.abort(403)
@blueprint.route('/lib/uploaded_file/<filename>', methods=['GET'])
def uploaded_file(filename):
'''
Serves the actual uploaded file.
'''
uploads = os.path.join(flask.current_app.root_path,
flask.current_app.config['UPLOAD_FOLDER'])
return flask.send_from_directory(uploads, filename, as_attachment=False)
@blueprint.route('/uploads/_delete')
def delete_uploaded_file():
"""
End point for deleting an upload
"""
filename = flask.request.args.get('filename')
if filename != None and helpers.check_upload_permission():
helpers.remove_link(filename)
return flask.redirect(flask.url_for('uploads.uploaded_list'))
@blueprint.route('/uploads/uploaded_list', methods=['GET'])
def uploaded_list():
'''
Shows the list of uploaded files
'''
links = helpers.get_links()
return flask.render_template(
'uploaded_list.html',
delete_file_endpoint='uploads.delete_uploaded_file',
links=links,
permissions=helpers.check_upload_permission())
|
{
"content_hash": "eed3f05c4ddefb94f501bd5afccb3f16",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 76,
"avg_line_length": 29.568807339449542,
"alnum_prop": 0.6341917468197331,
"repo_name": "ASCIT/donut",
"id": "99bb7bfb4644b5a78a06bd768c5f456ee8c21291",
"size": "3223",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "donut/modules/uploads/routes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "26535"
},
{
"name": "HTML",
"bytes": "210555"
},
{
"name": "JavaScript",
"bytes": "59942"
},
{
"name": "Makefile",
"bytes": "504"
},
{
"name": "Python",
"bytes": "451356"
}
],
"symlink_target": ""
}
|
import django
from django.conf import settings
from django.core.urlresolvers import reverse
from django.forms import widgets
from django import http
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.api import cinder
from openstack_dashboard.dashboards.project.volumes \
.volumes import tables
from openstack_dashboard.test import helpers as test
from openstack_dashboard.usage import quotas
VOLUME_INDEX_URL = reverse('horizon:project:volumes:index')
class VolumeViewTests(test.TestCase):
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_list',
'volume_type_list',
'volume_list',
'availability_zone_list',
'extension_supported'),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume(self):
volume = self.cinder_volumes.first()
volume_type = self.volume_types.first()
az = self.cinder_availability_zones.first().zoneName
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'type': volume_type.name,
'size': 50,
'snapshot_source': '',
'availability_zone': az}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
cinder.extension_supported(IsA(http.HttpRequest), 'AvailabilityZones')\
.AndReturn(True)
cinder.volume_list(IsA(
http.HttpRequest)).AndReturn(self.cinder_volumes.list())
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
formData['type'],
metadata={},
snapshot_id=None,
image_id=None,
availability_zone=formData['availability_zone'],
source_volid=None)\
.AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post(url, formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_list',
'volume_type_list',
'volume_list',
'availability_zone_list',
'extension_supported'),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_dropdown(self):
volume = self.cinder_volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 50,
'type': '',
'volume_source_type': 'no_source_type',
'snapshot_source': self.cinder_volume_snapshots.first().id,
'image_source': self.images.first().id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
cinder.volume_list(IsA(
http.HttpRequest)).AndReturn(self.cinder_volumes.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.extension_supported(IsA(http.HttpRequest), 'AvailabilityZones')\
.AndReturn(True)
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=None,
image_id=None,
availability_zone=None,
source_volid=None).AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post(url, formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_get',
'volume_get',
'volume_type_list'),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_snapshot(self):
volume = self.cinder_volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
snapshot = self.cinder_volume_snapshots.first()
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 50,
'type': '',
'snapshot_source': snapshot.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_get(IsA(http.HttpRequest),
str(snapshot.id)).AndReturn(snapshot)
cinder.volume_get(IsA(http.HttpRequest), snapshot.volume_id).\
AndReturn(self.cinder_volumes.first())
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=snapshot.id,
image_id=None,
availability_zone=None,
source_volid=None).AndReturn(volume)
self.mox.ReplayAll()
# get snapshot from url
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post("?".join([url,
"snapshot_id=" + str(snapshot.id)]),
formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_get',
'volume_list',
'volume_type_list',
'availability_zone_list',
'volume_snapshot_get',
'volume_snapshot_list',
'extension_supported'),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_volume(self):
volume = self.cinder_volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
formData = {'name': u'A copy of a volume',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 50,
'type': '',
'volume_source_type': 'volume_source',
'volume_source': volume.id}
cinder.volume_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volumes.list())
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volume_snapshots.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_get(IsA(http.HttpRequest),
volume.id).AndReturn(self.cinder_volumes.first())
cinder.extension_supported(IsA(http.HttpRequest),
'AvailabilityZones').AndReturn(True)
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=None,
image_id=None,
availability_zone=None,
source_volid=volume.id).AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:create')
redirect_url = reverse('horizon:project:volumes:index')
res = self.client.post(url, formData)
self.assertNoFormErrors(res)
self.assertMessageCount(info=1)
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_list',
'volume_snapshot_get',
'volume_get',
'volume_list',
'volume_type_list',
'availability_zone_list',
'extension_supported'),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_snapshot_dropdown(self):
volume = self.cinder_volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
snapshot = self.cinder_volume_snapshots.first()
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 50,
'type': '',
'volume_source_type': 'snapshot_source',
'snapshot_source': snapshot.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
cinder.volume_list(IsA(
http.HttpRequest)).AndReturn(self.cinder_volumes.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_get(IsA(http.HttpRequest),
str(snapshot.id)).AndReturn(snapshot)
cinder.extension_supported(IsA(http.HttpRequest), 'AvailabilityZones')\
.AndReturn(True)
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=snapshot.id,
image_id=None,
availability_zone=None,
source_volid=None).AndReturn(volume)
self.mox.ReplayAll()
# get snapshot from dropdown list
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post(url, formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_snapshot_get',
'volume_type_list',
'volume_get'),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_snapshot_invalid_size(self):
usage_limit = {'maxTotalVolumeGigabytes': 100,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
snapshot = self.cinder_volume_snapshots.first()
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 20, 'snapshot_source': snapshot.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_get(IsA(http.HttpRequest),
str(snapshot.id)).AndReturn(snapshot)
cinder.volume_get(IsA(http.HttpRequest), snapshot.volume_id).\
AndReturn(self.cinder_volumes.first())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post("?".join([url,
"snapshot_id=" + str(snapshot.id)]),
formData, follow=True)
self.assertEqual(res.redirect_chain, [])
self.assertFormError(res, 'form', None,
"The volume size cannot be less than the "
"snapshot size (40GB)")
@test.create_stubs({cinder: ('volume_create',
'volume_type_list',
'availability_zone_list',
'extension_supported'),
api.glance: ('image_get',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_image(self):
volume = self.cinder_volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 200,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
image = self.images.first()
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 40,
'type': '',
'image_source': image.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
api.glance.image_get(IsA(http.HttpRequest),
str(image.id)).AndReturn(image)
cinder.extension_supported(IsA(http.HttpRequest), 'AvailabilityZones')\
.AndReturn(True)
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=None,
image_id=image.id,
availability_zone=None,
source_volid=None).AndReturn(volume)
self.mox.ReplayAll()
# get image from url
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post("?".join([url,
"image_id=" + str(image.id)]),
formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_type_list',
'volume_list',
'volume_snapshot_list',
'availability_zone_list',
'extension_supported'),
api.glance: ('image_get',
'image_list_detailed'),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_image_dropdown(self):
volume = self.cinder_volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 200,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
image = self.images.first()
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 30,
'type': '',
'volume_source_type': 'image_source',
'snapshot_source': self.cinder_volume_snapshots.first().id,
'image_source': image.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
cinder.volume_list(IsA(
http.HttpRequest)).AndReturn(self.cinder_volumes.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)) \
.AndReturn(usage_limit)
api.glance.image_get(IsA(http.HttpRequest),
str(image.id)).AndReturn(image)
cinder.extension_supported(IsA(http.HttpRequest), 'AvailabilityZones')\
.AndReturn(True)
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=None,
image_id=image.id,
availability_zone=None,
source_volid=None).AndReturn(volume)
self.mox.ReplayAll()
# get image from dropdown list
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post(url, formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_type_list',
'availability_zone_list',
'extension_supported'),
api.glance: ('image_get',
'image_list_detailed'),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_image_under_image_size(self):
usage_limit = {'maxTotalVolumeGigabytes': 100,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
image = self.images.first()
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 1, 'image_source': image.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
api.glance.image_get(IsA(http.HttpRequest),
str(image.id)).AndReturn(image)
cinder.extension_supported(IsA(http.HttpRequest), 'AvailabilityZones')\
.AndReturn(True)
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post("?".join([url,
"image_id=" + str(image.id)]),
formData, follow=True)
self.assertEqual(res.redirect_chain, [])
# in django 1.6 filesizeformat replaces all spaces with
# non-breaking space characters
if django.VERSION >= (1, 6):
msg = (u"The volume size cannot be less than the "
u"image size (20.0\xa0GB)")
else:
msg = (u"The volume size cannot be less than the "
u"image size (20.0 GB)")
self.assertFormError(res, 'form', None, msg)
@test.create_stubs({cinder: ('volume_type_list',
'availability_zone_list',
'extension_supported'),
api.glance: ('image_get',
'image_list_detailed'),
quotas: ('tenant_limit_usages',)})
def _test_create_volume_from_image_under_image_min_disk_size(self, image):
usage_limit = {'maxTotalVolumeGigabytes': 100,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 5, 'image_source': image.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
api.glance.image_get(IsA(http.HttpRequest),
str(image.id)).AndReturn(image)
cinder.extension_supported(IsA(http.HttpRequest), 'AvailabilityZones')\
.AndReturn(True)
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post("?".join([url,
"image_id=" + str(image.id)]),
formData, follow=True)
self.assertEqual(res.redirect_chain, [])
self.assertFormError(res, 'form', None,
"The volume size cannot be less than the "
"image minimum disk size (30GB)")
def test_create_volume_from_image_under_image_min_disk_size(self):
image = self.images.get(name="protected_images")
image.min_disk = 30
self._test_create_volume_from_image_under_image_min_disk_size(image)
def test_create_volume_from_image_under_image_property_min_disk_size(self):
image = self.images.get(name="protected_images")
image.min_disk = 0
image.properties['min_disk'] = 30
self._test_create_volume_from_image_under_image_min_disk_size(image)
@test.create_stubs({cinder: ('volume_snapshot_list',
'volume_type_list',
'volume_list',
'availability_zone_list',
'extension_supported'),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_gb_used_over_alloted_quota(self):
usage_limit = {'maxTotalVolumeGigabytes': 100,
'gigabytesUsed': 80,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
formData = {'name': u'This Volume Is Huge!',
'description': u'This is a volume that is just too big!',
'method': u'CreateForm',
'size': 5000}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
cinder.volume_list(IsA(
http.HttpRequest)).AndReturn(self.cinder_volumes.list())
cinder.extension_supported(IsA(http.HttpRequest), 'AvailabilityZones')\
.AndReturn(True)
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post(url, formData)
expected_error = [u'A volume of 5000GB cannot be created as you only'
' have 20GB of your quota available.']
self.assertEqual(res.context['form'].errors['__all__'], expected_error)
@test.create_stubs({cinder: ('volume_snapshot_list',
'volume_type_list',
'volume_list',
'availability_zone_list',
'extension_supported'),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_number_over_alloted_quota(self):
usage_limit = {'maxTotalVolumeGigabytes': 100,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': len(self.cinder_volumes.list())}
formData = {'name': u'Too Many...',
'description': u'We have no volumes left!',
'method': u'CreateForm',
'size': 10}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
cinder.volume_list(IsA(
http.HttpRequest)).AndReturn(self.cinder_volumes.list())
cinder.extension_supported(IsA(http.HttpRequest), 'AvailabilityZones')\
.AndReturn(True)
cinder.availability_zone_list(IsA(http.HttpRequest)).AndReturn(
self.cinder_availability_zones.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:create')
res = self.client.post(url, formData)
expected_error = [u'You are already using all of your available'
' volumes.']
self.assertEqual(res.context['form'].errors['__all__'], expected_error)
@test.create_stubs({cinder: ('volume_list',
'volume_snapshot_list',
'volume_delete',),
api.nova: ('server_list',),
quotas: ('tenant_quota_usages',)})
def test_delete_volume(self):
volumes = self.cinder_volumes.list()
volume = self.cinder_volumes.first()
formData = {'action':
'volumes__delete__%s' % volume.id}
cinder.volume_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn(volumes)
cinder.volume_delete(IsA(http.HttpRequest), volume.id)
api.nova.server_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn([self.servers.list(), False])
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.cinder_volume_snapshots.list())
cinder.volume_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn(volumes)
api.nova.server_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn([self.servers.list(), False])
cinder.volume_list(IsA(http.HttpRequest)).AndReturn(volumes)
quotas.tenant_quota_usages(IsA(http.HttpRequest)).MultipleTimes().\
AndReturn(self.quota_usages.first())
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:index')
res = self.client.post(url, formData, follow=True)
self.assertIn("Scheduled deletion of Volume: Volume name",
[m.message for m in res.context['messages']])
@test.create_stubs({cinder: ('volume_list',
'volume_snapshot_list',
'volume_delete',),
api.nova: ('server_list',),
quotas: ('tenant_quota_usages',)})
def test_delete_volume_error_existing_snapshot(self):
volume = self.cinder_volumes.first()
volumes = self.cinder_volumes.list()
formData = {'action':
'volumes__delete__%s' % volume.id}
exc = self.exceptions.cinder.__class__(400,
"error: dependent snapshots")
cinder.volume_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn(volumes)
cinder.volume_delete(IsA(http.HttpRequest), volume.id).\
AndRaise(exc)
api.nova.server_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn([self.servers.list(), False])
cinder.volume_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn(volumes)
api.nova.server_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn([self.servers.list(), False])
cinder.volume_snapshot_list(IsA(http.HttpRequest))\
.AndReturn(self.cinder_volume_snapshots.list())
cinder.volume_list(IsA(http.HttpRequest)).AndReturn(volumes)
quotas.tenant_quota_usages(IsA(http.HttpRequest)).MultipleTimes().\
AndReturn(self.quota_usages.first())
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:index')
res = self.client.post(url, formData, follow=True)
self.assertEqual(list(res.context['messages'])[0].message,
u'Unable to delete volume "%s". '
u'One or more snapshots depend on it.' %
volume.name)
@test.create_stubs({cinder: ('volume_get',), api.nova: ('server_list',)})
def test_edit_attachments(self):
PREV = settings.OPENSTACK_HYPERVISOR_FEATURES['can_set_mount_point']
settings.OPENSTACK_HYPERVISOR_FEATURES['can_set_mount_point'] = True
volume = self.cinder_volumes.first()
servers = [s for s in self.servers.list()
if s.tenant_id == self.request.user.tenant_id]
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
api.nova.server_list(IsA(http.HttpRequest)).AndReturn([servers, False])
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:attach',
args=[volume.id])
res = self.client.get(url)
# Asserting length of 2 accounts for the one instance option,
# and the one 'Choose Instance' option.
form = res.context['form']
self.assertEqual(len(form.fields['instance']._choices),
2)
self.assertEqual(res.status_code, 200)
self.assertTrue(isinstance(form.fields['device'].widget,
widgets.TextInput))
settings.OPENSTACK_HYPERVISOR_FEATURES['can_set_mount_point'] = PREV
@test.create_stubs({cinder: ('volume_get',), api.nova: ('server_list',)})
def test_edit_attachments_cannot_set_mount_point(self):
volume = self.cinder_volumes.first()
servers = [s for s in self.servers.list()
if s.tenant_id == self.request.user.tenant_id]
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
api.nova.server_list(IsA(http.HttpRequest)).AndReturn([servers, False])
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:attach',
args=[volume.id])
res = self.client.get(url)
# Assert the device field is hidden.
form = res.context['form']
self.assertTrue(isinstance(form.fields['device'].widget,
widgets.HiddenInput))
@test.create_stubs({cinder: ('volume_get',),
api.nova: ('server_get', 'server_list',),
quotas: ('tenant_quota_usages',)})
def test_edit_attachments_attached_volume(self):
servers = [s for s in self.servers.list()
if s.tenant_id == self.request.user.tenant_id]
server = servers[0]
volume = self.cinder_volumes.list()[0]
cinder.volume_get(IsA(http.HttpRequest), volume.id) \
.AndReturn(volume)
api.nova.server_list(IsA(http.HttpRequest)) \
.AndReturn([servers, False])
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:attach',
args=[volume.id])
res = self.client.get(url)
self.assertEqual(res.context['form'].fields['instance']._choices[0][1],
"Select an instance")
self.assertEqual(len(res.context['form'].fields['instance'].choices),
2)
self.assertEqual(res.context['form'].fields['instance']._choices[1][0],
server.id)
self.assertEqual(res.status_code, 200)
@test.create_stubs({cinder: ('volume_list',
'volume_snapshot_list'),
api.nova: ('server_list',),
quotas: ('tenant_quota_usages',)})
def test_create_button_disabled_when_quota_exceeded(self):
quota_usages = self.quota_usages.first()
quota_usages['volumes']['available'] = 0
volumes = self.cinder_volumes.list()
cinder.volume_list(IsA(http.HttpRequest), search_opts=None)\
.AndReturn(volumes)
api.nova.server_list(IsA(http.HttpRequest), search_opts=None)\
.AndReturn([self.servers.list(), False])
cinder.volume_snapshot_list(IsA(http.HttpRequest))\
.AndReturn(self.cinder_volume_snapshots.list())
cinder.volume_list(IsA(http.HttpRequest)).AndReturn(volumes)
quotas.tenant_quota_usages(IsA(http.HttpRequest))\
.MultipleTimes().AndReturn(quota_usages)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:project:volumes:index'))
self.assertTemplateUsed(res, 'project/volumes/index.html')
volumes = res.context['volumes_table'].data
self.assertItemsEqual(volumes, self.cinder_volumes.list())
create_link = tables.CreateVolume()
url = create_link.get_link_url()
classes = list(create_link.get_default_classes())\
+ list(create_link.classes)
link_name = "%s (%s)" % (unicode(create_link.verbose_name),
"Quota exceeded")
expected_string = "<a href='%s' title='%s' class='%s disabled' "\
"id='volumes__action_create'>%s</a>" \
% (url, link_name, " ".join(classes), link_name)
self.assertContains(res, expected_string, html=True,
msg_prefix="The create button is not disabled")
@test.create_stubs({cinder: ('volume_get',),
api.nova: ('server_get',)})
def test_detail_view(self):
volume = self.cinder_volumes.first()
server = self.servers.first()
volume.attachments = [{"server_id": server.id}]
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
api.nova.server_get(IsA(http.HttpRequest), server.id).AndReturn(server)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:detail',
args=[volume.id])
res = self.client.get(url)
self.assertContains(res, "<h2>Volume Details: Volume name</h2>",
1, 200)
self.assertContains(res, "<dd>Volume name</dd>", 1, 200)
self.assertContains(res, "<dd>%s</dd>" % volume.id, 1, 200)
self.assertContains(res, "<dd>Available</dd>", 1, 200)
self.assertContains(res, "<dd>40 GB</dd>", 1, 200)
self.assertContains(res,
("<a href=\"/project/instances/1/\">%s</a>"
% server.name),
1,
200)
self.assertNoMessages()
@test.create_stubs({cinder: ('volume_get',)})
def test_get_data(self):
volume = self.cinder_volumes.get(name='v2_volume')
volume._apiresource.name = ""
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:index') + \
"?action=row_update&table=volumes&obj_id=" + volume.id
res = self.client.get(url, {},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(res.status_code, 200)
self.assertEqual(volume.name, volume.id)
@test.create_stubs({cinder: ('volume_get',)})
def test_detail_view_with_exception(self):
volume = self.cinder_volumes.first()
server = self.servers.first()
volume.attachments = [{"server_id": server.id}]
cinder.volume_get(IsA(http.HttpRequest), volume.id).\
AndRaise(self.exceptions.cinder)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:detail',
args=[volume.id])
res = self.client.get(url)
self.assertRedirectsNoFollow(res, VOLUME_INDEX_URL)
@test.create_stubs({cinder: ('volume_update',
'volume_get',)})
def test_update_volume(self):
volume = self.cinder_volumes.get(name="my_volume")
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
cinder.volume_update(IsA(http.HttpRequest),
volume.id,
volume.name,
volume.description)
self.mox.ReplayAll()
formData = {'method': 'UpdateForm',
'name': volume.name,
'description': volume.description}
url = reverse('horizon:project:volumes:volumes:update',
args=[volume.id])
res = self.client.post(url, formData)
self.assertRedirectsNoFollow(res, VOLUME_INDEX_URL)
@test.create_stubs({cinder: ('volume_get',
'volume_extend')})
def test_extend_volume(self):
volume = self.cinder_volumes.first()
formData = {'name': u'A Volume I Am Making',
'orig_size': volume.size,
'new_size': 100}
cinder.volume_get(IsA(http.HttpRequest), volume.id).\
AndReturn(self.cinder_volumes.first())
cinder.volume_extend(IsA(http.HttpRequest),
volume.id,
formData['new_size']).AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:extend',
args=[volume.id])
res = self.client.post(url, formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_get',),
quotas: ('tenant_limit_usages',)})
def test_extend_volume_with_wrong_size(self):
volume = self.cinder_volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 100,
'gigabytesUsed': 20,
'volumesUsed': len(self.cinder_volumes.list()),
'maxTotalVolumes': 6}
formData = {'name': u'A Volume I Am Making',
'orig_size': volume.size,
'new_size': 10}
cinder.volume_get(IsA(http.HttpRequest), volume.id).\
AndReturn(self.cinder_volumes.first())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:extend',
args=[volume.id])
res = self.client.post(url, formData)
self.assertFormError(res, 'form', None,
"New size for extend must be greater than "
"current size.")
|
{
"content_hash": "49b500d8ebcdb39c204e65d2de6ba028",
"timestamp": "",
"source": "github",
"line_count": 1017,
"max_line_length": 79,
"avg_line_length": 46.98623402163225,
"alnum_prop": 0.5121690907188449,
"repo_name": "shhui/horizon",
"id": "f4d8200ba4f5bba0c2d9acea3c71f33914edb392",
"size": "48594",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "openstack_dashboard/dashboards/project/volumes/volumes/tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "193319"
},
{
"name": "HTML",
"bytes": "300323"
},
{
"name": "JavaScript",
"bytes": "739933"
},
{
"name": "Makefile",
"bytes": "588"
},
{
"name": "Python",
"bytes": "3412846"
},
{
"name": "Shell",
"bytes": "19297"
}
],
"symlink_target": ""
}
|
import flakytests
from webkitpy.common.checkout.scm.scm_mock import MockSCM
from webkitpy.layout_tests.layout_package import bot_test_expectations
from webkitpy.tool.commands.commandtest import CommandsTest
from webkitpy.tool.mocktool import MockTool, MockOptions
from webkitpy.layout_tests.port import builders
class FakeBotTestExpectations(object):
def expectation_lines(self, only_ignore_very_flaky=False):
return []
class FakeBotTestExpectationsFactory(object):
FAILURE_MAP = {"A": "AUDIO", "C": "CRASH", "F": "TEXT", "I": "IMAGE", "O": "MISSING",
"N": "NO DATA", "P": "PASS", "T": "TIMEOUT", "Y": "NOTRUN", "X": "SKIP",
"Z": "IMAGE+TEXT", "K": "LEAK"}
def _expectations_from_test_data(self, builder, test_data):
test_data[bot_test_expectations.ResultsJSON.FAILURE_MAP_KEY] = self.FAILURE_MAP
json_dict = {
builder: test_data,
}
results = bot_test_expectations.ResultsJSON(builder, json_dict)
return bot_test_expectations.BotTestExpectations(results, builders._exact_matches[builder]["specifiers"])
def expectations_for_builder(self, builder):
if builder == 'foo-builder':
return self._expectations_from_test_data(builder, {
'tests': {
'pass.html': {'results': [[2, 'FFFP']], 'expected': 'PASS'},
}
})
if builder == 'bar-builder':
return self._expectations_from_test_data(builder, {
'tests': {
'pass.html': {'results': [[2, 'TTTP']], 'expected': 'PASS'},
}
})
return FakeBotTestExpectations()
class FlakyTestsTest(CommandsTest):
def test_merge_lines(self):
command = flakytests.FlakyTests()
factory = FakeBotTestExpectationsFactory()
old_builders = builders._exact_matches
builders._exact_matches = {
"foo-builder": {"port_name": "dummy-port", "specifiers": ['Linux', 'Release']},
"bar-builder": {"port_name": "dummy-port", "specifiers": ['Mac', 'Debug']},
}
try:
lines = command._collect_expectation_lines(['foo-builder', 'bar-builder'], factory)
self.assertEqual(len(lines), 1)
self.assertEqual(lines[0].expectations, ['TEXT', 'TIMEOUT', 'PASS'])
self.assertEqual(lines[0].specifiers, ['Mac', 'Linux'])
finally:
builders._exact_matches = old_builders
def test_integration(self):
command = flakytests.FlakyTests()
tool = MockTool()
command.expectations_factory = FakeBotTestExpectationsFactory
options = MockOptions(upload=True)
expected_stdout = '''
Manually add bug numbers for these and then put the lines in LayoutTests/TestExpectations.
TODO(ojan): Write a script to file/assign the bugs then create a bot to do this automatically.
'''
self.assert_execute_outputs(command, options=options, tool=tool, expected_stdout=expected_stdout)
|
{
"content_hash": "bb38882ac34c05458a640bf82dd9e529",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 113,
"avg_line_length": 39.57142857142857,
"alnum_prop": 0.6179849031834591,
"repo_name": "Pluto-tv/blink-crosswalk",
"id": "172e5ba97f248d56456262124c818396792cacc7",
"size": "3210",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Tools/Scripts/webkitpy/tool/commands/flakytests_unittest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "1835"
},
{
"name": "Assembly",
"bytes": "14768"
},
{
"name": "Batchfile",
"bytes": "35"
},
{
"name": "C",
"bytes": "128002"
},
{
"name": "C++",
"bytes": "45337051"
},
{
"name": "CSS",
"bytes": "596289"
},
{
"name": "CoffeeScript",
"bytes": "163"
},
{
"name": "GLSL",
"bytes": "11578"
},
{
"name": "Groff",
"bytes": "28067"
},
{
"name": "HTML",
"bytes": "64824312"
},
{
"name": "Java",
"bytes": "109377"
},
{
"name": "JavaScript",
"bytes": "25099309"
},
{
"name": "Objective-C",
"bytes": "45096"
},
{
"name": "Objective-C++",
"bytes": "302371"
},
{
"name": "PHP",
"bytes": "220636"
},
{
"name": "Perl",
"bytes": "115958"
},
{
"name": "Python",
"bytes": "3879209"
},
{
"name": "Ruby",
"bytes": "73952"
},
{
"name": "Shell",
"bytes": "10282"
},
{
"name": "XSLT",
"bytes": "50203"
},
{
"name": "Yacc",
"bytes": "10148"
}
],
"symlink_target": ""
}
|
from datetime import datetime
from collections import defaultdict
import emoji
class SpoofPhone():
"""This class provides methods to spoof tools to spoof SMS messages to a Shawk client"""
def __init__(self, mock_IMAP_instance, sender):
self.imap = mock_IMAP_instance
self.sender = sender
def send(self, messages, time=None, sender=None, emojize=False):
"""Spoof a simple SMS"""
# Handle arguments
if not time:
time = datetime.utcnow()
if not sender:
sender = self.sender
if not isinstance(messages, list):
messages = [messages]
if emojize:
messages = [emoji.emojize(message, use_aliases=True) for message in messages]
# Spoof IMAP's fetch return value
self.imap.fetch.return_value = defaultdict(dict)
message_counter = 1
for message in messages:
self.imap.fetch.return_value[message_counter] = {
b'BODY[TEXT]': str.encode(message),
b'BODY[HEADER.FIELDS (FROM)]': str.encode(sender.get_address()),
b'INTERNALDATE': str.encode(str(time))
}
|
{
"content_hash": "ab476f7078fc043a7e9e84b7eba07c7b",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 92,
"avg_line_length": 35.484848484848484,
"alnum_prop": 0.605465414175918,
"repo_name": "hawkins/Shawk",
"id": "b52855b2121a19ed26a109f644a3051774839571",
"size": "1172",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/SpoofPhone.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "134160"
},
{
"name": "HTML",
"bytes": "9908"
},
{
"name": "JavaScript",
"bytes": "13762"
},
{
"name": "Python",
"bytes": "50054"
}
],
"symlink_target": ""
}
|
{
'\n\nThank you!': 'Obrigado!',
'\n\nWe will wait and let you know when your payment is confirmed.': 'Nós iremos esperar e te avisaremos quando seu pagamento for confirmado.',
'\n- %s from %s to %s': '- %s de %s a %s',
'\nAmount: R$%.2f': 'Total: R$%.2f',
"\nSomething happened and we couldn't verify your payment.\n": 'Alguma coisa aconteceu e não conseguimos verificar seu pagamento.',
'\nThank you for your purchase!': 'Agradecemos pela sua compra!',
'\nThank you!': 'Obrigado!',
'\nThank you.': 'Obrigado.',
'\nThe total amount was R$%.2f.': 'O valor total foi R$%.2f.',
'\nWe will wait and let you know when your payment is confirmed.\n': 'Nós iremos esperar e te avisaremos quando seu pagamento for confirmado.',
'\nYou can check your payment history after login in to your profile.': 'Você pode verificar se seu pagamento foi confirmado no seu histórico após fazer login no seu perfil.',
'!langcode!': 'pt-br',
'!langname!': 'Português (do Brasil)',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" é uma expressão opcional como "campo1=\'novovalor\'". Você não pode atualizar ou apagar os resultados de um JOIN',
'%02d/%02d': '%02d/%02d',
'%B %d, %Y': '%d de %B de %Y',
'%d%% OFF': '%d%% MENOS',
'%d/%d': '%d/%d',
'%m-%d-%Y': '%d-%m-%Y',
'%s %%{row} deleted': '%s linhas apagadas',
'%s %%{row} updated': '%s linhas atualizadas',
'%s Certificate': 'Certificado de %s',
'%s of %s': '%s de %s',
'%s selected': '%s selecionado',
'%Y-%m-%d': '%d-%m-%Y',
'%Y-%m-%d %H:%M:%S': '%d-%m-%Y %H:%M:%S',
'- %s from %s to %s': '- %s de %s a %s',
'- %s from %s to %s\n': '- %s de %s a %s',
'- Web applications com web2py from 2015-10-01 to 2015-10-31\n': '',
'- Web applications com web2py from 2016-02-01 to 2016-02-29\n': '- Web applications com web2py from 2016-02-01 to 2016-02-29\r\n',
'16% OFF': '',
'90% OFF': '90% OFF',
'?': '?',
'@markmin\x01An error occured, please [[reload %s]] the page': 'Ocorreu um erro, por favor [[reload %s]] a página',
'About': 'Sobre',
'Access': 'Acesse',
'Access Control': 'Controle de Acesso',
'Access the /appadmin to make at least one teacher user:': 'Acesse o /appadmin para criar pelo menos um professor.',
'Actions': 'Ações',
'Add more': 'Adicionar mais',
'Administrative interface': 'Interface administrativa',
'Administrative Interface': 'Interface Administrativa',
'Ajax Recipes': 'Receitas de Ajax',
'All certificates sent!': 'Todos os certificados enviados!',
'All Classes': 'Todas as Turmas',
'Alternative A': 'Alternativa A',
'Alternative B': 'Alternativa B',
'Alternative C': 'Alternativa C',
'Alternative D': 'Alternativa D',
'Amount': 'Total',
'Amount: R$%.2f': 'Total: R$%.2f',
'Amount: R$%.2f\n': 'Total: R$%.2f',
'Amount: R$150.00\n': '',
'and enroll!': 'e inscreva-se!',
'and go to': 'e vá para',
'Announcements': 'Anúncios',
'appadmin is disabled because insecure channel': 'Administração desativada porque o canal não é seguro',
'Apply changes': 'Aplicar mudanças',
'Are you sure you want to delete this object?': 'Você está certo que deseja apagar este objeto?',
'Available Databases and Tables': 'Bancos de dados e tabelas disponíveis',
'Available Until': 'Disponível Até',
'Banner': 'Banner',
'Body': 'Conteúdo',
'Buy Now': 'Pagar Agora',
'Buy this book': 'Compre o livro',
'Cache': 'Cache',
'cache': 'cache',
'Cache Cleared': 'Cache limpo',
'Cache Keys': 'Chaves de cache',
'Calendar': 'Calendário',
'Cannot be empty': 'Não pode ser vazio',
'Certificates': 'Certificados',
'Change password': 'Alterar senha',
'change password': 'modificar senha',
'Change Password': 'Alterar Senha',
'Check to delete': 'Marque para apagar',
'Class %s': 'Turma %s',
'Class Id': 'Id da Turma',
'Classes': 'Turmas',
'Clear CACHE?': 'Limpar CACHE?',
'Clear DISK': 'Limpar DISCO',
'Clear RAM': 'Limpar memória RAM',
'Client IP': 'IP do cliente',
'Closed': 'Fechado',
'Community': 'Comunidade',
'Components and Plugins': 'Componentes e Plugins',
'Confirm Password': 'Confirmar Senha',
'Confirmation Time': 'Horário da Confirmação',
'Confirmed': 'Confirmado',
'Contact': 'Contato',
'Continue Shopping': 'Continue Comprando',
'Controller': 'Controlador',
'Copyright': 'Copyright',
'Correct Alternative': 'Alternativa Correta',
'Course': 'Curso',
'Course Announcements': 'Anúncios do Curso',
'Course Id': 'Id do Curso',
"Course's end": 'Fim do Curso',
"Course's start": 'Início do Curso',
'Courses': 'Cursos',
'Current request': 'Requisição atual',
'Current response': 'Resposta atual',
'Current session': 'Sessão atual',
'customize me!': 'Personalize-me!',
'DASHBOARD': 'DASHBOARD',
'Dashboard': 'Dashboard',
'data uploaded': 'dados enviados',
'Database': 'banco de dados',
'Database %s select': 'Selecionar banco de dados %s',
'Database Administration (appadmin)': 'Database Administration (appadmin)',
'Date': 'Data',
'db': 'bd',
'DB Model': 'Modelo BD',
'Delete': 'Delete',
'delete': 'delete',
'Delete:': 'Apagar:',
'Demo': 'Demo',
'Denied': 'Negado',
'Deployment Recipes': 'Receitas de deploy',
'Description': 'Descrição',
'design': 'projeto',
'Details': 'Detalhes',
'Discount': 'Desconto',
'DISK': 'DISK',
'Disk Cache Keys': 'Chaves do Cache de Disco',
'Disk Cleared': 'Disco Limpo',
'Documentation': 'Documentação',
"Don't know what to do?": 'Não sabe o que fazer?',
'done!': 'concluído!',
'Download': 'Download',
'E-mail': 'E-mail',
'Edit': 'Editar',
'Edit current record': 'Editar o registro atual',
'edit profile': 'editar perfil',
'Edit This App': 'Editar esta aplicação',
'Email and SMS': 'Email e SMS',
'End': 'Fim',
'End date': 'Data de término',
'End Date': 'Data de Término',
'Enroll now!': 'Inscreva-se agora!',
'Enter a number between %(min)g and %(max)g': 'Insira um número entre %(min)g e %(max)g',
'Enter an integer between %(min)g and %(max)g': 'Informe um valor inteiro entre %(min)g e %(max)g',
'Enter an integer greater than or equal to %(min)g': 'Insira um número maior que ou igual a %(min)g',
'Enter the auth_membership table and associate your new user to the "Teacher" group': 'Entre na tabela auth_membership e associe seu novo usuário ao grupo "Teacher"',
'Enter the auth_user table and create a new record': 'Entre na tabela auth_user e crie um novo registro',
'Enter with your teacher user and create your course, classes and lessons': 'Entre com seu usário de professor e crie seu curso, suas turmas e aulas',
'Errors': 'Erros',
'Erros no formulário!': 'Erros no formulário!',
'export as csv file': 'exportar como um arquivo csv',
'FAQ': 'Perguntas frequentes',
'file': '',
'file ## download': 'file ',
'First name': 'Nome',
'First, import a template and signature!': 'Primeiro, importe uma imagem de fundo e uma assinatura!',
'Form has errors!': 'O formulário possui erros!',
'Forms and Validators': 'Formulários e Validadores',
'Forum': 'Fórum',
'FREE': 'GRÁTIS',
'Free Applications': 'Aplicações gratuitas',
'from %s to %s': 'de %s a %s',
'from 01 de October de 2015 to 31 de October de 2015': '',
'FULL!': 'CHEIO!',
'Generate Certificate': 'Gerar Certificado',
'Graph Model': 'Graph Model',
'Group ID': 'ID do Grupo',
'Groups': 'Grupos',
'has satisfactorily completed the course': 'concluiu satisfatoriamente o curso',
'Hello World': 'Olá Mundo',
'Home': 'Início',
'hours': 'horas',
'How did you get here?': 'Como você chegou aqui?',
'Icon': 'Ícone',
'If you want to test, just': 'Se você quiser testar, apenas',
"If you're sure you paid the order, please contact us. Otherwise, try to pay again later.": 'Se você tem certeza que pagou o pedido, por favor entre em contato conosco. Caso contrário, tente pagar novamente mais tarde.',
"If you're sure you paid the order, please contact us. Otherwise, try to pay again later.\n": 'Se você tem certeza que pagou o pedido, por favor entre em contato. Do contrário, tente pagar novamente mais tarde.',
'import': 'importar',
'Import/Export': 'Importar/Exportar',
'in a total of %d hours.': 'em um total de %d horas.',
'In Progress': 'Em Progresso',
'Index': 'Início',
'insert new': 'inserir novo',
'insert new %s': 'inserir novo %s',
'Insufficient privileges': 'Privilégios insuficientes',
'Interested? Submit your email below to be notified for the next open class.': 'Interessado? Insira seu email abaixo para ser notificado para a próxima turma aberta.',
'Interests': 'Interesses',
'Internal State': 'Estado Interno',
'Introduction': 'Introdução',
'Invalid email': 'Email inválido',
'Invalid login': 'Login inválido',
'Invalid Query': 'Consulta Inválida',
'invalid request': 'requisição inválida',
'Key': 'Chave',
'Last name': 'Sobrenome',
'Layout': 'Layout',
'Layout Plugins': 'Plugins de Layout',
'Layouts': 'Layouts',
'Lesson': 'Aula',
'Lesson Id': 'Id da Aula',
'Lesson scheduled for:': 'Aula agendada para:',
'Lesson Type': 'Tipo da Aula',
'Limit date:': 'Data limite:',
'limited to': 'limitada a',
'Live chat': 'Chat ao vivo',
'Live Chat': 'Chat ao vivo',
'Log In': 'Log In',
'Log Out': 'Log Out',
'Logged in': 'Logou',
'Logged out': 'Deslogou-se',
'Login': 'Autentique-se',
'login': 'Entrar',
'logout': 'Sair',
'Logout': 'Logout',
'Lost Password': 'Esqueceu sua senha?',
'Lost password?': 'Perdeu a senha?',
'lost password?': 'esqueceu sua senha?',
'Main Menu': 'Menu Principal',
'Manage %(action)s': 'Gerenciar %(action)s',
'Manage Access Control': 'Manage Access Control',
'Manage Cache': 'Gerenciar Cache',
'Manage courses': 'Gerenciar cursos',
'Max Students': 'Max Alunos',
'Max. Students': 'Max. Alunos',
'Memberships': 'Memberships',
'Menu Model': 'Modelo de Menu',
'Module': 'Módulo',
'Modules': 'Módulos',
'My Calendar': 'Meu Calendário',
'My Certificates': 'Meus Certificados',
'My Courses': 'Meus Cursos',
'My courses': 'Meus cursos',
'My Sites': 'Meus sites',
'My Work': 'Meu Serviço',
'Name': 'Nome',
'New': 'Novo',
'New announcement': 'Novo anúncio',
'New announcement on %s class': 'Novo anúncio na turma %s',
'New Class': 'Nova turma',
'New Course': 'Novo curso',
'New lesson': 'Nova aula',
'New module': 'Novo módulo',
'New password': 'Nova senha',
'New Record': 'Novo Registro',
'new record inserted': 'novo registro inserido',
'New Topic': 'Novo tópico',
'New topic': 'Novo tópico',
'next %s rows': 'Próximas %s linhas',
'next 100 rows': 'próximas 100 linhas',
'No announcements yet!': 'Sem anúncios ainda!',
'No databases in this application': 'Não há bancos de dados nesta aplicação',
'Not Authorized': 'Não Autorizado',
"Now, you won't be able to see the lessons anymore. But the forum, announcements and other resources are still available.": 'Agora você não conseguirá ver as aulas mais. Porém o fórum, os anúncios e outros recursos ainda estão disponíveis.',
'Object or table name': 'Nome do objeto do da tabela',
'October 1th': '',
'Old password': 'Senha antiga',
'Online examples': 'Exemplos online',
'Open classes': 'Turmas abertas',
'Open Enrollment': 'Inscrições Abertas',
'or import from csv file': 'ou importar de um arquivo csv',
'Order': 'Pedido',
'Order Date': 'Data do Pedido',
'Order date': 'Data do Pedido',
'Order details': 'Detalhes do Pedido',
'Order Id': 'Id do Pedido',
'Order Nº': 'Nº do Pedido',
'Origin': 'Origem',
'Other Plugins': 'Outros Plugins',
'Other Recipes': 'Outras Receitas',
'Overview': 'Visão Geral',
'Owner': 'Criador',
'Password': 'Senha',
'Payment completed! Congratulations for your purchase!': 'Pagamento completo! Parabéns pela sua compra!',
'Payment confirmed!': 'Pagamento confirmado!',
'Payment History': 'Histórico de Pagamento',
'Pending': 'Pendente',
'Pending Id': 'Id da Pendência',
'Permission': 'Permissão',
'Permissions': 'Permissões',
'Place': 'Posição',
'Please, select which type of lesson you want to create.': 'Por favor, selecione qual tipo de aula você gostaria de criar.',
'Plugins': 'Plugins',
'Post': 'Postagem',
'Powered by': 'Desenvolvido com',
'Preface': 'Prefácio',
'Preview': 'Visualizar',
'previous %s rows': 'anteriores %s linhas',
'previous 100 rows': '100 linhas anteriores',
'Price': 'Preço',
'Products': 'Produtos',
'Professor': 'Professor',
'Profile': 'Perfil',
'pygraphviz library not found': 'pygraphviz library not found',
'Python': 'Python',
'Query:': 'Consulta:',
'Question': 'Pergunta',
'Quick Examples': 'Exemplos rápidos',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Recipes': 'Receitas',
'Record': 'Registro',
'Record Created': 'Registro Criado',
'record does not exist': 'registro não existe',
'Record ID': 'ID do Registro',
'Record id': 'id do registro',
'register': 'Registre-se',
'Register': 'Registre-se',
'register a normal user': 'registre um usuário comum',
'Registered On': 'Registrado Em',
'Registration identifier': 'Idenficador de registro',
'Registration key': 'Chave de registro',
'Release Date': 'Data de Início',
'Remember me (for 30 days)': 'Lembrar de mim (por 30 dias)',
'Replies': 'Respostas',
'Reply this user': 'Responder esse usuário',
'Reset Password key': 'Resetar chave de senha',
'Resources': 'Recursos',
'Role': 'Papel',
'Roles': 'Funções',
'Rows in Table': 'Linhas na tabela',
'Rows selected': 'Linhas selecionadas',
'said': 'disse',
'Save model as...': 'Salvar modelo como...',
'Schedule Date': 'Agendar data',
'Schedule event': 'Agendar evento',
'See more': 'Ver mais',
'Semantic': 'Semântico',
'Send to Students': 'Enviar para Estudantes',
'Services': 'Serviço',
'Settings': 'Configurações',
'Shopping Cart': 'Carrinho de Compras',
'Short Description': 'Descrição Curta',
'Sign Up': 'Registrar',
'Signature': 'Assinatura',
'Size of cache:': 'Tamanho do cache:',
"Something happened and we couldn't verify your payment.": 'Algo aconteceu e nós não conseguimos verificar seu pagamento.',
'Something went wrong!': 'Alguma coisa deu errado!',
'Sorry! Something bad happened!': 'Desculpe! Alguma coisa ruim aconteceu!',
'Start': 'Início',
'Start date': 'Data de Início',
'Start Date': 'Data de Início',
'Starting on': 'Começando em',
'state': 'estado',
'Statistics': 'Estatísticas',
'Status': 'Status',
'Student': 'Aluno',
'students max': 'alunos max',
'Stylesheet': 'Folha de estilo',
'Submit': 'Enviar',
'submit': 'enviar',
'Success!': 'Sucesso!',
'Support': 'Suporte',
'Sure you want to delete this object?': 'Está certo(a) que deseja apagar este objeto?',
'Table': 'Tabela',
'Table name': 'Nome da tabela',
'Take a look at our Courses': 'Dê uma olhada nos nossos Cursos',
'Template': 'Template',
'Text': 'Texto',
'Thank you for your purchase!': 'Obrigado pela sua compra!',
'Thank you!': 'Obrigado!',
'Thank you.': 'Obrigado.',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'Uma "consulta" é uma condição como "db.tabela1.campo1==\'valor\'". Expressões como "db.tabela1.campo1==db.tabela2.campo2" resultam em um JOIN SQL.',
'The Core': 'The Core',
'The following classes have a limit date of conclusion:': 'As seguintes turmas possuem uma data limite de conclusão:',
'The output of the file is a dictionary that was rendered by the view %s': 'A saída do arquivo é um dicionário que foi apresentado pela visão %s',
'The total amount was R$%.2f.': 'O valor total foi R$%.2f',
'The total amount was R$%.2f.\n': 'O valor total foi R$%.2f.',
'The Views': 'As views',
'There is a new announcement on %s class.': 'Há um novo anúncio na turma %s.',
'There was a problem with this video!': 'Houve um problema com esse vídeo!',
'There was a problem with your payment!': 'Houve um problema com seu pagamento!',
'There was a problem with your payment!\n': 'Houve um problema com o seu pagamento!',
'These people are interested in your course %s': 'Estas pessoas estão interessadas no seu curso',
'This App': 'Esta aplicação',
'This class has a limit date for conclusion.': 'Esta turma tem uma data limite de conclusão.',
'This class reached the limit date': 'Esta turma atingiu a data limite',
'This course is already on your shopping cart!': 'Esse curso já está no seu carrinho de compras!',
'This email already has an account': 'Este email já tem uma conta',
'This is a copy of the scaffolding application': 'Isto é uma cópia da aplicação modelo',
'This is to certify that': 'Certifico para os devidos fins que',
"This means that, after the limit date, you won't be able to see the lessons anymore. Forum, announcements and other resources will still be available.": 'Isso significa que, após a data limite, você não será capaz de ver as aulas mais. Fórum, anúncios e outros recursos ainda estarão disponíveis.',
'Time in Cache (h:m:s)': 'Tempo em Cache (h:m:s)',
'Timestamp': 'Timestamp',
'Title': 'Título',
'to finish your payment.': 'para finalizar seu pagamento.',
'Token': 'Token',
'Total': 'Total',
'Total Hours': 'Horas Totais',
'Traceback': 'Traceback',
'Twitter': 'Twitter',
'unable to parse csv file': 'não foi possível analisar arquivo csv',
'Update:': 'Atualizar:',
'Upload Video': 'Upload do Vídeo',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) para AND, (...)|(...) para OR, e ~(...) para NOT para construir consultas mais complexas.',
'User': 'Usuário',
'User Area - Courses': 'Área do Usuário - Cursos',
'User avatar': 'Avatar do Usuário',
'User Class': 'Turma do Usuário',
'User Id': 'Id do Usuário',
'User ID': 'ID do Usuário',
'User Lesson': 'Aula do Usuário',
'User Voice': 'Opinião dos usuários',
'Users': 'Usuários',
'Value not in database': 'Valor não está no banco de dados',
'Verify Password': 'Verificar senha',
'Video': 'Vídeo',
'Video file': 'Arquivo de Vídeo',
'Video URL': 'URL do Vídeo',
'Videos': 'Vídeos',
'View': 'Visualização',
'View course': 'Ver curso',
'Wait a few seconds.': 'Aguarde alguns segundos.',
"We couldn't find any video here. Please, alert your instructor about this problem!": 'Não conseguimos encontrar nenhum vídeo aqui. Por favor, informe seu instrutor sobre este problema!',
'We just confirmed your payment for order number %s.': 'Nós acabamos de confirmar o seu pagamento pelo pedido %s.',
'We just confirmed your payment for order number %s.\n': 'Acabamos de confirmar seu pagamento para o pedido número %s.',
'We just received your order number %s:': 'Acabamos de receber o seu pedido de número %s:',
'We just received your order number %s:\n': 'Acabamos de receber seu pedido de número %s:',
'We just received your order number bec36ec6:\n': 'We just received your order number bec36ec6:\r\n',
'We just received your order number e760809e:\n': '',
'We received your order!': 'Recebemos seu pedido!',
'We will wait and let you know when your payment is confirmed.': '',
'Web applications com web2py Certificate': 'Web applications com web2py Certificate',
'Web2py': 'Web2py',
'Welcome': 'Bem-vindo',
'Welcome %s': 'Bem-vindo %s',
'Welcome to web2py': 'Bem-vindo ao web2py',
'Welcome to web2py!': 'Bem-vindo ao web2py!',
'Which called the function %s located in the file %s': 'Que chamou a função %s localizada no arquivo %s',
"Why don't you follow this steps to start making your courses?": 'Por que não segue estes passos para começar a criar seus cursos?',
'Working...': 'Trabalhando...',
'You are already enrolled in this class to %s, so we removed it from your shopping cart.': 'Você já está inscrito nesta turma para o curso %s, então nós removemos ela do seu carrinho de compras.',
'You are already on the list for this course!': 'Você já está na lista para este curso!',
'You are already on this class!': 'Você já está nesta turma!',
'You are already registered!': 'Você já está registrado!',
'You are successfully running web2py': 'Você está executando o web2py com sucesso',
'You are successfully running web2py.': 'Você está executando o web2py com sucesso.',
'You can access it here: %s': 'Você pode acessá-lo aqui: %s',
'You can check your payment history after login in to your profile.': 'Você pode verificar o seu histórico de pagamentos após fazer o login no seu perfil.',
'You can check your payment history after login in to your profile.\n': 'Você pode checar seu histórico de pagamento após logar no seu perfil.',
'You can modify this application and adapt it to your needs': 'Você pode modificar esta aplicação e adaptá-la às suas necessidades',
'You have nothing in your shopping cart yet!': 'Você não tem nada no seu carrinho de compras ainda!',
'You visited the url %s': 'Você acessou a url %s',
"You're beeing redirected to a secure enviroment on Paypal": 'Você está sendo redirecionado para um ambiente seguro do Paypal',
'Your browser does not support the video tag.': 'O seu browser não suporta a tag de vídeo.',
'Your Certificate of Conclusion of %s is attached to this email. For more info, contact your teacher.\n\nCongratulations!': 'Seu Certificado de Conclusão do curso %s está anexado a este email. Para mais informações, entre em contato com o seu professor. Parabéns!',
'Your Certificate of Conclusion of Web applications com web2py is attached to this email. For more info, contact your teacher.\n\nCongratulations!': 'Your Certificate of Conclusion of Web applications com web2py is attached to this email. For more info, contact your teacher.\r\n\r\nCongratulations!',
'Your email': 'Seu email',
}
|
{
"content_hash": "c40063b1a425288da76d7f8e29d5a683",
"timestamp": "",
"source": "github",
"line_count": 448,
"max_line_length": 301,
"avg_line_length": 46.933035714285715,
"alnum_prop": 0.6977551602777513,
"repo_name": "juliarizza/web2courses",
"id": "5a88b0e4623fd78df99e4d1d64f2aa6923b02e3a",
"size": "21299",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "languages/pt-br.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "201176"
},
{
"name": "HTML",
"bytes": "465615"
},
{
"name": "JavaScript",
"bytes": "47418"
},
{
"name": "PHP",
"bytes": "4548"
},
{
"name": "Python",
"bytes": "684729"
}
],
"symlink_target": ""
}
|
"""A test to verify an implementation of the Face layer of RPC Framework."""
import abc
import unittest
import six
from grpc.framework.face import interfaces
from tests.unit.framework.common import test_constants
from tests.unit.framework.face.testing import callback as testing_callback
from tests.unit.framework.face.testing import control
from tests.unit.framework.face.testing import coverage
from tests.unit.framework.face.testing import digest
from tests.unit.framework.face.testing import stock_service
from tests.unit.framework.face.testing import test_case
class EventInvocationSynchronousEventServiceTestCase(
six.with_metaclass(abc.ABCMeta,
test_case.FaceTestCase, coverage.FullCoverage)):
"""A test of the Face layer of RPC Framework.
Concrete subclasses must also extend unittest.TestCase.
"""
def setUp(self):
"""See unittest.TestCase.setUp for full specification.
Overriding implementations must call this implementation.
"""
self.control = control.PauseFailControl()
self.digest = digest.digest(
stock_service.STOCK_TEST_SERVICE, self.control, None)
self.stub, self.memo = self.set_up_implementation(
self.digest.name, self.digest.methods,
self.digest.event_method_implementations, None)
def tearDown(self):
"""See unittest.TestCase.tearDown for full specification.
Overriding implementations must call this implementation.
"""
self.tear_down_implementation(self.memo)
def testSuccessfulUnaryRequestUnaryResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.unary_unary_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
callback = testing_callback.Callback()
self.stub.event_value_in_value_out(
name, request, callback.complete, callback.abort,
test_constants.SHORT_TIMEOUT)
callback.block_until_terminated()
response = callback.response()
test_messages.verify(request, response, self)
def testSuccessfulUnaryRequestStreamResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.unary_stream_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
callback = testing_callback.Callback()
self.stub.event_value_in_stream_out(
name, request, callback, callback.abort,
test_constants.SHORT_TIMEOUT)
callback.block_until_terminated()
responses = callback.responses()
test_messages.verify(request, responses, self)
def testSuccessfulStreamRequestUnaryResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.stream_unary_messages_sequences)):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
callback = testing_callback.Callback()
unused_call, request_consumer = self.stub.event_stream_in_value_out(
name, callback.complete, callback.abort,
test_constants.SHORT_TIMEOUT)
for request in requests:
request_consumer.consume(request)
request_consumer.terminate()
callback.block_until_terminated()
response = callback.response()
test_messages.verify(requests, response, self)
def testSuccessfulStreamRequestStreamResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.stream_stream_messages_sequences)):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
callback = testing_callback.Callback()
unused_call, request_consumer = self.stub.event_stream_in_stream_out(
name, callback, callback.abort, test_constants.SHORT_TIMEOUT)
for request in requests:
request_consumer.consume(request)
request_consumer.terminate()
callback.block_until_terminated()
responses = callback.responses()
test_messages.verify(requests, responses, self)
def testSequentialInvocations(self):
# pylint: disable=cell-var-from-loop
for name, test_messages_sequence in (
six.iteritems(self.digest.unary_unary_messages_sequences)):
for test_messages in test_messages_sequence:
first_request = test_messages.request()
second_request = test_messages.request()
first_callback = testing_callback.Callback()
second_callback = testing_callback.Callback()
def make_second_invocation(first_response):
first_callback.complete(first_response)
self.stub.event_value_in_value_out(
name, second_request, second_callback.complete,
second_callback.abort, test_constants.SHORT_TIMEOUT)
self.stub.event_value_in_value_out(
name, first_request, make_second_invocation, first_callback.abort,
test_constants.SHORT_TIMEOUT)
second_callback.block_until_terminated()
first_response = first_callback.response()
second_response = second_callback.response()
test_messages.verify(first_request, first_response, self)
test_messages.verify(second_request, second_response, self)
def testExpiredUnaryRequestUnaryResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.unary_unary_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
callback = testing_callback.Callback()
with self.control.pause():
self.stub.event_value_in_value_out(
name, request, callback.complete, callback.abort,
test_constants.SHORT_TIMEOUT)
callback.block_until_terminated()
self.assertEqual(interfaces.Abortion.EXPIRED, callback.abortion())
def testExpiredUnaryRequestStreamResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.unary_stream_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
callback = testing_callback.Callback()
with self.control.pause():
self.stub.event_value_in_stream_out(
name, request, callback, callback.abort,
test_constants.SHORT_TIMEOUT)
callback.block_until_terminated()
self.assertEqual(interfaces.Abortion.EXPIRED, callback.abortion())
def testExpiredStreamRequestUnaryResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.stream_unary_messages_sequences)):
for unused_test_messages in test_messages_sequence:
callback = testing_callback.Callback()
self.stub.event_stream_in_value_out(
name, callback.complete, callback.abort,
test_constants.SHORT_TIMEOUT)
callback.block_until_terminated()
self.assertEqual(interfaces.Abortion.EXPIRED, callback.abortion())
def testExpiredStreamRequestStreamResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.stream_stream_messages_sequences)):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
callback = testing_callback.Callback()
unused_call, request_consumer = self.stub.event_stream_in_stream_out(
name, callback, callback.abort, test_constants.SHORT_TIMEOUT)
for request in requests:
request_consumer.consume(request)
callback.block_until_terminated()
self.assertEqual(interfaces.Abortion.EXPIRED, callback.abortion())
def testFailedUnaryRequestUnaryResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.unary_unary_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
callback = testing_callback.Callback()
with self.control.fail():
self.stub.event_value_in_value_out(
name, request, callback.complete, callback.abort,
test_constants.SHORT_TIMEOUT)
callback.block_until_terminated()
self.assertEqual(interfaces.Abortion.SERVICER_FAILURE,
callback.abortion())
def testFailedUnaryRequestStreamResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.unary_stream_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
callback = testing_callback.Callback()
with self.control.fail():
self.stub.event_value_in_stream_out(
name, request, callback, callback.abort,
test_constants.SHORT_TIMEOUT)
callback.block_until_terminated()
self.assertEqual(interfaces.Abortion.SERVICER_FAILURE,
callback.abortion())
def testFailedStreamRequestUnaryResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.stream_unary_messages_sequences)):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
callback = testing_callback.Callback()
with self.control.fail():
unused_call, request_consumer = self.stub.event_stream_in_value_out(
name, callback.complete, callback.abort,
test_constants.SHORT_TIMEOUT)
for request in requests:
request_consumer.consume(request)
request_consumer.terminate()
callback.block_until_terminated()
self.assertEqual(interfaces.Abortion.SERVICER_FAILURE,
callback.abortion())
def testFailedStreamRequestStreamResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.stream_stream_messages_sequences)):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
callback = testing_callback.Callback()
with self.control.fail():
unused_call, request_consumer = self.stub.event_stream_in_stream_out(
name, callback, callback.abort, test_constants.SHORT_TIMEOUT)
for request in requests:
request_consumer.consume(request)
request_consumer.terminate()
callback.block_until_terminated()
self.assertEqual(interfaces.Abortion.SERVICER_FAILURE, callback.abortion())
def testParallelInvocations(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.unary_unary_messages_sequences)):
for test_messages in test_messages_sequence:
first_request = test_messages.request()
first_callback = testing_callback.Callback()
second_request = test_messages.request()
second_callback = testing_callback.Callback()
self.stub.event_value_in_value_out(
name, first_request, first_callback.complete, first_callback.abort,
test_constants.SHORT_TIMEOUT)
self.stub.event_value_in_value_out(
name, second_request, second_callback.complete,
second_callback.abort, test_constants.SHORT_TIMEOUT)
first_callback.block_until_terminated()
second_callback.block_until_terminated()
first_response = first_callback.response()
second_response = second_callback.response()
test_messages.verify(first_request, first_response, self)
test_messages.verify(second_request, second_response, self)
@unittest.skip('TODO(nathaniel): implement.')
def testWaitingForSomeButNotAllParallelInvocations(self):
raise NotImplementedError()
def testCancelledUnaryRequestUnaryResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.unary_unary_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
callback = testing_callback.Callback()
with self.control.pause():
call = self.stub.event_value_in_value_out(
name, request, callback.complete, callback.abort,
test_constants.SHORT_TIMEOUT)
call.cancel()
callback.block_until_terminated()
self.assertEqual(interfaces.Abortion.CANCELLED, callback.abortion())
def testCancelledUnaryRequestStreamResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.unary_stream_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
callback = testing_callback.Callback()
call = self.stub.event_value_in_stream_out(
name, request, callback, callback.abort,
test_constants.SHORT_TIMEOUT)
call.cancel()
callback.block_until_terminated()
self.assertEqual(interfaces.Abortion.CANCELLED, callback.abortion())
def testCancelledStreamRequestUnaryResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.stream_unary_messages_sequences)):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
callback = testing_callback.Callback()
call, request_consumer = self.stub.event_stream_in_value_out(
name, callback.complete, callback.abort,
test_constants.SHORT_TIMEOUT)
for request in requests:
request_consumer.consume(request)
call.cancel()
callback.block_until_terminated()
self.assertEqual(interfaces.Abortion.CANCELLED, callback.abortion())
def testCancelledStreamRequestStreamResponse(self):
for name, test_messages_sequence in (
six.iteritems(self.digest.stream_stream_messages_sequences)):
for unused_test_messages in test_messages_sequence:
callback = testing_callback.Callback()
call, unused_request_consumer = self.stub.event_stream_in_stream_out(
name, callback, callback.abort, test_constants.SHORT_TIMEOUT)
call.cancel()
callback.block_until_terminated()
self.assertEqual(interfaces.Abortion.CANCELLED, callback.abortion())
|
{
"content_hash": "2371aa8eba9e02bfb1db1657b7267065",
"timestamp": "",
"source": "github",
"line_count": 349,
"max_line_length": 83,
"avg_line_length": 40.713467048710605,
"alnum_prop": 0.6880146386093321,
"repo_name": "miselin/grpc",
"id": "98b61e492c9f340aaaa7a959dd0b62c1631ff335",
"size": "15738",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/python/grpcio/tests/unit/framework/face/testing/event_invocation_synchronous_event_service_test_case.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "12656"
},
{
"name": "C",
"bytes": "4911267"
},
{
"name": "C#",
"bytes": "1107955"
},
{
"name": "C++",
"bytes": "1361012"
},
{
"name": "DTrace",
"bytes": "147"
},
{
"name": "JavaScript",
"bytes": "282482"
},
{
"name": "M4",
"bytes": "34062"
},
{
"name": "Makefile",
"bytes": "591839"
},
{
"name": "Objective-C",
"bytes": "274724"
},
{
"name": "PHP",
"bytes": "128996"
},
{
"name": "Protocol Buffer",
"bytes": "102293"
},
{
"name": "Python",
"bytes": "1586857"
},
{
"name": "Ruby",
"bytes": "498986"
},
{
"name": "Shell",
"bytes": "49986"
},
{
"name": "Swift",
"bytes": "5279"
}
],
"symlink_target": ""
}
|
import os
import subprocess
from collections import namedtuple
from .tools import is_exe, NotExecutableError
# factory class for cd_hit class returned values
Results = namedtuple("Results", "command fastaout clusters " +
"stdout stderr")
class Cd_hit_Error(Exception):
"""Exception raised when cd_hit fails"""
def __init__(self, message):
self.message = message
class Cd_hit(object):
"""Class for working with cd_hit"""
def __init__(self, exe_path):
"""Instantiate with location of executable"""
if not is_exe(exe_path):
msg = "{0} is not an executable".format(exe_path)
raise NotExecutableError(msg)
self._exe_path = exe_path
def run(self, fasta_in, threads, threshold, outdir, prefix, dry_run=False):
"""Run cd_hit to cluster passed fasta files
- fasta_in - fasta file to be clustered
- threshold - threshold to cluster at
- threads - number of threads for cd_hit to use
- outdir - output directory for clustering output
- prefix - file prefix for cd_hit output
- dry_run - if True, returns cmd-line but does not run
Returns a tuple of output filenames, and the STOUT returned by the
cd_hit run.
"""
self.__build_cmd(fasta_in, threads, threshold, outdir, prefix)
if dry_run:
return(self._cmd)
pipe = subprocess.run(self._cmd, shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=True)
results = Results(self._cmd, *self._outfnames, pipe.stdout,
pipe.stderr)
return results
def __build_cmd(self, fasta_in, threads, threshold, outdir, prefix):
"""Build a command-line for cd_hit_est.
cd_hit takes a path to an output directory PLUS the prefix of the
files to write, such that
-o a/b/cdefg
writes files
a/b/cdefg
a/b/cdefg.clstr
and so on.
-d added to the command is so the output clusters will write out the
names up to 500 letters long. The default chops these at 20.
(too short)
-M added to allow unlimited memeroy - not a problem for
small jobs. If job are big, we will have to alter this.
"""
# outfiles are name WhatEver.out + .bak.clstr and + .clstr
self._outfnames = [os.path.join(outdir, prefix) + suffix for suffix in
('.fasta', '.clstr')]
cmd = ["cd-hit-est",
"-i", fasta_in,
"-o", os.path.join(outdir, prefix),
"-T {0}".format(threads),
"-M", "0",
"-c", str(threshold),
"-d", "500"]
self._cmd = ' '.join(cmd)
|
{
"content_hash": "2f17455fa76322767b599f705cb554fa",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 79,
"avg_line_length": 33.45348837209303,
"alnum_prop": 0.5630865484880083,
"repo_name": "widdowquinn/THAPBI-pycits",
"id": "dc16fc1f9490d92088e56e8472a3a4c5362af77d",
"size": "3265",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pycits/cd_hit.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "469746"
},
{
"name": "Python",
"bytes": "199487"
},
{
"name": "UnrealScript",
"bytes": "31869"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import uuid
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from .utils import update_glance
@python_2_unicode_compatible
class Integration(models.Model):
SCOPE_SEND_NOTIFICATIONS = 'send_notification'
SCOPES = [(SCOPE_SEND_NOTIFICATIONS, SCOPE_SEND_NOTIFICATIONS)]
id = models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True)
name = models.CharField(max_length=255)
description = models.TextField()
key = models.CharField(max_length=50, blank=True, null=True)
homepage_url = models.URLField()
url = models.URLField()
scopes = models.CharField(max_length=50, choices=SCOPES)
room_installable = models.BooleanField(default=True)
globally_installable = models.BooleanField(default=False)
@property
def capabilities_url(self):
return '{}/capabilities'.format(self.get_url())
@property
def installed_url(self):
return '{}/installed'.format(self.get_url())
def get_key(self):
return self.key or self.id
def get_url(self):
return '{}/{}'.format(self.url, str(self.id))
def __str__(self):
return self.name
@python_2_unicode_compatible
class WebPanel(models.Model):
TYPE_SIDEBAR = 'sidebar'
TYPES = [(TYPE_SIDEBAR, TYPE_SIDEBAR)]
id = models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True)
integration = models.ForeignKey(Integration, related_name='panels')
panel_type = models.CharField(max_length=50, choices=TYPES)
key = models.CharField(max_length=50, blank=True, null=True)
name = models.CharField(max_length=255)
url = models.URLField()
location = models.CharField(max_length=255,
default='hipchat.sidebar.right')
icon_url = models.URLField(blank=True, null=True)
icon_url_2x = models.URLField(blank=True, null=True)
def get_key(self):
return self.key or self.id
def get_url(self):
return '{}/{}/{}'.format(self.integration.get_url(),
self.panel_type,
str(self.id))
def __str__(self):
return "{} @ {}".format(self.name, self.integration)
@python_2_unicode_compatible
class Glance(models.Model):
id = models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True)
integration = models.ForeignKey(Integration, related_name='glances')
key = models.CharField(max_length=50, blank=True, null=True)
name = models.CharField(max_length=255)
url = models.URLField()
target = models.CharField(max_length=255, blank=True, null=True)
icon_url = models.URLField(blank=True, null=True)
icon_url_2x = models.URLField(blank=True, null=True)
def get_key(self):
return self.key or self.id
def get_url(self):
return '{}/glance/{}'.format(self.integration.get_url(), str(self.id))
def update_label(self, installation, new_label):
"""
Updates glance label.
"""
return update_glance(
installation=installation,
glace=self,
new_label=new_label
)
def __str__(self):
return "{} @ {}".format(self.name, self.integration)
@python_2_unicode_compatible
class Webhook(models.Model):
EVENT_ROOM_MESSAGE = 'room_message'
EVENTS = [(EVENT_ROOM_MESSAGE, EVENT_ROOM_MESSAGE)]
id = models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True)
integration = models.ForeignKey(Integration, related_name='webhooks')
event = models.CharField(max_length=50, choices=EVENTS)
name = models.CharField(max_length=255)
url = models.URLField()
pattern = models.CharField(max_length=255, blank=True, null=True)
def get_key(self):
return self.key or self.id
def get_url(self):
return '{}/webhook/{}'.format(self.integration.get_url(), str(self.id))
def __str__(self):
return "{} for {} @ {}".format(self.name, self.event, self.integration)
@python_2_unicode_compatible
class Installation(models.Model):
id = models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True)
integration = models.ForeignKey(Integration,
related_name='installations',
blank=True,
null=True)
capabilities_url = models.URLField()
room_id = models.IntegerField()
group_id = models.IntegerField()
oauth_id = models.UUIDField()
oauth_secret = models.CharField(max_length=255)
uninstalled = models.DateTimeField(blank=True, null=True)
def __str__(self):
return str(self.room_id)
|
{
"content_hash": "fedfa59838b2f37e77754ac1e671ef9f",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 79,
"avg_line_length": 33.0979020979021,
"alnum_prop": 0.6465244031269808,
"repo_name": "the-it-dude/django_rest_hipchat",
"id": "30c1607ab37c6863ad4d1277dce231d2045dbb1b",
"size": "4733",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_rest_hipchat/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2161"
},
{
"name": "Python",
"bytes": "22293"
}
],
"symlink_target": ""
}
|
from books.model.VendorPayment import VendorPayment
from books.model.VendorPaymentList import VendorPaymentList
from books.model.Bill import Bill
class VendorPaymentsParser:
"""This class is used to parse the json response for Vendor payments."""
def get_list(self, resp):
"""This method parses the given response and returns vendor payments
list object.
Args:
resp(dict): Dictionary containing json object for vendor payments
list.
Returns:
instance: Vendor payments list object.
"""
vendor_payments_list = VendorPaymentList()
for value in resp['vendorpayments']:
vendor_payment = VendorPayment()
vendor_payment.set_payment_id(value['payment_id'])
vendor_payment.set_vendor_id(value['vendor_id'])
vendor_payment.set_vendor_name(value['vendor_name'])
vendor_payment.set_payment_mode(value['payment_mode'])
vendor_payment.set_description(value['description'])
vendor_payment.set_date(value['date'])
vendor_payment.set_reference_number(value['reference_number'])
vendor_payment.set_exchange_rate(value['exchange_rate'])
vendor_payment.set_amount(value['amount'])
vendor_payment.set_paid_through_account_id(value[\
'paid_through_account_id'])
vendor_payment.set_paid_through_account_name(value[\
'paid_through_account_name'])
vendor_payment.set_balance(value['balance'])
vendor_payments_list.set_vendor_payments(vendor_payment)
return vendor_payments_list
def get_vendor_payment(self, resp):
"""This method is used to parse the given response and returns vendor
payments object.
Args:
resp(dict): Dictionary containing json object for vendor payments.
Returns:
instance: Vendor payments object.
"""
vendor_payment_obj = VendorPayment()
vendor_payment = resp['vendorpayment']
vendor_payment_obj.set_payment_id(vendor_payment['payment_id'])
vendor_payment_obj.set_vendor_id(vendor_payment['vendor_id'])
vendor_payment_obj.set_vendor_name(vendor_payment['vendor_name'])
vendor_payment_obj.set_payment_mode(vendor_payment['payment_mode'])
vendor_payment_obj.set_description(vendor_payment['description'])
vendor_payment_obj.set_date(vendor_payment['date'])
vendor_payment_obj.set_reference_number(vendor_payment[\
'reference_number'])
vendor_payment_obj.set_exchange_rate(vendor_payment['exchange_rate'])
vendor_payment_obj.set_amount(vendor_payment['amount'])
vendor_payment_obj.set_currency_symbol(vendor_payment[\
'currency_symbol'])
vendor_payment_obj.set_paid_through_account_id(vendor_payment[\
'paid_through_account_id'])
vendor_payment_obj.set_paid_through_account_name(vendor_payment[\
'paid_through_account_name'])
for value in vendor_payment['bills']:
bill = Bill()
bill.set_bill_number(value['bill_number'])
bill.set_bill_payment_id(value['bill_payment_id'])
bill.set_bill_id(value['bill_id'])
bill.set_total(value['total'])
bill.set_balance(value['balance'])
bill.set_amount_applied(value['amount_applied'])
bill.set_date(value['date'])
bill.set_due_date(value['due_date'])
vendor_payment_obj.set_bills(bill)
return vendor_payment_obj
def get_message(self, resp):
"""This message parses the given response and returns message string.
Args:
resp(dict): Response containing json object for message.
Returns:
str: Success message.
"""
return resp['message']
|
{
"content_hash": "fab341a30398822d53a894694bc7c642",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 78,
"avg_line_length": 42.12903225806452,
"alnum_prop": 0.6311893823379275,
"repo_name": "zoho/books-python-wrappers",
"id": "9ae76acaf2deefb8986e240d3f0f71c2a66aad2e",
"size": "3926",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "books/parser/VendorPaymentsParser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1012377"
}
],
"symlink_target": ""
}
|
from rest_framework.routers import Route, DynamicRoute, SimpleRouter
class JobRouter(SimpleRouter):
'''
A dedicated router for UWS services. The main difference is that a POST on an instance
maps to update, not PUT. Also actions are removed.
'''
routes = [
Route(
url=r'^{prefix}$',
mapping={
'get': 'list',
'post': 'create'
},
name='{basename}-list',
detail=False,
initkwargs={'suffix': 'List'}
),
Route(
url=r'^{prefix}/{lookup}$',
mapping={
'get': 'retrieve',
'post': 'update',
'delete': 'destroy'
},
name='{basename}-detail',
detail=True,
initkwargs={'suffix': 'Instance'}
),
DynamicRoute(
url=r'^{prefix}/{lookup}/{url_path}$',
name='{basename}-{url_name}',
detail=True,
initkwargs={}
)
]
|
{
"content_hash": "35a36967146421da7ae3fd112a36ad55",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 90,
"avg_line_length": 27.394736842105264,
"alnum_prop": 0.4553314121037464,
"repo_name": "aipescience/django-daiquiri",
"id": "e0aa7c05c479c5ba2bc50b36bd7ca4c1ac55dd50",
"size": "1041",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "daiquiri/jobs/routers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "28598"
},
{
"name": "HTML",
"bytes": "236579"
},
{
"name": "JavaScript",
"bytes": "97087"
},
{
"name": "Python",
"bytes": "602159"
}
],
"symlink_target": ""
}
|
"""
steamgames.py
Copyright 2013, Nate Collings
http://github.com/naiyt/steamgames
A small API wrapper intended to retrieve all available store information
for either a specific appid, or all appids.
Thanks for this SteamDB.info blog post for the idea on the best way to do this:
http://steamdb.info/blog/5/
This should give you all of the same info that, say, SteamCalculator's script does
(https://github.com/spezifanta/SteamCalculator-Scripts), but it's much more efficient and
can return much more info.
"""
import urllib2
import urllib
import json
import logging
from time import sleep
class Base:
"""Base class mainly for opening urls and chunking data"""
def _retry(self, url, time, retries):
"""If a url is unavaible, retries it "retries" number of times, with "time" space between tries"""
print "{} was unreachable, _retrying {} number of times".format(url, retries)
for num in range(retries):
try:
return urllib2.urlopen(url)
except:
sleep(time)
logging.warning("Couldn't reach {} after {} retries. Moving to next.".format(url, retries))
def _open_url(self, url):
try:
return urllib2.urlopen(url)
except urllib2.URLError as e:
logging.warning('URLError = {}'.format(str(e.reason)))
exit()
except urllib2.HTTPError as e:
logging.warning('HTTPError = {}'.format(str(e.code)))
return _retry(self, url, 5, 5)
except ValueError:
logging.error('Not a proper url: {}'.format(url))
exit()
def _chunks(self, params, number):
"""Breaks a list into a set of equally sized chunked lists, with remaining entries in last list"""
for i in xrange(0, len(params), number):
yield params[i:i+number]
class Games(Base):
"""
Class used to retrieve either all Game objects, or specific ones given a list of appids.
Example:
games = Games()
all_games = games.get_all('US') # Get a generator with all game info
# Get a generator for just the appids you specify
some_games = games.get_appids_info([123,1245])
"""
def __init__(self,num=None):
"""
Num is how many we can check against the Steam API per iteration. Defaults to 200,
as I've had good success querying the API for 200 appids at a time.
"""
if num is None:
self.num = 200
else:
self.num = num
# appids_to_names is a dict mapping appid -> game names
# names_to_appids is a dict mapping names -> appids
# Probably not necessary to have both
self.appids_to_names, self.names_to_appids = None, None
def _create_url(self, appids, cc):
"""Given a list of appids, creates an API url to retrieve them"""
appids = [str(x) for x in appids]
list_of_ids = ','.join(appids)
data = {'appids': list_of_ids, 'cc': cc, 'l': 'english', 'v': '1'}
url_vals = urllib.urlencode(data)
return "http://store.steampowered.com/api/appdetails/?{}".format(url_vals)
def _get_urls(self, appids, cc):
"""Returns urls for all of appids"""
list_of_ids = list(self._chunks(appids,self.num))
all_urls = []
for x in list_of_ids:
all_urls.append(self._create_url(x, cc))
return all_urls
def get_all(self, cc):
"""
A generator that returns all games currently in the Steam Store as Game objects.
This wraps around _get_games_from, so that the even though we have seperate urls for
the games, when you call this method you just get one generator, that will give you
one game object at a time.
"""
if self.appids_to_names is None or self.names_to_appids is None:
self.appids_to_names, self.names_to_appids = self.get_ids_and_names()
print "Chunking data..."
urls = self._get_urls(self.appids_to_names.keys(), cc)
for url in urls:
curr_games = self._get_games_from(url)
for game in curr_games:
yield game
def _get_games_from(self, url):
"""
This generator actually creates the Game objects, and can be called from
any method if you pass a list of url's to create the game appids from.
"""
page = json.loads(self._open_url(url).read())
for appid in page:
game = Game(page[appid], appid)
if game.success:
yield game
def get_info_for(self, appids, cc):
"""Given a list of appids, returns their Game objects"""
urls = self._get_urls(appids, cc)
for url in urls:
print "Opening a new page of games..."
curr_games = self._get_games_from(url)
for game in curr_games:
yield game
def get_ids_and_names(self):
"""Returns all appids in the store as a dictionary mapping appid to game_name"""
url = self._open_url("http://api.steampowered.com/ISteamApps/GetAppList/v2")
url_info = json.loads(url.read())
all_ids = {}
all_names = {}
for app in url_info['applist']['apps']:
all_ids[app['appid']] = app['name']
all_names[app['name']] = app['appid']
return all_ids, all_names
def get_id(self, game_name):
"""Given an appid, returns the game name"""
if game_name in self.names_to_appids:
return self.names_to_appids[game_name]
def get_name(self, appid):
"""Given a game name returns its appid"""
if game_name in self.appids_to_names:
return self.appids_to_names[appid]
class Game(Base):
"""
The actual Game() object -- really this is just a wrapper around the base
json response from Steam, that makes it a bit easier to go through the data.
"""
def __init__(self, game_json, appid):
"""
Curently, this just sets member variables for the various values
that the game object should have. Not all of these exist on all
appids, so there's some defaults whenever there is a key error.
I'll admit this looks kind of nasty, but it works. Perhaps someone
would be willing to make this look a bit better/more Pythonic?
"""
self.appid = appid
if 'success' in game_json:
self.success = game_json['success']
if self.success:
self.store_url = self._calc_store_url(self.appid)
data = game_json['data']
self.type = data['type']
self.description = data['detailed_description']
try:
self.name = data['name']
except KeyError:
self.name = "No Name"
try:
self.supported_languages = data['supported_languages']
except KeyError:
self.supported_languages = None
self.header_image = data['header_image']
self.website = data['website']
try:
self.currency = data['price_overview']['currency']
self.price = self._calc_price(data['price_overview']['initial'])
self.discounted_price = self._calc_price(data['price_overview']['final'])
self.discount_percent = data['price_overview']['discount_percent']
except KeyError:
self.currency = None
self.price = 0
self.discounted_price = 0
self.discount_percent = 0
try:
self.packages = data['packages']
except KeyError:
self.packages = None
self.platforms = data['platforms']
try:
self.categories = data['categories']
except KeyError:
self.categories = None
else:
print "Error! Can't read the game for {}".format(appid)
def _calc_price(self, amount):
"""Prices from the API are represented by cents -- convert to dollars"""
return float(amount) / 100.0
def _calc_store_url(self, appid):
return "http://store.steampowered.com/app/{}".format(appid)
|
{
"content_hash": "98bf58c88f1e0721d3acc1d8c8c31846",
"timestamp": "",
"source": "github",
"line_count": 234,
"max_line_length": 100,
"avg_line_length": 30.44871794871795,
"alnum_prop": 0.6819649122807018,
"repo_name": "naiyt/steamplaytime",
"id": "c16e77d65c21def50c04ccd053d990688cbfad2e",
"size": "7125",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/SteamAPI/steamgames.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "84094"
},
{
"name": "JavaScript",
"bytes": "57269"
},
{
"name": "Python",
"bytes": "76015"
}
],
"symlink_target": ""
}
|
import flask
import werkzeug.exceptions
from digits.webapp import app, scheduler, autodoc
from digits.utils.routing import request_wants_json
import images.views
import images as dataset_images
from digits.base_workspace import *
NAMESPACE = '/datasets/'
@app.route(NAMESPACE + '<job_id>.json', methods=['GET'])
@app.route(NAMESPACE + '<job_id>', methods=['GET'])
@autodoc(['datasets', 'api'])
def datasets_show(job_id):
"""
Show a DatasetJob
Returns JSON when requested:
{id, name, directory, status}
"""
workspace = get_workspace_details(flask.request.url)
job = scheduler.get_job(job_id)
if job is None:
raise werkzeug.exceptions.NotFound('Job not found')
if request_wants_json():
return flask.jsonify(job.json_dict(True))
else:
if isinstance(job, dataset_images.ImageClassificationDatasetJob):
return dataset_images.classification.views.show(job, workspace)
elif isinstance(job, dataset_images.FeatureExtractionDatasetJob):
return dataset_images.extraction.views.show(job, workspace)
else:
raise werkzeug.exceptions.BadRequest('Invalid job type')
@app.route(NAMESPACE + 'summary', methods=['GET'])
@autodoc('datasets')
def dataset_summary():
"""
Return a short HTML summary of a DatasetJob
"""
workspace = get_workspace_details(flask.request.url)
job = scheduler.get_job(flask.request.args['job_id'])
if job is None:
raise werkzeug.exceptions.NotFound('Job not found')
return flask.render_template('datasets/summary.html', dataset=job, workspace = workspace)
|
{
"content_hash": "c94b7c8507b13c15e8ebeb1c36c887b5",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 93,
"avg_line_length": 33.204081632653065,
"alnum_prop": 0.693300553165335,
"repo_name": "DESHRAJ/DIGITS",
"id": "be9c670a8ecf6d43bf7be9c38565e4686e93619b",
"size": "1697",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "digits/dataset/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "9286"
},
{
"name": "CoffeeScript",
"bytes": "3296"
},
{
"name": "Groff",
"bytes": "94371300"
},
{
"name": "HTML",
"bytes": "305108"
},
{
"name": "JavaScript",
"bytes": "1172380"
},
{
"name": "Python",
"bytes": "450498"
},
{
"name": "Shell",
"bytes": "1462"
}
],
"symlink_target": ""
}
|
"""PyGotham user profiles."""
from flask import (
Blueprint, flash, g, redirect, render_template, request, url_for
)
from flask_login import current_user
from flask_security import login_required
from pygotham.core import db
from pygotham.frontend import route
from pygotham.models import Talk, Volunteer
__all__ = ('blueprint',)
blueprint = Blueprint(
'profile',
__name__,
subdomain='<event_slug>',
url_prefix='/profile',
)
@route(blueprint, '/dashboard/')
@login_required
def dashboard():
"""Return the user's dashboard."""
# TODO: Optionally, old proposals should be shown in a read-only mode.
talks = Talk.query.current.filter(Talk.user == current_user)
return render_template(
'profile/dashboard.html', talks=talks)
@route(blueprint, '/settings/', methods=('GET', 'POST'))
@login_required
def settings():
"""Return the user's settings."""
# TODO: How should this be handled? Should a speaker's bio be stored
# as a snapshot from event to event? It could be stored as part of a
# talks.models.Presentation.
from pygotham.forms import ProfileForm
form = ProfileForm(request.form, obj=current_user)
if form.validate_on_submit():
form.populate_obj(current_user)
db.session.commit()
flash('Your profile has been updated.', 'success')
return redirect(url_for('profile.settings'))
return render_template('profile/settings.html', form=form)
@route(blueprint, '/unvolunteer/')
@login_required
def unvolunteer():
"""Remove a user from being a volunteer."""
if current_user.is_volunteer:
volunteer = Volunteer.query.current.filter(
Volunteer.user == current_user).first()
db.session.delete(volunteer)
db.session.commit()
flash("We're sorry to see you change your mind!")
return redirect(url_for('profile.dashboard'))
@route(blueprint, '/volunteer/')
@login_required
def volunteer():
"""Sign up a user as a volunteer."""
if not current_user.is_volunteer:
volunteer = Volunteer(user=current_user, event=g.current_event)
db.session.add(volunteer)
db.session.commit()
flash('Thanks for volunteering!')
return redirect(url_for('profile.dashboard'))
|
{
"content_hash": "0176f824cb31dc8cdc7d8c5e4ac17453",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 74,
"avg_line_length": 29.763157894736842,
"alnum_prop": 0.6777188328912467,
"repo_name": "PyGotham/pygotham",
"id": "0c05332dfdbe3d2976d617838021fd298bcf788e",
"size": "2262",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pygotham/frontend/profile.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "56204"
},
{
"name": "HTML",
"bytes": "37411"
},
{
"name": "JavaScript",
"bytes": "116"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "113486"
},
{
"name": "Ruby",
"bytes": "1526"
},
{
"name": "Shell",
"bytes": "587"
}
],
"symlink_target": ""
}
|
from daapserver.utils import parse_byte_range
import shutil
import gevent
import gevent.queue
def stream_from_remote(lock, remote_fd, target_file, chunk_size=32768,
on_cache=None):
"""
Spawn a greenlet to download and cache a file, while simultaniously stream
data to the receiver. An additional greenlet is spawned to handle the file
download and caching. Every time another block (of interest, depending on
start and stop) is available, it will be written to the queue. The streamer
blocks until a block of interest is available.
:param file remote_fd: File descriptor of remote file to stream.
:param str target_file: Path to target file name. Must be writeable.
:param int chunk_size: Chunk size to use when reading remote.
:param callable on_cache: Callback method to invoke when streaming is done.
"""
temp_file = "%s.temp" % target_file
queue = gevent.queue.Queue()
def _downloader():
exhausted = False
bytes_read = 0
with open(temp_file, "wb") as local_fd:
try:
while True:
chunk = remote_fd.read(chunk_size)
if not chunk:
exhausted = True
break
local_fd.write(chunk)
bytes_read += len(chunk)
# Yield in the form of (chunk_begin, chunk_end, chunk)
yield bytes_read - len(chunk), bytes_read, chunk
finally:
# Make sure the remaining bytes are read from remote and
# written to disk.
if not exhausted:
while True:
chunk = remote_fd.read(chunk_size)
if not chunk:
break
local_fd.write(chunk)
bytes_read += len(chunk)
# Move the temp file to the target file. On the same disk, this
# should be an atomic operation.
shutil.move(temp_file, target_file)
# Mark done, for the on_cache
exhausted = True
# Invoke callback, if fully exhausted
if exhausted and on_cache:
on_cache(bytes_read)
def _cacher(begin, end):
put = False
# Hack (1)
old_owner, lock._owner = lock._owner, gevent.getcurrent()
with lock:
try:
for chunk_begin, chunk_end, chunk in _downloader():
# Ensure that the chunk we have downloaded is a chunk that
# we are interested in. For instance, we may need the
# middle part of a song, but this will mean that we the
# beginning should be downloaded (and saved to file) first.
if (chunk_begin <= begin < chunk_end) or \
(chunk_begin <= end < chunk_end):
put = not put
if put:
queue.put((chunk_begin, chunk_end, chunk))
finally:
# Make sure the streamer stops
queue.put(StopIteration)
# Hack (2)
lock._owner = old_owner
def _streamer(byte_range=None):
begin, end = parse_byte_range(byte_range)
# Spawn the download greenlet.
greenlet = gevent.spawn(_cacher, begin, end)
try:
put = False
# At this point, the '_cacher' greenlet is already running and
# should have downloaded (part of) the file. The part that we are
# interested in will be put in the queue.
for chunk_begin, chunk_end, chunk in queue:
if (chunk_begin <= begin < chunk_end) or \
(chunk_begin <= end < chunk_end):
put = not put
if put:
i = max(0, begin - chunk_begin)
j = min(len(chunk), end - chunk_begin)
yield chunk[i:j]
finally:
# Make sure the greenlet gets killed when this iterator is closed.
greenlet.kill()
return _streamer
def stream_from_file(lock, fd, file_size, on_start=None, on_finish=None):
"""
Create an iterator that streams a file partially or all at once.
"""
def _streamer(byte_range=None):
begin, end = parse_byte_range(byte_range, max_byte=file_size)
try:
if on_start:
on_start()
with lock:
fd.seek(begin)
chunk = fd.read(end - begin)
yield chunk
finally:
if on_finish:
on_finish()
return _streamer
def stream_from_buffer(lock, data, file_size, chunk_size=32768, on_start=None,
on_finish=None):
"""
"""
def _streamer(byte_range=None):
begin, end = parse_byte_range(byte_range, max_byte=file_size)
# Yield data in chunks
try:
if on_start:
on_start()
while True:
with lock:
chunk = data[begin:min(end, begin + chunk_size)]
# Send the data
yield chunk
# Increment offset
begin += len(chunk)
# Stop when the end has been reached
if begin >= end:
break
finally:
if on_finish:
on_finish()
return _streamer
|
{
"content_hash": "8da4c93dfe07182b52b01a953d2f4636",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 79,
"avg_line_length": 31.581920903954803,
"alnum_prop": 0.5146690518783542,
"repo_name": "ties/SubDaap",
"id": "6a39ee4ca1f7ea8ce1c9afb9641d995c795b9cfe",
"size": "5590",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "subdaap/stream.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "9188"
},
{
"name": "Python",
"bytes": "119924"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class TextValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(self, plotly_name="text", parent_name="box", **kwargs):
super(TextValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
{
"content_hash": "c57a29057e777d1299bc57896706be11",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 72,
"avg_line_length": 36.84615384615385,
"alnum_prop": 0.5887265135699373,
"repo_name": "plotly/python-api",
"id": "fe17f2cc81406cc887fe22f6de08d9c4d29b6c55",
"size": "479",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/box/_text.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
}
|
import functools
import parsy
from public import public
import ibis.expr.datatypes.core as dt
from ibis.common.exceptions import IbisTypeError
from ibis.common.parsing import (
COLON,
COMMA,
FIELD,
LANGLE,
LPAREN,
NUMBER,
PRECISION,
RANGLE,
RAW_NUMBER,
RAW_STRING,
RPAREN,
SCALE,
SEMICOLON,
spaceless,
spaceless_string,
)
@public
@functools.lru_cache(maxsize=100)
def parse(text: str) -> dt.DataType:
"""Parse a type from a [`str`][str] `text`.
The default `maxsize` parameter for caching is chosen to cache the most
commonly used types--there are about 30--along with some capacity for less
common but repeatedly-used complex types.
Parameters
----------
text
The type string to parse
Examples
--------
Parse an array type from a string
>>> import ibis
>>> import ibis.expr.datatypes as dt
>>> dt.parse("array<int64>")
Array(value_type=Int64(nullable=True), nullable=True)
You can avoid parsing altogether by constructing objects directly
>>> import ibis
>>> import ibis.expr.datatypes as dt
>>> ty = dt.parse("array<int64>")
>>> ty == dt.Array(dt.int64)
True
"""
srid = NUMBER
geotype = spaceless_string("geography") | spaceless_string("geometry")
@parsy.generate
def srid_geotype():
yield SEMICOLON
sr = yield srid
yield COLON
gt = yield geotype
return (gt, sr)
@parsy.generate
def geotype_part():
yield COLON
gt = yield geotype
return (gt, None)
@parsy.generate
def srid_part():
yield SEMICOLON
sr = yield srid
return (None, sr)
def geotype_parser(name, type):
name_parser = spaceless_string(name)
geosubtype_parser = srid_geotype | geotype_part | srid_part
@parsy.generate
def parser():
yield name_parser
sr_gt = yield geosubtype_parser.optional()
return type(*sr_gt) if sr_gt is not None else type()
return parser
primitive = (
spaceless_string("boolean").result(dt.boolean) # docprecated
| spaceless_string("bool").result(dt.boolean)
| spaceless_string("int8").result(dt.int8)
| spaceless_string("int16").result(dt.int16)
| spaceless_string("int32").result(dt.int32)
| spaceless_string("int64").result(dt.int64)
| spaceless_string("uint8").result(dt.uint8)
| spaceless_string("uint16").result(dt.uint16)
| spaceless_string("uint32").result(dt.uint32)
| spaceless_string("uint64").result(dt.uint64)
| spaceless_string("halffloat").result(dt.float16) # docprecated
| spaceless_string("double").result(dt.float64) # docprecated
| spaceless_string("float16").result(dt.float16)
| spaceless_string("float32").result(dt.float32)
| spaceless_string("float64").result(dt.float64)
| spaceless_string("float").result(dt.float64)
| spaceless_string("string").result(dt.string)
| spaceless_string("binary").result(dt.binary) # docprecated
| spaceless_string("bytes").result(dt.binary)
| spaceless_string("timestamp").result(dt.Timestamp())
| spaceless_string("time").result(dt.time)
| spaceless_string("date").result(dt.date)
| spaceless_string("category").result(dt.category)
| spaceless_string("geometry").result(dt.GeoSpatial(geotype='geometry'))
| spaceless_string("geography").result(dt.GeoSpatial(geotype='geography'))
| spaceless_string("null").result(dt.null)
| geotype_parser("linestring", dt.LineString)
| geotype_parser("polygon", dt.Polygon)
| geotype_parser("point", dt.Point)
| geotype_parser("multilinestring", dt.MultiLineString)
| geotype_parser("multipolygon", dt.MultiPolygon)
| geotype_parser("multipoint", dt.MultiPoint)
)
@parsy.generate
def varchar_or_char():
yield spaceless_string("varchar", "char").then(
LPAREN.then(RAW_NUMBER).skip(RPAREN).optional()
)
return dt.String()
@parsy.generate
def decimal():
yield spaceless_string("decimal")
precision, scale = (
yield LPAREN.then(
parsy.seq(spaceless(PRECISION).skip(COMMA), spaceless(SCALE))
)
.skip(RPAREN)
.optional()
) or (None, None)
return dt.Decimal(precision=precision, scale=scale)
@parsy.generate
def parened_string():
yield LPAREN
s = yield RAW_STRING
yield RPAREN
return s
@parsy.generate
def timestamp():
yield spaceless_string("timestamp")
tz = yield parened_string
return dt.Timestamp(tz)
@parsy.generate
def angle_type():
yield LANGLE
value_type = yield ty
yield RANGLE
return value_type
@parsy.generate
def interval():
yield spaceless_string("interval")
value_type = yield angle_type.optional()
unit = yield parened_string.optional()
return dt.Interval(
value_type=value_type,
unit=unit if unit is not None else "s",
)
@parsy.generate
def array():
yield spaceless_string("array")
value_type = yield angle_type
return dt.Array(value_type)
@parsy.generate
def set():
yield spaceless_string("set")
value_type = yield angle_type
return dt.Set(value_type)
@parsy.generate
def map():
yield spaceless_string("map")
yield LANGLE
key_type = yield primitive
yield COMMA
value_type = yield ty
yield RANGLE
return dt.Map(key_type, value_type)
spaceless_field = spaceless(FIELD)
@parsy.generate
def struct():
yield spaceless_string("struct")
yield LANGLE
field_names_types = yield (
parsy.seq(spaceless_field.skip(COLON), ty)
.combine(lambda field, ty: (field, ty))
.sep_by(COMMA)
)
yield RANGLE
return dt.Struct.from_tuples(field_names_types)
@parsy.generate
def nullable():
yield spaceless_string("!")
parsed_ty = yield ty
return parsed_ty(nullable=False)
ty = (
nullable
| timestamp
| primitive
| decimal
| varchar_or_char
| interval
| array
| set
| map
| struct
| spaceless_string("jsonb").result(dt.jsonb)
| spaceless_string("json").result(dt.json)
| spaceless_string("uuid").result(dt.uuid)
| spaceless_string("macaddr").result(dt.macaddr)
| spaceless_string("inet").result(dt.inet)
| spaceless_string("geography").result(dt.geography)
| spaceless_string("geometry").result(dt.geometry)
| spaceless_string("int").result(dt.int64)
| spaceless_string("str").result(dt.string)
)
return ty.parse(text)
@dt.dtype.register(str)
def from_string(value: str) -> dt.DataType:
try:
return parse(value)
except SyntaxError:
raise IbisTypeError(f'{value!r} cannot be parsed as a datatype')
|
{
"content_hash": "795d8f232c85f54c2cb4a7789284a63d",
"timestamp": "",
"source": "github",
"line_count": 250,
"max_line_length": 82,
"avg_line_length": 29.14,
"alnum_prop": 0.6024708304735759,
"repo_name": "ibis-project/ibis",
"id": "9ef758aa131647150fe0b7bb0b0e1e4e5bb1026a",
"size": "7285",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ibis/expr/datatypes/parse.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "44931"
},
{
"name": "CMake",
"bytes": "1862"
},
{
"name": "Dockerfile",
"bytes": "70"
},
{
"name": "JavaScript",
"bytes": "2713"
},
{
"name": "Nix",
"bytes": "11917"
},
{
"name": "Python",
"bytes": "2958224"
},
{
"name": "Shell",
"bytes": "3167"
}
],
"symlink_target": ""
}
|
"""
Created on 18 Feb 2017
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
"""
from scs_core.data.datetime import LocalizedDatetime
from scs_core.sample.climate_sample import ClimateSample
from scs_core.sampler.sampler import Sampler
# --------------------------------------------------------------------------------------------------------------------
class ClimateSampler(Sampler):
"""
classdocs
"""
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, runner, tag, sht, barometer=None, altitude=None):
"""
Constructor
"""
Sampler.__init__(self, runner)
self.__tag = tag
self.__sht = sht
self.__barometer = barometer
self.__altitude = altitude
# ----------------------------------------------------------------------------------------------------------------
def reset(self):
Sampler.reset(self)
self.__sht.reset()
def sample(self):
sht_sample = self.__sht.sample()
# TODO: get the altitude from GPS if necessary
if self.__barometer:
barometer_sample = self.__barometer.sample(altitude=self.__altitude, include_temp=False)
else:
barometer_sample = None
recorded = LocalizedDatetime.now().utc() # after sampling, so that we can monitor resource contention
return ClimateSample(self.__tag, recorded, sht_sample, barometer_sample)
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "ClimateSampler:{runner:%s, tag:%s, sht:%s, barometer:%s, altitude:%s}" % \
(self.runner, self.__tag, self.__sht, self.__barometer, self.__altitude)
|
{
"content_hash": "152a12dc0ca1e8781873a065ca9f66fe",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 118,
"avg_line_length": 29.761904761904763,
"alnum_prop": 0.45653333333333335,
"repo_name": "south-coast-science/scs_dev",
"id": "63e782b36bfd5602d0bcc594b89599c7f559fb68",
"size": "1875",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/scs_dev/sampler/climate_sampler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "264120"
},
{
"name": "Shell",
"bytes": "2059"
}
],
"symlink_target": ""
}
|
"""Mock implementations of OpenFlow vendor extension interfaces.
"""
import struct
from zope import interface
from openfaucet import ofaction
from openfaucet import ofproto
MOCK_VENDOR_ID = 0x4242
class MockVendorAction(ofaction.vendor_action(
'MockVendorAction', MOCK_VENDOR_ID, '!2xL', ('dummy',))):
subtype = 0x1664
class MockVendorHandler(object):
"""A mock vendor extension implementation.
All callbacks made to this object are appended as tuples into
attribute calls_made.
"""
interface.implements(ofproto.IOpenflowVendorHandler)
vendor_id = MOCK_VENDOR_ID
def __init__(self):
# A list of tuples representing calls.
self.calls_made = []
def connection_made(self):
pass
def connection_lost(self, reason):
pass
def handle_vendor_message(self, msg_length, xid, buf):
bytes = buf.read_bytes(msg_length - 12) # Consume the remaining bytes.
self.calls_made.append(('handle_vendor_message', msg_length, xid,
bytes))
def serialize_vendor_action(self, action):
self.calls_made.append(('serialize_vendor_action', action))
subtype = action.subtype
if subtype != MockVendorAction.subtype:
raise ValueError('wrong vendor action subtype', subtype)
header = struct.pack('!H', subtype)
return (header, action.serialize())
def deserialize_vendor_action(self, action_length, buf):
subtype = buf.unpack('!H')[0]
if subtype != MockVendorAction.subtype:
raise ValueError('wrong vendor action subtype', subtype)
a = MockVendorAction.deserialize(buf)
self.calls_made.append(('deserialize_vendor_action', action_length, a))
return a
def handle_vendor_stats_request(self, msg_length, xid, buf):
bytes = buf.read_bytes(msg_length - 16) # Consume the remaining bytes.
self.calls_made.append(('handle_vendor_stats_request', msg_length,
xid, bytes))
def handle_vendor_stats_reply(self, msg_length, xid, buf, reply_more):
bytes = buf.read_bytes(msg_length - 16) # Consume the remaining bytes.
self.calls_made.append(('handle_vendor_stats_reply', msg_length,
xid, bytes, reply_more))
|
{
"content_hash": "5105b46cea7a462ac19a8ef147eb235a",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 79,
"avg_line_length": 32.774647887323944,
"alnum_prop": 0.6454662655779975,
"repo_name": "rlenglet/openfaucet",
"id": "5122030aa1126a07cd0ba4cee584c122c3489694",
"size": "2903",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/openfaucet/mock_vendor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "473090"
},
{
"name": "Shell",
"bytes": "616"
}
],
"symlink_target": ""
}
|
import os
import pytest
from keras.layers import *
from keras.models import Model
from zoo.models.textmatching import KNRM
from test.zoo.pipeline.utils.test_utils import ZooTestCase
np.random.seed(1337) # for reproducibility
resource_path = os.path.join(os.path.split(__file__)[0], "../../resources")
glove_path = os.path.join(resource_path, "glove.6B/glove.6B.50d.txt")
class TestKNRM(ZooTestCase):
# Model definition from MatchZoo rewritten in Keras 1.2.2
@staticmethod
def keras_knrm(text1_length, text2_length, vocab_size, embed_size,
kernel_num=21, sigma=0.1, exact_sigma=0.001):
def kernel_layer(mu, sigma):
def kernel(x):
return K.tf.exp(-0.5 * (x - mu) * (x - mu) / sigma / sigma)
return Activation(kernel)
query = Input(name='query', shape=(text1_length, ))
doc = Input(name='doc', shape=(text2_length, ))
embedding = Embedding(vocab_size, embed_size, name="embedding")
q_embed = embedding(query)
d_embed = embedding(doc)
mm = merge([q_embed, d_embed], mode="dot", dot_axes=[2, 2])
KM = []
for i in range(kernel_num):
mu = 1. / (kernel_num - 1) + (2. * i) / (kernel_num - 1) - 1.0
sigma = sigma
if mu > 1.0:
sigma = exact_sigma
mu = 1.0
mm_exp = kernel_layer(mu, sigma)(mm)
mm_doc_sum = Lambda(lambda x: K.tf.reduce_sum(x, 2))(mm_exp)
mm_log = Activation(K.tf.log1p)(mm_doc_sum)
mm_sum = Lambda(lambda x: K.tf.reduce_sum(x, 1))(mm_log)
KM.append(mm_sum)
Phi = Lambda(lambda x: K.tf.stack(x, 1))(KM)
out_ = Dense(1, init="uniform", activation="sigmoid", name="dense")(Phi)
model = Model([query, doc], out_)
return model
def test_with_keras(self):
kmodel = self.keras_knrm(5, 10, 22, 50)
input_data = np.random.randint(20, size=(4, 15))
koutput = kmodel.predict([input_data[:, :5], input_data[:, 5:]])
kweights = kmodel.get_weights()
bweights = [kweights[0], np.transpose(kweights[1]), kweights[2]]
model = KNRM(5, 10, glove_path, target_mode="classification")
model.set_weights(bweights)
output = model.forward(input_data)
self.assert_allclose(output, koutput)
def test_forward_backward(self):
model = KNRM(15, 60, glove_path, word_index={"is": 1, "to": 2, "the": 3, "for": 4},
kernel_num=10, sigma=0.15, exact_sigma=1e-4)
input_data = np.random.randint(5, size=(1, 75))
self.assert_forward_backward(model, input_data)
def test_save_load(self):
model = KNRM(5, 10, glove_path)
input_data = np.random.randint(20, size=(3, 15))
self.assert_zoo_model_save_load(model, input_data)
if __name__ == "__main__":
pytest.main([__file__])
|
{
"content_hash": "dd45c5db547667c13b50ab9e05272002",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 91,
"avg_line_length": 38.31578947368421,
"alnum_prop": 0.5789835164835165,
"repo_name": "intel-analytics/analytics-zoo",
"id": "9b6e02f9c75e212e224216e24f6fd68454ef5c9b",
"size": "3503",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyzoo/test/zoo/models/textmatching/test_knrm.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "73165"
},
{
"name": "Groovy",
"bytes": "1613"
},
{
"name": "Java",
"bytes": "209136"
},
{
"name": "Jupyter Notebook",
"bytes": "24437284"
},
{
"name": "Makefile",
"bytes": "11724"
},
{
"name": "PureBasic",
"bytes": "593"
},
{
"name": "Python",
"bytes": "4085490"
},
{
"name": "RobotFramework",
"bytes": "17467"
},
{
"name": "Scala",
"bytes": "3562801"
},
{
"name": "Shell",
"bytes": "413512"
}
],
"symlink_target": ""
}
|
import datetime
from operator import itemgetter
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
from unittest import mock
import orjson
from django.db import IntegrityError
from django.utils.timezone import now as timezone_now
from zerver.actions.message_delete import do_delete_messages
from zerver.actions.message_edit import (
check_update_message,
do_update_message,
get_mentions_for_message_updates,
)
from zerver.actions.reactions import do_add_reaction
from zerver.actions.realm_settings import do_change_realm_plan_type, do_set_realm_property
from zerver.actions.streams import do_change_stream_post_policy, do_deactivate_stream
from zerver.actions.users import do_change_user_role
from zerver.lib.message import MessageDict, has_message_access, messages_for_ids, truncate_topic
from zerver.lib.test_classes import ZulipTestCase, get_topic_messages
from zerver.lib.test_helpers import cache_tries_captured, queries_captured
from zerver.lib.topic import RESOLVED_TOPIC_PREFIX, TOPIC_NAME
from zerver.lib.user_topics import (
get_topic_mutes,
get_users_muting_topic,
set_topic_mutes,
topic_is_muted,
)
from zerver.lib.utils import assert_is_not_none
from zerver.models import (
MAX_TOPIC_NAME_LENGTH,
Message,
Realm,
Stream,
UserMessage,
UserProfile,
get_realm,
get_stream,
)
if TYPE_CHECKING:
from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse
class EditMessageTestCase(ZulipTestCase):
def check_topic(self, msg_id: int, topic_name: str) -> None:
msg = Message.objects.get(id=msg_id)
self.assertEqual(msg.topic_name(), topic_name)
def check_message(self, msg_id: int, topic_name: str, content: str) -> None:
# Make sure we saved the message correctly to the DB.
msg = Message.objects.get(id=msg_id)
self.assertEqual(msg.topic_name(), topic_name)
self.assertEqual(msg.content, content)
"""
We assume our caller just edited a message.
Next, we will make sure we properly cached the messages. We still have
to do a query to hydrate recipient info, but we won't need to hit the
zerver_message table.
"""
with queries_captured(keep_cache_warm=True) as queries:
(fetch_message_dict,) = messages_for_ids(
message_ids=[msg.id],
user_message_flags={msg_id: []},
search_fields={},
apply_markdown=False,
client_gravatar=False,
allow_edit_history=True,
)
self.assert_length(queries, 1)
for query in queries:
self.assertNotIn("message", query["sql"])
self.assertEqual(
fetch_message_dict[TOPIC_NAME],
msg.topic_name(),
)
self.assertEqual(
fetch_message_dict["content"],
msg.content,
)
self.assertEqual(
fetch_message_dict["sender_id"],
msg.sender_id,
)
if msg.edit_history:
self.assertEqual(
fetch_message_dict["edit_history"],
orjson.loads(msg.edit_history),
)
def prepare_move_topics(
self,
user_email: str,
old_stream: str,
new_stream: str,
topic: str,
language: Optional[str] = None,
) -> Tuple[UserProfile, Stream, Stream, int, int]:
user_profile = self.example_user(user_email)
if language is not None:
user_profile.default_language = language
user_profile.save(update_fields=["default_language"])
self.login(user_email)
stream = self.make_stream(old_stream)
stream_to = self.make_stream(new_stream)
self.subscribe(user_profile, stream.name)
self.subscribe(user_profile, stream_to.name)
msg_id = self.send_stream_message(
user_profile, stream.name, topic_name=topic, content="First"
)
msg_id_lt = self.send_stream_message(
user_profile, stream.name, topic_name=topic, content="Second"
)
self.send_stream_message(user_profile, stream.name, topic_name=topic, content="third")
return (user_profile, stream, stream_to, msg_id, msg_id_lt)
class EditMessagePayloadTest(EditMessageTestCase):
def test_edit_message_no_changes(self) -> None:
self.login("hamlet")
msg_id = self.send_stream_message(
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
)
result = self.client_patch(
"/json/messages/" + str(msg_id),
{},
)
self.assert_json_error(result, "Nothing to change")
def test_move_message_cant_move_private_message(self) -> None:
hamlet = self.example_user("hamlet")
self.login("hamlet")
cordelia = self.example_user("cordelia")
msg_id = self.send_personal_message(hamlet, cordelia)
verona = get_stream("Verona", hamlet.realm)
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"stream_id": verona.id,
},
)
self.assert_json_error(result, "Private messages cannot be moved to streams.")
def test_private_message_edit_topic(self) -> None:
hamlet = self.example_user("hamlet")
self.login("hamlet")
cordelia = self.example_user("cordelia")
msg_id = self.send_personal_message(hamlet, cordelia)
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": "Should not exist",
},
)
self.assert_json_error(result, "Private messages cannot have topics.")
def test_propagate_invalid(self) -> None:
self.login("hamlet")
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
result = self.client_patch(
"/json/messages/" + str(id1),
{
"topic": "edited",
"propagate_mode": "invalid",
},
)
self.assert_json_error(result, "Invalid propagate_mode")
self.check_topic(id1, topic_name="topic1")
result = self.client_patch(
"/json/messages/" + str(id1),
{
"content": "edited",
"propagate_mode": "change_all",
},
)
self.assert_json_error(result, "Invalid propagate_mode without topic edit")
self.check_topic(id1, topic_name="topic1")
def test_edit_message_no_topic(self) -> None:
self.login("hamlet")
msg_id = self.send_stream_message(
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
)
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": " ",
},
)
self.assert_json_error(result, "Topic can't be empty!")
def test_edit_message_invalid_topic(self) -> None:
self.login("hamlet")
msg_id = self.send_stream_message(
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
)
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": "editing\nfun",
},
)
self.assert_json_error(result, "Invalid character in topic, at position 8!")
def test_move_message_to_stream_with_content(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test"
)
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
"content": "Not allowed",
},
)
self.assert_json_error(result, "Cannot change message content while changing stream")
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 3)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assert_length(messages, 0)
# Right now, we prevent users from editing widgets.
def test_edit_submessage(self) -> None:
self.login("hamlet")
msg_id = self.send_stream_message(
self.example_user("hamlet"),
"Denmark",
topic_name="editing",
content="/poll Games?\nYES\nNO",
)
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"content": "/poll Games?\nYES\nNO\nMaybe",
},
)
self.assert_json_error(result, "Widgets cannot be edited.")
class EditMessageTest(EditMessageTestCase):
def test_query_count_on_to_dict_uncached(self) -> None:
# `to_dict_uncached` method is used by the mechanisms
# tested in this class. Hence, its performance is tested here.
# Generate 2 messages
user = self.example_user("hamlet")
realm = user.realm
self.login_user(user)
stream_name = "public_stream"
self.subscribe(user, stream_name)
message_ids = []
message_ids.append(self.send_stream_message(user, stream_name, "Message one"))
user_2 = self.example_user("cordelia")
self.subscribe(user_2, stream_name)
message_ids.append(self.send_stream_message(user_2, stream_name, "Message two"))
self.subscribe(self.notification_bot(realm), stream_name)
message_ids.append(
self.send_stream_message(self.notification_bot(realm), stream_name, "Message three")
)
messages = [
Message.objects.select_related().get(id=message_id) for message_id in message_ids
]
# Check number of queries performed
# 1 query for realm_id per message = 3
# 1 query each for reactions & submessage for all messages = 2
with self.assert_database_query_count(5):
MessageDict.to_dict_uncached(messages)
realm_id = 2 # Fetched from stream object
# Check number of queries performed with realm_id
# 1 query each for reactions & submessage for all messages = 2
with self.assert_database_query_count(2):
MessageDict.to_dict_uncached(messages, realm_id)
def test_save_message(self) -> None:
"""This is also tested by a client test, but here we can verify
the cache against the database"""
self.login("hamlet")
msg_id = self.send_stream_message(
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
)
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"content": "after edit",
},
)
self.assert_json_success(result)
self.check_message(msg_id, topic_name="editing", content="after edit")
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"topic": "edited",
},
)
self.assert_json_success(result)
self.check_topic(msg_id, topic_name="edited")
def test_fetch_message_from_id(self) -> None:
self.login("hamlet")
msg_id = self.send_personal_message(
from_user=self.example_user("hamlet"),
to_user=self.example_user("cordelia"),
content="Personal message",
)
result = self.client_get("/json/messages/" + str(msg_id))
response_dict = self.assert_json_success(result)
self.assertEqual(response_dict["raw_content"], "Personal message")
self.assertEqual(response_dict["message"]["id"], msg_id)
self.assertEqual(response_dict["message"]["flags"], [])
# Send message to web-public stream where hamlet is not subscribed.
# This will test case of user having no `UserMessage` but having access
# to message.
web_public_stream = self.make_stream("web-public-stream", is_web_public=True)
self.subscribe(self.example_user("cordelia"), web_public_stream.name)
web_public_stream_msg_id = self.send_stream_message(
self.example_user("cordelia"), web_public_stream.name, content="web-public message"
)
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
response_dict = self.assert_json_success(result)
self.assertEqual(response_dict["raw_content"], "web-public message")
self.assertEqual(response_dict["message"]["id"], web_public_stream_msg_id)
self.assertEqual(response_dict["message"]["flags"], ["read", "historical"])
# Spectator should be able to fetch message in web-public stream.
self.logout()
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
response_dict = self.assert_json_success(result)
self.assertEqual(response_dict["raw_content"], "web-public message")
self.assertEqual(response_dict["message"]["id"], web_public_stream_msg_id)
# Verify default is apply_markdown=True
self.assertEqual(response_dict["message"]["content"], "<p>web-public message</p>")
# Verify apply_markdown=False works correctly.
result = self.client_get(
"/json/messages/" + str(web_public_stream_msg_id), {"apply_markdown": "false"}
)
response_dict = self.assert_json_success(result)
self.assertEqual(response_dict["raw_content"], "web-public message")
self.assertEqual(response_dict["message"]["content"], "web-public message")
with self.settings(WEB_PUBLIC_STREAMS_ENABLED=False):
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
self.assert_json_error(
result, "Not logged in: API authentication or user session required", status_code=401
)
# Test error cases
self.login("hamlet")
result = self.client_get("/json/messages/999999")
self.assert_json_error(result, "Invalid message(s)")
self.login("cordelia")
result = self.client_get(f"/json/messages/{msg_id}")
self.assert_json_success(result)
self.login("othello")
result = self.client_get(f"/json/messages/{msg_id}")
self.assert_json_error(result, "Invalid message(s)")
def test_fetch_raw_message_spectator(self) -> None:
user_profile = self.example_user("iago")
self.login("iago")
web_public_stream = self.make_stream("web-public-stream", is_web_public=True)
self.subscribe(user_profile, web_public_stream.name)
web_public_stream_msg_id = self.send_stream_message(
user_profile, web_public_stream.name, content="web-public message"
)
non_web_public_stream = self.make_stream("non-web-public-stream")
self.subscribe(user_profile, non_web_public_stream.name)
non_web_public_stream_msg_id = self.send_stream_message(
user_profile, non_web_public_stream.name, content="non-web-public message"
)
# Generate a private message to use in verification.
private_message_id = self.send_personal_message(user_profile, user_profile)
invalid_message_id = private_message_id + 1000
self.logout()
# Confirm WEB_PUBLIC_STREAMS_ENABLED is enforced.
with self.settings(WEB_PUBLIC_STREAMS_ENABLED=False):
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
self.assert_json_error(
result, "Not logged in: API authentication or user session required", 401
)
do_set_realm_property(
user_profile.realm, "enable_spectator_access", False, acting_user=None
)
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
self.assert_json_error(
result, "Not logged in: API authentication or user session required", 401
)
do_set_realm_property(user_profile.realm, "enable_spectator_access", True, acting_user=None)
# Verify success with web-public stream and default SELF_HOSTED plan type.
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
response_dict = self.assert_json_success(result)
self.assertEqual(response_dict["raw_content"], "web-public message")
self.assertEqual(response_dict["message"]["flags"], ["read"])
# Verify LIMITED plan type does not allow web-public access.
do_change_realm_plan_type(user_profile.realm, Realm.PLAN_TYPE_LIMITED, acting_user=None)
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
self.assert_json_error(
result, "Not logged in: API authentication or user session required", 401
)
do_set_realm_property(user_profile.realm, "enable_spectator_access", True, acting_user=None)
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
self.assert_json_error(
result, "Not logged in: API authentication or user session required", 401
)
# Verify works with STANDARD_FREE plan type too.
do_change_realm_plan_type(
user_profile.realm, Realm.PLAN_TYPE_STANDARD_FREE, acting_user=None
)
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
response_dict = self.assert_json_success(result)
self.assertEqual(response_dict["raw_content"], "web-public message")
# Verify private messages are rejected.
result = self.client_get("/json/messages/" + str(private_message_id))
self.assert_json_error(
result, "Not logged in: API authentication or user session required", 401
)
# Verify an actual public stream is required.
result = self.client_get("/json/messages/" + str(non_web_public_stream_msg_id))
self.assert_json_error(
result, "Not logged in: API authentication or user session required", 401
)
# Verify invalid message IDs are rejected with the same error message.
result = self.client_get("/json/messages/" + str(invalid_message_id))
self.assert_json_error(
result, "Not logged in: API authentication or user session required", 401
)
# Verify deactivated streams are rejected. This may change in the future.
do_deactivate_stream(web_public_stream, acting_user=None)
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
self.assert_json_error(
result, "Not logged in: API authentication or user session required", 401
)
def test_fetch_raw_message_stream_wrong_realm(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.make_stream("public_stream")
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(
user_profile, stream.name, topic_name="test", content="test"
)
result = self.client_get(f"/json/messages/{msg_id}")
self.assert_json_success(result)
mit_user = self.mit_user("sipbtest")
self.login_user(mit_user)
result = self.client_get(f"/json/messages/{msg_id}", subdomain="zephyr")
self.assert_json_error(result, "Invalid message(s)")
def test_fetch_raw_message_private_stream(self) -> None:
user_profile = self.example_user("hamlet")
self.login_user(user_profile)
stream = self.make_stream("private_stream", invite_only=True)
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(
user_profile, stream.name, topic_name="test", content="test"
)
result = self.client_get(f"/json/messages/{msg_id}")
self.assert_json_success(result)
self.login("othello")
result = self.client_get(f"/json/messages/{msg_id}")
self.assert_json_error(result, "Invalid message(s)")
def test_edit_message_no_permission(self) -> None:
self.login("hamlet")
msg_id = self.send_stream_message(
self.example_user("iago"), "Denmark", topic_name="editing", content="before edit"
)
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"content": "content after edit",
},
)
self.assert_json_error(result, "You don't have permission to edit this message")
def test_edit_message_no_content(self) -> None:
self.login("hamlet")
msg_id = self.send_stream_message(
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
)
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"content": " ",
},
)
self.assert_json_success(result)
content = Message.objects.filter(id=msg_id).values_list("content", flat=True)[0]
self.assertEqual(content, "(deleted)")
def test_edit_message_history_disabled(self) -> None:
user_profile = self.example_user("hamlet")
do_set_realm_property(user_profile.realm, "allow_edit_history", False, acting_user=None)
self.login("hamlet")
# Single-line edit
msg_id_1 = self.send_stream_message(
self.example_user("hamlet"),
"Denmark",
topic_name="editing",
content="content before edit",
)
new_content_1 = "content after edit"
result_1 = self.client_patch(
f"/json/messages/{msg_id_1}",
{
"content": new_content_1,
},
)
self.assert_json_success(result_1)
result = self.client_get(f"/json/messages/{msg_id_1}/history")
self.assert_json_error(result, "Message edit history is disabled in this organization")
# Now verify that if we fetch the message directly, there's no
# edit history data attached.
messages_result = self.client_get(
"/json/messages", {"anchor": msg_id_1, "num_before": 0, "num_after": 10}
)
self.assert_json_success(messages_result)
json_messages = orjson.loads(messages_result.content)
for msg in json_messages["messages"]:
self.assertNotIn("edit_history", msg)
def test_edit_message_history(self) -> None:
self.login("hamlet")
# Single-line edit
msg_id_1 = self.send_stream_message(
self.example_user("hamlet"),
"Denmark",
topic_name="editing",
content="content before edit",
)
new_content_1 = "content after edit"
result_1 = self.client_patch(
f"/json/messages/{msg_id_1}",
{
"content": new_content_1,
},
)
self.assert_json_success(result_1)
message_edit_history_1 = self.client_get(f"/json/messages/{msg_id_1}/history")
json_response_1 = orjson.loads(message_edit_history_1.content)
message_history_1 = json_response_1["message_history"]
# Check content of message after edit.
self.assertEqual(message_history_1[0]["rendered_content"], "<p>content before edit</p>")
self.assertEqual(message_history_1[1]["rendered_content"], "<p>content after edit</p>")
self.assertEqual(
message_history_1[1]["content_html_diff"],
(
"<div><p>content "
'<span class="highlight_text_inserted">after</span> '
'<span class="highlight_text_deleted">before</span>'
" edit</p></div>"
),
)
# Check content of message before edit.
self.assertEqual(
message_history_1[1]["prev_rendered_content"], "<p>content before edit</p>"
)
# Edits on new lines
msg_id_2 = self.send_stream_message(
self.example_user("hamlet"),
"Denmark",
topic_name="editing",
content="content before edit, line 1\n\ncontent before edit, line 3",
)
new_content_2 = (
"content before edit, line 1\n"
"content after edit, line 2\n"
"content before edit, line 3"
)
result_2 = self.client_patch(
f"/json/messages/{msg_id_2}",
{
"content": new_content_2,
},
)
self.assert_json_success(result_2)
message_edit_history_2 = self.client_get(f"/json/messages/{msg_id_2}/history")
json_response_2 = orjson.loads(message_edit_history_2.content)
message_history_2 = json_response_2["message_history"]
self.assertEqual(
message_history_2[0]["rendered_content"],
"<p>content before edit, line 1</p>\n<p>content before edit, line 3</p>",
)
self.assertEqual(
message_history_2[1]["rendered_content"],
(
"<p>content before edit, line 1<br>\n"
"content after edit, line 2<br>\n"
"content before edit, line 3</p>"
),
)
self.assertEqual(
message_history_2[1]["content_html_diff"],
(
"<div><p>content before edit, line 1<br> "
'content <span class="highlight_text_inserted">after edit, line 2<br> '
"content</span> before edit, line 3</p></div>"
),
)
self.assertEqual(
message_history_2[1]["prev_rendered_content"],
"<p>content before edit, line 1</p>\n<p>content before edit, line 3</p>",
)
def test_empty_message_edit(self) -> None:
self.login("hamlet")
msg_id = self.send_stream_message(
self.example_user("hamlet"),
"Denmark",
topic_name="editing",
content="We will edit this to render as empty.",
)
# Edit that manually to simulate a rendering bug
message = Message.objects.get(id=msg_id)
message.rendered_content = ""
message.save(update_fields=["rendered_content"])
self.assert_json_success(
self.client_patch(
"/json/messages/" + str(msg_id),
{
"content": "We will edit this to also render as empty.",
},
)
)
# And again tweak to simulate a rendering bug
message = Message.objects.get(id=msg_id)
message.rendered_content = ""
message.save(update_fields=["rendered_content"])
history = self.client_get("/json/messages/" + str(msg_id) + "/history")
message_history = orjson.loads(history.content)["message_history"]
self.assertEqual(message_history[0]["rendered_content"], "")
self.assertEqual(message_history[1]["rendered_content"], "")
self.assertEqual(message_history[1]["content_html_diff"], "<div></div>")
def test_edit_link(self) -> None:
# Link editing
self.login("hamlet")
msg_id_1 = self.send_stream_message(
self.example_user("hamlet"),
"Denmark",
topic_name="editing",
content="Here is a link to [zulip](www.zulip.org).",
)
new_content_1 = "Here is a link to [zulip](www.zulipchat.com)."
result_1 = self.client_patch(
f"/json/messages/{msg_id_1}",
{
"content": new_content_1,
},
)
self.assert_json_success(result_1)
message_edit_history_1 = self.client_get(f"/json/messages/{msg_id_1}/history")
json_response_1 = orjson.loads(message_edit_history_1.content)
message_history_1 = json_response_1["message_history"]
# Check content of message after edit.
self.assertEqual(
message_history_1[0]["rendered_content"],
"<p>Here is a link to " '<a href="http://www.zulip.org">zulip</a>.</p>',
)
self.assertEqual(
message_history_1[1]["rendered_content"],
"<p>Here is a link to " '<a href="http://www.zulipchat.com">zulip</a>.</p>',
)
self.assertEqual(
message_history_1[1]["content_html_diff"],
(
'<div><p>Here is a link to <a href="http://www.zulipchat.com"'
">zulip "
'<span class="highlight_text_inserted"> Link: http://www.zulipchat.com .'
'</span> <span class="highlight_text_deleted"> Link: http://www.zulip.org .'
"</span> </a></p></div>"
),
)
def test_edit_history_unedited(self) -> None:
self.login("hamlet")
msg_id = self.send_stream_message(
self.example_user("hamlet"),
"Denmark",
topic_name="editing",
content="This message has not been edited.",
)
result = self.client_get(f"/json/messages/{msg_id}/history")
message_history = self.assert_json_success(result)["message_history"]
self.assert_length(message_history, 1)
def test_mentions_for_message_updates(self) -> None:
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
self.login_user(hamlet)
self.subscribe(hamlet, "Denmark")
self.subscribe(cordelia, "Denmark")
msg_id = self.send_stream_message(
hamlet, "Denmark", content="@**Cordelia, Lear's daughter**"
)
mention_user_ids = get_mentions_for_message_updates(msg_id)
self.assertEqual(mention_user_ids, {cordelia.id})
def test_edit_cases(self) -> None:
"""This test verifies the accuracy of construction of Zulip's edit
history data structures."""
self.login("hamlet")
hamlet = self.example_user("hamlet")
stream_1 = self.make_stream("stream 1")
stream_2 = self.make_stream("stream 2")
stream_3 = self.make_stream("stream 3")
self.subscribe(hamlet, stream_1.name)
self.subscribe(hamlet, stream_2.name)
self.subscribe(hamlet, stream_3.name)
msg_id = self.send_stream_message(
self.example_user("hamlet"), "stream 1", topic_name="topic 1", content="content 1"
)
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"content": "content 2",
},
)
self.assert_json_success(result)
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
self.assertEqual(history[0]["prev_content"], "content 1")
self.assertEqual(history[0]["user_id"], hamlet.id)
self.assertEqual(
set(history[0].keys()),
{
"timestamp",
"prev_content",
"user_id",
"prev_rendered_content",
"prev_rendered_content_version",
},
)
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"topic": "topic 2",
},
)
self.assert_json_success(result)
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
self.assertEqual(history[0]["prev_topic"], "topic 1")
self.assertEqual(history[0]["topic"], "topic 2")
self.assertEqual(history[0]["user_id"], hamlet.id)
self.assertEqual(
set(history[0].keys()),
{"timestamp", "prev_topic", "topic", "user_id"},
)
self.login("iago")
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"stream_id": stream_2.id,
},
)
self.assert_json_success(result)
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
self.assertEqual(history[0]["prev_stream"], stream_1.id)
self.assertEqual(history[0]["stream"], stream_2.id)
self.assertEqual(history[0]["user_id"], self.example_user("iago").id)
self.assertEqual(set(history[0].keys()), {"timestamp", "prev_stream", "stream", "user_id"})
self.login("hamlet")
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"content": "content 3",
"topic": "topic 3",
},
)
self.assert_json_success(result)
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
self.assertEqual(history[0]["prev_content"], "content 2")
self.assertEqual(history[0]["prev_topic"], "topic 2")
self.assertEqual(history[0]["topic"], "topic 3")
self.assertEqual(history[0]["user_id"], hamlet.id)
self.assertEqual(
set(history[0].keys()),
{
"timestamp",
"prev_topic",
"topic",
"prev_content",
"user_id",
"prev_rendered_content",
"prev_rendered_content_version",
},
)
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"content": "content 4",
},
)
self.assert_json_success(result)
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
self.assertEqual(history[0]["prev_content"], "content 3")
self.assertEqual(history[0]["user_id"], hamlet.id)
self.login("iago")
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"topic": "topic 4",
"stream_id": stream_3.id,
},
)
self.assert_json_success(result)
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
self.assertEqual(history[0]["prev_topic"], "topic 3")
self.assertEqual(history[0]["topic"], "topic 4")
self.assertEqual(history[0]["prev_stream"], stream_2.id)
self.assertEqual(history[0]["stream"], stream_3.id)
self.assertEqual(history[0]["user_id"], self.example_user("iago").id)
self.assertEqual(
set(history[0].keys()),
{
"timestamp",
"prev_topic",
"topic",
"prev_stream",
"stream",
"user_id",
},
)
# Now, we verify that all of the edits stored in the message.edit_history
# have the correct data structure
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
self.assertEqual(history[0]["prev_topic"], "topic 3")
self.assertEqual(history[0]["topic"], "topic 4")
self.assertEqual(history[0]["stream"], stream_3.id)
self.assertEqual(history[0]["prev_stream"], stream_2.id)
self.assertEqual(history[1]["prev_content"], "content 3")
self.assertEqual(history[2]["prev_topic"], "topic 2")
self.assertEqual(history[2]["topic"], "topic 3")
self.assertEqual(history[2]["prev_content"], "content 2")
self.assertEqual(history[3]["stream"], stream_2.id)
self.assertEqual(history[3]["prev_stream"], stream_1.id)
self.assertEqual(history[4]["prev_topic"], "topic 1")
self.assertEqual(history[4]["topic"], "topic 2")
self.assertEqual(history[5]["prev_content"], "content 1")
# Now, we verify that the edit history data sent back has the
# correct filled-out fields
message_edit_history = self.client_get(f"/json/messages/{msg_id}/history")
json_response = orjson.loads(message_edit_history.content)
# We reverse the message history view output so that the IDs line up with the above.
message_history = list(reversed(json_response["message_history"]))
i = 0
for entry in message_history:
expected_entries = {"content", "rendered_content", "topic", "timestamp", "user_id"}
if i in {0, 2, 4}:
expected_entries.add("prev_topic")
expected_entries.add("topic")
if i in {1, 2, 5}:
expected_entries.add("prev_content")
expected_entries.add("prev_rendered_content")
expected_entries.add("content_html_diff")
if i in {0, 3}:
expected_entries.add("prev_stream")
expected_entries.add("stream")
i += 1
self.assertEqual(expected_entries, set(entry.keys()))
self.assert_length(message_history, 7)
self.assertEqual(message_history[0]["topic"], "topic 4")
self.assertEqual(message_history[0]["prev_topic"], "topic 3")
self.assertEqual(message_history[0]["stream"], stream_3.id)
self.assertEqual(message_history[0]["prev_stream"], stream_2.id)
self.assertEqual(message_history[0]["content"], "content 4")
self.assertEqual(message_history[1]["topic"], "topic 3")
self.assertEqual(message_history[1]["content"], "content 4")
self.assertEqual(message_history[1]["prev_content"], "content 3")
self.assertEqual(message_history[2]["topic"], "topic 3")
self.assertEqual(message_history[2]["prev_topic"], "topic 2")
self.assertEqual(message_history[2]["content"], "content 3")
self.assertEqual(message_history[2]["prev_content"], "content 2")
self.assertEqual(message_history[3]["topic"], "topic 2")
self.assertEqual(message_history[3]["stream"], stream_2.id)
self.assertEqual(message_history[3]["prev_stream"], stream_1.id)
self.assertEqual(message_history[3]["content"], "content 2")
self.assertEqual(message_history[4]["topic"], "topic 2")
self.assertEqual(message_history[4]["prev_topic"], "topic 1")
self.assertEqual(message_history[4]["content"], "content 2")
self.assertEqual(message_history[5]["topic"], "topic 1")
self.assertEqual(message_history[5]["content"], "content 2")
self.assertEqual(message_history[5]["prev_content"], "content 1")
self.assertEqual(message_history[6]["content"], "content 1")
self.assertEqual(message_history[6]["topic"], "topic 1")
def test_edit_message_content_limit(self) -> None:
def set_message_editing_params(
allow_message_editing: bool,
message_content_edit_limit_seconds: Union[int, str],
edit_topic_policy: int,
) -> None:
result = self.client_patch(
"/json/realm",
{
"allow_message_editing": orjson.dumps(allow_message_editing).decode(),
"message_content_edit_limit_seconds": orjson.dumps(
message_content_edit_limit_seconds
).decode(),
"edit_topic_policy": edit_topic_policy,
},
)
self.assert_json_success(result)
def do_edit_message_assert_success(
id_: int, unique_str: str, topic_only: bool = False
) -> None:
new_topic = "topic" + unique_str
new_content = "content" + unique_str
params_dict = {"topic": new_topic}
if not topic_only:
params_dict["content"] = new_content
result = self.client_patch(f"/json/messages/{id_}", params_dict)
self.assert_json_success(result)
if topic_only:
self.check_topic(id_, topic_name=new_topic)
else:
self.check_message(id_, topic_name=new_topic, content=new_content)
def do_edit_message_assert_error(
id_: int, unique_str: str, error: str, topic_only: bool = False
) -> None:
message = Message.objects.get(id=id_)
old_topic = message.topic_name()
old_content = message.content
new_topic = "topic" + unique_str
new_content = "content" + unique_str
params_dict = {"topic": new_topic}
if not topic_only:
params_dict["content"] = new_content
result = self.client_patch(f"/json/messages/{id_}", params_dict)
message = Message.objects.get(id=id_)
self.assert_json_error(result, error)
msg = Message.objects.get(id=id_)
self.assertEqual(msg.topic_name(), old_topic)
self.assertEqual(msg.content, old_content)
self.login("iago")
# send a message in the past
id_ = self.send_stream_message(
self.example_user("iago"), "Denmark", content="content", topic_name="topic"
)
message = Message.objects.get(id=id_)
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
message.save()
# test the various possible message editing settings
# high enough time limit, all edits allowed
set_message_editing_params(True, 240, Realm.POLICY_ADMINS_ONLY)
do_edit_message_assert_success(id_, "A")
# out of time, only topic editing allowed
set_message_editing_params(True, 120, Realm.POLICY_ADMINS_ONLY)
do_edit_message_assert_success(id_, "B", True)
do_edit_message_assert_error(id_, "C", "The time limit for editing this message has passed")
# infinite time, all edits allowed
set_message_editing_params(True, "unlimited", Realm.POLICY_ADMINS_ONLY)
do_edit_message_assert_success(id_, "D")
# without allow_message_editing, nothing is allowed
set_message_editing_params(False, 240, Realm.POLICY_ADMINS_ONLY)
do_edit_message_assert_error(
id_, "E", "Your organization has turned off message editing", True
)
set_message_editing_params(False, 120, Realm.POLICY_ADMINS_ONLY)
do_edit_message_assert_error(
id_, "F", "Your organization has turned off message editing", True
)
set_message_editing_params(False, "unlimited", Realm.POLICY_ADMINS_ONLY)
do_edit_message_assert_error(
id_, "G", "Your organization has turned off message editing", True
)
def test_edit_topic_policy(self) -> None:
def set_message_editing_params(
allow_message_editing: bool,
message_content_edit_limit_seconds: Union[int, str],
edit_topic_policy: int,
) -> None:
self.login("iago")
result = self.client_patch(
"/json/realm",
{
"allow_message_editing": orjson.dumps(allow_message_editing).decode(),
"message_content_edit_limit_seconds": orjson.dumps(
message_content_edit_limit_seconds
).decode(),
"edit_topic_policy": edit_topic_policy,
},
)
self.assert_json_success(result)
def do_edit_message_assert_success(id_: int, unique_str: str, acting_user: str) -> None:
self.login(acting_user)
new_topic = "topic" + unique_str
params_dict = {"topic": new_topic}
result = self.client_patch(f"/json/messages/{id_}", params_dict)
self.assert_json_success(result)
self.check_topic(id_, topic_name=new_topic)
def do_edit_message_assert_error(
id_: int, unique_str: str, error: str, acting_user: str
) -> None:
self.login(acting_user)
message = Message.objects.get(id=id_)
old_topic = message.topic_name()
old_content = message.content
new_topic = "topic" + unique_str
params_dict = {"topic": new_topic}
result = self.client_patch(f"/json/messages/{id_}", params_dict)
message = Message.objects.get(id=id_)
self.assert_json_error(result, error)
msg = Message.objects.get(id=id_)
self.assertEqual(msg.topic_name(), old_topic)
self.assertEqual(msg.content, old_content)
# send a message in the past
id_ = self.send_stream_message(
self.example_user("hamlet"), "Denmark", content="content", topic_name="topic"
)
message = Message.objects.get(id=id_)
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
message.save()
# Guest user must be subscribed to the stream to access the message.
polonius = self.example_user("polonius")
self.subscribe(polonius, "Denmark")
# any user can edit the topic of a message
set_message_editing_params(True, "unlimited", Realm.POLICY_EVERYONE)
do_edit_message_assert_success(id_, "A", "polonius")
# only members can edit topic of a message
set_message_editing_params(True, "unlimited", Realm.POLICY_MEMBERS_ONLY)
do_edit_message_assert_error(
id_, "B", "You don't have permission to edit this message", "polonius"
)
do_edit_message_assert_success(id_, "B", "cordelia")
# only full members can edit topic of a message
set_message_editing_params(True, "unlimited", Realm.POLICY_FULL_MEMBERS_ONLY)
cordelia = self.example_user("cordelia")
do_set_realm_property(cordelia.realm, "waiting_period_threshold", 10, acting_user=None)
cordelia.date_joined = timezone_now() - datetime.timedelta(days=9)
cordelia.save()
do_edit_message_assert_error(
id_, "C", "You don't have permission to edit this message", "cordelia"
)
cordelia.date_joined = timezone_now() - datetime.timedelta(days=11)
cordelia.save()
do_edit_message_assert_success(id_, "C", "cordelia")
# only moderators can edit topic of a message
set_message_editing_params(True, "unlimited", Realm.POLICY_MODERATORS_ONLY)
do_edit_message_assert_error(
id_, "D", "You don't have permission to edit this message", "cordelia"
)
do_edit_message_assert_success(id_, "D", "shiva")
# only admins can edit the topics of messages
set_message_editing_params(True, "unlimited", Realm.POLICY_ADMINS_ONLY)
do_edit_message_assert_error(
id_, "E", "You don't have permission to edit this message", "shiva"
)
do_edit_message_assert_success(id_, "E", "iago")
# users cannot edit topics if allow_message_editing is False
set_message_editing_params(False, "unlimited", Realm.POLICY_EVERYONE)
do_edit_message_assert_error(
id_, "D", "Your organization has turned off message editing", "cordelia"
)
# non-admin users cannot edit topics sent > 72 hrs ago
message.date_sent = message.date_sent - datetime.timedelta(seconds=290000)
message.save()
set_message_editing_params(True, "unlimited", Realm.POLICY_EVERYONE)
do_edit_message_assert_success(id_, "E", "iago")
do_edit_message_assert_success(id_, "F", "shiva")
do_edit_message_assert_error(
id_, "G", "The time limit for editing this message's topic has passed", "cordelia"
)
# anyone should be able to edit "no topic" indefinitely
message.set_topic_name("(no topic)")
message.save()
do_edit_message_assert_success(id_, "D", "cordelia")
@mock.patch("zerver.actions.message_edit.send_event")
def test_edit_topic_public_history_stream(self, mock_send_event: mock.MagicMock) -> None:
stream_name = "Macbeth"
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
self.make_stream(stream_name, history_public_to_subscribers=True)
self.subscribe(hamlet, stream_name)
self.login_user(hamlet)
message_id = self.send_stream_message(hamlet, stream_name, "Where am I?")
self.login_user(cordelia)
self.subscribe(cordelia, stream_name)
message = Message.objects.get(id=message_id)
def do_update_message_topic_success(
user_profile: UserProfile,
message: Message,
topic_name: str,
users_to_be_notified: List[Dict[str, Any]],
) -> None:
do_update_message(
user_profile=user_profile,
target_message=message,
new_stream=None,
topic_name=topic_name,
propagate_mode="change_later",
send_notification_to_old_thread=False,
send_notification_to_new_thread=False,
content=None,
rendering_result=None,
prior_mention_user_ids=set(),
mention_data=None,
)
mock_send_event.assert_called_with(mock.ANY, mock.ANY, users_to_be_notified)
# Returns the users that need to be notified when a message topic is changed
def notify(user_id: int) -> Dict[str, Any]:
um = UserMessage.objects.get(message=message_id)
if um.user_profile_id == user_id:
return {
"id": user_id,
"flags": um.flags_list(),
}
else:
return {
"id": user_id,
"flags": ["read"],
}
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
# Edit topic of a message sent before Cordelia subscribed the stream
do_update_message_topic_success(
cordelia, message, "Othello eats apple", users_to_be_notified
)
# If Cordelia is long-term idle, she doesn't get a notification.
cordelia.long_term_idle = True
cordelia.save()
users_to_be_notified = list(map(notify, [hamlet.id]))
do_update_message_topic_success(
cordelia, message, "Another topic idle", users_to_be_notified
)
cordelia.long_term_idle = False
cordelia.save()
# Even if Hamlet unsubscribes the stream, he should be notified when the topic is changed
# because he has a UserMessage row.
self.unsubscribe(hamlet, stream_name)
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
do_update_message_topic_success(cordelia, message, "Another topic", users_to_be_notified)
# Hamlet subscribes to the stream again and Cordelia unsubscribes, then Hamlet changes
# the message topic. Cordelia won't receive any updates when a message on that stream is
# changed because she is not a subscriber and doesn't have a UserMessage row.
self.subscribe(hamlet, stream_name)
self.unsubscribe(cordelia, stream_name)
self.login_user(hamlet)
users_to_be_notified = list(map(notify, [hamlet.id]))
do_update_message_topic_success(hamlet, message, "Change again", users_to_be_notified)
@mock.patch("zerver.actions.message_edit.send_event")
def test_edit_muted_topic(self, mock_send_event: mock.MagicMock) -> None:
stream_name = "Stream 123"
stream = self.make_stream(stream_name)
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
aaron = self.example_user("aaron")
self.subscribe(hamlet, stream_name)
self.login_user(hamlet)
message_id = self.send_stream_message(
hamlet, stream_name, topic_name="Topic1", content="Hello World"
)
self.subscribe(cordelia, stream_name)
self.login_user(cordelia)
self.subscribe(aaron, stream_name)
self.login_user(aaron)
already_muted_topic = "Already muted topic"
muted_topics = [
[stream_name, "Topic1"],
[stream_name, "Topic2"],
[stream_name, already_muted_topic],
]
set_topic_mutes(hamlet, muted_topics)
set_topic_mutes(cordelia, muted_topics)
# Returns the users that need to be notified when a message topic is changed
def notify(user_id: int) -> Dict[str, Any]:
um = UserMessage.objects.get(message=message_id)
if um.user_profile_id == user_id:
return {
"id": user_id,
"flags": um.flags_list(),
}
else:
return {
"id": user_id,
"flags": ["read"],
}
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id, aaron.id]))
change_all_topic_name = "Topic 1 edited"
# This code path adds 9 (1 + 4/user with muted topics) + 1 to
# the number of database queries for moving a topic.
with self.assert_database_query_count(19):
check_update_message(
user_profile=hamlet,
message_id=message_id,
stream_id=None,
topic_name=change_all_topic_name,
propagate_mode="change_all",
send_notification_to_old_thread=False,
send_notification_to_new_thread=False,
content=None,
)
for muting_user in get_users_muting_topic(stream.id, change_all_topic_name):
for user in users_to_be_notified:
if muting_user.id == user["id"]:
user["muted_topics"] = get_topic_mutes(muting_user)
break
self.assertFalse(topic_is_muted(hamlet, stream.id, "Topic1"))
self.assertFalse(topic_is_muted(cordelia, stream.id, "Topic1"))
self.assertFalse(topic_is_muted(aaron, stream.id, "Topic1"))
self.assertTrue(topic_is_muted(hamlet, stream.id, "Topic2"))
self.assertTrue(topic_is_muted(cordelia, stream.id, "Topic2"))
self.assertFalse(topic_is_muted(aaron, stream.id, "Topic2"))
self.assertTrue(topic_is_muted(hamlet, stream.id, change_all_topic_name))
self.assertTrue(topic_is_muted(cordelia, stream.id, change_all_topic_name))
self.assertFalse(topic_is_muted(aaron, stream.id, change_all_topic_name))
change_later_topic_name = "Topic 1 edited again"
check_update_message(
user_profile=hamlet,
message_id=message_id,
stream_id=None,
topic_name=change_later_topic_name,
propagate_mode="change_later",
send_notification_to_old_thread=False,
send_notification_to_new_thread=False,
content=None,
)
self.assertFalse(topic_is_muted(hamlet, stream.id, change_all_topic_name))
self.assertTrue(topic_is_muted(hamlet, stream.id, change_later_topic_name))
# Make sure we safely handle the case of the new topic being already muted.
check_update_message(
user_profile=hamlet,
message_id=message_id,
stream_id=None,
topic_name=already_muted_topic,
propagate_mode="change_all",
send_notification_to_old_thread=False,
send_notification_to_new_thread=False,
content=None,
)
self.assertFalse(topic_is_muted(hamlet, stream.id, change_later_topic_name))
self.assertTrue(topic_is_muted(hamlet, stream.id, already_muted_topic))
change_one_topic_name = "Topic 1 edited change_one"
check_update_message(
user_profile=hamlet,
message_id=message_id,
stream_id=None,
topic_name=change_one_topic_name,
propagate_mode="change_one",
send_notification_to_old_thread=False,
send_notification_to_new_thread=False,
content=None,
)
self.assertTrue(topic_is_muted(hamlet, stream.id, change_one_topic_name))
self.assertFalse(topic_is_muted(hamlet, stream.id, change_later_topic_name))
# Move topic between two public streams.
desdemona = self.example_user("desdemona")
message_id = self.send_stream_message(
hamlet, stream_name, topic_name="New topic", content="Hello World"
)
new_public_stream = self.make_stream("New public stream")
self.subscribe(desdemona, new_public_stream.name)
self.login_user(desdemona)
muted_topics = [
[stream_name, "New topic"],
]
set_topic_mutes(desdemona, muted_topics)
set_topic_mutes(cordelia, muted_topics)
with self.assert_database_query_count(31):
check_update_message(
user_profile=desdemona,
message_id=message_id,
stream_id=new_public_stream.id,
propagate_mode="change_all",
send_notification_to_old_thread=False,
send_notification_to_new_thread=False,
content=None,
)
self.assertFalse(topic_is_muted(desdemona, stream.id, "New topic"))
self.assertFalse(topic_is_muted(cordelia, stream.id, "New topic"))
self.assertFalse(topic_is_muted(aaron, stream.id, "New topic"))
self.assertTrue(topic_is_muted(desdemona, new_public_stream.id, "New topic"))
self.assertTrue(topic_is_muted(cordelia, new_public_stream.id, "New topic"))
self.assertFalse(topic_is_muted(aaron, new_public_stream.id, "New topic"))
# Move topic to a private stream.
message_id = self.send_stream_message(
hamlet, stream_name, topic_name="New topic", content="Hello World"
)
new_private_stream = self.make_stream("New private stream", invite_only=True)
self.subscribe(desdemona, new_private_stream.name)
self.login_user(desdemona)
muted_topics = [
[stream_name, "New topic"],
]
set_topic_mutes(desdemona, muted_topics)
set_topic_mutes(cordelia, muted_topics)
with self.assert_database_query_count(33):
check_update_message(
user_profile=desdemona,
message_id=message_id,
stream_id=new_private_stream.id,
propagate_mode="change_all",
send_notification_to_old_thread=False,
send_notification_to_new_thread=False,
content=None,
)
# Cordelia is not subscribed to the private stream, so
# Cordelia should have had the topic unmuted, while Desdemona
# should have had her muted topic record moved.
self.assertFalse(topic_is_muted(desdemona, stream.id, "New topic"))
self.assertFalse(topic_is_muted(cordelia, stream.id, "New topic"))
self.assertFalse(topic_is_muted(aaron, stream.id, "New topic"))
self.assertTrue(topic_is_muted(desdemona, new_private_stream.id, "New topic"))
self.assertFalse(topic_is_muted(cordelia, new_private_stream.id, "New topic"))
self.assertFalse(topic_is_muted(aaron, new_private_stream.id, "New topic"))
# Move topic between two public streams with change in topic name.
desdemona = self.example_user("desdemona")
message_id = self.send_stream_message(
hamlet, stream_name, topic_name="New topic 2", content="Hello World"
)
self.login_user(desdemona)
muted_topics = [
[stream_name, "New topic 2"],
]
set_topic_mutes(desdemona, muted_topics)
set_topic_mutes(cordelia, muted_topics)
with self.assert_database_query_count(31):
check_update_message(
user_profile=desdemona,
message_id=message_id,
stream_id=new_public_stream.id,
topic_name="changed topic name",
propagate_mode="change_all",
send_notification_to_old_thread=False,
send_notification_to_new_thread=False,
content=None,
)
self.assertFalse(topic_is_muted(desdemona, stream.id, "New topic 2"))
self.assertFalse(topic_is_muted(cordelia, stream.id, "New topic 2"))
self.assertFalse(topic_is_muted(aaron, stream.id, "New topic 2"))
self.assertTrue(topic_is_muted(desdemona, new_public_stream.id, "changed topic name"))
self.assertTrue(topic_is_muted(cordelia, new_public_stream.id, "changed topic name"))
self.assertFalse(topic_is_muted(aaron, new_public_stream.id, "changed topic name"))
# Moving only half the messages doesn't move MutedTopic records.
second_message_id = self.send_stream_message(
hamlet, stream_name, topic_name="changed topic name", content="Second message"
)
with self.assert_database_query_count(25):
check_update_message(
user_profile=desdemona,
message_id=second_message_id,
stream_id=new_public_stream.id,
topic_name="final topic name",
propagate_mode="change_later",
send_notification_to_old_thread=False,
send_notification_to_new_thread=False,
content=None,
)
self.assertTrue(topic_is_muted(desdemona, new_public_stream.id, "changed topic name"))
self.assertTrue(topic_is_muted(cordelia, new_public_stream.id, "changed topic name"))
self.assertFalse(topic_is_muted(aaron, new_public_stream.id, "changed topic name"))
self.assertFalse(topic_is_muted(desdemona, new_public_stream.id, "final topic name"))
self.assertFalse(topic_is_muted(cordelia, new_public_stream.id, "final topic name"))
self.assertFalse(topic_is_muted(aaron, new_public_stream.id, "final topic name"))
@mock.patch("zerver.actions.message_edit.send_event")
def test_wildcard_mention(self, mock_send_event: mock.MagicMock) -> None:
stream_name = "Macbeth"
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
self.make_stream(stream_name, history_public_to_subscribers=True)
self.subscribe(hamlet, stream_name)
self.subscribe(cordelia, stream_name)
self.login_user(hamlet)
message_id = self.send_stream_message(hamlet, stream_name, "Hello everyone")
def notify(user_id: int) -> Dict[str, Any]:
return {
"id": user_id,
"flags": ["wildcard_mentioned"],
}
users_to_be_notified = sorted(map(notify, [cordelia.id, hamlet.id]), key=itemgetter("id"))
result = self.client_patch(
f"/json/messages/{message_id}",
{
"content": "Hello @**everyone**",
},
)
self.assert_json_success(result)
# Extract the send_event call where event type is 'update_message'.
# Here we assert wildcard_mention_user_ids has been set properly.
called = False
for call_args in mock_send_event.call_args_list:
(arg_realm, arg_event, arg_notified_users) = call_args[0]
if arg_event["type"] == "update_message":
self.assertEqual(arg_event["type"], "update_message")
self.assertEqual(arg_event["wildcard_mention_user_ids"], [cordelia.id, hamlet.id])
self.assertEqual(
sorted(arg_notified_users, key=itemgetter("id")), users_to_be_notified
)
called = True
self.assertTrue(called)
def test_wildcard_mention_restrictions_when_editing(self) -> None:
cordelia = self.example_user("cordelia")
shiva = self.example_user("shiva")
self.login("cordelia")
stream_name = "Macbeth"
self.make_stream(stream_name, history_public_to_subscribers=True)
self.subscribe(cordelia, stream_name)
self.subscribe(shiva, stream_name)
message_id = self.send_stream_message(cordelia, stream_name, "Hello everyone")
realm = cordelia.realm
do_set_realm_property(
realm,
"wildcard_mention_policy",
Realm.WILDCARD_MENTION_POLICY_MODERATORS,
acting_user=None,
)
with mock.patch("zerver.lib.message.num_subscribers_for_stream_id", return_value=17):
result = self.client_patch(
"/json/messages/" + str(message_id),
{
"content": "Hello @**everyone**",
},
)
self.assert_json_error(
result, "You do not have permission to use wildcard mentions in this stream."
)
with mock.patch("zerver.lib.message.num_subscribers_for_stream_id", return_value=14):
result = self.client_patch(
"/json/messages/" + str(message_id),
{
"content": "Hello @**everyone**",
},
)
self.assert_json_success(result)
self.login("shiva")
message_id = self.send_stream_message(shiva, stream_name, "Hi everyone")
with mock.patch("zerver.lib.message.num_subscribers_for_stream_id", return_value=17):
result = self.client_patch(
"/json/messages/" + str(message_id),
{
"content": "Hello @**everyone**",
},
)
self.assert_json_success(result)
def test_topic_edit_history_saved_in_all_message(self) -> None:
self.login("hamlet")
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
id2 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
id3 = self.send_stream_message(self.example_user("iago"), "Verona", topic_name="topic1")
id4 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic2")
id5 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
def verify_edit_history(new_topic: str, len_edit_history: int) -> None:
for msg_id in [id1, id2, id5]:
msg = Message.objects.get(id=msg_id)
self.assertEqual(
new_topic,
msg.topic_name(),
)
# Since edit history is being generated by do_update_message,
# it's contents can vary over time; So, to keep this test
# future proof, we only verify it's length.
self.assert_length(
orjson.loads(assert_is_not_none(msg.edit_history)), len_edit_history
)
for msg_id in [id3, id4]:
msg = Message.objects.get(id=msg_id)
self.assertEqual(msg.edit_history, None)
new_topic = "edited"
result = self.client_patch(
f"/json/messages/{id1}",
{
"topic": new_topic,
"propagate_mode": "change_later",
},
)
self.assert_json_success(result)
verify_edit_history(new_topic, 1)
new_topic = "edited2"
result = self.client_patch(
f"/json/messages/{id1}",
{
"topic": new_topic,
"propagate_mode": "change_later",
},
)
self.assert_json_success(result)
verify_edit_history(new_topic, 2)
def test_topic_and_content_edit(self) -> None:
self.login("hamlet")
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", "message 1", "topic")
id2 = self.send_stream_message(self.example_user("iago"), "Denmark", "message 2", "topic")
id3 = self.send_stream_message(self.example_user("hamlet"), "Denmark", "message 3", "topic")
new_topic = "edited"
result = self.client_patch(
"/json/messages/" + str(id1),
{
"topic": new_topic,
"propagate_mode": "change_later",
"content": "edited message",
},
)
self.assert_json_success(result)
# Content change of only id1 should come in edit history
# and topic change should be present in all the messages.
msg1 = Message.objects.get(id=id1)
msg2 = Message.objects.get(id=id2)
msg3 = Message.objects.get(id=id3)
msg1_edit_history = orjson.loads(assert_is_not_none(msg1.edit_history))
self.assertTrue("prev_content" in msg1_edit_history[0].keys())
for msg in [msg2, msg3]:
self.assertFalse(
"prev_content" in orjson.loads(assert_is_not_none(msg.edit_history))[0].keys()
)
for msg in [msg1, msg2, msg3]:
self.assertEqual(
new_topic,
msg.topic_name(),
)
self.assert_length(orjson.loads(assert_is_not_none(msg.edit_history)), 1)
def test_propagate_topic_forward(self) -> None:
self.login("hamlet")
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
id2 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
id3 = self.send_stream_message(self.example_user("iago"), "Verona", topic_name="topic1")
id4 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic2")
id5 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
result = self.client_patch(
f"/json/messages/{id1}",
{
"topic": "edited",
"propagate_mode": "change_later",
},
)
self.assert_json_success(result)
self.check_topic(id1, topic_name="edited")
self.check_topic(id2, topic_name="edited")
self.check_topic(id3, topic_name="topic1")
self.check_topic(id4, topic_name="topic2")
self.check_topic(id5, topic_name="edited")
def test_propagate_all_topics(self) -> None:
self.login("hamlet")
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
id2 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
id3 = self.send_stream_message(self.example_user("iago"), "Verona", topic_name="topic1")
id4 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic2")
id5 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
id6 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic3")
result = self.client_patch(
f"/json/messages/{id2}",
{
"topic": "edited",
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
self.check_topic(id1, topic_name="edited")
self.check_topic(id2, topic_name="edited")
self.check_topic(id3, topic_name="topic1")
self.check_topic(id4, topic_name="topic2")
self.check_topic(id5, topic_name="edited")
self.check_topic(id6, topic_name="topic3")
def test_propagate_all_topics_with_different_uppercase_letters(self) -> None:
self.login("hamlet")
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
id2 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="Topic1")
id3 = self.send_stream_message(self.example_user("iago"), "Verona", topic_name="topiC1")
id4 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="toPic1")
result = self.client_patch(
f"/json/messages/{id2}",
{
"topic": "edited",
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
self.check_topic(id1, topic_name="edited")
self.check_topic(id2, topic_name="edited")
self.check_topic(id3, topic_name="topiC1")
self.check_topic(id4, topic_name="edited")
def test_move_message_to_stream(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
"iago",
"test move stream",
"new stream",
"test",
# Set the user's translation language to German to test that
# it is overridden by the realm's default language.
"de",
)
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
"send_notification_to_old_thread": "true",
},
HTTP_ACCEPT_LANGUAGE="de",
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 1)
self.assertEqual(
messages[0].content,
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assert_length(messages, 4)
self.assertEqual(
messages[3].content,
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
)
def test_move_message_realm_admin_cant_move_to_another_realm(self) -> None:
user_profile = self.example_user("iago")
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
self.login("iago")
lear_realm = get_realm("lear")
new_stream = self.make_stream("new", lear_realm)
msg_id = self.send_stream_message(user_profile, "Verona", topic_name="test123")
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
},
)
self.assert_json_error(result, "Invalid stream ID")
def test_move_message_realm_admin_cant_move_to_private_stream_without_subscription(
self,
) -> None:
user_profile = self.example_user("iago")
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
self.login("iago")
new_stream = self.make_stream("new", invite_only=True)
msg_id = self.send_stream_message(user_profile, "Verona", topic_name="test123")
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
},
)
self.assert_json_error(result, "Invalid stream ID")
def test_move_message_realm_admin_cant_move_from_private_stream_without_subscription(
self,
) -> None:
user_profile = self.example_user("iago")
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
self.login("iago")
self.make_stream("privatestream", invite_only=True)
self.subscribe(user_profile, "privatestream")
msg_id = self.send_stream_message(user_profile, "privatestream", topic_name="test123")
self.unsubscribe(user_profile, "privatestream")
verona = get_stream("Verona", user_profile.realm)
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"stream_id": verona.id,
"propagate_mode": "change_all",
},
)
self.assert_json_error(
result,
"You don't have permission to move this message due to missing access to its stream",
)
def test_move_message_from_private_stream_message_access_checks(
self,
) -> None:
hamlet = self.example_user("hamlet")
user_profile = self.example_user("iago")
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
self.login("iago")
private_stream = self.make_stream(
"privatestream", invite_only=True, history_public_to_subscribers=False
)
self.subscribe(hamlet, "privatestream")
original_msg_id = self.send_stream_message(hamlet, "privatestream", topic_name="test123")
self.subscribe(user_profile, "privatestream")
new_msg_id = self.send_stream_message(user_profile, "privatestream", topic_name="test123")
# Now we unsub and hamlet sends a new message (we won't have access to it even after re-subbing!)
self.unsubscribe(user_profile, "privatestream")
new_inaccessible_msg_id = self.send_stream_message(
hamlet, "privatestream", topic_name="test123"
)
# Re-subscribe and send another message:
self.subscribe(user_profile, "privatestream")
newest_msg_id = self.send_stream_message(
user_profile, "privatestream", topic_name="test123"
)
verona = get_stream("Verona", user_profile.realm)
result = self.client_patch(
"/json/messages/" + str(new_msg_id),
{
"stream_id": verona.id,
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
self.assertEqual(Message.objects.get(id=new_msg_id).recipient_id, verona.recipient_id)
self.assertEqual(Message.objects.get(id=newest_msg_id).recipient_id, verona.recipient_id)
# The original message and the new, inaccessible message weren't moved,
# because user_profile doesn't have access to them.
self.assertEqual(
Message.objects.get(id=original_msg_id).recipient_id, private_stream.recipient_id
)
self.assertEqual(
Message.objects.get(id=new_inaccessible_msg_id).recipient_id,
private_stream.recipient_id,
)
def test_move_message_to_stream_change_later(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test"
)
result = self.client_patch(
f"/json/messages/{msg_id_later}",
{
"stream_id": new_stream.id,
"propagate_mode": "change_later",
"send_notification_to_old_thread": "true",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 2)
self.assertEqual(messages[0].id, msg_id)
self.assertEqual(
messages[1].content,
f"2 messages were moved from this topic to #**new stream>test** by @_**Iago|{user_profile.id}**.",
)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assert_length(messages, 3)
self.assertEqual(messages[0].id, msg_id_later)
self.assertEqual(
messages[2].content,
f"2 messages were moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
)
def test_move_message_to_stream_change_later_all_moved(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test"
)
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"stream_id": new_stream.id,
"propagate_mode": "change_later",
"send_notification_to_old_thread": "true",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 1)
self.assertEqual(
messages[0].content,
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assert_length(messages, 4)
self.assertEqual(messages[0].id, msg_id)
self.assertEqual(
messages[3].content,
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
)
def test_move_message_to_stream_change_one(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test"
)
result = self.client_patch(
"/json/messages/" + str(msg_id_later),
{
"stream_id": new_stream.id,
"propagate_mode": "change_one",
"send_notification_to_old_thread": "true",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 3)
self.assertEqual(messages[0].id, msg_id)
self.assertEqual(
messages[2].content,
f"A message was moved from this topic to #**new stream>test** by @_**Iago|{user_profile.id}**.",
)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assert_length(messages, 2)
self.assertEqual(messages[0].id, msg_id_later)
self.assertEqual(
messages[1].content,
f"A message was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
)
def test_move_message_to_stream_change_all(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test"
)
result = self.client_patch(
"/json/messages/" + str(msg_id_later),
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
"send_notification_to_old_thread": "true",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 1)
self.assertEqual(
messages[0].content,
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assert_length(messages, 4)
self.assertEqual(messages[0].id, msg_id)
self.assertEqual(
messages[3].content,
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
)
def test_move_message_between_streams_policy_setting(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"othello", "old_stream_1", "new_stream_1", "test"
)
def check_move_message_according_to_policy(role: int, expect_fail: bool = False) -> None:
do_change_user_role(user_profile, role, acting_user=None)
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
},
)
if expect_fail:
self.assert_json_error(result, "You don't have permission to move this message")
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 3)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assert_length(messages, 0)
else:
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 0)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assert_length(messages, 4)
# Check sending messages when policy is Realm.POLICY_ADMINS_ONLY.
do_set_realm_property(
user_profile.realm,
"move_messages_between_streams_policy",
Realm.POLICY_ADMINS_ONLY,
acting_user=None,
)
check_move_message_according_to_policy(UserProfile.ROLE_MODERATOR, expect_fail=True)
check_move_message_according_to_policy(UserProfile.ROLE_REALM_ADMINISTRATOR)
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"othello", "old_stream_2", "new_stream_2", "test"
)
# Check sending messages when policy is Realm.POLICY_MODERATORS_ONLY.
do_set_realm_property(
user_profile.realm,
"move_messages_between_streams_policy",
Realm.POLICY_MODERATORS_ONLY,
acting_user=None,
)
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER, expect_fail=True)
check_move_message_according_to_policy(UserProfile.ROLE_MODERATOR)
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"othello", "old_stream_3", "new_stream_3", "test"
)
# Check sending messages when policy is Realm.POLICY_FULL_MEMBERS_ONLY.
do_set_realm_property(
user_profile.realm,
"move_messages_between_streams_policy",
Realm.POLICY_FULL_MEMBERS_ONLY,
acting_user=None,
)
do_set_realm_property(
user_profile.realm, "waiting_period_threshold", 100000, acting_user=None
)
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER, expect_fail=True)
do_set_realm_property(user_profile.realm, "waiting_period_threshold", 0, acting_user=None)
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER)
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"othello", "old_stream_4", "new_stream_4", "test"
)
# Check sending messages when policy is Realm.POLICY_MEMBERS_ONLY.
do_set_realm_property(
user_profile.realm,
"move_messages_between_streams_policy",
Realm.POLICY_MEMBERS_ONLY,
acting_user=None,
)
check_move_message_according_to_policy(UserProfile.ROLE_GUEST, expect_fail=True)
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER)
def test_move_message_to_stream_based_on_stream_post_policy(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"othello", "old_stream_1", "new_stream_1", "test"
)
do_set_realm_property(
user_profile.realm,
"move_messages_between_streams_policy",
Realm.POLICY_MEMBERS_ONLY,
acting_user=None,
)
def check_move_message_to_stream(role: int, error_msg: Optional[str] = None) -> None:
do_change_user_role(user_profile, role, acting_user=None)
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
},
)
if error_msg is not None:
self.assert_json_error(result, error_msg)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 3)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assert_length(messages, 0)
else:
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 0)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assert_length(messages, 4)
# Check when stream_post_policy is STREAM_POST_POLICY_ADMINS.
do_change_stream_post_policy(
new_stream, Stream.STREAM_POST_POLICY_ADMINS, acting_user=user_profile
)
error_msg = "Only organization administrators can send to this stream."
check_move_message_to_stream(UserProfile.ROLE_MODERATOR, error_msg)
check_move_message_to_stream(UserProfile.ROLE_REALM_ADMINISTRATOR)
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"othello", "old_stream_2", "new_stream_2", "test"
)
# Check when stream_post_policy is STREAM_POST_POLICY_MODERATORS.
do_change_stream_post_policy(
new_stream, Stream.STREAM_POST_POLICY_MODERATORS, acting_user=user_profile
)
error_msg = "Only organization administrators and moderators can send to this stream."
check_move_message_to_stream(UserProfile.ROLE_MEMBER, error_msg)
check_move_message_to_stream(UserProfile.ROLE_MODERATOR)
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"othello", "old_stream_3", "new_stream_3", "test"
)
# Check when stream_post_policy is STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS.
do_change_stream_post_policy(
new_stream, Stream.STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS, acting_user=user_profile
)
error_msg = "New members cannot send to this stream."
do_set_realm_property(
user_profile.realm, "waiting_period_threshold", 100000, acting_user=None
)
check_move_message_to_stream(UserProfile.ROLE_MEMBER, error_msg)
do_set_realm_property(user_profile.realm, "waiting_period_threshold", 0, acting_user=None)
check_move_message_to_stream(UserProfile.ROLE_MEMBER)
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"othello", "old_stream_4", "new_stream_4", "test"
)
# Check when stream_post_policy is STREAM_POST_POLICY_EVERYONE.
# In this case also, guest is not allowed as we do not allow guest to move
# messages between streams in any case, so stream_post_policy of new stream does
# not matter.
do_change_stream_post_policy(
new_stream, Stream.STREAM_POST_POLICY_EVERYONE, acting_user=user_profile
)
do_set_realm_property(
user_profile.realm, "waiting_period_threshold", 100000, acting_user=None
)
check_move_message_to_stream(
UserProfile.ROLE_GUEST, "You don't have permission to move this message"
)
check_move_message_to_stream(UserProfile.ROLE_MEMBER)
def test_move_message_to_stream_with_topic_editing_not_allowed(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"othello", "old_stream_1", "new_stream_1", "test"
)
realm = user_profile.realm
realm.edit_topic_policy = Realm.POLICY_ADMINS_ONLY
realm.save()
self.login("cordelia")
do_set_realm_property(
user_profile.realm,
"move_messages_between_streams_policy",
Realm.POLICY_MEMBERS_ONLY,
acting_user=None,
)
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
"topic": "new topic",
},
)
self.assert_json_error(result, "You don't have permission to edit this message")
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 0)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assert_length(messages, 4)
def test_move_message_to_stream_and_topic(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test"
)
with self.assert_database_query_count(53), cache_tries_captured() as cache_tries:
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"propagate_mode": "change_all",
"send_notification_to_old_thread": "true",
"stream_id": new_stream.id,
"topic": "new topic",
},
)
self.assert_length(cache_tries, 13)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 1)
self.assertEqual(
messages[0].content,
f"This topic was moved to #**new stream>new topic** by @_**Iago|{user_profile.id}**.",
)
messages = get_topic_messages(user_profile, new_stream, "new topic")
self.assert_length(messages, 4)
self.assertEqual(
messages[3].content,
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
)
self.assert_json_success(result)
def test_inaccessible_msg_after_stream_change(self) -> None:
"""Simulates the case where message is moved to a stream where user is not a subscribed"""
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test"
)
guest_user = self.example_user("polonius")
non_guest_user = self.example_user("hamlet")
self.subscribe(guest_user, old_stream.name)
self.subscribe(non_guest_user, old_stream.name)
msg_id_to_test_acesss = self.send_stream_message(
user_profile, old_stream.name, topic_name="test", content="fourth"
)
self.assertEqual(
has_message_access(
guest_user, Message.objects.get(id=msg_id_to_test_acesss), has_user_message=False
),
True,
)
self.assertEqual(
has_message_access(
guest_user,
Message.objects.get(id=msg_id_to_test_acesss),
has_user_message=False,
stream=old_stream,
),
True,
)
self.assertEqual(
has_message_access(
non_guest_user,
Message.objects.get(id=msg_id_to_test_acesss),
has_user_message=False,
),
True,
)
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
"topic": "new topic",
},
)
self.assert_json_success(result)
self.assertEqual(
has_message_access(
guest_user,
Message.objects.get(id=msg_id_to_test_acesss),
has_user_message=False,
),
False,
)
self.assertEqual(
has_message_access(
non_guest_user,
Message.objects.get(id=msg_id_to_test_acesss),
has_user_message=False,
),
True,
)
self.assertEqual(
# If the guest user were subscribed to the new stream,
# they'd have access; has_message_access does not validate
# the is_subscribed parameter.
has_message_access(
guest_user,
Message.objects.get(id=msg_id_to_test_acesss),
has_user_message=False,
stream=new_stream,
is_subscribed=True,
),
True,
)
self.assertEqual(
has_message_access(
guest_user,
Message.objects.get(id=msg_id_to_test_acesss),
has_user_message=False,
stream=new_stream,
),
False,
)
with self.assertRaises(AssertionError):
# Raises assertion if you pass an invalid stream.
has_message_access(
guest_user,
Message.objects.get(id=msg_id_to_test_acesss),
has_user_message=False,
stream=old_stream,
)
self.assertEqual(
UserMessage.objects.filter(
user_profile_id=non_guest_user.id,
message_id=msg_id_to_test_acesss,
).count(),
0,
)
self.assertEqual(
has_message_access(
self.example_user("iago"),
Message.objects.get(id=msg_id_to_test_acesss),
has_user_message=False,
),
True,
)
def test_no_notify_move_message_to_stream(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test"
)
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
"send_notification_to_old_thread": "false",
"send_notification_to_new_thread": "false",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 0)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assert_length(messages, 3)
def test_notify_new_thread_move_message_to_stream(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test"
)
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
"send_notification_to_old_thread": "false",
"send_notification_to_new_thread": "true",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 0)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assert_length(messages, 4)
self.assertEqual(
messages[3].content,
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
)
def test_notify_old_thread_move_message_to_stream(self) -> None:
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
"iago", "test move stream", "new stream", "test"
)
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
"send_notification_to_old_thread": "true",
"send_notification_to_new_thread": "false",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, old_stream, "test")
self.assert_length(messages, 1)
self.assertEqual(
messages[0].content,
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
)
messages = get_topic_messages(user_profile, new_stream, "test")
self.assert_length(messages, 3)
def test_notify_new_topic(self) -> None:
user_profile = self.example_user("iago")
self.login("iago")
stream = self.make_stream("public stream")
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(
user_profile, stream.name, topic_name="test", content="First"
)
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
self.send_stream_message(user_profile, stream.name, topic_name="test", content="third")
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": "edited",
"propagate_mode": "change_all",
"send_notification_to_old_thread": "false",
"send_notification_to_new_thread": "true",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, "test")
self.assert_length(messages, 0)
messages = get_topic_messages(user_profile, stream, "edited")
self.assert_length(messages, 4)
self.assertEqual(
messages[3].content,
f"This topic was moved here from #**public stream>test** by @_**Iago|{user_profile.id}**.",
)
def test_notify_old_topic(self) -> None:
user_profile = self.example_user("iago")
self.login("iago")
stream = self.make_stream("public stream")
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(
user_profile, stream.name, topic_name="test", content="First"
)
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
self.send_stream_message(user_profile, stream.name, topic_name="test", content="third")
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": "edited",
"propagate_mode": "change_all",
"send_notification_to_old_thread": "true",
"send_notification_to_new_thread": "false",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, "test")
self.assert_length(messages, 1)
self.assertEqual(
messages[0].content,
f"This topic was moved to #**public stream>edited** by @_**Iago|{user_profile.id}**.",
)
messages = get_topic_messages(user_profile, stream, "edited")
self.assert_length(messages, 3)
def test_notify_both_topics(self) -> None:
user_profile = self.example_user("iago")
self.login("iago")
stream = self.make_stream("public stream")
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(
user_profile, stream.name, topic_name="test", content="First"
)
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
self.send_stream_message(user_profile, stream.name, topic_name="test", content="third")
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": "edited",
"propagate_mode": "change_all",
"send_notification_to_old_thread": "true",
"send_notification_to_new_thread": "true",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, "test")
self.assert_length(messages, 1)
self.assertEqual(
messages[0].content,
f"This topic was moved to #**public stream>edited** by @_**Iago|{user_profile.id}**.",
)
messages = get_topic_messages(user_profile, stream, "edited")
self.assert_length(messages, 4)
self.assertEqual(
messages[3].content,
f"This topic was moved here from #**public stream>test** by @_**Iago|{user_profile.id}**.",
)
def test_notify_no_topic(self) -> None:
user_profile = self.example_user("iago")
self.login("iago")
stream = self.make_stream("public stream")
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(
user_profile, stream.name, topic_name="test", content="First"
)
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
self.send_stream_message(user_profile, stream.name, topic_name="test", content="third")
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": "edited",
"propagate_mode": "change_all",
"send_notification_to_old_thread": "false",
"send_notification_to_new_thread": "false",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, "test")
self.assert_length(messages, 0)
messages = get_topic_messages(user_profile, stream, "edited")
self.assert_length(messages, 3)
def test_notify_new_topics_after_message_move(self) -> None:
user_profile = self.example_user("iago")
self.login("iago")
stream = self.make_stream("public stream")
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(
user_profile, stream.name, topic_name="test", content="First"
)
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Third")
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": "edited",
"propagate_mode": "change_one",
"send_notification_to_old_thread": "false",
"send_notification_to_new_thread": "true",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, "test")
self.assert_length(messages, 2)
self.assertEqual(messages[0].content, "Second")
self.assertEqual(messages[1].content, "Third")
messages = get_topic_messages(user_profile, stream, "edited")
self.assert_length(messages, 2)
self.assertEqual(messages[0].content, "First")
self.assertEqual(
messages[1].content,
f"A message was moved here from #**public stream>test** by @_**Iago|{user_profile.id}**.",
)
def test_notify_old_topics_after_message_move(self) -> None:
user_profile = self.example_user("iago")
self.login("iago")
stream = self.make_stream("public stream")
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(
user_profile, stream.name, topic_name="test", content="First"
)
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Third")
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": "edited",
"propagate_mode": "change_one",
"send_notification_to_old_thread": "true",
"send_notification_to_new_thread": "false",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, "test")
self.assert_length(messages, 3)
self.assertEqual(messages[0].content, "Second")
self.assertEqual(messages[1].content, "Third")
self.assertEqual(
messages[2].content,
f"A message was moved from this topic to #**public stream>edited** by @_**Iago|{user_profile.id}**.",
)
messages = get_topic_messages(user_profile, stream, "edited")
self.assert_length(messages, 1)
self.assertEqual(messages[0].content, "First")
def test_notify_both_topics_after_message_move(self) -> None:
user_profile = self.example_user("iago")
self.login("iago")
stream = self.make_stream("public stream")
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(
user_profile, stream.name, topic_name="test", content="First"
)
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Third")
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": "edited",
"propagate_mode": "change_one",
"send_notification_to_old_thread": "true",
"send_notification_to_new_thread": "true",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, "test")
self.assert_length(messages, 3)
self.assertEqual(messages[0].content, "Second")
self.assertEqual(messages[1].content, "Third")
self.assertEqual(
messages[2].content,
f"A message was moved from this topic to #**public stream>edited** by @_**Iago|{user_profile.id}**.",
)
messages = get_topic_messages(user_profile, stream, "edited")
self.assert_length(messages, 2)
self.assertEqual(messages[0].content, "First")
self.assertEqual(
messages[1].content,
f"A message was moved here from #**public stream>test** by @_**Iago|{user_profile.id}**.",
)
def test_notify_no_topic_after_message_move(self) -> None:
user_profile = self.example_user("iago")
self.login("iago")
stream = self.make_stream("public stream")
self.subscribe(user_profile, stream.name)
msg_id = self.send_stream_message(
user_profile, stream.name, topic_name="test", content="First"
)
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Third")
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": "edited",
"propagate_mode": "change_one",
"send_notification_to_old_thread": "false",
"send_notification_to_new_thread": "false",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, "test")
self.assert_length(messages, 2)
self.assertEqual(messages[0].content, "Second")
self.assertEqual(messages[1].content, "Third")
messages = get_topic_messages(user_profile, stream, "edited")
self.assert_length(messages, 1)
self.assertEqual(messages[0].content, "First")
def test_notify_resolve_topic_long_name(self) -> None:
user_profile = self.example_user("hamlet")
self.login("hamlet")
stream = self.make_stream("public stream")
self.subscribe(user_profile, stream.name)
# Marking topics with a long name as resolved causes the new topic name to be truncated.
# We want to avoid having code paths believing that the topic is "moved" instead of
# "resolved" in this edge case.
topic_name = "a" * MAX_TOPIC_NAME_LENGTH
msg_id = self.send_stream_message(
user_profile, stream.name, topic_name=topic_name, content="First"
)
resolved_topic = RESOLVED_TOPIC_PREFIX + topic_name
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": resolved_topic,
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
new_topic_name = truncate_topic(resolved_topic)
messages = get_topic_messages(user_profile, stream, new_topic_name)
self.assert_length(messages, 2)
self.assertEqual(messages[0].content, "First")
self.assertEqual(
messages[1].content,
f"@_**{user_profile.full_name}|{user_profile.id}** has marked this topic as resolved.",
)
# Note that we are removing the prefix from the already truncated topic,
# so unresolved_topic_name will not be the same as the original topic_name
unresolved_topic_name = new_topic_name.replace(RESOLVED_TOPIC_PREFIX, "")
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": unresolved_topic_name,
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, unresolved_topic_name)
self.assert_length(messages, 3)
self.assertEqual(
messages[2].content,
f"@_**{user_profile.full_name}|{user_profile.id}** has marked this topic as unresolved.",
)
def test_notify_resolve_and_move_topic(self) -> None:
user_profile = self.example_user("hamlet")
self.login("hamlet")
stream = self.make_stream("public stream")
topic = "test"
self.subscribe(user_profile, stream.name)
# Resolve a topic normally first
msg_id = self.send_stream_message(user_profile, stream.name, "foo", topic_name=topic)
resolved_topic = RESOLVED_TOPIC_PREFIX + topic
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": resolved_topic,
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, resolved_topic)
self.assert_length(messages, 2)
self.assertEqual(
messages[1].content,
f"@_**{user_profile.full_name}|{user_profile.id}** has marked this topic as resolved.",
)
# Test unresolving a topic while moving it (✔ test -> bar)
new_topic = "bar"
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": new_topic,
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, new_topic)
self.assert_length(messages, 4)
self.assertEqual(
messages[2].content,
f"@_**{user_profile.full_name}|{user_profile.id}** has marked this topic as unresolved.",
)
self.assertEqual(
messages[3].content,
f"This topic was moved here from #**public stream>✔ test** by @_**{user_profile.full_name}|{user_profile.id}**.",
)
# Now test moving the topic while also resolving it (bar -> ✔ baz)
new_resolved_topic = RESOLVED_TOPIC_PREFIX + "baz"
result = self.client_patch(
"/json/messages/" + str(msg_id),
{
"topic": new_resolved_topic,
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
messages = get_topic_messages(user_profile, stream, new_resolved_topic)
self.assert_length(messages, 6)
self.assertEqual(
messages[4].content,
f"@_**{user_profile.full_name}|{user_profile.id}** has marked this topic as resolved.",
)
self.assertEqual(
messages[5].content,
f"This topic was moved here from #**public stream>{new_topic}** by @_**{user_profile.full_name}|{user_profile.id}**.",
)
def parameterized_test_move_message_involving_private_stream(
self,
from_invite_only: bool,
history_public_to_subscribers: bool,
user_messages_created: bool,
to_invite_only: bool = True,
) -> None:
admin_user = self.example_user("iago")
user_losing_access = self.example_user("cordelia")
user_gaining_access = self.example_user("hamlet")
self.login("iago")
old_stream = self.make_stream("test move stream", invite_only=from_invite_only)
new_stream = self.make_stream(
"new stream",
invite_only=to_invite_only,
history_public_to_subscribers=history_public_to_subscribers,
)
self.subscribe(admin_user, old_stream.name)
self.subscribe(user_losing_access, old_stream.name)
self.subscribe(admin_user, new_stream.name)
self.subscribe(user_gaining_access, new_stream.name)
msg_id = self.send_stream_message(
admin_user, old_stream.name, topic_name="test", content="First"
)
self.send_stream_message(admin_user, old_stream.name, topic_name="test", content="Second")
self.assertEqual(
UserMessage.objects.filter(
user_profile_id=user_losing_access.id,
message_id=msg_id,
).count(),
1,
)
self.assertEqual(
UserMessage.objects.filter(
user_profile_id=user_gaining_access.id,
message_id=msg_id,
).count(),
0,
)
result = self.client_patch(
f"/json/messages/{msg_id}",
{
"stream_id": new_stream.id,
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
messages = get_topic_messages(admin_user, old_stream, "test")
self.assert_length(messages, 0)
messages = get_topic_messages(admin_user, new_stream, "test")
self.assert_length(messages, 3)
self.assertEqual(
UserMessage.objects.filter(
user_profile_id=user_losing_access.id,
message_id=msg_id,
).count(),
0,
)
# When the history is shared, UserMessage is not created for the user but the user
# can see the message.
self.assertEqual(
UserMessage.objects.filter(
user_profile_id=user_gaining_access.id,
message_id=msg_id,
).count(),
1 if user_messages_created else 0,
)
def test_move_message_from_public_to_private_stream_not_shared_history(self) -> None:
self.parameterized_test_move_message_involving_private_stream(
from_invite_only=False,
history_public_to_subscribers=False,
user_messages_created=True,
)
def test_move_message_from_public_to_private_stream_shared_history(self) -> None:
self.parameterized_test_move_message_involving_private_stream(
from_invite_only=False,
history_public_to_subscribers=True,
user_messages_created=False,
)
def test_move_message_from_private_to_private_stream_not_shared_history(self) -> None:
self.parameterized_test_move_message_involving_private_stream(
from_invite_only=True,
history_public_to_subscribers=False,
user_messages_created=True,
)
def test_move_message_from_private_to_private_stream_shared_history(self) -> None:
self.parameterized_test_move_message_involving_private_stream(
from_invite_only=True,
history_public_to_subscribers=True,
user_messages_created=False,
)
def test_move_message_from_private_to_public(self) -> None:
self.parameterized_test_move_message_involving_private_stream(
from_invite_only=True,
history_public_to_subscribers=True,
user_messages_created=False,
to_invite_only=False,
)
def test_can_move_messages_between_streams(self) -> None:
def validation_func(user_profile: UserProfile) -> bool:
user_profile.refresh_from_db()
return user_profile.can_move_messages_between_streams()
self.check_has_permission_policies("move_messages_between_streams_policy", validation_func)
def test_mark_topic_as_resolved(self) -> None:
self.login("iago")
admin_user = self.example_user("iago")
hamlet = self.example_user("hamlet")
cordelia = self.example_user("cordelia")
aaron = self.example_user("aaron")
# Set the user's translation language to German to test that
# it is overridden by the realm's default language.
admin_user.default_language = "de"
admin_user.save()
stream = self.make_stream("new")
self.subscribe(admin_user, stream.name)
self.subscribe(hamlet, stream.name)
self.subscribe(cordelia, stream.name)
self.subscribe(aaron, stream.name)
original_topic = "topic 1"
id1 = self.send_stream_message(hamlet, "new", topic_name=original_topic)
id2 = self.send_stream_message(admin_user, "new", topic_name=original_topic)
msg1 = Message.objects.get(id=id1)
do_add_reaction(aaron, msg1, "tada", "1f389", "unicode_emoji")
# Check that we don't incorrectly send "unresolve topic"
# notifications when asking the preserve the current topic.
result = self.client_patch(
"/json/messages/" + str(id1),
{
"topic": original_topic,
"propagate_mode": "change_all",
},
)
self.assert_json_error(result, "Nothing to change")
resolved_topic = RESOLVED_TOPIC_PREFIX + original_topic
result = self.client_patch(
"/json/messages/" + str(id1),
{
"topic": resolved_topic,
"propagate_mode": "change_all",
},
HTTP_ACCEPT_LANGUAGE="de",
)
self.assert_json_success(result)
for msg_id in [id1, id2]:
msg = Message.objects.get(id=msg_id)
self.assertEqual(
resolved_topic,
msg.topic_name(),
)
messages = get_topic_messages(admin_user, stream, resolved_topic)
self.assert_length(messages, 3)
self.assertEqual(
messages[2].content,
f"@_**Iago|{admin_user.id}** has marked this topic as resolved.",
)
# Check topic resolved notification message is only unread for participants.
assert (
UserMessage.objects.filter(
user_profile__in=[admin_user, hamlet, aaron], message__id=messages[2].id
)
.extra(where=[UserMessage.where_unread()])
.count()
== 3
)
assert (
UserMessage.objects.filter(user_profile=cordelia, message__id=messages[2].id)
.extra(where=[UserMessage.where_unread()])
.count()
== 0
)
# Now move to a weird state and confirm we get the normal topic moved message.
weird_topic = "✔ ✔✔" + original_topic
result = self.client_patch(
"/json/messages/" + str(id1),
{
"topic": weird_topic,
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
for msg_id in [id1, id2]:
msg = Message.objects.get(id=msg_id)
self.assertEqual(
weird_topic,
msg.topic_name(),
)
messages = get_topic_messages(admin_user, stream, weird_topic)
self.assert_length(messages, 4)
self.assertEqual(
messages[2].content,
f"@_**Iago|{admin_user.id}** has marked this topic as resolved.",
)
self.assertEqual(
messages[3].content,
f"This topic was moved here from #**new>✔ topic 1** by @_**Iago|{admin_user.id}**.",
)
unresolved_topic = original_topic
result = self.client_patch(
"/json/messages/" + str(id1),
{
"topic": unresolved_topic,
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
for msg_id in [id1, id2]:
msg = Message.objects.get(id=msg_id)
self.assertEqual(
unresolved_topic,
msg.topic_name(),
)
messages = get_topic_messages(admin_user, stream, unresolved_topic)
self.assert_length(messages, 5)
self.assertEqual(
messages[2].content, f"@_**Iago|{admin_user.id}** has marked this topic as resolved."
)
self.assertEqual(
messages[4].content,
f"@_**Iago|{admin_user.id}** has marked this topic as unresolved.",
)
# Check topic unresolved notification message is only unread for participants.
assert (
UserMessage.objects.filter(
user_profile__in=[admin_user, hamlet, aaron], message__id=messages[4].id
)
.extra(where=[UserMessage.where_unread()])
.count()
== 3
)
assert (
UserMessage.objects.filter(user_profile=cordelia, message__id=messages[4].id)
.extra(where=[UserMessage.where_unread()])
.count()
== 0
)
# Now move to another stream while resolving the topic and
# check the notifications.
final_stream = self.make_stream("final")
self.subscribe(admin_user, final_stream.name)
result = self.client_patch(
"/json/messages/" + str(id1),
{
"topic": resolved_topic,
"stream_id": final_stream.id,
"propagate_mode": "change_all",
},
)
self.assert_json_success(result)
for msg_id in [id1, id2]:
msg = Message.objects.get(id=msg_id)
self.assertEqual(
resolved_topic,
msg.topic_name(),
)
messages = get_topic_messages(admin_user, final_stream, resolved_topic)
# TODO: This should be 7 -- but currently we never trigger
# resolve-topic notifications when moving the stream, even if
# the resolve-topic state is changed at that time.
self.assert_length(messages, 6)
self.assertEqual(
messages[5].content,
f"This topic was moved here from #**new>topic 1** by @_**Iago|{admin_user.id}**.",
)
class DeleteMessageTest(ZulipTestCase):
def test_delete_message_invalid_request_format(self) -> None:
self.login("iago")
hamlet = self.example_user("hamlet")
msg_id = self.send_stream_message(hamlet, "Denmark")
result = self.client_delete(f"/json/messages/{msg_id + 1}", {"message_id": msg_id})
self.assert_json_error(result, "Invalid message(s)")
result = self.client_delete(f"/json/messages/{msg_id}")
self.assert_json_success(result)
def test_delete_message_by_user(self) -> None:
def set_message_deleting_params(
delete_own_message_policy: int, message_content_delete_limit_seconds: Union[int, str]
) -> None:
self.login("iago")
result = self.client_patch(
"/json/realm",
{
"delete_own_message_policy": delete_own_message_policy,
"message_content_delete_limit_seconds": orjson.dumps(
message_content_delete_limit_seconds
).decode(),
},
)
self.assert_json_success(result)
def test_delete_message_by_admin(msg_id: int) -> "TestHttpResponse":
self.login("iago")
result = self.client_delete(f"/json/messages/{msg_id}")
return result
def test_delete_message_by_owner(msg_id: int) -> "TestHttpResponse":
self.login("hamlet")
result = self.client_delete(f"/json/messages/{msg_id}")
return result
def test_delete_message_by_other_user(msg_id: int) -> "TestHttpResponse":
self.login("cordelia")
result = self.client_delete(f"/json/messages/{msg_id}")
return result
# Test if message deleting is not allowed(default).
set_message_deleting_params(Realm.POLICY_ADMINS_ONLY, "unlimited")
hamlet = self.example_user("hamlet")
self.login_user(hamlet)
msg_id = self.send_stream_message(hamlet, "Denmark")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_other_user(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_admin(msg_id=msg_id)
self.assert_json_success(result)
# Test if message deleting is allowed.
# Test if time limit is None(no limit).
set_message_deleting_params(Realm.POLICY_EVERYONE, "unlimited")
msg_id = self.send_stream_message(hamlet, "Denmark")
message = Message.objects.get(id=msg_id)
message.date_sent = message.date_sent - datetime.timedelta(seconds=600)
message.save()
result = test_delete_message_by_other_user(msg_id=msg_id)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_success(result)
# Test if time limit is non-zero.
set_message_deleting_params(Realm.POLICY_EVERYONE, 240)
msg_id_1 = self.send_stream_message(hamlet, "Denmark")
message = Message.objects.get(id=msg_id_1)
message.date_sent = message.date_sent - datetime.timedelta(seconds=120)
message.save()
msg_id_2 = self.send_stream_message(hamlet, "Denmark")
message = Message.objects.get(id=msg_id_2)
message.date_sent = message.date_sent - datetime.timedelta(seconds=360)
message.save()
result = test_delete_message_by_other_user(msg_id=msg_id_1)
self.assert_json_error(result, "You don't have permission to delete this message")
result = test_delete_message_by_owner(msg_id=msg_id_1)
self.assert_json_success(result)
result = test_delete_message_by_owner(msg_id=msg_id_2)
self.assert_json_error(result, "The time limit for deleting this message has passed")
# No limit for admin.
result = test_delete_message_by_admin(msg_id=msg_id_2)
self.assert_json_success(result)
# Test multiple delete requests with no latency issues
msg_id = self.send_stream_message(hamlet, "Denmark")
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_success(result)
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Invalid message(s)")
# Test handling of 500 error caused by multiple delete requests due to latency.
# see issue #11219.
with mock.patch("zerver.views.message_edit.do_delete_messages") as m, mock.patch(
"zerver.views.message_edit.validate_can_delete_message", return_value=None
), mock.patch("zerver.views.message_edit.access_message", return_value=(None, None)):
m.side_effect = IntegrityError()
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Message already deleted")
m.side_effect = Message.DoesNotExist()
result = test_delete_message_by_owner(msg_id=msg_id)
self.assert_json_error(result, "Message already deleted")
def test_delete_message_according_to_delete_own_message_policy(self) -> None:
def check_delete_message_by_sender(
sender_name: str, error_msg: Optional[str] = None
) -> None:
sender = self.example_user(sender_name)
msg_id = self.send_stream_message(sender, "Verona")
self.login_user(sender)
result = self.client_delete(f"/json/messages/{msg_id}")
if error_msg is None:
self.assert_json_success(result)
else:
self.assert_json_error(result, error_msg)
realm = get_realm("zulip")
do_set_realm_property(
realm, "delete_own_message_policy", Realm.POLICY_ADMINS_ONLY, acting_user=None
)
check_delete_message_by_sender("shiva", "You don't have permission to delete this message")
check_delete_message_by_sender("iago")
do_set_realm_property(
realm, "delete_own_message_policy", Realm.POLICY_MODERATORS_ONLY, acting_user=None
)
check_delete_message_by_sender(
"cordelia", "You don't have permission to delete this message"
)
check_delete_message_by_sender("shiva")
do_set_realm_property(
realm, "delete_own_message_policy", Realm.POLICY_MEMBERS_ONLY, acting_user=None
)
check_delete_message_by_sender(
"polonius", "You don't have permission to delete this message"
)
check_delete_message_by_sender("cordelia")
do_set_realm_property(
realm, "delete_own_message_policy", Realm.POLICY_FULL_MEMBERS_ONLY, acting_user=None
)
do_set_realm_property(realm, "waiting_period_threshold", 10, acting_user=None)
cordelia = self.example_user("cordelia")
cordelia.date_joined = timezone_now() - datetime.timedelta(days=9)
cordelia.save()
check_delete_message_by_sender(
"cordelia", "You don't have permission to delete this message"
)
cordelia.date_joined = timezone_now() - datetime.timedelta(days=11)
cordelia.save()
check_delete_message_by_sender("cordelia")
do_set_realm_property(
realm, "delete_own_message_policy", Realm.POLICY_EVERYONE, acting_user=None
)
check_delete_message_by_sender("cordelia")
check_delete_message_by_sender("polonius")
def test_delete_event_sent_after_transaction_commits(self) -> None:
"""
Tests that `send_event` is hooked to `transaction.on_commit`. This is important, because
we don't want to end up holding locks on message rows for too long if the event queue runs
into a problem.
"""
hamlet = self.example_user("hamlet")
self.send_stream_message(hamlet, "Denmark")
message = self.get_last_message()
with self.tornado_redirected_to_list([], expected_num_events=1):
with mock.patch("zerver.actions.message_edit.send_event") as m:
m.side_effect = AssertionError(
"Events should be sent only after the transaction commits."
)
do_delete_messages(hamlet.realm, [message])
|
{
"content_hash": "f8f9873e13916b6836cdc96262f4a91e",
"timestamp": "",
"source": "github",
"line_count": 3339,
"max_line_length": 130,
"avg_line_length": 40.74094040131776,
"alnum_prop": 0.5804210712027875,
"repo_name": "zulip/zulip",
"id": "87621ce598fd4ee9f6fe823e12c0ba645d13794a",
"size": "136048",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "zerver/tests/test_message_edit.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "509211"
},
{
"name": "Dockerfile",
"bytes": "4219"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "696430"
},
{
"name": "Handlebars",
"bytes": "384277"
},
{
"name": "JavaScript",
"bytes": "4098367"
},
{
"name": "Perl",
"bytes": "10163"
},
{
"name": "Puppet",
"bytes": "112433"
},
{
"name": "Python",
"bytes": "10336945"
},
{
"name": "Ruby",
"bytes": "3166"
},
{
"name": "Shell",
"bytes": "147162"
},
{
"name": "TypeScript",
"bytes": "286785"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals, division, absolute_import#, print_function
class kvstore(object):
def __init__(self, db):
self.db = db
def __getitem__(self, k):
return self.db._config_value_get(k)
def __setitem__(self, k,v):
self.db._config_value_set(k,v)
def __delitem__(self, k):
self.db._config_del(k)
class DatabaseBase(object):
def __init__(self, workerid):
self.ID = workerid
# dictionary interface for config
self.CONF = kvstore(self)
# database ID
self.dbid = self.CONF['databaseid']
if self.dbid is None:
import os, base64
code = base64.b32encode( os.urandom(5) )
self.dbid = code.decode("ascii").rstrip("=")
self.CONF['databaseid'] = self.dbid
def close(self):
pass
# config inteface
def _config_value_get(self, key):
pass
def _config_value_set(self, key, val):
pass
def _config_del(self, key):
pass
|
{
"content_hash": "0131ce302bc8907354bc1206ffb36cb2",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 83,
"avg_line_length": 22.02127659574468,
"alnum_prop": 0.561352657004831,
"repo_name": "AYAtechnologies/Kasaya-esb",
"id": "c4c505a2d12f449b3eb3bfa29817d64526b8502d",
"size": "1050",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kasaya/workers/asyncd/db/base.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "254516"
}
],
"symlink_target": ""
}
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: vultr_block_storage_facts
short_description: Gather facts about the Vultr block storage volumes available.
description:
- Gather facts about block storage volumes available in Vultr.
version_added: "2.7"
author: "Yanis Guenane (@Spredzy)"
deprecated:
removed_in: "2.12"
why: Transformed into an info module.
alternative: Use M(vultr_block_storage_info) instead.
extends_documentation_fragment: vultr
'''
EXAMPLES = r'''
- name: Gather Vultr block storage volumes facts
local_action:
module: vultr_block_storage_facts
- name: Print the gathered facts
debug:
var: ansible_facts.vultr_block_storage_facts
'''
RETURN = r'''
---
vultr_api:
description: Response from Vultr API with a few additions/modification
returned: success
type: complex
contains:
api_account:
description: Account used in the ini file to select the key
returned: success
type: str
sample: default
api_timeout:
description: Timeout used for the API requests
returned: success
type: int
sample: 60
api_retries:
description: Amount of max retries for the API requests
returned: success
type: int
sample: 5
api_retry_max_delay:
description: Exponential backoff delay in seconds between retries up to this max delay value.
returned: success
type: int
sample: 12
version_added: '2.9'
api_endpoint:
description: Endpoint used for the API requests
returned: success
type: str
sample: "https://api.vultr.com"
vultr_block_storage_facts:
description: Response from Vultr API
returned: success
type: complex
contains:
"vultr_block_storage_facts": [
{
"attached_to_id": null,
"cost_per_month": 1.0,
"date_created": "2018-07-24 12:59:59",
"id": 17332323,
"name": "ansible-test-volume",
"region": "New Jersey",
"size": 10,
"status": "active"
}
]
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vultr import (
Vultr,
vultr_argument_spec,
)
class AnsibleVultrBlockStorageFacts(Vultr):
def __init__(self, module):
super(AnsibleVultrBlockStorageFacts, self).__init__(module, "vultr_block_storage_facts")
self.returns = {
'attached_to_SUBID': dict(key='attached_to_id'),
'cost_per_month': dict(convert_to='float'),
'date_created': dict(),
'SUBID': dict(key='id'),
'label': dict(key='name'),
'DCID': dict(key='region', transform=self._get_region_name),
'size_gb': dict(key='size', convert_to='int'),
'status': dict()
}
def _get_region_name(self, region):
return self.get_region(region, 'DCID').get('name')
def get_block_storage_volumes(self):
return self.api_query(path="/v1/block/list")
def main():
argument_spec = vultr_argument_spec()
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
volume_facts = AnsibleVultrBlockStorageFacts(module)
result = volume_facts.get_result(volume_facts.get_block_storage_volumes())
ansible_facts = {
'vultr_block_storage_facts': result['vultr_block_storage_facts']
}
module.exit_json(ansible_facts=ansible_facts, **result)
if __name__ == '__main__':
main()
|
{
"content_hash": "516c32471adbe73d3e677c6c9828f30b",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 99,
"avg_line_length": 27.78030303030303,
"alnum_prop": 0.6353967821107173,
"repo_name": "thaim/ansible",
"id": "2da66d1697651e18821451ef13ef80b7ae125b27",
"size": "3835",
"binary": false,
"copies": "7",
"ref": "refs/heads/fix-broken-link",
"path": "lib/ansible/modules/cloud/vultr/_vultr_block_storage_facts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7"
},
{
"name": "Shell",
"bytes": "246"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
from permuta import *
import permstruct
import permstruct.dag
from permstruct import *
from permstruct.dag import taylor_dag
import sys
# -- Example from Kuszmaul paper -- #
# STATUS ================================================ >
task = '2314_2413_3412'
patts = [ Permutation([ int(c) for c in p ]) for p in task.split('_') ]
struct(patts)
# struct(patts, size = 4, verify_bound = 10, ask_verify_higher = True)
|
{
"content_hash": "27723415ac00aeb045fc9e21b58d8cac",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 71,
"avg_line_length": 25.444444444444443,
"alnum_prop": 0.631004366812227,
"repo_name": "PermutaTriangle/PermStruct",
"id": "bba27e6b7af07c6aff08774fdc3e5e809d5d011d",
"size": "458",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/classical_3x4/2314_2413_3412.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Haskell",
"bytes": "891"
},
{
"name": "Makefile",
"bytes": "381"
},
{
"name": "Python",
"bytes": "898912"
}
],
"symlink_target": ""
}
|
from pyperf import perf_counter
from synapse.util.caches.lrucache import LruCache
async def main(reactor, loops):
"""
Benchmark `loops` number of insertions into LruCache without eviction.
"""
cache = LruCache(loops)
start = perf_counter()
for i in range(loops):
cache[i] = True
end = perf_counter() - start
return end
|
{
"content_hash": "de80d10a8f673889f8423b9419cd85c5",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 74,
"avg_line_length": 19.263157894736842,
"alnum_prop": 0.6584699453551912,
"repo_name": "matrix-org/synapse",
"id": "9b4a4241493f89d7710be8849128f107e4b94b54",
"size": "963",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "synmark/suites/lrucache.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "7229"
},
{
"name": "Dockerfile",
"bytes": "9316"
},
{
"name": "Gherkin",
"bytes": "441"
},
{
"name": "HTML",
"bytes": "66000"
},
{
"name": "JavaScript",
"bytes": "15635"
},
{
"name": "Jinja",
"bytes": "7687"
},
{
"name": "Lua",
"bytes": "241"
},
{
"name": "Perl",
"bytes": "28191"
},
{
"name": "Python",
"bytes": "10632037"
},
{
"name": "Rust",
"bytes": "57034"
},
{
"name": "Shell",
"bytes": "53124"
}
],
"symlink_target": ""
}
|
from .models import get_environment, get_predictive_model, get_item_selector, get_active_environment_info, \
Answer, Item, PracticeContext, PracticeSet,\
get_filter, get_mastery_trashold, get_time_for_knowledge_overview, \
get_forbidden_items
from django.db import transaction, connection
from django.http import HttpResponse, HttpResponseBadRequest
from django.utils.translation import ugettext as _
from django.views.decorators.cache import cache_page
from django.views.decorators.csrf import ensure_csrf_cookie
from lazysignup.decorators import allow_lazy_user
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from proso.django.cache import cache_page_conditional
from proso.django.enrichment import enrich_json_objects_by_object_type
from proso.django.request import is_time_overridden, get_time, get_language, load_query_json
from proso.django.response import render, render_json, BadRequestException
from proso.list import flatten
from proso.time import timer
from proso_common.models import get_config
from proso_user.models import get_user_id
from random import sample
import json
import logging
import proso.svg
import proso_common.views
from django.conf import settings
LOGGER = logging.getLogger('django.request')
@cache_page_conditional(
condition=lambda request, args, kwargs: 'stats' not in request.GET and kwargs['object_class'] not in [PracticeSet])
def show_one(request, object_class, id):
return proso_common.views.show_one(
request, enrich_json_objects_by_object_type, object_class, id, template='models_json.html')
@cache_page_conditional(
condition=lambda request, args, kwargs: 'stats' not in request.GET and kwargs['object_class'] not in [PracticeSet])
def show_more(request, object_class, should_cache=True):
to_json_kwargs = {}
def _load_objects(request, object_class):
objs = object_class.objects
if hasattr(objs, 'prepare_related'):
objs = objs.prepare_related()
db_filter = proso_common.views.get_db_filter(request)
objs = objs.all() if db_filter is None else objs.filter(**db_filter)
if object_class == PracticeSet:
user_id = get_user_id(request, allow_override=True)
objs = objs.filter(answer__user_id=user_id).order_by('-id')
return objs
return proso_common.views.show_more(
request, enrich_json_objects_by_object_type, _load_objects, object_class,
should_cache=should_cache, template='models_json.html', to_json_kwargs=to_json_kwargs)
@allow_lazy_user
def status(request):
user_id = get_user_id(request)
time = get_time(request)
environment = get_environment()
if is_time_overridden(request):
environment.shift_time(time)
return render_json(request, {
'object_type': 'status',
'number_of_answers': environment.number_of_answers(user=user_id),
'number_of_correct_answers': environment.number_of_correct_answers(user=user_id),
'environment_info': get_active_environment_info(),
}, template='models_json.html')
@cache_page_conditional(condition=lambda request, args, kwargs: 'stats' not in request.GET, cache='file' if 'file' in settings.CACHES else None)
def to_practice(request):
practice_filter = get_filter(request)
item_ids = Item.objects.filter_all_reachable_leaves(practice_filter, get_language(request))
if len(item_ids) == 0:
return render_json(request, {
'error': _('There is no item for the given filter to practice.'),
'error_type': 'empty_practice'
}, status=404, template='models_json.html')
result = [Item.objects.item_id_to_json(item_id) for item_id in item_ids]
return render_json(request, result, template='models_json.html', help_text=to_practice.__doc__)
@cache_page(60 * 60 * 24 * 7)
def to_practice_counts(request):
"""
Get number of items available to practice.
filters: -- use this or body
json as in BODY
language:
language of the items
BODY
json in following format:
{
"#identifier": [] -- custom identifier (str) and filter
...
}
"""
data = None
if request.method == "POST":
data = json.loads(request.body.decode("utf-8"))["filters"]
if "filters" in request.GET:
data = load_query_json(request.GET, "filters")
if data is None or len(data) == 0:
return render_json(request, {}, template='models_json.html', help_text=to_practice_counts.__doc__)
language = get_language(request)
timer('to_practice_counts')
filter_names, filter_filters = list(zip(*sorted(data.items())))
reachable_leaves = Item.objects.filter_all_reachable_leaves_many(filter_filters, language)
response = {
group_id: {
'filter': data[group_id],
'number_of_items': len(items),
}
for group_id, items in zip(filter_names, reachable_leaves)
}
LOGGER.debug("to_practice_counts - getting items in groups took %s seconds", (timer('to_practice_counts')))
return render_json(request, response, template='models_json.html', help_text=to_practice_counts.__doc__)
@allow_lazy_user
def answers(request):
limit = min(int(request.GET.get('limit', 10)), 1000)
user_id = get_user_id(request)
item_ids = Item.objects.filter_all_reachable_leaves(get_filter(request), get_language(request))
found_answers = Answer.objects.answers(Answer.objects.filter(item_asked_id__in=item_ids, user_id=user_id).order_by('-id').values_list('id', flat=True)[:limit])
return render_json(request, found_answers, template='models_json.html', help_text=answers.__doc__)
def practice_image(request):
user_id = get_user_id(request)
limit = min(int(request.GET.get('limit', 10)), 100)
item_ids = Item.objects.filter_all_reachable_leaves(get_filter(request), get_language(request))
answers = Answer.objects.filter(user_id=user_id).filter(item_asked_id__in=item_ids).order_by('-id')[:limit]
predictive_model = get_predictive_model()
environment = get_environment()
predictions = predictive_model.predict_more_items(environment, user=-1, items=item_ids, time=get_time_for_knowledge_overview(request))
items_in_order = list(zip(*sorted(zip(predictions, item_ids), reverse=True)))[1] if len(item_ids) > 1 else []
item_prediction = dict(list(zip(item_ids, predictions)))
item_position = dict(list(zip(items_in_order, list(range(len(item_ids))))))
svg = proso.svg.Printer()
answers = sorted(list(answers), key=lambda a: a.id)
SQUARE_SIZE = 10
OFFSET_X = SQUARE_SIZE
OFFSET_Y = SQUARE_SIZE * 3
for i, item in enumerate(items_in_order):
svg.print_square(OFFSET_X + SQUARE_SIZE * i, OFFSET_Y - SQUARE_SIZE, SQUARE_SIZE, int(255 * item_prediction[item]))
for i, answer in enumerate(answers):
for j in range(len(items_in_order)):
svg.print_square(OFFSET_X + SQUARE_SIZE * j, OFFSET_Y + SQUARE_SIZE * i, SQUARE_SIZE, 255, border_color=200)
color = 'green' if answer.item_asked_id == answer.item_answered_id else 'red'
svg.print_square(
OFFSET_X + SQUARE_SIZE * item_position[answer.item_asked_id],
OFFSET_Y + SQUARE_SIZE * i, SQUARE_SIZE, color, border_color=0)
svg.print_text(OFFSET_X + SQUARE_SIZE * (len(items_in_order) + 1), OFFSET_Y + SQUARE_SIZE * i + 0.8 * SQUARE_SIZE, answer.time.strftime('%H:%M:%S %Y-%m-%d'), font_size=10)
return HttpResponse(str(svg), content_type="image/svg+xml")
def answers_per_month(request):
try:
from pylab import rcParams
import matplotlib.pyplot as plt
import pandas
import seaborn as sns
except ImportError:
return HttpResponse('Can not import python packages for analysis.', status=503)
categories = load_query_json(request.GET, "categories", "[]")
translated = Item.objects.translate_identifiers(categories, get_language(request))
translated_inverted = {item: name for name, item in translated.items()}
children = pandas.DataFrame([
{'item': item, 'category': translated_inverted[category]}
for category, items in Item.objects.get_reachable_children(
list(translated.values()), get_language(request)
).items()
for item in items
])
with connection.cursor() as cursor:
cursor.execute(
'''
SELECT item_id, date_part('month', time), COUNT(1)
FROM proso_models_answer
GROUP BY 1, 2
'''
)
data = []
for item, month, answers in cursor:
data.append({
'item': item,
'month': month,
'answers': answers,
})
data = pandas.DataFrame(data)
if len(children) == 0:
data['category'] = data['item'].apply(lambda i: 'category/all')
else:
data = pandas.merge(data, children, on='item', how='inner')
if 'percentage' in request.GET:
def _percentage(group):
total = group['answers'].sum()
return group.groupby('category').apply(lambda g: 100 * g['answers'].sum() / total).reset_index().rename(columns={0: 'answers'})
data = data.groupby('month').apply(_percentage).reset_index()
def _apply(group):
group['answers_cumsum'] = group['answers'].cumsum()
return group
data = data.sort_values(by=['category'], ascending=False).groupby('month').apply(_apply)
data['month'] = data['month'].astype(int)
sns.set(style='white')
rcParams['figure.figsize'] = 15, 10
palette = sns.color_palette("hls", max(5, len(categories)))
fig = plt.figure()
for i, category in enumerate(sorted(data['category'].unique())):
item_data = data[data['category'] == category]
sns.barplot(
x='month',
y='answers_cumsum',
data=item_data,
label=category.split('/')[1],
color=palette[i % len(palette)],
ci=None
)
plt.ylabel('Answers' + (' (%)' if 'percentage' in request.GET else ''))
plt.xlabel('Month')
plt.title('Answers per Month')
if 'percentage' in request.GET:
plt.ylim(0, 100)
plt.legend(loc='center left', bbox_to_anchor=(1, 0.5))
response = HttpResponse(content_type='image/png')
canvas = FigureCanvas(fig)
canvas.print_png(response)
return response
@ensure_csrf_cookie
@allow_lazy_user
@transaction.atomic
def answer(request):
"""
Save the answer.
GET parameters:
html:
turn on the HTML version of the API
BODY
json in following format:
{
"answer": #answer, -- for one answer
"answers": [#answer, #answer, #answer ...] -- for multiple answers
}
answer = {
"answer_class": str, -- class of answer to save (e.g., flashcard_answer)
"response_time": int, -- response time in milliseconds
"meta": "str" -- optional information
"time_gap": int -- waiting time in frontend in seconds
... -- other fields depending on aswer type
(see from_json method of Django model class)
}
"""
if request.method == 'GET':
return render(request, 'models_answer.html', {}, help_text=answer.__doc__)
elif request.method == 'POST':
practice_filter = get_filter(request)
practice_context = PracticeContext.objects.from_content(practice_filter)
saved_answers = _save_answers(request, practice_context, True)
return render_json(request, saved_answers, status=200, template='models_answer.html')
else:
return HttpResponseBadRequest("method %s is not allowed".format(request.method))
@ensure_csrf_cookie
@allow_lazy_user
def user_stats(request):
"""
Get user statistics for selected groups of items
time:
time in format '%Y-%m-%d_%H:%M:%S' used for practicing
user:
identifier of the user (only for stuff users)
username:
username of user (only for users with public profile)
filters: -- use this or body
json as in BODY
mastered:
use model to compute number of mastered items - can be slowed
language:
language of the items
BODY
json in following format:
{
"#identifier": [] -- custom identifier (str) and filter
...
}
"""
timer('user_stats')
response = {}
data = None
if request.method == "POST":
data = json.loads(request.body.decode("utf-8"))["filters"]
if "filters" in request.GET:
data = load_query_json(request.GET, "filters")
if data is None:
return render_json(request, {}, template='models_user_stats.html', help_text=user_stats.__doc__)
environment = get_environment()
if is_time_overridden(request):
environment.shift_time(get_time(request))
user_id = get_user_id(request)
language = get_language(request)
filter_names, filter_filters = list(zip(*sorted(data.items())))
reachable_leaves = Item.objects.filter_all_reachable_leaves_many(filter_filters, language)
all_leaves = sorted(list(set(flatten(reachable_leaves))))
answers = environment.number_of_answers_more_items(all_leaves, user_id)
correct_answers = environment.number_of_correct_answers_more_items(all_leaves, user_id)
if request.GET.get("mastered"):
timer('user_stats_mastered')
mastery_threshold = get_mastery_trashold()
predictions = Item.objects.predict_for_overview(environment, user_id, all_leaves)
mastered = dict(list(zip(all_leaves, [p >= mastery_threshold for p in predictions])))
LOGGER.debug("user_stats - getting predictions for items took %s seconds", (timer('user_stats_mastered')))
for identifier, items in zip(filter_names, reachable_leaves):
if len(items) == 0:
response[identifier] = {
"filter": data[identifier],
"number_of_items": 0,
}
else:
response[identifier] = {
"filter": data[identifier],
"number_of_items": len(items),
"number_of_practiced_items": sum(answers[i] > 0 for i in items),
"number_of_answers": sum(answers[i] for i in items),
"number_of_correct_answers": sum(correct_answers[i] for i in items),
}
if request.GET.get("mastered"):
response[identifier]["number_of_mastered_items"]= sum(mastered[i] for i in items)
return render_json(request, response, template='models_user_stats.html', help_text=user_stats.__doc__)
@ensure_csrf_cookie
@allow_lazy_user
@transaction.atomic
def practice(request):
"""
Return the given number of questions to practice adaptively. In case of
POST request, try to save the answer(s).
GET parameters:
filter:
list of lists of identifiers (may be prefixed by minus sign to
mark complement)
language:
language (str) of items
avoid:
list of item ids to avoid
limit:
number of returned questions (default 10, maximum 100)
time:
time in format '%Y-%m-%d_%H:%M:%S' used for practicing
user:
identifier for the practicing user (only for stuff users)
stats:
turn on the enrichment of the objects by some statistics
html:
turn on the HTML version of the API
BODY:
see answer resource
"""
if request.user.id is None: # Google Bot
return render_json(request, {
'error': _('There is no user available for the practice.'),
'error_type': 'user_undefined'
}, status=400, template='models_json.html')
limit = min(int(request.GET.get('limit', 10)), 100)
# prepare
user = get_user_id(request)
time = get_time(request)
avoid = load_query_json(request.GET, "avoid", "[]")
practice_filter = get_filter(request)
practice_context = PracticeContext.objects.from_content(practice_filter)
environment = get_environment()
item_selector = get_item_selector()
if is_time_overridden(request):
environment.shift_time(time)
# save answers
if request.method == 'POST':
_save_answers(request, practice_context, False)
elif request.method == 'GET':
PracticeSet.objects.filter(answer__user_id=request.user.id).update(finished=True)
if limit > 0:
item_ids = Item.objects.filter_all_reachable_leaves(practice_filter, get_language(request), forbidden_identifiers=get_forbidden_items())
item_ids = list(set(item_ids) - set(avoid))
limit_size = get_config('proso_models', 'practice.limit_item_set_size_to_select_from', default=None)
if limit_size is not None and limit_size < len(item_ids):
item_ids = sample(item_ids, limit_size)
if len(item_ids) == 0:
return render_json(request, {
'error': _('There is no item for the given filter to practice.'),
'error_type': 'empty_practice'
}, status=404, template='models_json.html')
selected_items, meta = item_selector.select(environment, user, item_ids, time, practice_context.id, limit, items_in_queue=len(avoid))
result = []
for item, item_meta in zip(selected_items, meta):
question = {
'object_type': 'question',
'payload': Item.objects.item_id_to_json(item),
}
if item_meta is not None:
question['meta'] = item_meta
result.append(question)
else:
result = []
return render_json(request, result, template='models_json.html', help_text=practice.__doc__)
def _get_answers(request):
data = json.loads(request.body.decode("utf-8"))
if "answer" in data:
answers = [data["answer"]]
elif "answers" in data:
answers = data["answers"]
else:
raise BadRequestException("Answer(s) not found")
return answers
def _save_answers(request, practice_context, finish_practice_set):
timer('_save_answers')
json_objects = _get_answers(request)
answers = []
last_answers = Answer.objects.prefetch_related('practice_set').filter(user_id=request.user.id).order_by('-id')[:1]
if len(last_answers) == 0 or last_answers[0].context_id != practice_context.id or last_answers[0].practice_set is None or last_answers[0].practice_set.finished:
if len(last_answers) > 0 and last_answers[0].context_id != practice_context.id:
PracticeSet.objects.filter(answer__user_id=request.user.id).update(finished=True)
practice_set = PracticeSet.objects.create()
else:
practice_set = last_answers[0].practice_set
if finish_practice_set:
practice_set.finished = True
practice_set.save()
for json_object in json_objects:
if 'answer_class' not in json_object:
raise BadRequestException('The answer does not contain key "answer_class".')
answer_class = Answer.objects.answer_class(json_object['answer_class'])
answers.append(answer_class.objects.from_json(json_object, practice_context, practice_set, request.user.id))
LOGGER.debug("saving of %s answers took %s seconds", len(answers), timer('_save_answers'))
return answers
|
{
"content_hash": "d51cee0302316fcf0cf7ba2dab6b4037",
"timestamp": "",
"source": "github",
"line_count": 463,
"max_line_length": 179,
"avg_line_length": 42.25269978401728,
"alnum_prop": 0.6385012523641568,
"repo_name": "adaptive-learning/proso-apps",
"id": "6e14bac00cdd956fc28a6c9ae535031b681adc82",
"size": "19563",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "proso_models/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4739"
},
{
"name": "HTML",
"bytes": "35781"
},
{
"name": "JavaScript",
"bytes": "865"
},
{
"name": "Makefile",
"bytes": "4125"
},
{
"name": "Python",
"bytes": "645104"
}
],
"symlink_target": ""
}
|
class IntegerCondition(object):
'''
classdocs
'''
def __init__(self, parent, conditionName, parameter):
self.parent = parent
self.conditionName = conditionName
self.parameter = parameter
|
{
"content_hash": "ada636860dae7667da81d34e39142bc2",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 57,
"avg_line_length": 25.333333333333332,
"alnum_prop": 0.6271929824561403,
"repo_name": "theshammy/GenAn",
"id": "f450919a77e20baf027a619034b6920abf592730",
"size": "228",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/concepts/integer_condition.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2122"
},
{
"name": "HTML",
"bytes": "19036"
},
{
"name": "JavaScript",
"bytes": "15499"
},
{
"name": "Python",
"bytes": "62881"
}
],
"symlink_target": ""
}
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import unittest
from datetime import datetime
try:
import cx_Oracle
except ImportError:
cx_Oracle = None
import numpy
from airflow.hooks.oracle_hook import OracleHook
from airflow.models import Connection
from tests.compat import mock
@unittest.skipIf(cx_Oracle is None, 'cx_Oracle package not present')
class TestOracleHookConn(unittest.TestCase):
def setUp(self):
super().setUp()
self.connection = Connection(
login='login',
password='password',
host='host',
port=1521
)
self.db_hook = OracleHook()
self.db_hook.get_connection = mock.Mock()
self.db_hook.get_connection.return_value = self.connection
@mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect')
def test_get_conn_host(self, mock_connect):
self.db_hook.get_conn()
assert mock_connect.call_count == 1
args, kwargs = mock_connect.call_args
self.assertEqual(args, ())
self.assertEqual(kwargs['user'], 'login')
self.assertEqual(kwargs['password'], 'password')
self.assertEqual(kwargs['dsn'], 'host')
@mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect')
def test_get_conn_sid(self, mock_connect):
dsn_sid = {'dsn': 'dsn', 'sid': 'sid'}
self.connection.extra = json.dumps(dsn_sid)
self.db_hook.get_conn()
assert mock_connect.call_count == 1
args, kwargs = mock_connect.call_args
self.assertEqual(args, ())
self.assertEqual(kwargs['dsn'],
cx_Oracle.makedsn(dsn_sid['dsn'],
self.connection.port, dsn_sid['sid']))
@mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect')
def test_get_conn_service_name(self, mock_connect):
dsn_service_name = {'dsn': 'dsn', 'service_name': 'service_name'}
self.connection.extra = json.dumps(dsn_service_name)
self.db_hook.get_conn()
assert mock_connect.call_count == 1
args, kwargs = mock_connect.call_args
self.assertEqual(args, ())
self.assertEqual(kwargs['dsn'], cx_Oracle.makedsn(
dsn_service_name['dsn'], self.connection.port,
service_name=dsn_service_name['service_name']))
@mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect')
def test_get_conn_encoding_without_nencoding(self, mock_connect):
self.connection.extra = json.dumps({'encoding': 'UTF-8'})
self.db_hook.get_conn()
assert mock_connect.call_count == 1
args, kwargs = mock_connect.call_args
self.assertEqual(args, ())
self.assertEqual(kwargs['encoding'], 'UTF-8')
self.assertEqual(kwargs['nencoding'], 'UTF-8')
@mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect')
def test_get_conn_encoding_with_nencoding(self, mock_connect):
self.connection.extra = json.dumps({'encoding': 'UTF-8', 'nencoding': 'gb2312'})
self.db_hook.get_conn()
assert mock_connect.call_count == 1
args, kwargs = mock_connect.call_args
self.assertEqual(args, ())
self.assertEqual(kwargs['encoding'], 'UTF-8')
self.assertEqual(kwargs['nencoding'], 'gb2312')
@mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect')
def test_get_conn_nencoding(self, mock_connect):
self.connection.extra = json.dumps({'nencoding': 'UTF-8'})
self.db_hook.get_conn()
assert mock_connect.call_count == 1
args, kwargs = mock_connect.call_args
self.assertEqual(args, ())
self.assertNotIn('encoding', kwargs)
self.assertEqual(kwargs['nencoding'], 'UTF-8')
@mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect')
def test_get_conn_mode(self, mock_connect):
mode = {
'sysdba': cx_Oracle.SYSDBA,
'sysasm': cx_Oracle.SYSASM,
'sysoper': cx_Oracle.SYSOPER,
'sysbkp': cx_Oracle.SYSBKP,
'sysdgd': cx_Oracle.SYSDGD,
'syskmt': cx_Oracle.SYSKMT,
}
first = True
for m in mode:
self.connection.extra = json.dumps({'mode': m})
self.db_hook.get_conn()
if first:
assert mock_connect.call_count == 1
first = False
args, kwargs = mock_connect.call_args
self.assertEqual(args, ())
self.assertEqual(kwargs['mode'], mode.get(m))
@mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect')
def test_get_conn_threaded(self, mock_connect):
self.connection.extra = json.dumps({'threaded': True})
self.db_hook.get_conn()
assert mock_connect.call_count == 1
args, kwargs = mock_connect.call_args
self.assertEqual(args, ())
self.assertEqual(kwargs['threaded'], True)
@mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect')
def test_get_conn_events(self, mock_connect):
self.connection.extra = json.dumps({'events': True})
self.db_hook.get_conn()
assert mock_connect.call_count == 1
args, kwargs = mock_connect.call_args
self.assertEqual(args, ())
self.assertEqual(kwargs['events'], True)
@mock.patch('airflow.hooks.oracle_hook.cx_Oracle.connect')
def test_get_conn_purity(self, mock_connect):
purity = {
'new': cx_Oracle.ATTR_PURITY_NEW,
'self': cx_Oracle.ATTR_PURITY_SELF,
'default': cx_Oracle.ATTR_PURITY_DEFAULT
}
first = True
for p in purity:
self.connection.extra = json.dumps({'purity': p})
self.db_hook.get_conn()
if first:
assert mock_connect.call_count == 1
first = False
args, kwargs = mock_connect.call_args
self.assertEqual(args, ())
self.assertEqual(kwargs['purity'], purity.get(p))
@unittest.skipIf(cx_Oracle is None, 'cx_Oracle package not present')
class TestOracleHook(unittest.TestCase):
def setUp(self):
super().setUp()
self.cur = mock.MagicMock()
self.conn = mock.MagicMock()
self.conn.cursor.return_value = self.cur
conn = self.conn
class UnitTestOracleHook(OracleHook):
conn_name_attr = 'test_conn_id'
def get_conn(self):
return conn
self.db_hook = UnitTestOracleHook()
def test_run_without_parameters(self):
sql = 'SQL'
self.db_hook.run(sql)
self.cur.execute.assert_called_once_with(sql)
assert self.conn.commit.called
def test_run_with_parameters(self):
sql = 'SQL'
param = ('p1', 'p2')
self.db_hook.run(sql, parameters=param)
self.cur.execute.assert_called_once_with(sql, param)
assert self.conn.commit.called
def test_insert_rows_with_fields(self):
rows = [("'basestr_with_quote", None, numpy.NAN,
numpy.datetime64('2019-01-24T01:02:03'),
datetime(2019, 1, 24), 1, 10.24, 'str')]
target_fields = ['basestring', 'none', 'numpy_nan', 'numpy_datetime64',
'datetime', 'int', 'float', 'str']
self.db_hook.insert_rows('table', rows, target_fields)
self.cur.execute.assert_called_once_with(
"INSERT /*+ APPEND */ INTO table "
"(basestring, none, numpy_nan, numpy_datetime64, datetime, int, float, str) "
"VALUES ('''basestr_with_quote',NULL,NULL,'2019-01-24T01:02:03',"
"to_date('2019-01-24 00:00:00','YYYY-MM-DD HH24:MI:SS'),1,10.24,'str')")
def test_insert_rows_without_fields(self):
rows = [("'basestr_with_quote", None, numpy.NAN,
numpy.datetime64('2019-01-24T01:02:03'),
datetime(2019, 1, 24), 1, 10.24, 'str')]
self.db_hook.insert_rows('table', rows)
self.cur.execute.assert_called_once_with(
"INSERT /*+ APPEND */ INTO table "
" VALUES ('''basestr_with_quote',NULL,NULL,'2019-01-24T01:02:03',"
"to_date('2019-01-24 00:00:00','YYYY-MM-DD HH24:MI:SS'),1,10.24,'str')")
def test_bulk_insert_rows_with_fields(self):
rows = [(1, 2, 3), (4, 5, 6), (7, 8, 9)]
target_fields = ['col1', 'col2', 'col3']
self.db_hook.bulk_insert_rows('table', rows, target_fields)
self.cur.prepare.assert_called_once_with(
"insert into table (col1, col2, col3) values (:1, :2, :3)")
self.cur.executemany.assert_called_once_with(None, rows)
def test_bulk_insert_rows_with_commit_every(self):
rows = [(1, 2, 3), (4, 5, 6), (7, 8, 9)]
target_fields = ['col1', 'col2', 'col3']
self.db_hook.bulk_insert_rows('table', rows, target_fields, commit_every=2)
self.cur.prepare.assert_called_with(
"insert into table (col1, col2, col3) values (:1, :2, :3)")
self.cur.executemany.assert_called_with(None, rows[2:])
def test_bulk_insert_rows_without_fields(self):
rows = [(1, 2, 3), (4, 5, 6), (7, 8, 9)]
self.db_hook.bulk_insert_rows('table', rows)
self.cur.prepare.assert_called_once_with(
"insert into table values (:1, :2, :3)")
self.cur.executemany.assert_called_once_with(None, rows)
def test_bulk_insert_rows_no_rows(self):
rows = []
self.assertRaises(ValueError, self.db_hook.bulk_insert_rows, 'table', rows)
|
{
"content_hash": "7028983982d8f742bc02f3aedde6893f",
"timestamp": "",
"source": "github",
"line_count": 253,
"max_line_length": 89,
"avg_line_length": 40.50592885375494,
"alnum_prop": 0.60724043715847,
"repo_name": "r39132/airflow",
"id": "209ccb63b6e0f341ba0e323a418707dcaa6008d1",
"size": "10248",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/hooks/test_oracle_hook.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "12126"
},
{
"name": "Dockerfile",
"bytes": "4111"
},
{
"name": "HTML",
"bytes": "128531"
},
{
"name": "JavaScript",
"bytes": "22118"
},
{
"name": "Mako",
"bytes": "1284"
},
{
"name": "Python",
"bytes": "5928206"
},
{
"name": "Shell",
"bytes": "41869"
}
],
"symlink_target": ""
}
|
"""Simple object oriented graphics library
The library is designed to make it very easy for novice programmers to
experiment with computer graphics in an object oriented fashion. It is
written by John Zelle for use with the book "Python Programming: An
Introduction to Computer Science" (Franklin, Beedle & Associates).
LICENSE: This is open-source software released under the terms of the
GPL (http://www.gnu.org/licenses/gpl.html).
PLATFORMS: The package is a wrapper around Tkinter and should run on
any platform where Tkinter is available.
INSTALLATION: Put this file somewhere where Python can see it.
OVERVIEW: There are two kinds of objects in the library. The CanvasFrame
class implements a window where drawing can be done and various
GraphicsObjects are provided that can be drawn into a CanvasFrame. As a
simple example, here is a complete program to draw a circle of radius
10 centered in a 100x100 window:
--------------------------------------------------------------------
from graphics import *
def main():
win = CanvasFrame("My Circle", 100, 100)
c = Circle(Point(50,50), 10)
c.draw(win)
win.getMouse() // Pause to view result
main()
--------------------------------------------------------------------
CanvasFrame objects support coordinate transformation through the
setCoords method and pointer-based input through getMouse.
The library provides the following graphical objects:
Point
Line
Circle
Oval
Rectangle
Polygon
Text
Entry (for text-based input)
Image
Various attributes of graphical objects can be set such as
outline-color, fill-color and line-width. Graphical objects also
support moving and hiding for animation effects.
The library also provides a very simple class for pixel-based image
manipulation, Pixmap. A pixmap can be loaded from a file and displayed
using an Image object. Both getPixel and setPixel methods are provided
for manipulating the image.
DOCUMENTATION: For complete documentation, see Chapter 5 of "Python
Programming: An Introduction to Computer Science" by John Zelle,
published by Franklin, Beedle & Associates. Also see
http://mcsp.wartburg.edu/zelle/python for a quick reference"""
# Version 3.5 5/10/09
# Removed all the threading crap and cleaned up the _root stuff
#
# Version 3.4 10/16/07
# Fixed GraphicsError to avoid "exploded" error messages.
# Version 3.3 8/8/06
# Added checkMouse method to CanvasFrame
# Version 3.2.2 5/30/05
# Cleaned up handling of exceptions in Tk thread. The graphics package
# now raises an exception if attempt is made to communicate with
# a dead Tk thread.
# Version 3.2.1 5/22/05
# Added shutdown function for tk thread to eliminate race-condition
# error "chatter" when main thread terminates
# Renamed various private globals with _
# Version 3.2 5/4/05
# Added Pixmap object for simple image manipulation.
# Version 3.1 4/13/05
# Improved the Tk thread communication so that most Tk calls
# do not have to wait for synchonization with the Tk thread.
# (see _tkCall and _tkExec)
# Version 3.0 12/30/04
# Implemented Tk event loop in separate thread. Should now work
# interactively with IDLE. Undocumented autoflush feature is
# no longer necessary. Its default is now False (off). It may
# be removed in a future version.
# Better handling of errors regarding operations on windows that
# have been closed.
# Addition of an isClosed method to CanvasFramedow class.
# Version 2.2 8/26/04
# Fixed cloning bug reported by Joseph Oldham.
# Now implements deep copy of config info.
# Version 2.1 1/15/04
# Added autoflush option to CanvasFrame. When True (default) updates on
# the window are done after each action. This makes some graphics
# intensive programs sluggish. Turning off autoflush causes updates
# to happen during idle periods or when flush is called.
# Version 2.0
# Updated Documentation
# Made Polygon accept a list of Points in constructor
# Made all drawing functions call TK update for easier animations
# and to make the overall package work better with
# Python 2.3 and IDLE 1.0 under Windows (still some issues).
# Removed vestigial turtle graphics.
# Added ability to configure font for Entry objects (analogous to Text)
# Added setTextColor for Text as an alias of setFill
# Changed to class-style exceptions
# Fixed cloning of Text objects
# Version 1.6
# Fixed Entry so StringVar uses _root as master, solves weird
# interaction with shell in Idle
# Fixed bug in setCoords. X and Y coordinates can increase in
# "non-intuitive" direction.
# Tweaked wm_protocol so window is not resizable and kill box closes.
# Version 1.5
# Fixed bug in Entry. Can now define entry before creating a
# CanvasFrame. All CanvasFrames are now toplevel windows and share
# a fixed root (called _root).
# Version 1.4
# Fixed Garbage collection of Tkinter images bug.
# Added ability to set text atttributes.
# Added Entry boxes.
import time, os, sys
import Tkinter
tk = Tkinter
##########################################################################
# Module Exceptions
import exceptions
class GraphicsError(exceptions.Exception):
"""Generic error class for graphics module exceptions."""
#def __init__(self, *args):
#self.args=args
pass
OBJ_ALREADY_DRAWN = "Object currently drawn"
UNSUPPORTED_METHOD = "Object doesn't support operation"
BAD_OPTION = "Illegal option value"
############################################################################
# Graphics classes start here
class CanvasFrame(tk.Frame):
"""A CanvasFrame is a frame for displaying graphics."""
def __init__(self, parent, width=200, height=200):
tk.Frame.__init__(self, parent)
self.parent = parent
self.canvas = tk.Canvas(parent, width = width, height = height)
self.canvas.pack()
parent.resizable(0,0)
self.foreground = "black"
self.items = []
self.mouseX = None
self.mouseY = None
self.canvas.bind("<Button-1>", self._onClick)
self.height = height
self.width = width
self._mouseCallback = None
self._keyboardCallback = None
self.trans = None
self.closed = False
parent.lift()
def __checkOpen(self):
if self.closed:
raise GraphicsError, "window is closed"
def setBackground(self, color):
"""Set background color of the window"""
self.__checkOpen()
self.canvas.config(bg=color)
def setCoords(self, x1, y1, x2, y2):
"""Set coordinates of window to run from (x1,y1) in the
lower-left corner to (x2,y2) in the upper-right corner."""
self.trans = Transform(self.width, self.height, x1, y1, x2, y2)
def close(self):
if self.closed: return
self.__close_help()
def __close_help(self):
"""Close the window"""
self.closed = True
self.parent.destroy()
def isClosed(self):
return self.closed
def plot(self, x, y, color="black"):
"""Set pixel (x,y) to the given color"""
self.__checkOpen()
xs,ys = self.toScreen(x,y)
self.create_line(xs,ys,xs+1,ys, fill=color)
def plotPixel(self, x, y, color="black"):
"""Set pixel raw (independent of window coordinates) pixel
(x,y) to color"""
self.__checkOpen()
self.create_line(x,y,x+1,y, fill=color)
def flush(self):
"""Update drawing to the window"""
self.__checkOpen()
self.update_idletasks()
def getMouse(self):
"""Wait for mouse click and return Point object representing
the click"""
self.mouseX = None
self.mouseY = None
while self.mouseX == None or self.mouseY == None:
self.update()
if self.isClosed(): raise GraphicsError, "getMouse in closed window"
time.sleep(.1) # give up thread
x,y = self.toWorld(self.mouseX, self.mouseY)
self.mouseX = None
self.mouseY = None
return Point(x,y)
def checkMouse(self):
"""Return last mouse click or None if mouse has
not been clicked since last call"""
if self.isClosed():
raise GraphicsError, "checkMouse in closed window"
self.update()
if self.mouseX != None and self.mouseY != None:
x,y = self.toWorld(self.mouseX, self.mouseY)
self.mouseX = None
self.mouseY = None
return Point(x,y)
else:
return None
def getHeight(self):
"""Return the height of the window"""
return self.height
def getWidth(self):
"""Return the width of the window"""
return self.width
def toScreen(self, x, y):
trans = self.trans
if trans:
return self.trans.screen(x,y)
else:
return x,y
def toWorld(self, x, y):
trans = self.trans
if trans:
return self.trans.world(x,y)
else:
return x,y
def setMouseHandler(self, func):
self._mouseCallback = func
def _onClick(self, e):
self.mouseX = e.x
self.mouseY = e.y
if self._mouseCallback:
self._mouseCallback(Point(e.x, e.y))
class Transform:
"""Internal class for 2-D coordinate transformations"""
def __init__(self, w, h, xlow, ylow, xhigh, yhigh):
# w, h are width and height of window
# (xlow,ylow) coordinates of lower-left [raw (0,h-1)]
# (xhigh,yhigh) coordinates of upper-right [raw (w-1,0)]
xspan = (xhigh-xlow)
yspan = (yhigh-ylow)
self.xbase = xlow
self.ybase = yhigh
self.xscale = xspan/float(w-1)
self.yscale = yspan/float(h-1)
def screen(self,x,y):
# Returns x,y in screen (actually window) coordinates
xs = (x-self.xbase) / self.xscale
ys = (self.ybase-y) / self.yscale
return int(xs+0.5),int(ys+0.5)
def world(self,xs,ys):
# Returns xs,ys in world coordinates
x = xs*self.xscale + self.xbase
y = self.ybase - ys*self.yscale
return x,y
# Default values for various item configuration options. Only a subset of
# keys may be present in the configuration dictionary for a given item
DEFAULT_CONFIG = {"fill":"",
"outline":"black",
"width":"1",
"arrow":"none",
"text":"",
"justify":"center",
"font": ("helvetica", 12, "normal")}
class GraphicsObject:
"""Generic base class for all of the drawable objects"""
# A subclass of GraphicsObject should override _draw and
# and _move methods.
def __init__(self, options):
# options is a list of strings indicating which options are
# legal for this object.
# When an object is drawn, canvas is set to the CanvasFrame(canvas)
# object where it is drawn and id is the TK identifier of the
# drawn shape.
self.canvas_frame = None
self.id = None
# config is the dictionary of configuration options for the widget.
config = {}
for option in options:
config[option] = DEFAULT_CONFIG[option]
self.config = config
def setFill(self, color):
"""Set interior color to color"""
self._reconfig("fill", color)
def setOutline(self, color):
"""Set outline color to color"""
self._reconfig("outline", color)
def setWidth(self, width):
"""Set line weight to width"""
self._reconfig("width", width)
def draw(self, canvas_frame):
"""Draw the object in CanvasFrame, which should be a CanvasFrame
object. A GraphicsObject may only be drawn into one
window. Raises an error if attempt made to draw an object that
is already visible."""
if self.canvas_frame and not self.canvas_frame.isClosed(): raise GraphicsError, OBJ_ALREADY_DRAWN
if canvas_frame.isClosed(): raise GraphicsError, "Can't draw to closed window"
self.canvas_frame = canvas_frame
self.id = self._draw(canvas_frame, self.config)
def undraw(self):
"""Undraw the object (i.e. hide it). Returns silently if the
object is not currently drawn."""
if not self.canvas_frame: return
if not self.canvas_frame.isClosed():
self.canvas_frame.canvas.delete(self.id)
self.canvas_frame = None
self.id = None
def move(self, dx, dy):
"""move object dx units in x direction and dy units in y
direction"""
self._move(dx,dy)
canvas_frame = self.canvas_frame
if canvas_frame and not canvas_frame.isClosed():
trans = canvas_frame.trans
if trans:
x = dx/ trans.xscale
y = -dy / trans.yscale
else:
x = dx
y = dy
self.canvas_frame.canvas.move(self.id, x, y)
def _reconfig(self, option, setting):
# Internal method for changing configuration of the object
# Raises an error if the option does not exist in the config
# dictionary for this object
if not self.config.has_key(option):
raise GraphicsError, UNSUPPORTED_METHOD
options = self.config
options[option] = setting
if self.canvas_frame and not self.canvas_frame.isClosed():
self.canvas_frame.canvas.itemconfig(self.id, options)
def _draw(self, canvas_frame, options):
"""draws appropriate figure on canvas with options provided
Returns Tk id of item drawn"""
pass # must override in subclass
def _move(self, dx, dy):
"""updates internal state of object to move it dx,dy units"""
pass # must override in subclass
class Point(GraphicsObject):
def __init__(self, x, y):
GraphicsObject.__init__(self, ["outline", "fill"])
self.setFill = self.setOutline
self.x = x
self.y = y
def _draw(self, canvas_frame, options):
x,y = canvas_frame.toScreen(self.x,self.y)
return canvas_frame.canvas.create_rectangle(x,y,x+1,y+1,options)
def _move(self, dx, dy):
self.x = self.x + dx
self.y = self.y + dy
def clone(self):
other = Point(self.x,self.y)
other.config = self.config.copy()
return other
def getX(self): return self.x
def getY(self): return self.y
class _BBox(GraphicsObject):
# Internal base class for objects represented by bounding box
# (opposite corners) Line segment is a degenerate case.
def __init__(self, p1, p2, options=["outline","width","fill"]):
GraphicsObject.__init__(self, options)
self.p1 = p1.clone()
self.p2 = p2.clone()
def _move(self, dx, dy):
self.p1.x = self.p1.x + dx
self.p1.y = self.p1.y + dy
self.p2.x = self.p2.x + dx
self.p2.y = self.p2.y + dy
def getP1(self): return self.p1.clone()
def getP2(self): return self.p2.clone()
def getCenter(self):
p1 = self.p1
p2 = self.p2
return Point((p1.x+p2.x)/2.0, (p1.y+p2.y)/2.0)
class Rectangle(_BBox):
def __init__(self, p1, p2):
_BBox.__init__(self, p1, p2)
def _draw(self, canvas_frame, options):
p1 = self.p1
p2 = self.p2
x1,y1 = canvas_frame.toScreen(p1.x,p1.y)
x2,y2 = canvas_frame.toScreen(p2.x,p2.y)
return canvas_frame.canvas.create_rectangle(x1,y1,x2,y2,options)
def clone(self):
other = Rectangle(self.p1, self.p2)
other.config = self.config.copy()
return other
class Oval(_BBox):
def __init__(self, p1, p2):
_BBox.__init__(self, p1, p2)
def clone(self):
other = Oval(self.p1, self.p2)
other.config = self.config.copy()
return other
def _draw(self, canvas_frame, options):
p1 = self.p1
p2 = self.p2
x1,y1 = canvas_frame.toScreen(p1.x,p1.y)
x2,y2 = canvas_frame.toScreen(p2.x,p2.y)
return canvas_frame.canvas.create_oval(x1,y1,x2,y2,options)
class Circle(Oval):
def __init__(self, center, radius):
p1 = Point(center.x-radius, center.y-radius)
p2 = Point(center.x+radius, center.y+radius)
Oval.__init__(self, p1, p2)
self.radius = radius
def clone(self):
other = Circle(self.getCenter(), self.radius)
other.config = self.config.copy()
return other
def getRadius(self):
return self.radius
class Line(_BBox):
def __init__(self, p1, p2):
_BBox.__init__(self, p1, p2, ["arrow","fill","width"])
self.setFill(DEFAULT_CONFIG['outline'])
self.setOutline = self.setFill
def clone(self):
other = Line(self.p1, self.p2)
other.config = self.config.copy()
return other
def _draw(self, canvas_frame, options):
p1 = self.p1
p2 = self.p2
x1,y1 = canvas_frame.toScreen(p1.x,p1.y)
x2,y2 = canvas_frame.toScreen(p2.x,p2.y)
return canvas_frame.canvas.create_line(x1,y1,x2,y2,options)
def setArrow(self, option):
if not option in ["first","last","both","none"]:
raise GraphicsError, BAD_OPTION
self._reconfig("arrow", option)
class Polygon(GraphicsObject):
def __init__(self, *points):
# if points passed as a list, extract it
if len(points) == 1 and type(points[0] == type([])):
points = points[0]
self.points = map(Point.clone, points)
GraphicsObject.__init__(self, ["outline", "width", "fill"])
def clone(self):
other = apply(Polygon, self.points)
other.config = self.config.copy()
return other
def getPoints(self):
return map(Point.clone, self.points)
def _move(self, dx, dy):
for p in self.points:
p.move(dx,dy)
def _draw(self, canvas_frame, options):
args = [canvas_frame.canvas]
for p in self.points:
x,y = canvas_frame.toScreen(p.x,p.y)
args.append(x)
args.append(y)
args.append(options)
return apply(tk.Canvas.create_polygon, args)
class Text(GraphicsObject):
def __init__(self, p, text):
GraphicsObject.__init__(self, ["justify","fill","text","font"])
self.setText(text)
self.anchor = p.clone()
self.setFill(DEFAULT_CONFIG['outline'])
self.setOutline = self.setFill
def _draw(self, canvas_frame, options):
p = self.anchor
x,y = canvas_frame.toScreen(p.x,p.y)
return canvas_frame.canvas.create_text(x,y,options)
def _move(self, dx, dy):
self.anchor.move(dx,dy)
def clone(self):
other = Text(self.anchor, self.config['text'])
other.config = self.config.copy()
return other
def setText(self,text):
self._reconfig("text", text)
def getText(self):
return self.config["text"]
def getAnchor(self):
return self.anchor.clone()
def setFace(self, face):
if face in ['helvetica','arial','courier','times roman']:
f,s,b = self.config['font']
self._reconfig("font",(face,s,b))
else:
raise GraphicsError, BAD_OPTION
def setSize(self, size):
if 5 <= size <= 72:
f,s,b = self.config['font']
self._reconfig("font", (f,size,b))
else:
raise GraphicsError, BAD_OPTION
def setStyle(self, style):
if style in ['bold','normal','italic', 'bold italic']:
f,s,b = self.config['font']
self._reconfig("font", (f,s,style))
else:
raise GraphicsError, BAD_OPTION
def setTextColor(self, color):
#self.config['fg'] = color;
self.setFill(color)
class Entry(GraphicsObject):
def __init__(self, canvas_frame, p, width):
GraphicsObject.__init__(self, [])
self.anchor = p.clone()
#print self.anchor
self.width = width
self.text = tk.StringVar(canvas_frame.parent)
self.text.set("")
self.fill = "gray"
self.color = "black"
self.font = DEFAULT_CONFIG['font']
self.entry = None
def _draw(self, canvas_frame, options):
p = self.anchor
x,y = canvas_frame.toScreen(p.x,p.y)
frm = tk.Frame(canvas_frame.parent)
self.entry = tk.Entry(frm,
width=self.width,
textvariable=self.text,
bg = self.fill,
fg = self.color,
font=self.font)
self.entry.pack()
#self.setFill(self.fill)
return canvas_frame.canvas.create_window(x,y,window=frm)
def getText(self):
return self.text.get()
def _move(self, dx, dy):
self.anchor.move(dx,dy)
def getAnchor(self):
return self.anchor.clone()
def clone(self):
other = Entry(self.anchor, self.width)
return self.__clone_help, other()
def __clone_help(self, other):
other.config = self.config.copy()
other.text = tk.StringVar()
other.text.set(self.text.get())
other.fill = self.fill
return other
def setText(self, t):
self.text.set(t)
def setFill(self, color):
self.fill = color
if self.entry:
self.entry.config(bg=color)
def _setFontComponent(self, which, value):
font = list(self.font)
font[which] = value
self.font = tuple(font)
if self.entry:
self.entry.config(font=self.font)
def setFace(self, face):
if face in ['helvetica','arial','courier','times roman']:
self._setFontComponent(0, face)
else:
raise GraphicsError, BAD_OPTION
def setSize(self, size):
if 5 <= size <= 36:
self._setFontComponent(1,size)
else:
raise GraphicsError, BAD_OPTION
def setStyle(self, style):
if style in ['bold','normal','italic', 'bold italic']:
self._setFontComponent(2,style)
else:
raise GraphicsError, BAD_OPTION
def setTextColor(self, color):
self.color=color
if self.entry:
self.entry.config(fg=color)
class Image(GraphicsObject):
idCount = 0
imageCache = {} # tk photoimages go here to avoid GC while drawn
def __init__(self, p, pixmap):
GraphicsObject.__init__(self, [])
self.anchor = p.clone()
self.imageId = Image.idCount
Image.idCount = Image.idCount + 1
if type(pixmap) == type(""):
self.img = tk.PhotoImage(file=pixmap, master=_root)
else:
self.img = pixmap.image
def _draw(self, canvas_frame, options):
p = self.anchor
x,y = canvas_frame.toScreen(p.x,p.y)
self.imageCache[self.imageId] = self.img # save a reference
return canvas_frame.canvas.create_image(x,y,image=self.img)
def _move(self, dx, dy):
self.anchor.move(dx,dy)
def undraw(self):
del self.imageCache[self.imageId] # allow gc of tk photoimage
GraphicsObject.undraw(self)
def getAnchor(self):
return self.anchor.clone()
def clone(self):
imgCopy = Pixmap(self.img.copy())
other = Image(self.anchor, imgCopy)
other.config = self.config.copy()
return other
class Pixmap:
"""Pixmap represents an image as a 2D array of color values.
A Pixmap can be made from a file (gif or ppm):
pic = Pixmap("myPicture.gif")
or initialized to a given size (initially transparent):
pic = Pixmap(512, 512)
"""
def __init__(self, *args):
if len(args) == 1: # a file name or pixmap
if type(args[0]) == type(""):
self.image = tk.PhotoImage( file=args[0], master=_root)
else:
self.image = args[0]
else: # arguments are width and height
width, height = args
self.image = tk.PhotoImage( master=_root,
width=width, height=height)
def getWidth(self):
"""Returns the width of the image in pixels"""
return self.image.width()
def getHeight(self):
"""Returns the height of the image in pixels"""
return self.image.height()
def getPixel(self, x, y):
"""Returns a list [r,g,b] with the RGB color values for pixel (x,y)
r,g,b are in range(256)
"""
value = self.image.get( x,y)
if type(value) == int:
return [value, value, value]
else:
return map(int, value.split())
def setPixel(self, x, y, (r,g,b)):
"""Sets pixel (x,y) to the color given by RGB values r, g, and b.
r,g,b should be in range(256)
"""
self.image.put( "{%s}"%color_rgb(r,g,b), (x, y))
def clone(self):
"""Returns a copy of this Pixmap"""
return Pixmap(self.image.copy())
def save(self, filename):
"""Saves the pixmap image to filename.
The format for the save image is determined from the filname extension.
"""
path, name = os.path.split(filename)
ext = name.split(".")[-1]
self.image.write( filename, format=ext)
def color_rgb(r,g,b):
"""r,g,b are intensities of red, green, and blue in range(256)
Returns color specifier string for the resulting color"""
return "#%02x%02x%02x" % (r,g,b)
class GraphWin(CanvasFrame):
def __init__(self, title, width=200, height=200):
self.root = tk.Tk()
self.root.title(title)
CanvasFrame.__init__(self, self.root, width, height)
self.root.protocol("WM_DELETE_WINDOW", self.__close_help)
self.root.config(bg = "dark gray")
def mainloop(self):
self.root.mainloop()
def __close_help(self):
"""Close the window"""
self.root.destroy()
class Window(tk.Tk):
def __init__(self, title):
tk.Tk.__init__(self)
self.title(title)
self.config(bg = "dark gray")
self.protocol("WM_DELETE_WINDOW", self.__close_help)
def __close_help(self):
"""Close the window"""
self.destroy()
def test():
#win = CanvasFrame(_root)
win = GraphWin("Test")
win.setCoords(0,0,10,10)
t = Text(Point(5,5), "Centered Text")
t.draw(win)
p = Polygon(Point(1,1), Point(5,3), Point(2,7))
p.draw(win)
e = Entry(win, Point(5,6), 10)
e.draw(win)
win.getMouse()
p.setFill("red")
p.setOutline("blue")
p.setWidth(2)
s = ""
for pt in p.getPoints():
s = s + "(%0.1f,%0.1f) " % (pt.getX(), pt.getY())
t.setText(e.getText())
e.setFill("green")
e.setText("Spam!")
e.move(2,0)
win.getMouse()
p.move(2,3)
s = ""
for pt in p.getPoints():
s = s + "(%0.1f,%0.1f) " % (pt.getX(), pt.getY())
t.setText(s)
win.getMouse()
p.undraw()
e.undraw()
t.setStyle("bold")
win.getMouse()
t.setStyle("normal")
win.getMouse()
t.setStyle("italic")
win.getMouse()
t.setStyle("bold italic")
win.getMouse()
t.setSize(14)
win.getMouse()
t.setFace("arial")
t.setSize(20)
win.getMouse()
win.close()
win.mainloop()
if __name__ == "__main__":
test()
|
{
"content_hash": "745d8949fd5c0fa8a217a6c1e20ea226",
"timestamp": "",
"source": "github",
"line_count": 896,
"max_line_length": 105,
"avg_line_length": 31.733258928571427,
"alnum_prop": 0.577744170506102,
"repo_name": "inconceivable/tetris",
"id": "c95566da72f13e3d70d605e717cdde5e8c6ce55d",
"size": "28447",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "graphics.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "51038"
}
],
"symlink_target": ""
}
|
"""Evaluate the ensemble of 8 CTC models (Librispeech corpus)."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from os.path import join, abspath
import sys
import yaml
import argparse
sys.path.append(abspath('../../../'))
from experiments.librispeech.data.load_dataset_ctc import Dataset
from experiments.librispeech.evaluation.eval_ensemble4_ctc import do_eval_cer
from utils.io.labels.character import Idx2char, Char2idx
from utils.evaluation.edit_distance import compute_cer, compute_wer, wer_align
from models.ctc.decoders.beam_search_decoder import BeamSearchDecoder
parser = argparse.ArgumentParser()
parser.add_argument('--result_save_path', type=str, default=None,
help='path to save results of ensemble')
parser.add_argument('--model1_path', type=str,
help='path to the 1st model to evaluate')
parser.add_argument('--model2_path', type=str,
help='path to the 2nd model to evaluate')
parser.add_argument('--model3_path', type=str,
help='path to the 3rd model to evaluate')
parser.add_argument('--model4_path', type=str,
help='path to the 4th model to evaluate')
parser.add_argument('--model5_path', type=str,
help='path to the 5th model to evaluate')
parser.add_argument('--model6_path', type=str,
help='path to the 6th model to evaluate')
parser.add_argument('--model7_path', type=str,
help='path to the 7th model to evaluate')
parser.add_argument('--model8_path', type=str,
help='path to the 8th model to evaluate')
parser.add_argument('--epoch_model1', type=int, default=-1,
help='the epoch of 1st model to restore')
parser.add_argument('--epoch_model2', type=int, default=-1,
help='the epoch of 2nd model to restore')
parser.add_argument('--epoch_model3', type=int, default=-1,
help='the epoch of 3rd model to restore')
parser.add_argument('--epoch_model4', type=int, default=-1,
help='the epoch of 4th model to restore')
parser.add_argument('--epoch_model5', type=int, default=-1,
help='the epoch of 5th model to restore')
parser.add_argument('--epoch_model6', type=int, default=-1,
help='the epoch of 6th model to restore')
parser.add_argument('--epoch_model7', type=int, default=-1,
help='the epoch of 7th model to restore')
parser.add_argument('--epoch_model8', type=int, default=-1,
help='the epoch of 8th model to restore')
parser.add_argument('--beam_width', type=int, default=20,
help='beam_width (int, optional): beam width for beam search.' +
' 1 disables beam search, which mean greedy decoding.')
parser.add_argument('--temperature_infer', type=int, default=1,
help='temperature parameter in the inference stage')
def do_eval(save_paths, params, beam_width, temperature_infer,
result_save_path):
"""Evaluate the model.
Args:
save_paths (list):
params (dict): A dictionary of parameters
epoch_list (list): list of the epoch to restore
beam_width (int): beam width for beam search.
1 disables beam search, which mean greedy decoding.
eval_batch_size (int): the size of mini-batch when evaluation
temperature_infer (int): temperature in the inference stage
result_save_path (string, optional):
"""
if 'temp1' in save_paths[0]:
temperature_train = 1
elif 'temp2' in save_paths[0]:
temperature_train = 2
else:
raise ValueError
if result_save_path is not None:
sys.stdout = open(join(result_save_path,
'8models_traintemp' + str(temperature_train) +
'_inftemp' + str(temperature_infer) + '.log'), 'w')
print('=' * 30)
print(' frame stack %d' % int(params['num_stack']))
print(' beam width: %d' % beam_width)
print(' ensemble: %d' % len(save_paths))
print(' temperature (training): %d' % temperature_train)
print(' temperature (inference): %d' % temperature_infer)
print('=' * 30)
# Load dataset
test_clean_data = Dataset(
data_type='test_clean', train_data_size=params['train_data_size'],
label_type=params['label_type'],
batch_size=1, splice=params['splice'],
num_stack=params['num_stack'], num_skip=params['num_skip'],
sort_utt=True)
test_other_data = Dataset(
data_type='test_other', train_data_size=params['train_data_size'],
label_type=params['label_type'],
batch_size=1, splice=params['splice'],
num_stack=params['num_stack'], num_skip=params['num_skip'],
sort_utt=True)
print('Test Data Evaluation:')
cer_clean_test, wer_clean_test = do_eval_cer(
save_paths=save_paths,
dataset=test_clean_data,
data_type='test_clean',
label_type=params['label_type'],
num_classes=params['num_classes'] + 1,
beam_width=beam_width,
temperature_infer=temperature_infer,
is_test=True,
progressbar=True)
print(' CER (clean): %f %%' % (cer_clean_test * 100))
print(' WER (clean): %f %%' % (wer_clean_test * 100))
cer_other_test, wer_other_test = do_eval_cer(
save_paths=save_paths,
dataset=test_other_data,
data_type='test_other',
label_type=params['label_type'],
num_classes=params['num_classes'] + 1,
beam_width=beam_width,
temperature_infer=temperature_infer,
is_test=True,
progressbar=True)
print(' CER (other): %f %%' % (cer_other_test * 100))
print(' WER (other): %f %%' % (wer_other_test * 100))
def main():
args = parser.parse_args()
# Load config file
with open(join(args.model1_path, 'config.yml'), "r") as f:
config = yaml.load(f)
params = config['param']
# Except for a blank class
if params['label_type'] == 'character':
params['num_classes'] = 28
else:
raise TypeError
save_paths = [args.model1_path, args.model2_path,
args.model3_path, args.model4_path,
args.model5_path, args.model6_path,
args.model7_path, args.model8_path]
do_eval(save_paths=save_paths, params=params,
beam_width=args.beam_width,
temperature_infer=args.temperature_infer,
result_save_path=args.result_save_path)
if __name__ == '__main__':
args = sys.argv
main()
|
{
"content_hash": "82f7d2aaf461d4074814ceb676cd5617",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 84,
"avg_line_length": 39.892857142857146,
"alnum_prop": 0.6119068934646374,
"repo_name": "hirofumi0810/tensorflow_end2end_speech_recognition",
"id": "e6dc044dbc933e427b91aab0bc5b1f1fa4dc6c14",
"size": "6750",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/librispeech/evaluation/eval_ensemble8_ctc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "535815"
},
{
"name": "Shell",
"bytes": "2247"
}
],
"symlink_target": ""
}
|
import math
from PyEngine3D.Utilities import *
def always_pass(*args):
return False
def cone_sphere_culling_actor(camera, actor):
to_actor = actor.transform.pos - camera.transform.pos
dist = length(to_actor)
if 0.0 < dist:
to_actor /= dist
rad = math.acos(np.dot(to_actor, -camera.transform.front)) - camera.half_cone
projected_dist = dist * math.sin(rad)
radius = actor.model.mesh.radius * max(actor.transform.scale)
if 0.0 < rad and radius < projected_dist:
return True
elif HALF_PI < rad and radius < dist:
return True
return False
def view_frustum_culling_geometry(camera, light, actor, geometry):
to_geometry = np.array([geometry.boundCenter[0], geometry.boundCenter[1], geometry.boundCenter[2], 1.0])
to_geometry = np.dot(to_geometry, actor.transform.matrix)[0:3] - camera.transform.pos
max_scale = max(actor.transform.scale)
if 1 < actor.instance_count:
# instancing
radius = geometry.radius * max_scale * actor.instance_radius_scale
radius += actor.instance_radius_offset * max_scale
else:
radius = geometry.radius * max_scale
for i in range(4):
d = np.dot(camera.frustum_vectors[i], to_geometry)
if radius < d:
return True
return False
def shadow_culling(camera, light, actor, geometry):
if 1 < actor.instance_count:
# instancing
scale = actor.instance_radius_scale
offset = actor.instance_radius_offset
boundMin = geometry.boundMin * scale - offset
boundMax = geometry.boundMax * scale + offset
else:
boundMin = geometry.boundMin.copy()
boundMax = geometry.boundMax.copy()
shadow_matrix = np.dot(actor.transform.matrix, light.shadow_view_projection)
boundMin[...] = np.dot(np.array([boundMin[0], boundMin[1], boundMin[2], 1.0], dtype=np.float32), shadow_matrix)[: 3]
boundMax[...] = np.dot(np.array([boundMax[0], boundMax[1], boundMax[2], 1.0], dtype=np.float32), shadow_matrix)[: 3]
minimum = np.minimum(boundMin, boundMax)
maximum = np.maximum(boundMin, boundMax)
if any(x < -1.0 for x in maximum) or any(1.0 < x for x in minimum):
return True
return False
def gather_render_infos(culling_func, camera, light, actor_list, solid_render_infos, translucent_render_infos):
for actor in actor_list:
for geometry in actor.get_geometries():
if culling_func(camera, light, actor, geometry):
continue
material_instance = actor.get_material_instance(geometry.index)
render_info = RenderInfo()
render_info.actor = actor
render_info.geometry = geometry
render_info.material = material_instance.material if material_instance else None
render_info.material_instance = material_instance
if render_info.material_instance.is_translucent():
if translucent_render_infos is not None:
translucent_render_infos.append(render_info)
elif solid_render_infos is not None:
solid_render_infos.append(render_info)
class RenderInfo:
def __init__(self):
self.actor = None
self.geometry = None
self.material = None
self.material_instance = None
|
{
"content_hash": "7d654fff64f75acace41d39b6938e04f",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 120,
"avg_line_length": 36.141304347826086,
"alnum_prop": 0.6442105263157895,
"repo_name": "ubuntunux/GuineaPig",
"id": "a1bffa75a7067eefa91744eeba1a1720115dc7f2",
"size": "3325",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PyEngine3D/Render/RenderInfo.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "GLSL",
"bytes": "6355"
},
{
"name": "Python",
"bytes": "216821"
}
],
"symlink_target": ""
}
|
import mock
import fixtures
from oslo_config import cfg
from oslo_log import log as logging
from nova import test
from nova.tests.functional.test_servers import ServersTestBase
from nova.tests.unit import fake_network
from nova.tests.unit.virt.libvirt import fake_libvirt_utils
from nova.tests.unit.virt.libvirt import fakelibvirt
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class NumaHostInfo(fakelibvirt.HostInfo):
def __init__(self, **kwargs):
super(NumaHostInfo, self).__init__(**kwargs)
self.numa_mempages_list = []
def get_numa_topology(self):
if self.numa_topology:
return self.numa_topology
topology = self._gen_numa_topology(self.cpu_nodes, self.cpu_sockets,
self.cpu_cores, self.cpu_threads,
self.kB_mem)
self.numa_topology = topology
# update number of active cpus
cpu_count = len(topology.cells) * len(topology.cells[0].cpus)
self.cpus = cpu_count - len(self.disabled_cpus_list)
return topology
def set_custom_numa_toplogy(self, topology):
self.numa_topology = topology
class NUMAServersTest(ServersTestBase):
def setUp(self):
super(NUMAServersTest, self).setUp()
# Replace libvirt with fakelibvirt
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.libvirt.driver.libvirt_utils',
fake_libvirt_utils))
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.libvirt.driver.libvirt',
fakelibvirt))
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.libvirt.host.libvirt',
fakelibvirt))
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.libvirt.guest.libvirt',
fakelibvirt))
self.useFixture(fakelibvirt.FakeLibvirtFixture())
def _setup_compute_service(self):
pass
def _setup_scheduler_service(self):
self.flags(compute_driver='nova.virt.libvirt.LibvirtDriver')
self.flags(scheduler_driver='nova.scheduler.'
'filter_scheduler.FilterScheduler')
self.flags(scheduler_default_filters=CONF.scheduler_default_filters
+ ['NUMATopologyFilter'])
return self.start_service('scheduler')
def _run_build_test(self, flavor_id, filter_mock, end_status='ACTIVE'):
self.compute = self.start_service('compute', host='test_compute0')
fake_network.set_stub_network_methods(self.stubs)
# Create server
good_server = self._build_server(flavor_id)
post = {'server': good_server}
created_server = self.api.post_server(post)
LOG.debug("created_server: %s" % created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Validate that the server has been created
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
# It should also be in the all-servers list
servers = self.api.get_servers()
server_ids = [s['id'] for s in servers]
self.assertIn(created_server_id, server_ids)
# Validate that NUMATopologyFilter has been called
self.assertTrue(filter_mock.called)
found_server = self._wait_for_state_change(found_server, 'BUILD')
self.assertEqual(end_status, found_server['status'])
self._delete_server(created_server_id)
def _get_topology_filter_spy(self):
host_manager = self.scheduler.manager.driver.host_manager
numa_filter_class = host_manager.filter_cls_map['NUMATopologyFilter']
host_pass_mock = mock.Mock(wraps=numa_filter_class().host_passes)
return host_pass_mock
@mock.patch('nova.virt.libvirt.LibvirtDriver._create_image')
def test_create_server_with_numa_topology(self, img_mock):
host_info = NumaHostInfo(cpu_nodes=2, cpu_sockets=1, cpu_cores=2,
cpu_threads=2, kB_mem=15740000)
fake_connection = fakelibvirt.Connection('qemu:///system',
version=1002007,
hv_version=2001000,
host_info=host_info)
# Create a flavor
extra_spec = {'hw:numa_nodes': '2'}
flavor_id = self._create_flavor(extra_spec=extra_spec)
host_pass_mock = self._get_topology_filter_spy()
with test.nested(
mock.patch('nova.virt.libvirt.host.Host.get_connection',
return_value=fake_connection),
mock.patch('nova.scheduler.filters'
'.numa_topology_filter.NUMATopologyFilter.host_passes',
side_effect=host_pass_mock)) as (conn_mock,
filter_mock):
self._run_build_test(flavor_id, filter_mock)
@mock.patch('nova.virt.libvirt.LibvirtDriver._create_image')
def test_create_server_with_numa_fails(self, img_mock):
host_info = NumaHostInfo(cpu_nodes=1, cpu_sockets=1, cpu_cores=2,
kB_mem=15740000)
fake_connection = fakelibvirt.Connection('qemu:///system',
version=1002007,
host_info=host_info)
# Create a flavor
extra_spec = {'hw:numa_nodes': '2'}
flavor_id = self._create_flavor(extra_spec=extra_spec)
host_pass_mock = self._get_topology_filter_spy()
with test.nested(
mock.patch('nova.virt.libvirt.host.Host.get_connection',
return_value=fake_connection),
mock.patch('nova.scheduler.filters'
'.numa_topology_filter.NUMATopologyFilter.host_passes',
side_effect=host_pass_mock)) as (conn_mock,
filter_mock):
self._run_build_test(flavor_id, filter_mock, end_status='ERROR')
|
{
"content_hash": "d496f2f9f9942c98dd6e71d225ca716b",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 78,
"avg_line_length": 39.6258064516129,
"alnum_prop": 0.5955714750895473,
"repo_name": "raildo/nova",
"id": "3aea72f04bdab372651809fc038a7b3a0bc0a4c3",
"size": "6774",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "nova/tests/functional/libvirt/test_numa_servers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16814792"
},
{
"name": "Shell",
"bytes": "20716"
},
{
"name": "Smarty",
"bytes": "351433"
}
],
"symlink_target": ""
}
|
from google.cloud import vision_v1p3beta1
def sample_delete_product_set():
# Create a client
client = vision_v1p3beta1.ProductSearchClient()
# Initialize request argument(s)
request = vision_v1p3beta1.DeleteProductSetRequest(
name="name_value",
)
# Make the request
client.delete_product_set(request=request)
# [END vision_v1p3beta1_generated_ProductSearch_DeleteProductSet_sync]
|
{
"content_hash": "4b37584174a863f165711cd2add130e5",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 70,
"avg_line_length": 24.823529411764707,
"alnum_prop": 0.7298578199052133,
"repo_name": "googleapis/python-vision",
"id": "1bacb76328ac8e894f331dfd75674255a42ea87d",
"size": "1818",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "samples/generated_samples/vision_v1p3beta1_generated_product_search_delete_product_set_sync.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "3254393"
},
{
"name": "Shell",
"bytes": "30660"
}
],
"symlink_target": ""
}
|
import urllib2
from urllib import urlencode
try:
import simplejson as json
except ImportError:
import json
import sys, re
import datetime, logging
import dateutil.parser
import pycomicvine.error
import collections
_API_URL = "https://www.comicvine.com/api/"
_cached_resources = {}
api_key = ""
def str_to_datetime(value):
try:
return dateutil.parser.parse(value)
except ValueError:
return value
class AttributeDefinition(object):
def __init__(self, target, start_type = None):
def _to_int(value):
try:
new_value = value.replace(',','')
return int(new_value)
except ValueError:
return value
self._start_type = start_type
if target == datetime.datetime or target == 'datetime':
self._target = str_to_datetime
self._target_name = 'datetime'
elif target == int or target == 'int':
self._target = _to_int
self._target_name = 'int'
elif callable(target):
if not isinstance(start_type, type) and \
not (isinstance(start_type, collections.Iterable) and
all(isinstance(t, type) for t in start_type)):
raise pycomicvine.error.IllegalArquementException(
"A start type needs to be defined"
)
self._target = target
self._target_name = 'callable'
elif target == 'keep':
self._target = lambda value: value
self._target_name = 'keep'
else:
self._target = None
self._target_name = target
def convert(self, value):
if self._target_name == 'callable' and \
isinstance(value, self._start_type):
return self._target(value)
elif self._target != None:
if value == None:
pass
elif isinstance(value, basestring):
return self._target(value)
else:
target = getattr(sys.modules[__name__], self._target_name)
if issubclass(target, _SingularResource):
if isinstance(value, dict):
value = target(do_not_download=True, **value)
else:
return value
elif issubclass(target, _ListResource):
if isinstance(value, list):
value = target(value)
elif value == None:
value = target([])
else:
return value
else:
raise pycomicvine.error.NotConvertableError(
"Error in convertion '"+str(value)+"' => "+\
self._target_name
)
return value
class _Resource(object):
class _Response:
def __init__(
self,
error,
limit,
offset,
number_of_page_results,
number_of_total_results,
status_code,
results,
version = None
):
self.error = error
self.limit = limit
self.offset = offset
self.number_of_page_results = number_of_page_results
self.number_of_total_results = number_of_total_results
self.status_code = status_code
if results != None:
self.results = results
else:
self.results = []
def __init__(self, *args, **kwargs):
raise NotImplemented()
def _request_object(self, baseurl, **params):
raise NotImplemented()
@classmethod
def _ensure_resource_url(type):
if not '_resource_url' in type.__dict__:
resource_type = Types.snakify_type_name(type)
type._resource_url = _API_URL + resource_type + "/"
@classmethod
def _request(type, baseurl, **params):
if 'api_key' not in params:
if len(api_key) == 0:
raise pycomicvine.error.InvalidAPIKeyError(
"Invalid API Key"
)
params['api_key'] = api_key
if 'field_list' in params and params['field_list'] != None:
if not isinstance(params['field_list'], basestring):
field_list = ""
try:
for field_name in params['field_list']:
field_list += str(field_name)+","
except TypeError, e:
raise pycomicvine.error.IllegalArquementException(
"'field_list' must be iterable"
)
params['field_list'] = field_list
if re.search(
"([a-z_]*,)*id(,[a-z_]*)*", params['field_list']
) == None:
params['field_list'] = "id," + params['field_list']
timeout = None
if 'timeout' in params:
if timeout != None:
timeout = int(params['timeout'])
del params['timeout']
params['format'] = 'json'
params = urlencode(params)
url = baseurl+"?"+params
logging.getLogger(__name__).debug("Calling "+url)
if timeout == None:
response_raw = json.loads(urllib2.urlopen(url).read())
else:
response_raw = json.loads(urllib2.urlopen(
url,
timeout=timeout
).read())
response = type._Response(**response_raw)
if response.status_code != 1:
raise pycomicvine.error.EXCEPTION_MAPPING.get(
response.status_code,
pycomicvine.error.UnknownStatusError
)(response.error)
if 'aliases' in response.results and \
isinstance(response.results['aliases'], basestring):
response.results['aliases'] = response.results[
'aliases'
].split('\n')
return response
class _SingularResource(_Resource):
def __new__(
type,
id,
all = False,
field_list = [],
do_not_download = False,
**kwargs
):
resource_type = kwargs.get(
'resource_type',
type
)
try:
type_id = Types()[resource_type]['id']
except KeyError:
return pycomicvine.error.InvalidResourceError(
resource_type
)
type._ensure_resource_url()
key = "{0:d}-{1:d}".format(type_id, id)
obj = _cached_resources.get(key)
if obj == None:
obj = object.__new__(type)
_cached_resources[key] = obj
return obj
def __init__(
self,
id,
all = False,
field_list = [],
do_not_download = False,
**kwargs
):
if '_ready' not in self.__dict__:
self._ready = True
try:
type_id = Types()[type(self)]['id']
except KeyError:
raise pycomicvine.error.InvalidResourceError(
"Resource type '{0!s}' does not exist.".format(
type(self)
)
)
self._detail_url = type(self)._resource_url + \
"{0:d}-{1:d}/".format(type_id, id)
self._fields = {'id': id}
if not do_not_download:
if all:
self._fields.update(self._request_object().results)
else:
self._fields.update(self._request_object(
field_list
).results)
if 'field_list' in kwargs:
del kwargs['field_list']
self._fields.update(kwargs)
elif 'field_list' in kwargs:
if not all or not do_not_download:
self._fields.update(self._request_object(
kwargs['field_list']
).results)
if all and not do_not_download:
if 'timeout' in kwargs:
self._fields.update(self._request_object(
timeout=kwargs['timeout']
).results)
def _request_object(self, field_list = None, timeout = None):
if field_list == None:
return type(self)._request(
self._detail_url,
timeout=timeout
)
else:
return type(self)._request(
self._detail_url,
field_list=field_list,
timeout=timeout
)
def __getattribute__(self, name):
def _object_attribute(name):
return object.__getattribute__(self, name)
def _parse_attribute(name):
fields = _object_attribute('_fields')
value = _object_attribute('_fields')[name]
if name in filter(
lambda x: not x.startswith('_'),
type(self).__dict__
):
definition = type(self).__dict__[name]
fields[name] = definition.convert(value)
return fields[name]
name = _object_attribute('_fix_api_error')(name)
try:
if name not in [
'__class__',
'__dict__',
'__member__',
'__methods__',
'_request_object'
] and name not in self.__dict__:
if name in _object_attribute('_fields'):
return _parse_attribute(name)
else:
self._fields.update(
_object_attribute('_request_object')(
[name]
).results
)
return _parse_attribute(name)
except KeyError:
pass
return _object_attribute(name)
def _fix_api_error(self, name):
return name
def __str__(self):
if 'name' in self._fields:
return str(self.name.encode(
'ascii',
'backslashreplace'
)) + " ["+str(self.id)+"]"
else:
return "["+str(self.id)+"]"
def __unicode__(self):
if 'name' in self._fields:
return unicode(self.name.encode(
'ascii',
'backslashreplace'
)) + u" ["+unicode(self.id)+u"]"
else:
return u"["+unicode(self.id)+u"]"
def __repr__(self):
return u"<"+unicode(type(self).__name__)+u": "+unicode(self)\
+u">"
class _ListResource(_Resource):
def _request_object(self, **params):
type(self)._ensure_resource_url()
if isinstance(self, Search):
if 'offset' in params:
limit = params['limit'] or self._limit
params['page'] = params['offset']/params['limit'] + 1
del params['offset']
elif 'page' not in params:
params['page'] = 1
return type(self)._request(type(self)._resource_url, **params)
def __init__(self, init_list = None, **kwargs):
if init_list != None:
if len(kwargs) > 0:
raise TypeError(
"If 'init_list' is given it is the only "+
"allowed argument"
)
self._results = init_list
self._total = len(init_list)
self._limit = len(init_list)
else:
response = self._request_object(**kwargs)
self._results = response.offset*[None] + response.results
self._total = response.number_of_total_results
self._limit = response.limit
if 'limit' in kwargs:
del kwargs['limit']
if 'offset' in kwargs:
del kwargs['offset']
self._args = kwargs
def __len__(self):
return self._total
def __getitem__(self, index):
if isinstance(index, slice):
start = index.start or 0
stop = index.stop or self._total
step = index.step or 1
else:
start = index
stop = index+1
step = 1
if start < 0 or start >= self._total or \
stop > self._total:
raise IndexError('Index out of range')
if len(self._results) < stop or \
None in self._results[start:stop:step]:
for i in range(start, stop, 100):
response = self._request_object(
limit=self._limit,
offset=i,
**self._args
)
if response.number_of_page_results != len(response.results):
logging.getLogger(__name__).warning("number of page results wrong (%d != %d) ",
response.number_of_page_results, len(response.results))
if isinstance(self, Search):
end_result = response.offset + response.number_of_page_results
if len(self._results) < end_result:
self._results.extend(
[None] * (end_result - len(self._results)))
for j in range(response.offset,
response.offset+len(response.results)):
self._results[j] = response.results[j-response.offset]
else:
for j in range(
len(self._results),
i+response.number_of_page_results
):
self._results.append(None)
for j in range(i, i+response.number_of_page_results):
self._results[j] = response.results[j-i]
if isinstance(self._results[index], list):
if not isinstance(index, slice) and len(self._results[index]) == 0:
self._results[index] = None
return None
for i in range(start, stop, step):
if isinstance(self._results[i], dict):
self._parse_result(i)
elif isinstance(self._results[i], list) and \
len(self._results[i]) == 0:
self._results[i] = None
elif isinstance(self._results[index], dict):
self._parse_result(index)
return self._results[index]
def __iter__(self):
for index in xrange(self._total):
yield self[index]
def __str__(self):
return str(unicode(self))
def __unicode__(self):
if len(self) == 0:
return u"[]"
string = u"["
for element in self:
string += unicode(element)+u", "
string = string[:-2]+u"]"
return string
def __repr__(self):
if len(self) == 0:
return unicode(type(self).__name__)+u"[]"
string = unicode(type(self).__name__)+u"["
for element in self:
string += repr(element)+u","
string = string[:-1]+u"]"
return string
def _parse_result(self, index):
if type(self) != Types:
type_dict = Types()
if isinstance(self, Search):
self._results[index] = type_dict[
self._results[index]['resource_type']
]['singular_resource_class'](
do_not_download=True,
**self._results[index]
)
else:
self._results[index] = type_dict[type(self)][
'singular_resource_class'
](do_not_download=True, **self._results[index])
class _SortableListResource(_ListResource):
def __init__(self, init_list = None, sort = None, **kwargs):
if sort != None:
if isinstance(sort, basestring):
if ':' not in sort:
sort += ":asc"
else:
try:
sort = str(sort[0])+":"+str(sort[1])
except KeyError:
if 'field' not in sort:
raise pycomicvine.error.IllegalArquementException(
"Argument 'sort' must contain item 'field'"
)
if 'direction' in sort:
sort = sort['field']+":"+str(sort['direction'])
else:
sort = sort['field']+":asc"
kwargs['sort'] = sort
super(_SortableListResource, self).__init__(init_list, **kwargs)
@classmethod
def search(type, query, **kwargs):
if 'resources' in kwargs:
del kwargs['resources']
types = Types()
resource_type = types[
Types.snakify_type_name(type)
]['detail_resource_name']
return Search(query=query, resources=resource_type, **kwargs)
class Character(_SingularResource):
api_detail_url = AttributeDefinition('keep')
birth = AttributeDefinition(datetime.datetime)
character_enemies = AttributeDefinition('Characters')
character_friends = AttributeDefinition('Characters')
count_of_issue_appearances = AttributeDefinition('keep')
creators = AttributeDefinition('People')
date_added = AttributeDefinition(datetime.datetime)
date_last_updated = AttributeDefinition(datetime.datetime)
deck = AttributeDefinition('keep')
description = AttributeDefinition('keep')
first_appeared_in_issue = AttributeDefinition('Issue')
gender = AttributeDefinition(
lambda value: u'\u2642' if value == 1 else
u'\u2640' if value == 2 else u'\u26a7',
int
)
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
issue_credits = AttributeDefinition('Issues')
issues_died_in = AttributeDefinition('Issues')
movies = AttributeDefinition('Movies')
name = AttributeDefinition('keep')
origin = AttributeDefinition('Origin')
powers = AttributeDefinition('Powers')
publisher = AttributeDefinition('Publisher')
real_name = AttributeDefinition('keep')
site_detail_url = AttributeDefinition('keep')
story_arc_credits = AttributeDefinition('StoryArcs')
team_enemies = AttributeDefinition('Teams')
team_friends = AttributeDefinition('Teams')
teams = AttributeDefinition('Teams')
volume_credits = AttributeDefinition('Volumes')
class Characters(_SortableListResource):
pass
class Chat(_SingularResource):
api_detail_url = AttributeDefinition('keep')
channel_name = AttributeDefinition('keep')
deck = AttributeDefinition('keep')
image = AttributeDefinition('keep')
password = AttributeDefinition('keep')
site_detail_url = AttributeDefinition('keep')
title = AttributeDefinition('keep')
class Chats(_SortableListResource):
@classmethod
def search(type, query, **kwargs):
return None
class Concept(_SingularResource):
aliases = AttributeDefinition('keep')
api_detail_url = AttributeDefinition('keep')
count_of_isssue_appearances = AttributeDefinition('keep')
date_added = AttributeDefinition(datetime.datetime)
date_last_updated = AttributeDefinition(datetime.datetime)
deck = AttributeDefinition('keep')
description = AttributeDefinition('keep')
first_appeared_in_issue = AttributeDefinition('Issue')
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
issue_credits = AttributeDefinition('Issues')
movies = AttributeDefinition('Movies')
name = AttributeDefinition('keep')
site_detail_url = AttributeDefinition('keep')
start_year = AttributeDefinition(int)
volume_credits = AttributeDefinition('Volumes')
def _fix_api_error(self, name):
if name == 'count_of_issue_appearances':
return 'count_of_isssue_appearances'
return super(Concept, self)._fix_api_error(name)
class Concepts(_SortableListResource):
pass
class Episode(_SingularResource):
aliases = AttributeDefinition('keep')
api_detail_url = AttributeDefinition('keep')
character_credits = AttributeDefinition('Characters')
characters_died_in = AttributeDefinition('Characters')
concept_credits = AttributeDefinition('Concepts')
air_date = AttributeDefinition(datetime.datetime)
date_added = AttributeDefinition(datetime.datetime)
date_last_updated = AttributeDefinition(datetime.datetime)
deck = AttributeDefinition('keep')
description = AttributeDefinition('keep')
first_appearance_characters = AttributeDefinition('Characters')
first_appearance_concepts = AttributeDefinition('Concepts')
first_appearance_locations = AttributeDefinition('Locations')
first_appearance_objects = AttributeDefinition('Objects')
first_appearance_storyarcs = AttributeDefinition('StoryArcs')
first_appearance_teams = AttributeDefinition('Teams')
has_staff_review = AttributeDefinition('keep')
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
episode_number = AttributeDefinition('keep')
location_credits = AttributeDefinition('Locations')
name = AttributeDefinition('keep')
object_credits = AttributeDefinition('Objects')
person_credits = AttributeDefinition('People')
site_detail_url = AttributeDefinition('keep')
story_arc_credits = AttributeDefinition('StoryArcs')
team_credits = AttributeDefinition('team_credits')
series = AttributeDefinition('Series')
class Episodes(_SortableListResource):
pass
class Issue(_SingularResource):
aliases = AttributeDefinition('keep')
api_detail_url = AttributeDefinition('keep')
character_credits = AttributeDefinition('Characters')
characters_died_in = AttributeDefinition('Characters')
concept_credits = AttributeDefinition('Concepts')
cover_date = AttributeDefinition(datetime.datetime)
date_added = AttributeDefinition(datetime.datetime)
date_last_updated = AttributeDefinition(datetime.datetime)
deck = AttributeDefinition('keep')
description = AttributeDefinition('keep')
first_appearance_characters = AttributeDefinition('Characters')
first_appearance_concepts = AttributeDefinition('Concepts')
first_appearance_locations = AttributeDefinition('Locations')
first_appearance_objects = AttributeDefinition('Objects')
first_appearance_storyarcs = AttributeDefinition('StoryArcs')
first_appearance_teams = AttributeDefinition('Teams')
has_staff_review = AttributeDefinition('keep')
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
issue_number = AttributeDefinition(int)
location_credits = AttributeDefinition('Locations')
name = AttributeDefinition('keep')
object_credits = AttributeDefinition('Objects')
person_credits = AttributeDefinition('People')
site_detail_url = AttributeDefinition('keep')
store_date = AttributeDefinition(datetime.datetime)
story_arc_credits = AttributeDefinition('StoryArcs')
team_credits = AttributeDefinition('Teams')
teams_disbanded_in = AttributeDefinition('Teams')
volume = AttributeDefinition('Volume')
def __unicode__(self):
string = u""
if 'name' in self._fields and self._fields['name'] != None:
string += unicode(self.name.encode(
'ascii',
'backslashreplace'
))+u" "
if 'issue_number' in self._fields:
string += u"#"+unicode(self.issue_number)+u" "
else:
if 'issue_number' in self._fields:
string += u"#"+unicode(self.issue_number)+u" "
if 'volume' in self._fields:
string = unicode(self.volume.name.encode(
'ascii',
'backslashreplace'
))+u" "+string
return string + u"["+unicode(self.id)+u"]"
def _fix_api_error(self, name):
if name == 'disbanded_teams':
return 'teams_disbanded_in'
if name == 'team_disbanded_in':
return 'teams_disbanded_in'
return super(Issue, self)._fix_api_error(name)
class Issues(_SortableListResource):
pass
class Location(_SingularResource):
aliases = AttributeDefinition('keep')
api_detail_url = AttributeDefinition('keep')
count_of_issue_appearances = AttributeDefinition(int)
date_added = AttributeDefinition(datetime.datetime)
date_last_updated = AttributeDefinition(datetime.datetime)
deck = AttributeDefinition('keep')
description = AttributeDefinition('keep')
first_appeared_in_issue = AttributeDefinition('Issue')
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
issue_credits = AttributeDefinition('Issues')
movies = AttributeDefinition('Movies')
name = AttributeDefinition('keep')
site_detail_url = AttributeDefinition('keep')
start_year = AttributeDefinition(int)
story_arc_credits = AttributeDefinition('StoryArcs')
volume_credits = AttributeDefinition('Volumes')
class Locations(_SortableListResource):
pass
class Movie(_SingularResource):
api_detail_url = AttributeDefinition('keep')
box_office_revenue = AttributeDefinition(int)
budget = AttributeDefinition(int)
characters = AttributeDefinition('Characters')
concepts = AttributeDefinition('Concepts')
date_added = AttributeDefinition(datetime.datetime)
date_last_updated = AttributeDefinition(datetime.datetime)
deck = AttributeDefinition('keep')
description = AttributeDefinition('keep')
distibutor = AttributeDefinition('keep')
has_staff_review = AttributeDefinition('keep')
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
locations = AttributeDefinition('Locations')
name = AttributeDefinition('keep')
producers = AttributeDefinition('People')
rating = AttributeDefinition('keep')
release_date = AttributeDefinition(datetime.datetime)
runtime = AttributeDefinition(
lambda value: int(str(value).split(':'))[0] * 60 + \
int(str(value).split(':')[1]) \
if ':' in str(value) \
else int(value),
basestring
)
site_detail_url = AttributeDefinition('keep')
studios = AttributeDefinition('keep')
teams = AttributeDefinition('Teams')
objects = AttributeDefinition('Objects')
total_revenue = AttributeDefinition(int)
writers = AttributeDefinition('People')
def _fix_api_error(self, name):
if name == 'things':
return 'objects'
return super(Movie, self)._fix_api_error(name)
class Movies(_SortableListResource):
pass
class Object(_SingularResource):
aliases = AttributeDefinition('keep')
api_detail_url = AttributeDefinition('keep')
count_of_issue_appearances = AttributeDefinition(int)
date_added = AttributeDefinition(datetime.datetime)
date_last_updated = AttributeDefinition(datetime.datetime)
deck = AttributeDefinition('keep')
description = AttributeDefinition('keep')
first_appeared_in_issue = AttributeDefinition('Issue')
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
issue_credits = AttributeDefinition('Issues')
movies = AttributeDefinition('Movies')
name = AttributeDefinition('keep')
site_detail_url = AttributeDefinition('keep')
start_year = AttributeDefinition(int)
story_arc_credits = AttributeDefinition('StoryArcs')
volume_credits = AttributeDefinition('Volumes')
class Objects(_SortableListResource):
pass
class Origin(_SingularResource):
api_detail_url = AttributeDefinition('keep')
character_set = AttributeDefinition('keep')
id = AttributeDefinition('keep')
name = AttributeDefinition('keep')
profiles = AttributeDefinition('keep')
site_detail_url = AttributeDefinition('keep')
class Origins(_SortableListResource):
@classmethod
def search(type, query, **kwargs):
return None
class Person(_SingularResource):
aliases = AttributeDefinition('keep')
api_detail_url = AttributeDefinition('keep')
birth = AttributeDefinition(datetime.datetime)
count_of_isssue_appearances = AttributeDefinition('keep')
country = AttributeDefinition('keep')
created_characters = AttributeDefinition('Characters')
date_added = AttributeDefinition(datetime.datetime)
date_last_updated = AttributeDefinition(datetime.datetime)
death = AttributeDefinition(
lambda value: str_to_datetime(value.get('date')) if
isinstance(value, dict) else
str_to_datetime(value),
(dict, basestring)
)
deck = AttributeDefinition('keep')
description = AttributeDefinition('keep')
email = AttributeDefinition('keep')
gender = AttributeDefinition(
lambda value: u'\u2642' if value == 1 else
u'\u2640' if value == 2 else u'\u26a7',
int
)
hometown = AttributeDefinition('keep')
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
issues = AttributeDefinition('Issues')
name = AttributeDefinition('keep')
site_detail_url = AttributeDefinition('keep')
story_arc_credits = AttributeDefinition('StoryArcs')
volume_credits = AttributeDefinition('Volumes')
website = AttributeDefinition('keep')
def _fix_api_error(self, name):
if name == 'count_of_issue_appearances':
return 'count_of_isssue_appearances'
if name == 'issue_credits':
return 'issues'
return super(Person, self)._fix_api_error(name)
class People(_SortableListResource):
pass
class Power(_SingularResource):
aliases = AttributeDefinition('keep')
api_detail_url = AttributeDefinition('keep')
characters = AttributeDefinition('Characters')
date_added = AttributeDefinition(datetime.datetime)
date_last_updated = AttributeDefinition(datetime.datetime)
description = AttributeDefinition('keep')
id = AttributeDefinition('keep')
name = AttributeDefinition('keep')
site_detail_url = AttributeDefinition('keep')
class Powers(_SortableListResource):
@classmethod
def search(type, query, **kwargs):
return None
class Promo(_SingularResource):
api_detail_url = AttributeDefinition('keep')
date_added = AttributeDefinition(datetime.datetime)
deck = AttributeDefinition('keep')
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
link = AttributeDefinition('keep')
name = AttributeDefinition('keep')
class Promos(_SortableListResource):
@classmethod
def search(type, query, **kwargs):
return None
class Publisher(_SingularResource):
aliases = AttributeDefinition('keep')
api_detail_url = AttributeDefinition('keep')
characters = AttributeDefinition('Characters')
date_added = AttributeDefinition(datetime.datetime)
date_last_updated = AttributeDefinition(datetime.datetime)
deck = AttributeDefinition('keep')
description = AttributeDefinition('keep')
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
location_address = AttributeDefinition('keep')
location_city = AttributeDefinition('keep')
location_state = AttributeDefinition('keep')
name = AttributeDefinition('keep')
site_detail_url = AttributeDefinition('keep')
story_arcs = AttributeDefinition('StoryArcs')
teams = AttributeDefinition('Teams')
volumes = AttributeDefinition('Volumes')
class Publishers(_SortableListResource):
pass
class Search(_ListResource):
def __init__(self, query, **kwargs):
super(Search, self).__init__(query=query, **kwargs)
class Series(_SingularResource):
aliases = AttributeDefinition('keep')
api_detail_url = AttributeDefinition('keep')
character_credits = AttributeDefinition('Characters')
count_of_episodes = AttributeDefinition('keep')
date_added = AttributeDefinition(datetime.datetime)
date_last_updated = AttributeDefinition(datetime.datetime)
deck = AttributeDefinition('keep')
description = AttributeDefinition('keep')
first_episode = AttributeDefinition('Episode')
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
last_episode = AttributeDefinition('Episode')
location_credits = AttributeDefinition('Locations')
name = AttributeDefinition('keep')
publisher = AttributeDefinition('Publisher')
site_detail_url = AttributeDefinition('keep')
start_year = AttributeDefinition('keep')
class SeriesList(_ListResource):
pass
class StoryArc(_SingularResource):
aliases = AttributeDefinition('keep')
api_detail_url = AttributeDefinition('keep')
count_of_isssue_appearances = AttributeDefinition(int)
date_added = AttributeDefinition(datetime.datetime)
date_last_updated = AttributeDefinition(datetime.datetime)
deck = AttributeDefinition('keep')
description = AttributeDefinition('keep')
first_appeared_in_issue = AttributeDefinition('Issue')
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
issues = AttributeDefinition('Issues')
movies = AttributeDefinition('Movies')
name = AttributeDefinition('keep')
publisher = AttributeDefinition('Publisher')
site_detail_url = AttributeDefinition('keep')
def _fix_api_error(self, name):
if name == 'count_of_issue_appearances':
return 'count_of_isssue_appearances'
return super(StoryArc, self)._fix_api_error(name)
class StoryArcs(_SortableListResource):
pass
class Team(_SingularResource):
aliases = AttributeDefinition('keep')
api_detail_url = AttributeDefinition('keep')
character_enemies = AttributeDefinition('Characters')
character_friends = AttributeDefinition('Characters')
characters = AttributeDefinition('Characters')
count_of_isssue_appearances = AttributeDefinition(int)
count_of_team_members = AttributeDefinition(int)
date_added = AttributeDefinition(datetime.datetime)
date_last_updated = AttributeDefinition(datetime.datetime)
deck = AttributeDefinition('keep')
description = AttributeDefinition('keep')
disbanded_in_issues = AttributeDefinition('Issues')
first_appeared_in_issue = AttributeDefinition('Issue')
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
issue_credits = AttributeDefinition('Issues')
isssues_disbanded_in = AttributeDefinition('Issues')
movies = AttributeDefinition('Movies')
name = AttributeDefinition('keep')
publisher = AttributeDefinition('Publisher')
site_detail_url = AttributeDefinition('keep')
story_arc_credits = AttributeDefinition('StoryArcs')
volume_credits = AttributeDefinition('Volumes')
def _fix_api_error(self, name):
if name == 'count_of_issue_appearances':
return 'count_of_isssue_appearances'
if name == 'issues_disbanded_in':
return 'isssues_disbanded_in'
return super(Team, self)._fix_api_error(name)
class Teams(_SortableListResource):
pass
class Types(_ListResource):
def __new__(type):
if not '_instance' in type.__dict__:
type._instance = object.__new__(type)
return type._instance
def __init__(self):
if not '_ready' in dir(self):
super(Types, self).__init__()
self._mapping = {}
for type in self:
type['singular_resource_class'] = getattr(
sys.modules[__name__],
Types._camilify_type_name(
type['detail_resource_name']),
UnknownResource
)
self._mapping[type['detail_resource_name']] = type
self._mapping[type['list_resource_name']] = type
self._ready = True
def __getitem__(self, key):
if isinstance(key, (int, long, slice)):
return super(Types, self).__getitem__(key)
if isinstance(key, type):
return self._mapping[self.snakify_type_name(key)]
return self._mapping[key]
@staticmethod
def snakify_type_name(type):
return re.sub(r'([A-Z]+)',r"_\1", type.__name__)[1:].lower()
@staticmethod
def _camilify_type_name(string):
string = string[0].upper() + string[1:].lower()
camel_string = ""
next_upper = False
for c in string:
if c == "_":
next_upper = True
continue
if next_upper:
camel_string += c.upper()
next_upper = False
else:
camel_string += c
return camel_string
class UnknownResource(_Resource):
pass
class Video(_SingularResource):
api_detail_url = AttributeDefinition('keep')
deck = AttributeDefinition('keep')
high_url = AttributeDefinition('keep')
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
length_seconds = AttributeDefinition('keep')
low_url = AttributeDefinition('keep')
name = AttributeDefinition('keep')
publish_date = AttributeDefinition(datetime.datetime)
site_detail_url = AttributeDefinition('keep')
url = AttributeDefinition('keep')
user = AttributeDefinition('keep')
class Videos(_SortableListResource):
pass
class VideoType(_SingularResource):
api_detail_url = AttributeDefinition('keep')
deck = AttributeDefinition('keep')
id = AttributeDefinition('keep')
name = AttributeDefinition('keep')
site_detail_url = AttributeDefinition('keep')
class VideoTypes(_SortableListResource):
@classmethod
def search(type, query, **kwargs):
return None
class Volume(_SingularResource):
aliases = AttributeDefinition('keep')
api_detail_url = AttributeDefinition('keep')
characters = AttributeDefinition('Characters')
concepts = AttributeDefinition('Concepts')
count_of_issues = AttributeDefinition(int)
date_added = AttributeDefinition(datetime.datetime)
date_last_updated = AttributeDefinition(datetime.datetime)
deck = AttributeDefinition('keep')
description = AttributeDefinition('keep')
first_issue = AttributeDefinition('Issue')
id = AttributeDefinition('keep')
image = AttributeDefinition('keep')
issues = AttributeDefinition('Issues')
last_issue = AttributeDefinition('Issue')
locations = AttributeDefinition('Locations')
name = AttributeDefinition('keep')
objects = AttributeDefinition('Objects')
people = AttributeDefinition('People')
publisher = AttributeDefinition('Publisher')
site_detail_url = AttributeDefinition('keep')
start_year = AttributeDefinition(int)
def _fix_api_error(self, name):
if name == 'character_credits':
return 'characters'
if name == 'concept_credits':
return 'concepts'
if name == 'location_credits':
return 'locations'
if name == 'object_credits':
return 'objects'
if name == 'person_credits':
return 'people'
return super(Volume, self)._fix_api_error(name)
class Volumes(_SortableListResource):
pass
|
{
"content_hash": "1872d4c9b322537f6915c9f4bb9f25c8",
"timestamp": "",
"source": "github",
"line_count": 1047,
"max_line_length": 99,
"avg_line_length": 38.10124164278892,
"alnum_prop": 0.587661686553695,
"repo_name": "authmillenon/pycomicvine",
"id": "7d1d449beb8410759d7e945996c7bee58991ede2",
"size": "41004",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pycomicvine/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "95514"
}
],
"symlink_target": ""
}
|
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
from urllib.parse import parse_qs, urljoin, urlparse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
from .._serialization import Serializer
from .._vendor import _convert_request, _format_url_section
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_get_request(
resource_group_name: str, cluster_name: str, application_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceFabric/managedclusters/{clusterName}/applications/{applicationName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"),
"applicationName": _SERIALIZER.url("application_name", application_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_create_or_update_request(
resource_group_name: str, cluster_name: str, application_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceFabric/managedclusters/{clusterName}/applications/{applicationName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"),
"applicationName": _SERIALIZER.url("application_name", application_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_update_request(
resource_group_name: str, cluster_name: str, application_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceFabric/managedclusters/{clusterName}/applications/{applicationName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"),
"applicationName": _SERIALIZER.url("application_name", application_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
def build_delete_request(
resource_group_name: str, cluster_name: str, application_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceFabric/managedclusters/{clusterName}/applications/{applicationName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"),
"applicationName": _SERIALIZER.url("application_name", application_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_request(resource_group_name: str, cluster_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceFabric/managedclusters/{clusterName}/applications",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"clusterName": _SERIALIZER.url("cluster_name", cluster_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class ApplicationsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.servicefabricmanagedclusters.ServiceFabricManagedClustersManagementClient`'s
:attr:`applications` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def get(
self, resource_group_name: str, cluster_name: str, application_name: str, **kwargs: Any
) -> _models.ApplicationResource:
"""Gets a Service Fabric managed application resource.
Get a Service Fabric managed application resource created or in the process of being created in
the Service Fabric cluster resource.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param cluster_name: The name of the cluster resource. Required.
:type cluster_name: str
:param application_name: The name of the application resource. Required.
:type application_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationResource or the result of cls(response)
:rtype: ~azure.mgmt.servicefabricmanagedclusters.models.ApplicationResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationResource]
request = build_get_request(
resource_group_name=resource_group_name,
cluster_name=cluster_name,
application_name=application_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorModel, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("ApplicationResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceFabric/managedclusters/{clusterName}/applications/{applicationName}"} # type: ignore
def _create_or_update_initial(
self,
resource_group_name: str,
cluster_name: str,
application_name: str,
parameters: Union[_models.ApplicationResource, IO],
**kwargs: Any
) -> _models.ApplicationResource:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationResource]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ApplicationResource")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
cluster_name=cluster_name,
application_name=application_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorModel, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("ApplicationResource", pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize("ApplicationResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceFabric/managedclusters/{clusterName}/applications/{applicationName}"} # type: ignore
@overload
def begin_create_or_update(
self,
resource_group_name: str,
cluster_name: str,
application_name: str,
parameters: _models.ApplicationResource,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.ApplicationResource]:
"""Creates or updates a Service Fabric managed application resource.
Create or update a Service Fabric managed application resource with the specified name.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param cluster_name: The name of the cluster resource. Required.
:type cluster_name: str
:param application_name: The name of the application resource. Required.
:type application_name: str
:param parameters: The application resource. Required.
:type parameters: ~azure.mgmt.servicefabricmanagedclusters.models.ApplicationResource
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.servicefabricmanagedclusters.models.ApplicationResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_create_or_update(
self,
resource_group_name: str,
cluster_name: str,
application_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.ApplicationResource]:
"""Creates or updates a Service Fabric managed application resource.
Create or update a Service Fabric managed application resource with the specified name.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param cluster_name: The name of the cluster resource. Required.
:type cluster_name: str
:param application_name: The name of the application resource. Required.
:type application_name: str
:param parameters: The application resource. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.servicefabricmanagedclusters.models.ApplicationResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_create_or_update(
self,
resource_group_name: str,
cluster_name: str,
application_name: str,
parameters: Union[_models.ApplicationResource, IO],
**kwargs: Any
) -> LROPoller[_models.ApplicationResource]:
"""Creates or updates a Service Fabric managed application resource.
Create or update a Service Fabric managed application resource with the specified name.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param cluster_name: The name of the cluster resource. Required.
:type cluster_name: str
:param application_name: The name of the application resource. Required.
:type application_name: str
:param parameters: The application resource. Is either a model type or a IO type. Required.
:type parameters: ~azure.mgmt.servicefabricmanagedclusters.models.ApplicationResource or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either ApplicationResource or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.servicefabricmanagedclusters.models.ApplicationResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationResource]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial( # type: ignore
resource_group_name=resource_group_name,
cluster_name=cluster_name,
application_name=application_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("ApplicationResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(
PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs)
) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceFabric/managedclusters/{clusterName}/applications/{applicationName}"} # type: ignore
@overload
def update(
self,
resource_group_name: str,
cluster_name: str,
application_name: str,
parameters: _models.ApplicationUpdateParameters,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ApplicationResource:
"""Updates the tags of an application resource of a given managed cluster.
Updates the tags of an application resource of a given managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param cluster_name: The name of the cluster resource. Required.
:type cluster_name: str
:param application_name: The name of the application resource. Required.
:type application_name: str
:param parameters: The application resource updated tags. Required.
:type parameters: ~azure.mgmt.servicefabricmanagedclusters.models.ApplicationUpdateParameters
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationResource or the result of cls(response)
:rtype: ~azure.mgmt.servicefabricmanagedclusters.models.ApplicationResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def update(
self,
resource_group_name: str,
cluster_name: str,
application_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ApplicationResource:
"""Updates the tags of an application resource of a given managed cluster.
Updates the tags of an application resource of a given managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param cluster_name: The name of the cluster resource. Required.
:type cluster_name: str
:param application_name: The name of the application resource. Required.
:type application_name: str
:param parameters: The application resource updated tags. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationResource or the result of cls(response)
:rtype: ~azure.mgmt.servicefabricmanagedclusters.models.ApplicationResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def update(
self,
resource_group_name: str,
cluster_name: str,
application_name: str,
parameters: Union[_models.ApplicationUpdateParameters, IO],
**kwargs: Any
) -> _models.ApplicationResource:
"""Updates the tags of an application resource of a given managed cluster.
Updates the tags of an application resource of a given managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param cluster_name: The name of the cluster resource. Required.
:type cluster_name: str
:param application_name: The name of the application resource. Required.
:type application_name: str
:param parameters: The application resource updated tags. Is either a model type or a IO type.
Required.
:type parameters: ~azure.mgmt.servicefabricmanagedclusters.models.ApplicationUpdateParameters
or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationResource or the result of cls(response)
:rtype: ~azure.mgmt.servicefabricmanagedclusters.models.ApplicationResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationResource]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ApplicationUpdateParameters")
request = build_update_request(
resource_group_name=resource_group_name,
cluster_name=cluster_name,
application_name=application_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorModel, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("ApplicationResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceFabric/managedclusters/{clusterName}/applications/{applicationName}"} # type: ignore
def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, cluster_name: str, application_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_delete_request(
resource_group_name=resource_group_name,
cluster_name=cluster_name,
application_name=application_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorModel, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceFabric/managedclusters/{clusterName}/applications/{applicationName}"} # type: ignore
@distributed_trace
def begin_delete(
self, resource_group_name: str, cluster_name: str, application_name: str, **kwargs: Any
) -> LROPoller[None]:
"""Deletes a Service Fabric managed application resource.
Delete a Service Fabric managed application resource with the specified name.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param cluster_name: The name of the cluster resource. Required.
:type cluster_name: str
:param application_name: The name of the application resource. Required.
:type application_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
cluster_name=cluster_name,
application_name=application_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(
PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs)
) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceFabric/managedclusters/{clusterName}/applications/{applicationName}"} # type: ignore
@distributed_trace
def list(
self, resource_group_name: str, cluster_name: str, **kwargs: Any
) -> Iterable["_models.ApplicationResource"]:
"""Gets the list of managed application resources created in the specified Service Fabric cluster
resource.
Gets all managed application resources created or in the process of being created in the
Service Fabric cluster resource.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param cluster_name: The name of the cluster resource. Required.
:type cluster_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationResource or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.servicefabricmanagedclusters.models.ApplicationResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.ApplicationResourceList]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
cluster_name=cluster_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urlparse(next_link)
_next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query))
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ApplicationResourceList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorModel, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceFabric/managedclusters/{clusterName}/applications"} # type: ignore
|
{
"content_hash": "be514e9b870897a853318324a36a30c9",
"timestamp": "",
"source": "github",
"line_count": 876,
"max_line_length": 229,
"avg_line_length": 47.317351598173516,
"alnum_prop": 0.6571531966224367,
"repo_name": "Azure/azure-sdk-for-python",
"id": "9c981b52316c714cdef1a1830506be801b749664",
"size": "41950",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/servicefabricmanagedclusters/azure-mgmt-servicefabricmanagedclusters/azure/mgmt/servicefabricmanagedclusters/operations/_applications_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
""" ib.ext.cfg.EWrapper -> config module for EWrapper.java.
"""
modulePreamble = [
'from ib.ext.AnyWrapper import AnyWrapper',
]
|
{
"content_hash": "10ae7a3fef663b0b1a3cbca360e744ed",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 59,
"avg_line_length": 23,
"alnum_prop": 0.6666666666666666,
"repo_name": "lockdm/ibpy",
"id": "e5afc27d44c09c3f768333dbecc6c4181695c3d7",
"size": "184",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "ib/ext/cfg/EWrapper.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "3370"
},
{
"name": "Python",
"bytes": "220551"
}
],
"symlink_target": ""
}
|
"""Adapter layer to distributed job manager server api
"""
import os
import os.path
import logging
import datetime
from fabric.api import *
from fabric.operations import put
import fabric.network
from file_locator import FileLocator
import utils
from dist_job_mgr.client import get_local_connection
from dist_job_mgr.version import VERSION
import dist_job_mgr.common as common
class DjmAdapterError(Exception):
pass
logger = logging.getLogger(__name__)
def get_djm_connection():
fl = FileLocator()
return get_local_connection(fl.get_djm_server_dir())
class DjmJob(object):
def __init__(self, c, job_id, nodes):
self.c = c
self.job_id = job_id
self.nodes = nodes
self.nodes_by_name = {}
for node in nodes:
if node["private_ip"]!=None:
ip_address = node["private_ip"]
else:
ip_address = node["public_ip"]
if not ip_address:
raise Exception("Neither public ip address nor private ip address specified for node %s, need to specify at least one" % node["name"])
node["datablox_ip_address"] = ip_address
self.nodes_by_name[node["name"]] = node
self.nodes_except_master = filter(lambda name: name!="master",
[node["name"] for node in self.nodes])
# set up the fabric nodes
env.hosts = [node["name"] for node in self.nodes]
env.roledefs['workers'] = self.nodes_except_master
for node in nodes:
env.hostdefs[node["name"]] = "%s@%s" % (node["os_username"],
node["contact_address"])
logger.debug("Node %s defined as %s" % (node["name"],
env.hostdefs[node["name"]]))
if "master" in env.hosts:
env.roledefs['master'] = ['master',]
else:
env.roledefs['master'] = []
def has_node(self, node_name):
return self.nodes_by_name.has_key(node_name)
def get_node(self, node_name):
return self.nodes_by_name[node_name]
def stop_job(self, successful=True, msg=None):
if successful:
self.c.stop_job(self.job_id,
common.JobStatus.JOB_SUCCESSFUL,
comment=msg)
logger.debug("Stopped job %s, status=JOB_SUCESSFUL" % self.job_id)
else:
self.c.stop_job(self.job_id,
common.JobStatus.JOB_FAILED,
comment=msg)
logger.debug("Stopped job %s, status=FAILED" % self.job_id)
fabric.network.disconnect_all()
@task
@parallel
@roles("workers")
def setup_worker_node(reuse_existing_installs):
logger.info("setup_worker_node: reuse_existing_installs = %s" % reuse_existing_installs)
fl = FileLocator()
dist_path = fl.get_engage_distribution_file()
# todo: don't copy engage if existing install can be reused
setup_script = os.path.join(fl.get_sw_packages_dir(), "setup_caretaker.sh")
put(setup_script, "~/setup_caretaker.sh")
run("chmod 755 ~/setup_caretaker.sh")
if reuse_existing_installs:
run("~/setup_caretaker.sh --reuse-existing-install")
else:
put(dist_path, "~/" + os.path.basename(dist_path))
run("~/setup_caretaker.sh")
def start_job_and_get_nodes(node_list, config_file_name, total_nodes=None,
reuse_existing_installs=True):
"""Given a node list and optional number of nodes, try to get the
requested nodes and start a job.
"""
if not total_nodes:
total_nodes = len(node_list)
if total_nodes<1:
raise DjmAdapterError("Must have at least one node")
c = get_djm_connection()
# make sure there aren't any dead jobs laying around
c.cleanup_dead_coordinators()
pool = None
for node_name in node_list:
n = c.find_node_by_name(node_name)
if not n:
raise DjmAdapterError("Node '%s' not defined" % node_name)
if n["pool"]:
if pool and pool!=n["pool"]:
raise DjmAdapterError("Cannot take nodes from both pool %s and pool %s"%
(pool, n["pool"]))
pool = n["pool"]
start_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
j = c.start_job(config_file_name, common.JobType.ONE_TIME_JOB,
total_nodes, "Datablox job started %s" % start_time,
node_pool_name=pool, requested_nodes=node_list)
logger.info("Started DJM job %s" % j)
try:
fl = FileLocator()
allocated_nodes = c.query_nodes(job_id=j)
djm_job = DjmJob(c, j, allocated_nodes)
logger.info("Setting up nodes")
# for all the non-master nodes, we setup the caretaker
nodes_except_master = djm_job.nodes_except_master
if len(nodes_except_master)>0:
execute(setup_worker_node, reuse_existing_installs)
# make sure the master node has the caretaker running
if djm_job.has_node("master"):
utils.run_svcctl(fl, ["start", "all"])
return djm_job
except KeyboardInterrupt:
logger.exception("Got keyboard interrupt in node initialization")
c.stop_job(j, common.JobStatus.JOB_FAILED,
comment="Got keyboard interrupt in node initialization")
raise
except Exception, e:
logger.exception("DJM problem in node initialization: %s" % e)
c.stop_job(j, common.JobStatus.JOB_FAILED,
comment="DJM problem in node initialization: %s" % e)
raise
|
{
"content_hash": "7691239629fcdce28cf56c073bbf7c09",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 150,
"avg_line_length": 38.755102040816325,
"alnum_prop": 0.5875021941372652,
"repo_name": "mpi-sws-rse/datablox",
"id": "252501fc1fca149903a98d071d1ae837c502ddaa",
"size": "5697",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "engage/adapter_pkg/datablox_engage_adapter/djm_server.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "7801"
},
{
"name": "Python",
"bytes": "378676"
},
{
"name": "Shell",
"bytes": "7299"
}
],
"symlink_target": ""
}
|
'''
baseline:
after: true
before: false
counts: 120
detector: H1
mass: 34.2
settling_time: 15.0
default_fits: nominal
equilibration:
eqtime: 0.0
inlet: R
inlet_delay: 3
outlet: O
use_extraction_eqtime: true
multicollect:
counts: 0
detector: H1
isotope: Ar40
peakcenter:
after: true
before: false
detector: H1
detectors:
- H1
- AX
- CDD
isotope: Ar40
peakhop:
hops_name: multihops
use_peak_hop: true
'''
ACTIVE_DETECTORS=('H2','H1','AX','L1','L2','CDD')
NCYCLES=2
def main():
info('unknown measurement script')
activate_detectors(*ACTIVE_DETECTORS)
if mx.peakcenter.before:
peak_center(detector=mx.peakcenter.detector,isotope=mx.peakcenter.isotope)
position_magnet(mx.multicollect.isotope, detector=mx.multicollect.detector)
'''
Equilibrate is non-blocking so use a sniff or sleep as a placeholder
e.g sniff(<equilibration_time>) or sleep(<equilibration_time>)
'''
if mx.equilibration.use_extraction_eqtime:
eqt = eqtime
else:
eqt = mx.equilibration.eqtime
equilibrate(eqtime=eqt, inlet=mx.equilibration.inlet, outlet=mx.equilibration.outlet,
delay=mx.equilibration.inlet_delay)
set_time_zero(0)
# sniff the gas during equilibration
sniff(eqt)
hops=load_hops('hops/{}.txt'.format(mx.peakhop.hops_name))
define_hops(hops)
set_fits()
set_baseline_fits()
if mx.baseline.before:
baselines(ncounts=mx.baseline.counts,mass=mx.baseline.mass, detector=mx.baseline.detector,
settling_time=mx.baseline.settling_time)
# multicollect on active detectors
# multicollect(ncounts=MULTICOLLECT_COUNTS, integration_time=1)
peak_hop(ncycles=NCYCLES, hops=hops)
if mx.baseline.after:
# necessary if peak hopping
define_detectors('Ar40','H1')
baselines(ncounts=mx.baseline.counts,mass=mx.baseline.mass, detector=mx.baseline.detector,
settling_time=mx.baseline.settling_time)
if mx.peakcenter.after:
activate_detectors(*mx.peakcenter.detectors, **{'peak_center':True})
peak_center(detector=mx.peakcenter.detector, isotope=mx.peakcenter.isotope)
if use_cdd_warming:
gosub('warm_cdd', argv=(mx.equilibration.outlet,))
info('finished measure script')
|
{
"content_hash": "8f3d925c342903fcebf487cb5dd88514",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 98,
"avg_line_length": 25.032258064516128,
"alnum_prop": 0.6786941580756014,
"repo_name": "USGSDenverPychron/pychron",
"id": "614000f7a11a53ddd9f1d0d3e2cf8894624e26df",
"size": "2342",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "docs/user_guide/operation/scripts/examples/argus/measurement/jan_peak_hop_multi.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "128"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "263"
},
{
"name": "Cython",
"bytes": "1692"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "46796"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10773692"
},
{
"name": "Shell",
"bytes": "1003"
}
],
"symlink_target": ""
}
|
"""Runtime support code for executables created by Subpar.
1. Third-party modules require some PYTHONPATH manipulation.
2. Python can natively import python modules from a zip archive, but
C extension modules require some help.
3. Resources stored in a .par file may need to be exposed as OS-level
files instead of Python File objects.
We hook into the pkg_resources module, if present, to achieve 2 and 3.
Limitations:
A. Retrieving resources from packages
It should be possible to do this:
fn = pkg_resources.resource_filename('mypackage', 'myfile')
But instead one must do
fn = pkg_resources.resource_filename(
pkg_resources.Requirement.parse.spec('mypackage'),
'myfile')
B. Extraction dir
You should explicitly set the default extraction directory, via
`pkg_resources.set_extraction_path(my_directory)`, since the default
is not safe. For example:
tmpdir = tempfile.mkdtemp()
pkg_resources.set_extraction(tmpdir)
You should arrange for that directory to be deleted at some point.
Note that pkg_resources.cleanup_resources() is an unimplemented no-op,
so use something else. For example:
atexit.register(lambda: shutil.rmtree(tmpdir, ignore_errors=True))
"""
import atexit
import os
import pkgutil
import shutil
import sys
import tempfile
import warnings
import zipfile
import zipimport
def _log(msg):
"""Print a debugging message in the same format as python -vv output"""
if sys.flags.verbose:
sys.stderr.write(msg)
sys.stderr.write('\n')
def _find_archive():
"""Find the path to the currently executing .par file
We don't handle the case where prefix is non-empty.
"""
main = sys.modules.get('__main__')
if not main:
_log('# __main__ module not found')
return None
main_loader = getattr(main, '__loader__')
if not main_loader:
_log('# __main__.__loader__ not set')
return None
prefix = getattr(main_loader, 'prefix')
if prefix != '':
_log('# unexpected prefix for __main__.__loader__ is %s' %
main_loader.prefix)
return None
archive_path = getattr(main_loader, 'archive')
if not archive_path:
_log('# missing archive for __main__.__loader__')
return None
return archive_path
def _extract_files(archive_path):
"""Extract the contents of this .par file to disk.
This creates a temporary directory, and registers an atexit
handler to clean that directory on program exit. Extraction and
cleanup will potentially use significant time and disk space.
Returns:
Directory where contents were extracted to.
"""
extract_dir = tempfile.mkdtemp()
def _extract_files_cleanup():
shutil.rmtree(extract_dir, ignore_errors=True)
atexit.register(_extract_files_cleanup)
_log('# extracting %s to %s' % (archive_path, extract_dir))
zip_file = zipfile.ZipFile(archive_path, mode='r')
zip_file.extractall(extract_dir)
zip_file.close()
return extract_dir
def _version_check_pkg_resources(pkg_resources):
"""Check that pkg_resources supports the APIs we need."""
# Check that pkg_resources is new enough.
#
# Determining the version of an arbitrarily old version of
# pkg_resources is tough, since it doesn't have a version literal,
# and the accompanying setuptools package computes its version
# dynamically from metadata that might not exist. Also setuptools
# might not exist, especially in the case of the pip-vendored copy
# of pkg_resources.
#
# We do a feature detection instead. We examine
# pkg_resources.WorkingSet.add, and see if it has at least the
# third default argument ('replace').
try:
if sys.version_info[0] < 3:
defaults = pkg_resources.WorkingSet.add.im_func.func_defaults
else:
defaults = pkg_resources.WorkingSet.add.__defaults__
return len(defaults) >= 3
except AttributeError:
return False
def _setup_pkg_resources(pkg_resources_name):
"""Setup hooks into the `pkg_resources` module
This enables the pkg_resources module to find metadata from wheels
that have been included in this .par file.
The functions and classes here are scoped to this function, since
we might have multitple pkg_resources modules, or none.
"""
try:
__import__(pkg_resources_name)
pkg_resources = sys.modules.get(pkg_resources_name)
if pkg_resources is None:
return
except ImportError:
# Skip setup
return
if not _version_check_pkg_resources(pkg_resources):
# Skip setup
return
class DistInfoMetadata(pkg_resources.EggMetadata):
"""Metadata provider for zip files containing .dist-info
In find_dist_info_in_zip(), we call
metadata.resource_listdir(directory_name). However, it doesn't
work with EggMetadata, because _zipinfo_name() expects the
directory name to end with a /, but metadata._listdir() which
expects the directory to _not_ end with a /.
Therefore this class exists.
"""
def _zipinfo_name(self, fspath):
"""Overrides EggMetadata._zipinfo_name"""
# Convert a virtual filename (full path to file) into a
# zipfile subpath usable with the zipimport directory
# cache for our target archive
fspath = fspath.rstrip(os.sep)
if fspath == self.loader.archive:
return ''
if fspath.startswith(self.zip_pre):
return fspath[len(self.zip_pre):]
raise AssertionError(
"%s is not a subpath of %s" % (fspath, self.zip_pre)
)
def _parts(self, zip_path):
"""Overrides EggMetadata._parts"""
# Convert a zipfile subpath into an egg-relative path part
# list.
fspath = self.zip_pre + zip_path
if fspath == self.egg_root:
return []
if fspath.startswith(self.egg_root + os.sep):
return fspath[len(self.egg_root) + 1:].split(os.sep)
raise AssertionError(
"%s is not a subpath of %s" % (fspath, self.egg_root)
)
def find_dist_info_in_zip(importer, path_item, only=False):
"""Find dist-info style metadata in zip files.
importer: PEP 302-style Importer object
path_item (str): filename or pseudo-filename like:
/usr/somedirs/main.par
or
/usr/somedirs/main.par/pypi__portpicker_1_2_0
only (bool): We ignore the `only` flag because it's not clear
what it should actually do in this case.
Yields pkg_resources.Distribution objects
"""
metadata = DistInfoMetadata(importer)
for subitem in metadata.resource_listdir('/'):
basename, ext = os.path.splitext(subitem)
if ext.lower() == '.dist-info':
# Parse distribution name
match = pkg_resources.EGG_NAME(basename)
project_name = 'unknown'
if match:
project_name = match.group('name')
# Create metadata object
subpath = os.path.join(path_item, subitem)
submeta = DistInfoMetadata(
zipimport.zipimporter(path_item))
# Override pkg_resources defaults to avoid
# "resource_filename() only supported for .egg, not
# .zip" message
submeta.egg_name = project_name
submeta.egg_info = subpath
submeta.egg_root = path_item
dist = pkg_resources.Distribution.from_location(
path_item, subitem, submeta)
yield dist
def find_eggs_and_dist_info_in_zip(importer, path_item, only=False):
"""Chain together our finder and the standard pkg_resources finder
For simplicity, and since pkg_resources doesn't provide a public
interface to do so, we hardcode the chaining (find_eggs_in_zip).
"""
# Our finder
for dist in find_dist_info_in_zip(importer, path_item, only):
yield dist
# The standard pkg_resources finder
for dist in pkg_resources.find_eggs_in_zip(importer, path_item, only):
yield dist
return
# This overwrites the existing registered finder.
pkg_resources.register_finder(zipimport.zipimporter,
find_eggs_and_dist_info_in_zip)
# Note that the default WorkingSet has already been created, and
# there is no public interface to easily refresh/reload it that
# doesn't also have a "Don't use this" warning. So we manually
# add just the entries we know about to the existing WorkingSet.
for entry in sys.path:
importer = pkgutil.get_importer(entry)
if isinstance(importer, zipimport.zipimporter):
for dist in find_dist_info_in_zip(importer, entry, only=True):
if isinstance(dist._provider, DistInfoMetadata):
pkg_resources.working_set.add(dist, entry, insert=False,
replace=True)
def _initialize_import_path(import_roots, import_prefix):
"""Add extra entries to PYTHONPATH so that modules can be imported."""
# We try to match to order of Bazel's stub
full_roots = [
os.path.join(import_prefix, import_root)
for import_root in import_roots]
sys.path[1:1] = full_roots
_log('# adding %s to sys.path' % full_roots)
def setup(import_roots, zip_safe):
"""Initialize subpar run-time support
Args:
import_root (list): subdirs inside .par file to add to the
module import path at runtime.
zip_safe (bool): If False, extract the .par file contents to a
temporary directory, and import everything from
that directory.
Returns:
True if setup was successful, else False
"""
archive_path = _find_archive()
if not archive_path:
warnings.warn('Failed to initialize .par file runtime support',
UserWarning)
return False
if os.path.abspath(sys.path[0]) != os.path.abspath(archive_path):
warnings.warn('Failed to initialize .par file runtime support. ' +
'archive_path was %r, sys.path was %r' % (
archive_path, sys.path),
UserWarning)
return False
# Extract files to disk if necessary
if not zip_safe:
extract_dir = _extract_files(archive_path)
# sys.path[0] is the name of the executing .par file. Point
# it to the extract directory instead, so that Python searches
# there for imports.
sys.path[0] = extract_dir
import_prefix = extract_dir
else: # Import directly from .par file
extract_dir = None
import_prefix = archive_path
# Initialize import path
_initialize_import_path(import_roots, import_prefix)
# Add hook for package metadata
_setup_pkg_resources('pkg_resources')
_setup_pkg_resources('pip._vendor.pkg_resources')
return True
|
{
"content_hash": "15d6b9edc6f8a7b447aa5253a723d6c5",
"timestamp": "",
"source": "github",
"line_count": 315,
"max_line_length": 78,
"avg_line_length": 36.06984126984127,
"alnum_prop": 0.6256820982221439,
"repo_name": "google/subpar",
"id": "e8ec253169ec51c620ef10bfa082bb40239d2f39",
"size": "11959",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "runtime/support.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "911"
},
{
"name": "Python",
"bytes": "86530"
},
{
"name": "Shell",
"bytes": "8952"
},
{
"name": "Starlark",
"bytes": "25116"
}
],
"symlink_target": ""
}
|
# -*- coding: utf-8 -*-
# ProjectEuler/src/python/problem290.py
#
# Digital Signature
# =================
# Published on Friday, 30th April 2010, 05:00 pm
#
# How many integers 0 n < 1018 have the property that the sum of the digits of
# n equals the sum of digits of 137n?
import projecteuler as pe
def main():
pass
if __name__ == "__main__":
main()
|
{
"content_hash": "68c1990611fd61aea91106e0e7f7e928",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 79,
"avg_line_length": 21.41176470588235,
"alnum_prop": 0.6263736263736264,
"repo_name": "olduvaihand/ProjectEuler",
"id": "1ed0cd2e65d4f91e2188463cf2370a0ebd1e6724",
"size": "366",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/problem290.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "0"
},
{
"name": "Python",
"bytes": "422751"
}
],
"symlink_target": ""
}
|
class Event():
theme = ''
def __init__(self, theme, data):
self.theme = theme
self.data = data
EventBox = list()
class Publisher():
def __init__(self):
self.reigster(EventBox)
def send(self, theme, data):
self.sendEvent(Event(theme, data))
def sendEvent(self, event):
self.box.append(event)
def reigster(self, box):
self.box = box
class Subscriber():
def __init__(self):
self.boxes = list()
self.handlers = dict()
self.subscribe(EventBox)
def subscribe(self, box):
self.box = box
def register(self, eventTheme, fun):
self.handlers[eventTheme] = fun
def process(self):
dispatched = list()
for event in self.box:
if event.theme in self.handlers.keys():
handler = self.handlers[event.theme]
handler(event)
dispatched.append(event)
for e in dispatched:
self.box.remove(e)
if __name__=='__main__':
p = Publisher()
s = Subscriber()
s.register('T', lambda e: print(e.data))
p.send(Event('T', 1))
p.send(Event('T', 4))
s.process()
p.send(Event('T', 5))
s.process()
|
{
"content_hash": "9942c99951acf74cf8dba3f90ccb3bf2",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 52,
"avg_line_length": 20.85,
"alnum_prop": 0.5331734612310152,
"repo_name": "anokata/pythonPetProjects",
"id": "6ac850a1ec8fa33da354168906a03fd5b48a102a",
"size": "1279",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "surgame/src/eventSystem.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6467"
},
{
"name": "HTML",
"bytes": "56632"
},
{
"name": "JavaScript",
"bytes": "603"
},
{
"name": "Makefile",
"bytes": "889"
},
{
"name": "Python",
"bytes": "840906"
},
{
"name": "Shell",
"bytes": "2407"
},
{
"name": "TSQL",
"bytes": "1299"
}
],
"symlink_target": ""
}
|
import json
import os
import datetime
import time
import random
from copy import deepcopy
from bson import ObjectId, json_util
from itertools import chain
from flask_admin import BaseView, expose
from flask_babel import gettext as _
from flask_login import current_user
from mongoengine.queryset import Q
from flask import request, current_app, redirect, jsonify, Response, \
Markup, flash, url_for, make_response
import application.models as Models
from application.controllers.admin import AdminView
from application.extensions import admin
import application.services.jobs as Jobs
from application.utils import Pagination, format_date
from configs.config import TEMPLATE_DIR
num_per_page = 50
delay_status_by_date = {
'PAYMENT_RECEIVED':3,
'PROCESSING': 1,
'SHIPPING': 5,
'PORT_ARRIVED': 4,
}
def to_json(lo):
dt = {}
dt['id'] = str(lo.id)
dt['is_closed'] = lo.is_closed
dt['close_reason'] = lo.close_reason
dt['created_at'] = lo.created_at
dt['detail'] = lo.detail.to_mongo()
dt['detail']['partner'] = (lambda p: p and p.name)(lo.detail.partner)
dt['address'] = lo.order.address.to_json()
dt['order_id'] = lo.order.short_id
dt['logistic_provider'] = lo.order.logistic_provider
dt['entries'] = [entry_to_json(entry) for entry in lo.entries]
dt['estimated_weight'] = lo.estimated_weight
dt['returned_entries'] = [entry_to_json(entry) for entry in lo.returned_entries]
return dt
def entry_to_json(entry):
dt = {}
dt['id'] = str(entry.id)
dt['item'] = entry.item_snapshot.to_mongo()
dt['spec'] = entry.item_spec_snapshot.to_mongo()
try:
dt['item']['weight'] = entry.item_snapshot.weight
except:
pass
try:
dt['item']['title_en'] = entry.item_snapshot.title_en
except:
pass
dt['amount_usd'] = entry.amount_usd
dt['amount'] = entry.amount
dt['quantity'] = entry.quantity
dt['unit_price'] = entry.unit_price
dt['created_at'] = entry.created_at
dt['remark'] = entry.remark
dt['shipping_info'] = entry.shipping_info
return dt
def restruct_query(data):
format_date = lambda d: datetime.datetime.strptime(d, '%Y-%m-%dT%H:%M:%S.%fZ')
status = data.get('status')
query = {}
for k,v in data.items():
if v in [None, u"None", "", "null"]: continue
if k[-3:] == '_no':
query.update({'detail__%s'%k: v})
elif k in ['status']:
query.update({'detail__%s'%k: v})
elif k == 'start':
if status:
date_field = Models.LogisticDetail.attr_by_log_stat[status]
query.update({'detail__%s__gte' % date_field: format_date(v)})
else:
query.update({'created_at__gte': format_date(v)})
elif k == 'end':
if status:
date_field = Models.LogisticDetail.attr_by_log_stat[status]
query.update({'detail__%s__lt' % date_field: format_date(v)})
else:
query.update({'created_at__lt': format_date(v)})
elif k == 'query':
if v.startswith('MB'):
query.update({'detail__partner_tracking_no': v})
elif ObjectId.is_valid(v):
query.update({'id': v})
else:
query.update({'tracking_no': v})
elif k == 'partner':
partner = Models.Partner.objects(name=v).first()
query.update({'detail__partner': partner})
elif k == 'channel':
query.update({'detail__channel': v})
else:
query.update({'%s'%k: v})
return query
class N(AdminView):
_permission = 'logistic'
@expose('/', methods = ['GET', 'POST', 'DELETE', 'PATCH'])
def index(self, status="ALL"):
def render_tpml(status):
return make_response(open(os.path.join(
TEMPLATE_DIR, 'admin/logistic/index.html')).read())
def render_json(lid):
return jsonify(message="OK")
return request.is_xhr and {
'GET': lambda f: render_json(f.get('id')),
}[request.method](request.form) or render_tpml(status)
@expose("/logistics", methods=["GET"])
def logistics(self):
items_range = request.headers.get('Range', "0-9")
start, end = items_range.split('-')
per_page = int(end)-int(start)+1
query = restruct_query(request.args)
tracking_no = query.pop("tracking_no", "")
include_closed = query.get('include_closed') and query.pop('include_closed')
try:
if include_closed:
los = Models.Logistic.objects(**query)
else:
los = Models.Logistic.objects(is_closed=False, **query)
if tracking_no:
los = los.filter(Q(detail__us_tracking_no=tracking_no) | Q(detail__cn_tracking_no=tracking_no))
if request.args.get('status'):
los = los.order_by('detail__%s' %
Models.LogisticDetail.attr_by_log_stat[request.args.get('status')])
except:
pass
if query.get('receiver'):
addrs = Models.Address.objects(receiver=query.get('receiver')).distinct('id')
orders = Models.Order.commodities(address__in=addrs)
los = list(chain.from_iterable(order.logistics for order in orders))
if query.get('order_id'):
orders = Models.Order.commodities(short_id=int(query.get('order_id')))
los = list(chain.from_iterable(order.logistics for order in orders))
try:
los_size = los.count()
except:
los_size = len(los)
data = los[int(start): int(end)]
data = [to_json(l) for l in data]
resp = make_response(json_util.dumps(data), 200)
resp.headers['Accept-Range'] = 'items'
resp.headers['Content-Range'] = '%s-%s/%s'% (start, end, los_size)
resp.headers['Content-Type'] = 'application/json'
return resp
@expose("/logistics_delay/<status>/<delay_type>", methods=["GET"])
@expose("/logistics_delay/<status>/", methods=["GET"])
@expose("/logistics_delay/", methods=["GET"])
def logistics_delay(self, status=None, delay_type=None):
utcnow = datetime.datetime.utcnow()
if status:
items_range = request.headers.get('Range', "0-9")
start, end = items_range.split('-')
per_page = int(end)-int(start)+1
query = restruct_query(request.args)
tracking_no = query.pop("tracking_no", "")
date_field = Models.LogisticDetail.attr_by_log_stat[status]
delay_days = datetime.timedelta(days=delay_status_by_date[status])
query.update({
'detail__%s__lt' % date_field: utcnow - delay_days,
'detail__status': status,
})
los = Models.Logistic.objects(is_closed=False, **query).order_by('detail__%s' %
date_field)
if tracking_no:
los = los.filter(Q(detail__us_tracking_no=tracking_no) | Q(detail__cn_tracking_no=tracking_no))
if delay_type:
los = los.filter(detail__delay_details__reason__contains=delay_type)
data = los[int(start): int(end)]
data = [to_json(l) for l in data]
resp = make_response(json_util.dumps(data), 200)
resp.headers['Accept-Range'] = 'items'
resp.headers['Content-Range'] = '%s-%s/%s'% (start, end, los.count())
resp.headers['Content-Type'] = 'application/json'
return resp
data = {}
for status in ["PAYMENT_RECEIVED", 'PROCESSING', 'SHIPPING', "PORT_ARRIVED"]:
los = Models.Logistic.objects(is_closed=False)
date_field = Models.LogisticDetail.attr_by_log_stat[status]
delay_days = datetime.timedelta(days=delay_status_by_date[status])
query = {
'detail__%s__lt' % date_field: utcnow - delay_days,
'detail__status': status,
}
count = los.filter(**query).count()
data.update({status: count})
return jsonify(results=data)
@expose("/logistics_irregular/<process_status>/<irr_type>", methods=["GET"])
@expose("/logistics_irregular/<process_status>/", methods=["GET"])
@expose("/logistics_irregular", methods=["GET"])
def logistics_irregular(self, process_status=None, irr_type=None):
utcnow = datetime.datetime.utcnow()
if process_status:
items_range = request.headers.get('Range', "0-9")
start, end = items_range.split('-')
query = restruct_query(request.args)
tracking_no = query.pop('tracking_no', '')
los = Models.Logistic.objects(
detail__irregular_details__process_status=process_status,
**query).order_by('-detail.irregular_details.created_at')
if irr_type:
los = los.filter(detail__irregular_details__irr_type=irr_type).order_by('-detail.irregular_details.created_at')
if tracking_no:
los = los.filter(Q(detail__us_tracking_no=tracking_no) | Q(detail__cn_tracking_no=tracking_no))
data = los[int(start): int(end)]
data = [to_json(l) for l in data]
resp = make_response(json_util.dumps(data), 200)
resp.headers['Accept-Range'] = 'items'
resp.headers['Content-Range'] = '%s-%s/%s'% (start, end, los.count())
resp.headers['Content-Type'] = 'application/json'
return resp
data = {}
for status in ["WAITING_PROCESS", "PROCESSING", "PROCESSED"]:
los = Models.Logistic.objects(detail__irregular_details__process_status=status)
data.update({status: los.count()})
return jsonify(results=data)
@expose("/update", methods=["PUT"])
def update(self):
query = request.get_json()
dt = {}
for k,v in query.items():
if v in [None, u"None", "", "null"]: continue
if 'date' in k:
val = datetime.datetime.strptime(v, '%Y-%m-%d')
elif k.startswith('real'):
val = float(v)
elif k == 'partner':
val = Models.Partner.objects(name=v).first()
elif k == 'irregularity':
val = Models.LogisticIrregular(irr_at_status=v.get('status'),
irr_type=v.get('type'),
reason=v.get('reason'),
desc=v.get('desc'))
else:
val = v.strip()
dt.update({k:val})
try:
lo = Models.Logistic.objects.get(id=dt.pop('lid'))
lo.update_logistic(dt)
return jsonify(message="OK",
remarks=lo.detail.remarks,
delays=lo.detail.delay_details,
irregularities=lo.detail.irregular_details)
except Exception as e:
return jsonify(message="Failed", desc=e.message)
@expose("/update_delay", methods=["PUT"])
def update_delay(self):
query = request.get_json()
try:
lo = Models.Logistic.objects.get(id=query['lid'])
delays = lo.detail.delay_details.filter(status=query['status'])
delays.update(is_done=query['is_done'])
lo.save()
return jsonify(message="OK")
except Exception as e:
return jsonify(message="Failed", desc=e.message)
@expose("/update_irr_step", methods=["PUT"])
def update_irr_step(self):
query = request.get_json()
dt = {}
for k,v in query.items():
dt.update({k:v})
try:
lo = Models.Logistic.objects.get(id=dt['lid'])
irregular = lo.detail.irregular_details.filter(irr_type=dt['irr_type']).first()
irregular.steps = dt['solutions']
lo.save()
return jsonify(message="OK", irr_detail=irregular)
except Exception as e:
return jsonify(message="Failed", desc=e.message)
@expose("/set_irr_done", methods=["PUT"])
def set_irr_done(self):
query = request.get_json()
dt = {}
for k,v in query.items():
dt.update({k:v})
try:
lo = Models.Logistic.objects.get(id=dt['lid'])
irregular = lo.detail.irregular_details.filter(irr_type=dt['irr_type']).first()
irregular.process_status = dt['process_status']
lo.save()
return jsonify(message="OK", irr_detail=irregular)
except Exception as e:
return jsonify(message="Failed", desc=e.message)
@expose("/update_irr_remark", methods=["PUT"])
def update_irr_remark(self):
query = request.get_json()
dt = {}
for k,v in query.items():
dt.update({k:v})
try:
lo = Models.Logistic.objects.get(id=dt['lid'])
irregular = lo.detail.irregular_details.filter(irr_type=dt['irr_type']).first()
remark = Models.LogisticRemark(content=dt['irr_remark'], creator=current_user.name)
irregular.remarks.append(remark)
lo.save()
return jsonify(message="OK", irr_detail=irregular)
except Exception as e:
return jsonify(message="Failed", desc=e.message)
@expose("/merge", methods=["POST"])
def merge(self):
lids = request.json.get('lids')
if not lids:
return jsonify(message="Failed", desc="error~~~")
los = [Models.Logistic.objects(id=lid).first() for lid in lids]
if not type(los) is list:
return jsonify(message="Failed", desc="please select more than 2 logistics")
start = 0
for index in range(len(los)-1):
if los[index+1].detail.cn_tracking_no != \
los[start].detail.cn_tracking_no or \
los[index+1].order != los[0].order:
return jsonify(message="Failed", desc="CTN and OrderID should be the same")
for index in range(len(los)-1):
map(
lambda e: los[index+1].entries.append(e),
los[index].entries
)
los[index].entries = []
los[index].save()
los[index].close(
'merged with %s' %
los[index+1].id, datetime.datetime.utcnow()
)
los[index+1].save()
if index+1 == len(los)-1:
comment = Models.LogisticRemark(
content=u"合并单", creator=current_user.name
)
los[index+1].detail.remarks.append(comment)
los[index+1].save()
return jsonify(message="OK", lid=str(los[index+1].id))
@expose("/split_entries", methods=["POST"])
def split_entries(self):
entries = request.json.get('selected')
if not entries:
return jsonify(message="Failed", desc="Please select entries!")
lids = []
entry_ids = []
for l in entries:
c = l.split(':')
lids.append(c[1])
entry_ids.append(c[0])
los = [Models.Logistic.objects(id=lid).first() for lid in set(lids)]
e_lst = []
for i in entry_ids:
e = Models.OrderEntry.objects(id=str(i)).first()
e_lst.append(e)
entries_groups = map(lambda lo: filter(lambda e: e in lo.entries, e_lst),
los)
for lo, lst in zip(los, entries_groups):
lo.fork_by_entries([e.id for e in lst])
return jsonify(message="OK", oid=lo.order.short_id)
@expose('/split_quantity', methods=['POST'])
def split_quantity(self):
lid = request.json.get('lid')
eid = request.json.get('eid')
quantity = request.json.get('quantity')
lo = Models.Logistic.objects(id=lid).first()
entry = Models.OrderEntry.objects(id=eid).first()
if entry.quantity > 1 and entry.quantity - int(quantity) >=1 and entry and lo:
entry.quantity -= int(quantity)
entry.update_snapshot()
entry.update_amount()
new_entry = deepcopy(entry)
new_entry.__class__ = Models.OrderEntry
new_entry.id = None
new_entry.quantity = int(quantity)
new_entry.update_snapshot()
new_entry.update_amount()
new_entry.save()
lo.entries.append(new_entry)
lo.save()
order = lo.order
order.entries.append(new_entry)
order.save()
else:
return jsonify(message="Failed", desc="quantity error~~~~~~")
return jsonify(message="OK", entries=[json.loads(json_util.dumps(entry_to_json(entry))) for entry in lo.entries])
@expose('/download', methods=["GET"])
def download(self):
FIELDS = [u"包裹ID", u'IMG No', u'CTN', u"下单日期", u"订单ID",u'订单短号', u'收件人', u'手机号', u'合作物流商', u'remark',u"下单备注", u"估重", u"渠道"]
now = datetime.datetime.now()
status = request.args.get('status')
query = restruct_query(request.args)
delay_export = query.get('delay_export') and query.pop('delay_export')
delay_type = query.get('delay_type') and query.pop('delay_type')
try:
los = Models.Logistic.objects(is_closed=False, **query)
if status:
los = los.order_by('detail__%s' %
Models.LogisticDetail.attr_by_log_stat[status])
except:
pass
if delay_export:
date_field = Models.LogisticDetail.attr_by_log_stat[status]
delay_days = datetime.timedelta(days=delay_status_by_date[status])
query = {
'detail__%s__lt' % date_field: datetime.datetime.utcnow() - delay_days,
'detail__status': status,
}
los = los.filter(**query).order_by('detail__%s' %
date_field)
if delay_type:
los = los.filter(detail__delay_details__reason__contains=delay_type)
if query.get('receiver'):
addrs = Models.Address.objects(receiver=query.get('receiver')).distinct('id')
orders = Models.Order.commodities(address__in=addrs)
los = list(chain.from_iterable(order.logistics for order in orders))
if query.get('order_id'):
orders = Models.Order.commodities(short_id=int(query.get('order_id')))
los = list(chain.from_iterable(order.logistics for order in orders))
def generate():
yield ','.join(st for st in FIELDS) + '\n'
for log in los:
yield ','.join([
str(log.id),
log.detail.partner_tracking_no,
log.detail.carrier_tracking_no,
log.detail.cn_tracking_no,
log.detail.cn_logistic_name,
format_date(log.detail.payment_received_date),
str(log.order.id),
str(log.order.short_id),
log.order.address.receiver,
log.order.address.mobile_number,
format_date(log.detail.processing_date),
format_date(log.detail.shipping_date),
format_date(log.detail.port_arrived_date),
format_date(log.detail.received_date),
format_date(log.detail.modified),
log.detail.partner.name if log.detail.partner else '',
'; '.join([r.content for r in log.detail.remarks]),
log.detail.extra or '',
str(log.estimated_weight),
log.detail.channel,
]) + '\n'
return Response(generate(),
mimetype="text/csv",
headers={
"Content-Disposition":
"attachment;filename=%s %s.csv" % (format_date(now,'%Y-%m-%d'),'dumps_file')
}
)
@expose('/partner', methods=["GET"])
def partner(self):
partners = Models.Partner.objects().distinct('name')
return jsonify(results=partners, message="OK")
@expose('/close/<lid>', methods=['GET'])
def close(self, lid):
lo = Models.Logistic.objects(id=lid).first()
lo.close("Closed By %s" % current_user.name)
return jsonify(message="OK")
@expose('/logs/<ltype>/<lid>', methods=['GET'])
def logs(self, ltype, lid):
if ltype == 'express':
logs = Models.Logistic.objects(id=lid).first().express_tracking
return self.render('admin/logistic/express.html', logs=logs)
elif ltype == 'logistic':
logs = Models.LogisticLog.objects(logistic_id=lid, log_type__ne='API')
user = lambda i: getattr(Models.User.objects(id=i).first(), 'name', '') if i and i != 'system' else i
return self.render('admin/logistic/logs.html', logs=logs, user=user)
elif ltype == 'print':
lo = Models.Logistic.objects(id=lid).first()
if lo.is_closed:
return Response('this logistics id has been closed.')
return self.render('admin/logistic/print_page.html', lo=lo)
@expose('/refresh/<company>/<number>', methods=['GET'])
def refresh(self, company, number):
Jobs.express.kuaidi_request(company, number)
return jsonify(message="OK")
@expose('/back_status', methods=['GET'])
def back_status(self):
lid = request.args.get('lid')
status = request.args.get('status')
l = Models.Logistic.objects(id=lid).first()
l.detail.status = status
setattr(l.detail, Models.LogisticDetail.attr_by_log_stat[status],
datetime.datetime.utcnow())
l.save()
order = l.order
order.update_logistic_status()
return jsonify(message="OK")
admin.add_view(N(name=_('Logistics Backend'), category='Logistics', menu_icon_type="fa", menu_icon_value="truck"))
|
{
"content_hash": "2c9edab57bbb800b2e6be6eae6d937da",
"timestamp": "",
"source": "github",
"line_count": 574,
"max_line_length": 130,
"avg_line_length": 38.893728222996515,
"alnum_prop": 0.5512653975363941,
"repo_name": "seasonstar/bibi",
"id": "184f1e18683f08619f4988c9da9da9f5c4d9f35d",
"size": "22417",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "application/controllers/admin/order/logistic.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "24139"
},
{
"name": "CSS",
"bytes": "278402"
},
{
"name": "HTML",
"bytes": "227750"
},
{
"name": "JavaScript",
"bytes": "2720066"
},
{
"name": "PHP",
"bytes": "20139"
},
{
"name": "Python",
"bytes": "375043"
}
],
"symlink_target": ""
}
|
"""Recurrent layers backed by cuDNN.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from tensorflow.python.framework import constant_op
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
from tensorflow.python.keras.engine.input_spec import InputSpec
from tensorflow.python.keras.layers.recurrent import RNN
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_cudnn_rnn_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.util.tf_export import tf_export
class _CuDNNRNN(RNN):
"""Private base class for CuDNNGRU and CuDNNLSTM layers.
Arguments:
return_sequences: Boolean. Whether to return the last output
in the output sequence, or the full sequence.
return_state: Boolean. Whether to return the last state
in addition to the output.
go_backwards: Boolean (default False).
If True, process the input sequence backwards and return the
reversed sequence.
stateful: Boolean (default False). If True, the last state
for each sample at index i in a batch will be used as initial
state for the sample of index i in the following batch.
time_major: Boolean (default False). If true, the inputs and outputs will be
in shape `(timesteps, batch, ...)`, whereas in the False case, it will
be `(batch, timesteps, ...)`.
"""
def __init__(self,
return_sequences=False,
return_state=False,
go_backwards=False,
stateful=False,
time_major=False,
**kwargs):
# We invoke the base layer's initializer directly here because we do not
# want to create RNN cell instance.
super(RNN, self).__init__(**kwargs) # pylint: disable=bad-super-call
self.return_sequences = return_sequences
self.return_state = return_state
self.go_backwards = go_backwards
self.stateful = stateful
self.time_major = time_major
self.supports_masking = False
self.input_spec = [InputSpec(ndim=3)]
if hasattr(self.cell.state_size, '__len__'):
state_size = self.cell.state_size
else:
state_size = [self.cell.state_size]
self.state_spec = [InputSpec(shape=(None, dim)) for dim in state_size]
self.constants_spec = None
self._states = None
self._num_constants = None
self._num_inputs = None
self._vector_shape = constant_op.constant([-1])
def _canonical_to_params(self, weights, biases):
weights = [array_ops.reshape(x, self._vector_shape) for x in weights]
biases = [array_ops.reshape(x, self._vector_shape) for x in biases]
return array_ops.concat(weights + biases, axis=0)
def call(self, inputs, mask=None, training=None, initial_state=None):
if isinstance(mask, list):
mask = mask[0]
if mask is not None:
raise ValueError('Masking is not supported for CuDNN RNNs.')
# input shape: `(samples, time (padded with zeros), input_dim)`
# note that the .build() method of subclasses MUST define
# self.input_spec and self.state_spec with complete input shapes.
if isinstance(inputs, list):
initial_state = inputs[1:]
inputs = inputs[0]
elif initial_state is not None:
pass
elif self.stateful:
initial_state = self.states
else:
initial_state = self.get_initial_state(inputs)
if len(initial_state) != len(self.states):
raise ValueError('Layer has ' + str(len(self.states)) +
' states but was passed ' + str(len(initial_state)) +
' initial states.')
if self.go_backwards:
# Reverse time axis.
inputs = K.reverse(inputs, 1)
output, states = self._process_batch(inputs, initial_state)
if self.stateful:
updates = []
for i in range(len(states)):
updates.append(state_ops.assign(self.states[i], states[i]))
self.add_update(updates, inputs)
if self.return_state:
return [output] + states
else:
return output
def get_config(self):
config = {
'return_sequences': self.return_sequences,
'return_state': self.return_state,
'go_backwards': self.go_backwards,
'stateful': self.stateful,
'time_major': self.time_major,
}
base_config = super( # pylint: disable=bad-super-call
RNN, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@classmethod
def from_config(cls, config):
return cls(**config)
@property
def trainable_weights(self):
if self.trainable and self.built:
return [self.kernel, self.recurrent_kernel, self.bias]
return []
@property
def non_trainable_weights(self):
if not self.trainable and self.built:
return [self.kernel, self.recurrent_kernel, self.bias]
return []
@property
def losses(self):
return super(RNN, self).losses
def get_losses_for(self, inputs=None):
return super( # pylint: disable=bad-super-call
RNN, self).get_losses_for(inputs=inputs)
@tf_export('keras.layers.CuDNNGRU')
class CuDNNGRU(_CuDNNRNN):
"""Fast GRU implementation backed by cuDNN.
More information about cuDNN can be found on the [NVIDIA
developer website](https://developer.nvidia.com/cudnn).
Can only be run on GPU.
Arguments:
units: Positive integer, dimensionality of the output space.
kernel_initializer: Initializer for the `kernel` weights matrix, used for
the linear transformation of the inputs.
recurrent_initializer: Initializer for the `recurrent_kernel` weights
matrix, used for the linear transformation of the recurrent state.
bias_initializer: Initializer for the bias vector.
kernel_regularizer: Regularizer function applied to the `kernel` weights
matrix.
recurrent_regularizer: Regularizer function applied to the
`recurrent_kernel` weights matrix.
bias_regularizer: Regularizer function applied to the bias vector.
activity_regularizer: Regularizer function applied to the output of the
layer (its "activation").
kernel_constraint: Constraint function applied to the `kernel` weights
matrix.
recurrent_constraint: Constraint function applied to the
`recurrent_kernel` weights matrix.
bias_constraint: Constraint function applied to the bias vector.
return_sequences: Boolean. Whether to return the last output in the output
sequence, or the full sequence.
return_state: Boolean. Whether to return the last state in addition to the
output.
go_backwards: Boolean (default False). If True, process the input sequence
backwards and return the reversed sequence.
stateful: Boolean (default False). If True, the last state for each sample
at index i in a batch will be used as initial state for the sample of
index i in the following batch.
"""
def __init__(self,
units,
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
bias_initializer='zeros',
kernel_regularizer=None,
recurrent_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
recurrent_constraint=None,
bias_constraint=None,
return_sequences=False,
return_state=False,
go_backwards=False,
stateful=False,
**kwargs):
self.units = units
cell_spec = collections.namedtuple('cell', 'state_size')
self._cell = cell_spec(state_size=self.units)
super(CuDNNGRU, self).__init__(
return_sequences=return_sequences,
return_state=return_state,
go_backwards=go_backwards,
stateful=stateful,
**kwargs)
self.kernel_initializer = initializers.get(kernel_initializer)
self.recurrent_initializer = initializers.get(recurrent_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.recurrent_regularizer = regularizers.get(recurrent_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.activity_regularizer = regularizers.get(activity_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.recurrent_constraint = constraints.get(recurrent_constraint)
self.bias_constraint = constraints.get(bias_constraint)
@property
def cell(self):
return self._cell
def build(self, input_shape):
super(CuDNNGRU, self).build(input_shape)
if isinstance(input_shape, list):
input_shape = input_shape[0]
input_dim = int(input_shape[-1])
self.kernel = self.add_weight(
shape=(input_dim, self.units * 3),
name='kernel',
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units * 3),
name='recurrent_kernel',
initializer=self.recurrent_initializer,
regularizer=self.recurrent_regularizer,
constraint=self.recurrent_constraint)
self.bias = self.add_weight(
shape=(self.units * 6,),
name='bias',
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint)
self.built = True
def _process_batch(self, inputs, initial_state):
if not self.time_major:
inputs = array_ops.transpose(inputs, perm=(1, 0, 2))
input_h = initial_state[0]
input_h = array_ops.expand_dims(input_h, axis=0)
params = self._canonical_to_params(
weights=[
self.kernel[:, self.units:self.units * 2],
self.kernel[:, :self.units],
self.kernel[:, self.units * 2:],
self.recurrent_kernel[:, self.units:self.units * 2],
self.recurrent_kernel[:, :self.units],
self.recurrent_kernel[:, self.units * 2:],
],
biases=[
self.bias[self.units:self.units * 2],
self.bias[:self.units],
self.bias[self.units * 2:self.units * 3],
self.bias[self.units * 4:self.units * 5],
self.bias[self.units * 3:self.units * 4],
self.bias[self.units * 5:],
],
)
outputs, h, _, _ = gen_cudnn_rnn_ops.cudnn_rnn(
inputs,
input_h=input_h,
input_c=0,
params=params,
is_training=True,
rnn_mode='gru')
if self.stateful or self.return_state:
h = h[0]
if self.return_sequences:
if self.time_major:
output = outputs
else:
output = array_ops.transpose(outputs, perm=(1, 0, 2))
else:
output = outputs[-1]
return output, [h]
def get_config(self):
config = {
'units': self.units,
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'recurrent_initializer':
initializers.serialize(self.recurrent_initializer),
'bias_initializer': initializers.serialize(self.bias_initializer),
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'recurrent_regularizer':
regularizers.serialize(self.recurrent_regularizer),
'bias_regularizer': regularizers.serialize(self.bias_regularizer),
'activity_regularizer':
regularizers.serialize(self.activity_regularizer),
'kernel_constraint': constraints.serialize(self.kernel_constraint),
'recurrent_constraint':
constraints.serialize(self.recurrent_constraint),
'bias_constraint': constraints.serialize(self.bias_constraint)
}
base_config = super(CuDNNGRU, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@tf_export('keras.layers.CuDNNLSTM')
class CuDNNLSTM(_CuDNNRNN):
"""Fast LSTM implementation backed by cuDNN.
More information about cuDNN can be found on the [NVIDIA
developer website](https://developer.nvidia.com/cudnn).
Can only be run on GPU.
Arguments:
units: Positive integer, dimensionality of the output space.
kernel_initializer: Initializer for the `kernel` weights matrix, used for
the linear transformation of the inputs.
unit_forget_bias: Boolean. If True, add 1 to the bias of the forget gate
at initialization. Setting it to true will also force
`bias_initializer="zeros"`. This is recommended in [Jozefowicz et
al.](http://www.jmlr.org/proceedings/papers/v37/jozefowicz15.pdf)
recurrent_initializer: Initializer for the `recurrent_kernel` weights
matrix, used for the linear transformation of the recurrent state.
bias_initializer: Initializer for the bias vector.
kernel_regularizer: Regularizer function applied to the `kernel` weights
matrix.
recurrent_regularizer: Regularizer function applied to the
`recurrent_kernel` weights matrix.
bias_regularizer: Regularizer function applied to the bias vector.
activity_regularizer: Regularizer function applied to the output of the
layer (its "activation").
kernel_constraint: Constraint function applied to the `kernel` weights
matrix.
recurrent_constraint: Constraint function applied to the
`recurrent_kernel` weights matrix.
bias_constraint: Constraint function applied to the bias vector.
return_sequences: Boolean. Whether to return the last output. in the
output sequence, or the full sequence.
return_state: Boolean. Whether to return the last state in addition to the
output.
go_backwards: Boolean (default False). If True, process the input sequence
backwards and return the reversed sequence.
stateful: Boolean (default False). If True, the last state for each sample
at index i in a batch will be used as initial state for the sample of
index i in the following batch.
"""
def __init__(self,
units,
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
bias_initializer='zeros',
unit_forget_bias=True,
kernel_regularizer=None,
recurrent_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
recurrent_constraint=None,
bias_constraint=None,
return_sequences=False,
return_state=False,
go_backwards=False,
stateful=False,
**kwargs):
self.units = units
cell_spec = collections.namedtuple('cell', 'state_size')
self._cell = cell_spec(state_size=(self.units, self.units))
super(CuDNNLSTM, self).__init__(
return_sequences=return_sequences,
return_state=return_state,
go_backwards=go_backwards,
stateful=stateful,
**kwargs)
self.kernel_initializer = initializers.get(kernel_initializer)
self.recurrent_initializer = initializers.get(recurrent_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.unit_forget_bias = unit_forget_bias
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.recurrent_regularizer = regularizers.get(recurrent_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.activity_regularizer = regularizers.get(activity_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.recurrent_constraint = constraints.get(recurrent_constraint)
self.bias_constraint = constraints.get(bias_constraint)
@property
def cell(self):
return self._cell
def build(self, input_shape):
super(CuDNNLSTM, self).build(input_shape)
if isinstance(input_shape, list):
input_shape = input_shape[0]
input_dim = int(input_shape[-1])
self.kernel = self.add_weight(
shape=(input_dim, self.units * 4),
name='kernel',
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units * 4),
name='recurrent_kernel',
initializer=self.recurrent_initializer,
regularizer=self.recurrent_regularizer,
constraint=self.recurrent_constraint)
if self.unit_forget_bias:
def bias_initializer(_, *args, **kwargs):
return array_ops.concat([
self.bias_initializer((self.units * 5,), *args, **kwargs),
initializers.Ones()((self.units,), *args, **kwargs),
self.bias_initializer((self.units * 2,), *args, **kwargs),
], axis=0)
else:
bias_initializer = self.bias_initializer
self.bias = self.add_weight(
shape=(self.units * 8,),
name='bias',
initializer=bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint)
self.built = True
def _process_batch(self, inputs, initial_state):
if not self.time_major:
inputs = array_ops.transpose(inputs, perm=(1, 0, 2))
input_h = initial_state[0]
input_c = initial_state[1]
input_h = array_ops.expand_dims(input_h, axis=0)
input_c = array_ops.expand_dims(input_c, axis=0)
params = self._canonical_to_params(
weights=[
self.kernel[:, :self.units],
self.kernel[:, self.units:self.units * 2],
self.kernel[:, self.units * 2:self.units * 3],
self.kernel[:, self.units * 3:],
self.recurrent_kernel[:, :self.units],
self.recurrent_kernel[:, self.units:self.units * 2],
self.recurrent_kernel[:, self.units * 2:self.units * 3],
self.recurrent_kernel[:, self.units * 3:],
],
biases=[
self.bias[:self.units],
self.bias[self.units:self.units * 2],
self.bias[self.units * 2:self.units * 3],
self.bias[self.units * 3:self.units * 4],
self.bias[self.units * 4:self.units * 5],
self.bias[self.units * 5:self.units * 6],
self.bias[self.units * 6:self.units * 7],
self.bias[self.units * 7:],
],
)
outputs, h, c, _ = gen_cudnn_rnn_ops.cudnn_rnn(
inputs,
input_h=input_h,
input_c=input_c,
params=params,
is_training=True)
if self.stateful or self.return_state:
h = h[0]
c = c[0]
if self.return_sequences:
if self.time_major:
output = outputs
else:
output = array_ops.transpose(outputs, perm=(1, 0, 2))
else:
output = outputs[-1]
return output, [h, c]
def get_config(self):
config = {
'units': self.units,
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'recurrent_initializer':
initializers.serialize(self.recurrent_initializer),
'bias_initializer': initializers.serialize(self.bias_initializer),
'unit_forget_bias': self.unit_forget_bias,
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'recurrent_regularizer':
regularizers.serialize(self.recurrent_regularizer),
'bias_regularizer': regularizers.serialize(self.bias_regularizer),
'activity_regularizer':
regularizers.serialize(self.activity_regularizer),
'kernel_constraint': constraints.serialize(self.kernel_constraint),
'recurrent_constraint':
constraints.serialize(self.recurrent_constraint),
'bias_constraint': constraints.serialize(self.bias_constraint)
}
base_config = super(CuDNNLSTM, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
|
{
"content_hash": "debd31c89fb424db9c1a5767eeff679f",
"timestamp": "",
"source": "github",
"line_count": 525,
"max_line_length": 80,
"avg_line_length": 38.55238095238095,
"alnum_prop": 0.6507905138339921,
"repo_name": "hehongliang/tensorflow",
"id": "81f292817fd989ee0aa256ada64e09b32a79ac2b",
"size": "20929",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/keras/layers/cudnn_recurrent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7666"
},
{
"name": "C",
"bytes": "194748"
},
{
"name": "C++",
"bytes": "26947133"
},
{
"name": "CMake",
"bytes": "174938"
},
{
"name": "Go",
"bytes": "908627"
},
{
"name": "Java",
"bytes": "323804"
},
{
"name": "Jupyter Notebook",
"bytes": "1833659"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "37293"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "63210"
},
{
"name": "Protocol Buffer",
"bytes": "249901"
},
{
"name": "PureBasic",
"bytes": "24932"
},
{
"name": "Python",
"bytes": "22872386"
},
{
"name": "Ruby",
"bytes": "327"
},
{
"name": "Shell",
"bytes": "336334"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.