text
stringlengths 4
1.02M
| meta
dict |
|---|---|
import unittest
from Fold import Fold, clusters_to_labels
from load_data import load_files
__author__ = 'mbarnes1'
class MyTestCase(unittest.TestCase):
def setUp(self):
text, self.labels, ad_id, self.phones = load_files(max_lines=500)
self.folds = Fold(text, self.phones, self.labels, number_processes=4)
def test_folds(self):
k = 5
## MinHash ##
minhash_labels = clusters_to_labels(self.folds._minhash_clusters)
# MinHash Label Folds
label_folds = self.folds.get_minhash_labelkfolds(k)
for train_idx, test_idx in label_folds:
train_clusters = frozenset(minhash_labels[train_idx])
test_clusters = frozenset(minhash_labels[test_idx])
self.assertFalse(train_clusters.intersection(test_clusters))
# MinHash Dedup Folds
dedup_folds = self.folds.get_minhash_dedupkfolds(k)
for train_idx, test_idx in dedup_folds:
self.assertEqual(len(train_idx) + len(test_idx), len(self.folds._minhash_clusters))
train_clusters = frozenset(minhash_labels[train_idx])
test_clusters = frozenset(minhash_labels[test_idx])
self.assertFalse(train_clusters.intersection(test_clusters))
## Phones ##
phone_labels = clusters_to_labels(self.folds._phone_clusters)
# MinHash Label Folds
label_folds = self.folds.get_phone_labelkfolds(k)
for train_idx, test_idx in label_folds:
train_clusters = frozenset(phone_labels[train_idx])
test_clusters = frozenset(phone_labels[test_idx])
self.assertFalse(train_clusters.intersection(test_clusters))
# MinHash Dedup Folds
dedup_folds = self.folds.get_phone_dedupkfolds(k)
for train_idx, test_idx in dedup_folds:
self.assertEqual(len(train_idx) + len(test_idx), len(self.folds._phone_clusters))
train_clusters = frozenset(phone_labels[train_idx])
test_clusters = frozenset(phone_labels[test_idx])
self.assertFalse(train_clusters.intersection(test_clusters))
|
{
"content_hash": "3ae911034bdd483710de0bb74f92057c",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 95,
"avg_line_length": 44.702127659574465,
"alnum_prop": 0.6539742979533556,
"repo_name": "benbo/QPR_CP1",
"id": "77e0719e2ac81f3c584562bf9cfcc95b542e3334",
"size": "2101",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_fold.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "48046"
}
],
"symlink_target": ""
}
|
"""This module converts SuperMemo collections to Anki."""
class Converter:
"""
This class converts a dictionary of Element objects into a format
that can be imported into an Anki deck.
"""
def __init__(self, elements, path_to_media_directory):
"""
elements:
A dictionary of Element objects indexed by ID.
path_to_media_directory:
Full path to the directory that contains the
media files for the collection, e.g.:
c:/sm/systems/your_collection/elements/
The path can use either slashes or backslashes
but must include a trailing slash or backslash.
"""
self.elements = elements
self.full_path_to_media = path_to_media_directory
def get_tags(self, element):
"""
Return the tags of an element as a string.
SuperMemo organizes elements into categories but Anki uses tags to
organize facts. To convert, elements are tagged with the names of all
categories they belong to in the original collection.
"""
tags = []
while element.properties['Parent'] != '0':
parent_id = int(element.properties['Parent'])
element = self.elements[parent_id]
# Cleanup of category names. Anki uses spaces to separate tags.
tag = (element.element_info['Title']
.replace(' & ', '&')
.replace('] ', ']')
.replace(',', '')
.replace(' ', '_'))
tags.append(tag)
return ' '.join(tags[::-1])
def get_relative_path_to_media_file(self, path_to_file, path_to_media_directory):
"""
Return the relative path to a media file.
SuperMemo stores full paths to media files but in Anki media files
are relative to the location of the deck.
"""
path_to_file = path_to_file.replace('\\', '/')
path_to_media_directory = path_to_media_directory.replace('\\', '/')
return path_to_file.replace(path_to_media_directory, '')
def convert(self, element):
"""Return the element in a format compatible with Anki."""
question_text = element.get_question() or ''
answer_text = element.get_answer() or ''
try:
answer_sound = self.get_relative_path_to_media_file(
element.get_answer_sound(), self.full_path_to_media)
except AttributeError:
answer_sound = ''
try:
question_sound = self.get_relative_path_to_media_file(
element.get_question_sound(), self.full_path_to_media)
except AttributeError:
question_sound = ''
out = "{0}[sound:{1}]\t{2}[sound:{3}]\t{4}".format(
question_text, question_sound, answer_text, answer_sound,
self.get_tags(element))
return out
def convert_all(self):
"""Return all elements in a format compatible with Anki."""
exportable_elements = [self.convert(e) for e
in self.elements.values() if e.is_item()]
return "\n".join(exportable_elements)
class Element:
"""
This class represents a SuperMemo element such as a topic or an item.
Accessing individual pieces of information about an element:
ComponentNo -> self.properties['ComponentNo']
Status -> self.element_info['Status']
Type of component #2 -> self.components[1]['Type']
(Note that SuperMemo component numbers start at 1.)
"""
# Bits that determine if a component is a question or an answer.
question = 0b00100000
answer = 0b01000000
def __init__(self, element_body):
"""
Instantiate an Element from its textual representation.
The textual representation of an element starts with
"Begin Element #N" and ends with "End Element #N"
where N is the ID of the element.
"""
self.properties = {}
self.element_info = {}
self.components = []
self.id = 0
# Extract information from the element body line by line.
inside_element_info = False
inside_component = False
for line in element_body.strip().split("\n"):
line = line.strip()
if line == "":
pass
if line.startswith("Begin Element "):
self.id = int(line.split(' #')[-1])
elif line.startswith("End Element "):
pass
elif line.startswith("Begin ElementInfo"):
inside_element_info = True
elif line.startswith("End ElementInfo"):
inside_element_info = False
elif line.startswith("Begin Component"):
self.components.append({})
inside_component = True
elif line.startswith("End Component"):
inside_component = False
else:
key, value = line.split('=', 1)
if not inside_element_info and not inside_component:
self.properties[key] = value
elif inside_component:
# There are other properties besides PlayAt and DisplayAt
# that could be converted to integers, but those two are
# the only ones currently used by the converter.
if key in ['PlayAt', 'DisplayAt']:
value = int(value)
self.components[-1][key] = value
elif inside_element_info:
self.element_info[key] = value
def get_question(self):
"""Return the text of the question."""
for c in self.components:
if (c['Type'] == 'Text' and c['DisplayAt'] & self.question
and c['DisplayAt'] & self.answer):
return c['Text']
def get_answer(self):
"""Return the text of the answer."""
for c in self.components:
if (c['Type'] == 'Text' and not c['DisplayAt'] & self.question
and c['DisplayAt'] & self.answer):
return c['Text']
def get_question_sound(self):
"""Return the full path to the audio file associated with the question."""
for c in self.components:
if c['Type'] == 'Sound' and c['PlayAt'] & self.question:
return c['SoundFile']
def get_answer_sound(self):
"""Return the full path to the audio file associated with the answer."""
for c in self.components:
if c['Type'] == 'Sound' and c['PlayAt'] & self.answer:
return c['SoundFile']
def is_item(self):
"""Return True if the element represents an item rather than a topic."""
return self.element_info['Type'] == 'Item'
def __str__(self):
str = "Type: {} Id: {} Title: {}\n".format(
self.element_info['Type'], self.id, self.element_info['Title'])
for c in self.components:
try:
str += "Component -> Type: {} {} PlayAt {}\n".format(
c['Type'], c['SoundFile'], c['PlayAt'])
except KeyError:
str += "Component -> Type: {} {} DisplayAt {}\n".format(
c['Type'], c['Text'], c['DisplayAt'])
return str
def read_sm_file(sm_file_contents):
"""Return a dictionary of Element objects indexed by ID."""
elements = [Element(element_body) for element_body
in sm_file_contents.strip().split("\n\n")]
return dict((e.id, e) for e in elements)
|
{
"content_hash": "f1d378f4a1fafdaa97a7659418e35529",
"timestamp": "",
"source": "github",
"line_count": 196,
"max_line_length": 85,
"avg_line_length": 38.86734693877551,
"alnum_prop": 0.5563139931740614,
"repo_name": "michalmazur/sm2anki",
"id": "91daf77c7e8781a7b25701b8beaf79ac0c530d80",
"size": "7618",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sm2anki.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "12206"
}
],
"symlink_target": ""
}
|
import webapp2
import jinja2
import os
import re
import string
#initiate jinja
template_dir = os.path.join(os.path.dirname(__file__), 'templates')
jinja_env = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir),
autoescape = True)
#Constants
PAGE_RE = r'(/(?:[a-zA-Z0-9_-]+/?)*)'
#Handlers
class Handler(webapp2.RequestHandler):
def write(self, *a, **kw):
self.response.out.write(*a, **kw)
def render_str(self, template, **params):
t = jinja_env.get_template(template)
return t.render(params)
def render(self, template, **kw):
self.write(self.render_str(template, **kw))
class WikiPage(Handler):
def get(self, pageName):
username = "andy"
content = "this is just a placeholder for now"
self.render("home.html", username = username, content = content, pageName = 'flappi chicken')
app = webapp2.WSGIApplication([(PAGE_RE, WikiPage)], debug=True)
|
{
"content_hash": "89d4eb621e2b3368b49c380087757029",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 101,
"avg_line_length": 28.647058823529413,
"alnum_prop": 0.6427104722792608,
"repo_name": "andyttran/flappichicken",
"id": "a50f0620bb009213a5779327fe3628063d9d2788",
"size": "2231",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "938"
},
{
"name": "HTML",
"bytes": "3081"
},
{
"name": "JavaScript",
"bytes": "55671"
},
{
"name": "Python",
"bytes": "2231"
}
],
"symlink_target": ""
}
|
from os.path import join
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
class PillowRecipe(CompiledComponentsPythonRecipe):
"""
A recipe for Pillow (previously known as Pil).
This recipe allow us to build the Pillow recipe with support for different
types of images and fonts. But you should be aware, that in order to use
some of the features of Pillow, we must build some libraries. By default
we automatically trigger the build of below libraries::
- freetype: rendering fonts support.
- harfbuzz: a text shaping library.
- jpeg: reading and writing JPEG image files.
- png: support for PNG images.
But you also could enable the build of some extra image types by requesting
the build of some libraries via argument `requirements`::
- libwebp: library to encode and decode images in WebP format.
"""
version = '8.4.0'
url = 'https://github.com/python-pillow/Pillow/archive/{version}.tar.gz'
site_packages_name = 'Pillow'
depends = ['png', 'jpeg', 'freetype', 'setuptools']
opt_depends = ['libwebp']
patches = [join('patches', 'fix-setup.patch')]
call_hostpython_via_targetpython = False
def get_recipe_env(self, arch=None, with_flags_in_cc=True):
env = super().get_recipe_env(arch, with_flags_in_cc)
png = self.get_recipe('png', self.ctx)
png_lib_dir = join(png.get_build_dir(arch.arch), '.libs')
png_inc_dir = png.get_build_dir(arch)
jpeg = self.get_recipe('jpeg', self.ctx)
jpeg_inc_dir = jpeg_lib_dir = jpeg.get_build_dir(arch.arch)
freetype = self.get_recipe('freetype', self.ctx)
free_lib_dir = join(freetype.get_build_dir(arch.arch), 'objs', '.libs')
free_inc_dir = join(freetype.get_build_dir(arch.arch), 'include')
# harfbuzz is a direct dependency of freetype and we need the proper
# flags to successfully build the Pillow recipe, so we add them here.
harfbuzz = self.get_recipe('harfbuzz', self.ctx)
harf_lib_dir = join(harfbuzz.get_build_dir(arch.arch), 'src', '.libs')
harf_inc_dir = harfbuzz.get_build_dir(arch.arch)
# libwebp is an optional dependency, so we add the
# flags if we have it in our `ctx.recipe_build_order`
build_with_webp_support = 'libwebp' in self.ctx.recipe_build_order
if build_with_webp_support:
webp = self.get_recipe('libwebp', self.ctx)
webp_install = join(
webp.get_build_dir(arch.arch), 'installation'
)
# Add libraries includes to CFLAGS
cflags = f' -I{png_inc_dir}'
cflags += f' -I{harf_inc_dir} -I{join(harf_inc_dir, "src")}'
cflags += f' -I{free_inc_dir}'
cflags += f' -I{jpeg_inc_dir}'
if build_with_webp_support:
cflags += f' -I{join(webp_install, "include")}'
cflags += f' -I{self.ctx.ndk.sysroot_include_dir}'
# Link the basic Pillow libraries...no need to add webp's libraries
# since it seems that the linkage is properly made without it :)
env['LIBS'] = ' -lpng -lfreetype -lharfbuzz -ljpeg -lturbojpeg -lm'
# Add libraries locations to LDFLAGS
env['LDFLAGS'] += f' -L{png_lib_dir}'
env['LDFLAGS'] += f' -L{free_lib_dir}'
env['LDFLAGS'] += f' -L{harf_lib_dir}'
env['LDFLAGS'] += f' -L{jpeg_lib_dir}'
if build_with_webp_support:
env['LDFLAGS'] += f' -L{join(webp_install, "lib")}'
env['LDFLAGS'] += f' -L{arch.ndk_lib_dir_versioned}'
if cflags not in env['CFLAGS']:
env['CFLAGS'] += cflags + " -lm"
return env
recipe = PillowRecipe()
|
{
"content_hash": "23e48250030f60fa76274a1eb9c6b8cf",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 79,
"avg_line_length": 41.34444444444444,
"alnum_prop": 0.6226820747110992,
"repo_name": "kivy/python-for-android",
"id": "f8f6929db5639a3fc887f457a875b201879d6738",
"size": "3721",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "pythonforandroid/recipes/Pillow/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "65026"
},
{
"name": "CMake",
"bytes": "250"
},
{
"name": "CSS",
"bytes": "3107"
},
{
"name": "Cython",
"bytes": "15033"
},
{
"name": "Dockerfile",
"bytes": "3302"
},
{
"name": "HTML",
"bytes": "5776"
},
{
"name": "Java",
"bytes": "137677"
},
{
"name": "Makefile",
"bytes": "14228"
},
{
"name": "Python",
"bytes": "822277"
},
{
"name": "Shell",
"bytes": "1823"
},
{
"name": "kvlang",
"bytes": "17453"
}
],
"symlink_target": ""
}
|
from collections import OrderedDict
import functools
import re
from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.cloud.dialogflowcx_v3.services.transition_route_groups import pagers
from google.cloud.dialogflowcx_v3.types import page
from google.cloud.dialogflowcx_v3.types import transition_route_group
from google.cloud.dialogflowcx_v3.types import (
transition_route_group as gcdc_transition_route_group,
)
from google.cloud.location import locations_pb2 # type: ignore
from google.longrunning import operations_pb2
from google.protobuf import field_mask_pb2 # type: ignore
from .transports.base import TransitionRouteGroupsTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import TransitionRouteGroupsGrpcAsyncIOTransport
from .client import TransitionRouteGroupsClient
class TransitionRouteGroupsAsyncClient:
"""Service for managing
[TransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroup].
"""
_client: TransitionRouteGroupsClient
DEFAULT_ENDPOINT = TransitionRouteGroupsClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = TransitionRouteGroupsClient.DEFAULT_MTLS_ENDPOINT
flow_path = staticmethod(TransitionRouteGroupsClient.flow_path)
parse_flow_path = staticmethod(TransitionRouteGroupsClient.parse_flow_path)
intent_path = staticmethod(TransitionRouteGroupsClient.intent_path)
parse_intent_path = staticmethod(TransitionRouteGroupsClient.parse_intent_path)
page_path = staticmethod(TransitionRouteGroupsClient.page_path)
parse_page_path = staticmethod(TransitionRouteGroupsClient.parse_page_path)
transition_route_group_path = staticmethod(
TransitionRouteGroupsClient.transition_route_group_path
)
parse_transition_route_group_path = staticmethod(
TransitionRouteGroupsClient.parse_transition_route_group_path
)
webhook_path = staticmethod(TransitionRouteGroupsClient.webhook_path)
parse_webhook_path = staticmethod(TransitionRouteGroupsClient.parse_webhook_path)
common_billing_account_path = staticmethod(
TransitionRouteGroupsClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
TransitionRouteGroupsClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(TransitionRouteGroupsClient.common_folder_path)
parse_common_folder_path = staticmethod(
TransitionRouteGroupsClient.parse_common_folder_path
)
common_organization_path = staticmethod(
TransitionRouteGroupsClient.common_organization_path
)
parse_common_organization_path = staticmethod(
TransitionRouteGroupsClient.parse_common_organization_path
)
common_project_path = staticmethod(TransitionRouteGroupsClient.common_project_path)
parse_common_project_path = staticmethod(
TransitionRouteGroupsClient.parse_common_project_path
)
common_location_path = staticmethod(
TransitionRouteGroupsClient.common_location_path
)
parse_common_location_path = staticmethod(
TransitionRouteGroupsClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
TransitionRouteGroupsAsyncClient: The constructed client.
"""
return TransitionRouteGroupsClient.from_service_account_info.__func__(TransitionRouteGroupsAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
TransitionRouteGroupsAsyncClient: The constructed client.
"""
return TransitionRouteGroupsClient.from_service_account_file.__func__(TransitionRouteGroupsAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return TransitionRouteGroupsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> TransitionRouteGroupsTransport:
"""Returns the transport used by the client instance.
Returns:
TransitionRouteGroupsTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(TransitionRouteGroupsClient).get_transport_class,
type(TransitionRouteGroupsClient),
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, TransitionRouteGroupsTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the transition route groups client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.TransitionRouteGroupsTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = TransitionRouteGroupsClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def list_transition_route_groups(
self,
request: Union[
transition_route_group.ListTransitionRouteGroupsRequest, dict
] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListTransitionRouteGroupsAsyncPager:
r"""Returns the list of all transition route groups in
the specified flow.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import dialogflowcx_v3
async def sample_list_transition_route_groups():
# Create a client
client = dialogflowcx_v3.TransitionRouteGroupsAsyncClient()
# Initialize request argument(s)
request = dialogflowcx_v3.ListTransitionRouteGroupsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_transition_route_groups(request=request)
# Handle the response
async for response in page_result:
print(response)
Args:
request (Union[google.cloud.dialogflowcx_v3.types.ListTransitionRouteGroupsRequest, dict]):
The request object. The request message for
[TransitionRouteGroups.ListTransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.ListTransitionRouteGroups].
parent (:class:`str`):
Required. The flow to list all transition route groups
for. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/flows/<Flow ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3.services.transition_route_groups.pagers.ListTransitionRouteGroupsAsyncPager:
The response message for
[TransitionRouteGroups.ListTransitionRouteGroups][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.ListTransitionRouteGroups].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = transition_route_group.ListTransitionRouteGroupsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_transition_route_groups,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListTransitionRouteGroupsAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
async def get_transition_route_group(
self,
request: Union[
transition_route_group.GetTransitionRouteGroupRequest, dict
] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> transition_route_group.TransitionRouteGroup:
r"""Retrieves the specified
[TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup].
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import dialogflowcx_v3
async def sample_get_transition_route_group():
# Create a client
client = dialogflowcx_v3.TransitionRouteGroupsAsyncClient()
# Initialize request argument(s)
request = dialogflowcx_v3.GetTransitionRouteGroupRequest(
name="name_value",
)
# Make the request
response = await client.get_transition_route_group(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dialogflowcx_v3.types.GetTransitionRouteGroupRequest, dict]):
The request object. The request message for
[TransitionRouteGroups.GetTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.GetTransitionRouteGroup].
name (:class:`str`):
Required. The name of the
[TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup].
Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/flows/<Flow ID>/transitionRouteGroups/<Transition Route Group ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3.types.TransitionRouteGroup:
An TransitionRouteGroup represents a group of
[TransitionRoutes][google.cloud.dialogflow.cx.v3.TransitionRoute]
to be used by a
[Page][google.cloud.dialogflow.cx.v3.Page].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = transition_route_group.GetTransitionRouteGroupRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_transition_route_group,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def create_transition_route_group(
self,
request: Union[
gcdc_transition_route_group.CreateTransitionRouteGroupRequest, dict
] = None,
*,
parent: str = None,
transition_route_group: gcdc_transition_route_group.TransitionRouteGroup = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcdc_transition_route_group.TransitionRouteGroup:
r"""Creates an
[TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]
in the specified flow.
Note: You should always train a flow prior to sending it
queries. See the `training
documentation <https://cloud.google.com/dialogflow/cx/docs/concept/training>`__.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import dialogflowcx_v3
async def sample_create_transition_route_group():
# Create a client
client = dialogflowcx_v3.TransitionRouteGroupsAsyncClient()
# Initialize request argument(s)
transition_route_group = dialogflowcx_v3.TransitionRouteGroup()
transition_route_group.display_name = "display_name_value"
request = dialogflowcx_v3.CreateTransitionRouteGroupRequest(
parent="parent_value",
transition_route_group=transition_route_group,
)
# Make the request
response = await client.create_transition_route_group(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dialogflowcx_v3.types.CreateTransitionRouteGroupRequest, dict]):
The request object. The request message for
[TransitionRouteGroups.CreateTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.CreateTransitionRouteGroup].
parent (:class:`str`):
Required. The flow to create an
[TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]
for. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/flows/<Flow ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
transition_route_group (:class:`google.cloud.dialogflowcx_v3.types.TransitionRouteGroup`):
Required. The transition route group
to create.
This corresponds to the ``transition_route_group`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3.types.TransitionRouteGroup:
An TransitionRouteGroup represents a group of
[TransitionRoutes][google.cloud.dialogflow.cx.v3.TransitionRoute]
to be used by a
[Page][google.cloud.dialogflow.cx.v3.Page].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, transition_route_group])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = gcdc_transition_route_group.CreateTransitionRouteGroupRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if transition_route_group is not None:
request.transition_route_group = transition_route_group
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_transition_route_group,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def update_transition_route_group(
self,
request: Union[
gcdc_transition_route_group.UpdateTransitionRouteGroupRequest, dict
] = None,
*,
transition_route_group: gcdc_transition_route_group.TransitionRouteGroup = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcdc_transition_route_group.TransitionRouteGroup:
r"""Updates the specified
[TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup].
Note: You should always train a flow prior to sending it
queries. See the `training
documentation <https://cloud.google.com/dialogflow/cx/docs/concept/training>`__.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import dialogflowcx_v3
async def sample_update_transition_route_group():
# Create a client
client = dialogflowcx_v3.TransitionRouteGroupsAsyncClient()
# Initialize request argument(s)
transition_route_group = dialogflowcx_v3.TransitionRouteGroup()
transition_route_group.display_name = "display_name_value"
request = dialogflowcx_v3.UpdateTransitionRouteGroupRequest(
transition_route_group=transition_route_group,
)
# Make the request
response = await client.update_transition_route_group(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dialogflowcx_v3.types.UpdateTransitionRouteGroupRequest, dict]):
The request object. The request message for
[TransitionRouteGroups.UpdateTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.UpdateTransitionRouteGroup].
transition_route_group (:class:`google.cloud.dialogflowcx_v3.types.TransitionRouteGroup`):
Required. The transition route group
to update.
This corresponds to the ``transition_route_group`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
The mask to control which fields get
updated.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflowcx_v3.types.TransitionRouteGroup:
An TransitionRouteGroup represents a group of
[TransitionRoutes][google.cloud.dialogflow.cx.v3.TransitionRoute]
to be used by a
[Page][google.cloud.dialogflow.cx.v3.Page].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([transition_route_group, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = gcdc_transition_route_group.UpdateTransitionRouteGroupRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if transition_route_group is not None:
request.transition_route_group = transition_route_group
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_transition_route_group,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("transition_route_group.name", request.transition_route_group.name),)
),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def delete_transition_route_group(
self,
request: Union[
transition_route_group.DeleteTransitionRouteGroupRequest, dict
] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Deletes the specified
[TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup].
Note: You should always train a flow prior to sending it
queries. See the `training
documentation <https://cloud.google.com/dialogflow/cx/docs/concept/training>`__.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import dialogflowcx_v3
async def sample_delete_transition_route_group():
# Create a client
client = dialogflowcx_v3.TransitionRouteGroupsAsyncClient()
# Initialize request argument(s)
request = dialogflowcx_v3.DeleteTransitionRouteGroupRequest(
name="name_value",
)
# Make the request
await client.delete_transition_route_group(request=request)
Args:
request (Union[google.cloud.dialogflowcx_v3.types.DeleteTransitionRouteGroupRequest, dict]):
The request object. The request message for
[TransitionRouteGroups.DeleteTransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroups.DeleteTransitionRouteGroup].
name (:class:`str`):
Required. The name of the
[TransitionRouteGroup][google.cloud.dialogflow.cx.v3.TransitionRouteGroup]
to delete. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/flows/<Flow ID>/transitionRouteGroups/<Transition Route Group ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = transition_route_group.DeleteTransitionRouteGroupRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_transition_route_group,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
async def list_operations(
self,
request: operations_pb2.ListOperationsRequest = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operations_pb2.ListOperationsResponse:
r"""Lists operations that match the specified filter in the request.
Args:
request (:class:`~.operations_pb2.ListOperationsRequest`):
The request object. Request message for
`ListOperations` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.operations_pb2.ListOperationsResponse:
Response message for ``ListOperations`` method.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = operations_pb2.ListOperationsRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.list_operations,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def get_operation(
self,
request: operations_pb2.GetOperationRequest = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operations_pb2.Operation:
r"""Gets the latest state of a long-running operation.
Args:
request (:class:`~.operations_pb2.GetOperationRequest`):
The request object. Request message for
`GetOperation` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.operations_pb2.Operation:
An ``Operation`` object.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = operations_pb2.GetOperationRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.get_operation,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def cancel_operation(
self,
request: operations_pb2.CancelOperationRequest = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Starts asynchronous cancellation on a long-running operation.
The server makes a best effort to cancel the operation, but success
is not guaranteed. If the server doesn't support this method, it returns
`google.rpc.Code.UNIMPLEMENTED`.
Args:
request (:class:`~.operations_pb2.CancelOperationRequest`):
The request object. Request message for
`CancelOperation` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
None
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = operations_pb2.CancelOperationRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.cancel_operation,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
async def get_location(
self,
request: locations_pb2.GetLocationRequest = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> locations_pb2.Location:
r"""Gets information about a location.
Args:
request (:class:`~.location_pb2.GetLocationRequest`):
The request object. Request message for
`GetLocation` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.location_pb2.Location:
Location object.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = locations_pb2.GetLocationRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.get_location,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def list_locations(
self,
request: locations_pb2.ListLocationsRequest = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> locations_pb2.ListLocationsResponse:
r"""Lists information about the supported locations for this service.
Args:
request (:class:`~.location_pb2.ListLocationsRequest`):
The request object. Request message for
`ListLocations` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.location_pb2.ListLocationsResponse:
Response message for ``ListLocations`` method.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = locations_pb2.ListLocationsRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.list_locations,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-dialogflowcx",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("TransitionRouteGroupsAsyncClient",)
|
{
"content_hash": "1dbbf76c12f6cc01a7d297f2c6728b0b",
"timestamp": "",
"source": "github",
"line_count": 1082,
"max_line_length": 154,
"avg_line_length": 41.975970425138634,
"alnum_prop": 0.6223523713065304,
"repo_name": "googleapis/python-dialogflow-cx",
"id": "8ca567634beba8ab7814ce82c17d2a9b7641bada",
"size": "46018",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "google/cloud/dialogflowcx_v3/services/transition_route_groups/async_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "10904903"
},
{
"name": "Shell",
"bytes": "30681"
}
],
"symlink_target": ""
}
|
import ctypes
import os
import shutil
from unittest import mock
import ddt
from os_win import constants
from os_win import exceptions
from os_win.tests.unit import test_base
from os_win.utils import pathutils
from os_win.utils.winapi import constants as w_const
from os_win.utils.winapi.libs import advapi32 as advapi32_def
from os_win.utils.winapi.libs import kernel32 as kernel32_def
from os_win.utils.winapi import wintypes
@ddt.ddt
class PathUtilsTestCase(test_base.OsWinBaseTestCase):
"""Unit tests for the Hyper-V PathUtils class."""
_autospec_classes = [
pathutils.ioutils.IOUtils,
pathutils.win32utils.Win32Utils,
pathutils._acl_utils.ACLUtils,
]
def setUp(self):
super(PathUtilsTestCase, self).setUp()
self._setup_lib_mocks()
self._pathutils = pathutils.PathUtils()
self._mock_run = self._pathutils._win32_utils.run_and_check_output
self._acl_utils = self._pathutils._acl_utils
self._io_utils = self._pathutils._io_utils
def _setup_lib_mocks(self):
self._ctypes = mock.Mock()
self._wintypes = mock.Mock()
self._wintypes.BOOL = lambda x: (x, 'BOOL')
self._ctypes.c_wchar_p = lambda x: (x, "c_wchar_p")
self._ctypes.pointer = lambda x: (x, 'pointer')
self._ctypes_patcher = mock.patch.object(
pathutils, 'ctypes', new=self._ctypes)
self._ctypes_patcher.start()
mock.patch.multiple(pathutils,
wintypes=self._wintypes,
kernel32=mock.DEFAULT,
create=True).start()
@mock.patch.object(pathutils.PathUtils, 'copy')
@mock.patch.object(os.path, 'isfile')
@mock.patch.object(os, 'listdir')
@mock.patch.object(pathutils.PathUtils, 'check_create_dir')
def test_copy_folder_files(self, mock_check_create_dir, mock_listdir,
mock_isfile, mock_copy):
src_dir = 'src'
dest_dir = 'dest'
fname = 'tmp_file.txt'
subdir = 'tmp_folder'
src_fname = os.path.join(src_dir, fname)
dest_fname = os.path.join(dest_dir, fname)
# making sure src_subdir is not copied.
mock_listdir.return_value = [fname, subdir]
mock_isfile.side_effect = [True, False]
self._pathutils.copy_folder_files(src_dir, dest_dir)
mock_check_create_dir.assert_called_once_with(dest_dir)
mock_copy.assert_called_once_with(src_fname, dest_fname)
@mock.patch.object(pathutils.PathUtils, 'rename')
@mock.patch.object(os.path, 'isfile')
@mock.patch.object(os, 'listdir')
def test_move_folder_files(self, mock_listdir, mock_isfile, mock_rename):
src_dir = 'src'
dest_dir = 'dest'
fname = 'tmp_file.txt'
subdir = 'tmp_folder'
src_fname = os.path.join(src_dir, fname)
dest_fname = os.path.join(dest_dir, fname)
# making sure src_subdir is not moved.
mock_listdir.return_value = [fname, subdir]
mock_isfile.side_effect = [True, False]
self._pathutils.move_folder_files(src_dir, dest_dir)
mock_rename.assert_called_once_with(src_fname, dest_fname)
@mock.patch('time.sleep')
@mock.patch.object(pathutils.shutil, 'rmtree')
def test_rmtree(self, mock_rmtree, mock_sleep):
exc = exceptions.WindowsError()
exc.winerror = w_const.ERROR_DIR_IS_NOT_EMPTY
mock_rmtree.side_effect = [exc] * 5 + [None]
self._pathutils.rmtree(mock.sentinel.FAKE_PATH)
mock_rmtree.assert_has_calls([mock.call(mock.sentinel.FAKE_PATH)] * 6)
@mock.patch('time.sleep')
@mock.patch.object(pathutils.shutil, 'rmtree')
def _check_rmtree(self, mock_rmtree, mock_sleep, side_effect):
mock_rmtree.side_effect = side_effect
self.assertRaises(exceptions.WindowsError, self._pathutils.rmtree,
mock.sentinel.FAKE_PATH)
def test_rmtree_unexpected(self):
self._check_rmtree(side_effect=exceptions.WindowsError)
@mock.patch('time.time')
def test_rmtree_exceeded(self, mock_time):
mock_time.side_effect = range(1, 100, 10)
exc = exceptions.WindowsError()
exc.winerror = w_const.ERROR_DIR_IS_NOT_EMPTY
self._check_rmtree(side_effect=exc)
@mock.patch.object(pathutils.PathUtils, 'makedirs')
@mock.patch.object(pathutils.PathUtils, 'exists')
def test_check_create_dir(self, mock_exists, mock_makedirs):
fake_dir = 'dir'
mock_exists.return_value = False
self._pathutils.check_create_dir(fake_dir)
mock_exists.assert_called_once_with(fake_dir)
mock_makedirs.assert_called_once_with(fake_dir)
@mock.patch.object(pathutils.PathUtils, 'rmtree')
@mock.patch.object(pathutils.PathUtils, 'exists')
def test_check_remove_dir(self, mock_exists, mock_rmtree):
fake_dir = 'dir'
self._pathutils.check_remove_dir(fake_dir)
mock_exists.assert_called_once_with(fake_dir)
mock_rmtree.assert_called_once_with(fake_dir)
@mock.patch('os.path.isdir')
@mock.patch('os.path.islink')
def _test_check_symlink(self, mock_is_symlink, mock_is_dir,
is_symlink=True, is_dir=True):
fake_path = r'c:\\fake_path'
if is_symlink:
f_attr = 0x400
else:
f_attr = 0x80
mock_is_dir.return_value = is_dir
mock_is_symlink.return_value = is_symlink
self._mock_run.return_value = f_attr
ret_value = self._pathutils.is_symlink(fake_path)
mock_is_symlink.assert_called_once_with(fake_path)
self.assertEqual(is_symlink, ret_value)
def test_is_symlink(self):
self._test_check_symlink()
def test_is_not_symlink(self):
self._test_check_symlink(is_symlink=False)
def test_create_sym_link(self):
tg_is_dir = False
self._pathutils.create_sym_link(mock.sentinel.path,
mock.sentinel.target,
target_is_dir=tg_is_dir)
self._mock_run.assert_called_once_with(
pathutils.kernel32.CreateSymbolicLinkW,
mock.sentinel.path,
mock.sentinel.target,
tg_is_dir,
kernel32_lib_func=True)
@mock.patch('os.path.isdir')
def _test_copy(self, mock_isdir, dest_isdir=False):
mock_isdir.return_value = dest_isdir
fail_if_exists = False
fake_src = r'fake_src_fname'
fake_dest = r'fake_dest'
expected_dest = (os.path.join(fake_dest, fake_src)
if dest_isdir else fake_dest)
self._pathutils.copy(fake_src, fake_dest,
fail_if_exists=fail_if_exists)
self._mock_run.assert_called_once_with(
pathutils.kernel32.CopyFileW,
self._ctypes.c_wchar_p(fake_src),
self._ctypes.c_wchar_p(expected_dest),
self._wintypes.BOOL(fail_if_exists),
kernel32_lib_func=True)
def test_copy_dest_is_fpath(self):
self._test_copy()
def test_copy_dest_is_dir(self):
self._test_copy(dest_isdir=True)
@mock.patch('os.path.isdir')
def test_copy_exc(self, mock_isdir):
mock_isdir.return_value = False
self._mock_run.side_effect = exceptions.Win32Exception(
func_name='mock_copy',
error_code='fake_error_code',
error_message='fake_error_msg')
self.assertRaises(IOError,
self._pathutils.copy,
mock.sentinel.src,
mock.sentinel.dest)
@mock.patch('os.close')
@mock.patch('tempfile.mkstemp')
def test_create_temporary_file(self, mock_mkstemp, mock_close):
fd = mock.sentinel.file_descriptor
path = mock.sentinel.absolute_pathname
mock_mkstemp.return_value = (fd, path)
output = self._pathutils.create_temporary_file(
suffix=mock.sentinel.suffix)
self.assertEqual(path, output)
mock_close.assert_called_once_with(fd)
mock_mkstemp.assert_called_once_with(suffix=mock.sentinel.suffix)
@mock.patch('oslo_utils.fileutils.delete_if_exists')
def test_temporary_file(self, mock_delete):
self._pathutils.create_temporary_file = mock.MagicMock()
self._pathutils.create_temporary_file.return_value = (
mock.sentinel.temporary_file)
with self._pathutils.temporary_file() as tmp_file:
self.assertEqual(mock.sentinel.temporary_file, tmp_file)
self.assertFalse(mock_delete.called)
mock_delete.assert_called_once_with(mock.sentinel.temporary_file)
@mock.patch.object(shutil, 'copytree')
def test_copy_dir(self, mock_copytree):
self._pathutils.copy_dir(mock.sentinel.src, mock.sentinel.dest)
mock_copytree.assert_called_once_with(mock.sentinel.src,
mock.sentinel.dest)
def test_add_acl_rule(self):
# We raise an expected exception in order to
# easily verify the resource cleanup.
raised_exc = exceptions.OSWinException
self._ctypes_patcher.stop()
fake_trustee = 'FAKEDOMAIN\\FakeUser'
mock_sec_info = dict(pp_sec_desc=mock.Mock(),
pp_dacl=mock.Mock())
self._acl_utils.get_named_security_info.return_value = mock_sec_info
self._acl_utils.set_named_security_info.side_effect = raised_exc
pp_new_dacl = self._acl_utils.set_entries_in_acl.return_value
self.assertRaises(raised_exc,
self._pathutils.add_acl_rule,
path=mock.sentinel.path,
trustee_name=fake_trustee,
access_rights=constants.ACE_GENERIC_READ,
access_mode=constants.ACE_GRANT_ACCESS,
inheritance_flags=constants.ACE_OBJECT_INHERIT)
self._acl_utils.get_named_security_info.assert_called_once_with(
obj_name=mock.sentinel.path,
obj_type=w_const.SE_FILE_OBJECT,
security_info_flags=w_const.DACL_SECURITY_INFORMATION)
self._acl_utils.set_entries_in_acl.assert_called_once_with(
entry_count=1,
p_explicit_entry_list=mock.ANY,
p_old_acl=mock_sec_info['pp_dacl'].contents)
self._acl_utils.set_named_security_info.assert_called_once_with(
obj_name=mock.sentinel.path,
obj_type=w_const.SE_FILE_OBJECT,
security_info_flags=w_const.DACL_SECURITY_INFORMATION,
p_dacl=pp_new_dacl.contents)
p_access = self._acl_utils.set_entries_in_acl.call_args_list[0][1][
'p_explicit_entry_list']
access = ctypes.cast(
p_access,
ctypes.POINTER(advapi32_def.EXPLICIT_ACCESS)).contents
self.assertEqual(constants.ACE_GENERIC_READ,
access.grfAccessPermissions)
self.assertEqual(constants.ACE_GRANT_ACCESS,
access.grfAccessMode)
self.assertEqual(constants.ACE_OBJECT_INHERIT,
access.grfInheritance)
self.assertEqual(w_const.TRUSTEE_IS_NAME,
access.Trustee.TrusteeForm)
self.assertEqual(fake_trustee,
access.Trustee.pstrName)
self._pathutils._win32_utils.local_free.assert_has_calls(
[mock.call(pointer)
for pointer in [mock_sec_info['pp_sec_desc'].contents,
pp_new_dacl.contents]])
def test_copy_acls(self):
raised_exc = exceptions.OSWinException
mock_sec_info = dict(pp_sec_desc=mock.Mock(),
pp_dacl=mock.Mock())
self._acl_utils.get_named_security_info.return_value = mock_sec_info
self._acl_utils.set_named_security_info.side_effect = raised_exc
self.assertRaises(raised_exc,
self._pathutils.copy_acls,
mock.sentinel.src,
mock.sentinel.dest)
self._acl_utils.get_named_security_info.assert_called_once_with(
obj_name=mock.sentinel.src,
obj_type=w_const.SE_FILE_OBJECT,
security_info_flags=w_const.DACL_SECURITY_INFORMATION)
self._acl_utils.set_named_security_info.assert_called_once_with(
obj_name=mock.sentinel.dest,
obj_type=w_const.SE_FILE_OBJECT,
security_info_flags=w_const.DACL_SECURITY_INFORMATION,
p_dacl=mock_sec_info['pp_dacl'].contents)
self._pathutils._win32_utils.local_free.assert_called_once_with(
mock_sec_info['pp_sec_desc'].contents)
def _get_file_id_info(self, volume_id, file_id, as_dict=False):
identifier = (wintypes.BYTE * 16)()
assert file_id < 1 << 128
idx = 0
while file_id:
identifier[idx] = file_id & 0xffff
file_id >>= 8
idx += 1
file_id_info = kernel32_def.FILE_ID_INFO(
VolumeSerialNumber=volume_id,
FileId=kernel32_def.FILE_ID_128(Identifier=identifier))
if as_dict:
return dict(volume_serial_number=file_id_info.VolumeSerialNumber,
file_id=bytearray(file_id_info.FileId.Identifier))
return file_id_info
@ddt.data((1, 2, 1, 2), # same file
(1, 2, 1, 3), # same volume id, different file id
(1, 2, 2, 2)) # same file id, different volume id
@ddt.unpack
@mock.patch.object(pathutils.PathUtils, 'get_file_id')
def test_is_same_file(self, volume_id_a, file_id_a,
volume_id_b, file_id_b, mock_get_file_id):
file_info_a = self._get_file_id_info(volume_id_a, file_id_a,
as_dict=True)
file_info_b = self._get_file_id_info(volume_id_b, file_id_b,
as_dict=True)
mock_get_file_id.side_effect = [file_info_a, file_info_b]
same_file = self._pathutils.is_same_file(
mock.sentinel.path_a,
mock.sentinel.path_b)
self.assertEqual(volume_id_a == volume_id_b and file_id_a == file_id_b,
same_file)
mock_get_file_id.assert_has_calls(
[mock.call(mock.sentinel.path_a),
mock.call(mock.sentinel.path_b)])
def test_get_file_id(self):
self._ctypes_patcher.stop()
fake_file_id = 1 << 64
fake_volume_id = 1 << 31
def fake_get_file_id(func, handle, file_info_class, file_info,
buffer_size, kernel32_lib_func):
self.assertEqual(func,
pathutils.kernel32.GetFileInformationByHandleEx)
self.assertTrue(kernel32_lib_func)
self.assertEqual(self._io_utils.open.return_value, handle)
self.assertEqual(w_const.FileIdInfo, file_info_class)
self.assertLessEqual(ctypes.sizeof(kernel32_def.FILE_ID_INFO),
buffer_size)
file_id = self._get_file_id_info(fake_volume_id, fake_file_id)
ctypes.memmove(file_info, ctypes.byref(file_id),
ctypes.sizeof(kernel32_def.FILE_ID_INFO))
self._mock_run.side_effect = fake_get_file_id
file_id = self._pathutils.get_file_id(mock.sentinel.path)
exp_identifier = [0] * 16
exp_identifier[8] = 1
exp_file_id = dict(volume_serial_number=fake_volume_id,
file_id=bytearray(exp_identifier))
self.assertEqual(exp_file_id, file_id)
self._io_utils.open.assert_called_once_with(
mock.sentinel.path,
desired_access=0,
share_mode=(w_const.FILE_SHARE_READ |
w_const.FILE_SHARE_WRITE |
w_const.FILE_SHARE_DELETE),
creation_disposition=w_const.OPEN_EXISTING)
self._io_utils.close_handle.assert_called_once_with(
self._io_utils.open.return_value)
def test_get_file_id_exc(self):
self._mock_run.side_effect = exceptions.Win32Exception(
message="fake exc")
self.assertRaises(exceptions.Win32Exception,
self._pathutils.get_file_id,
mock.sentinel.path)
self._io_utils.close_handle.assert_called_once_with(
self._io_utils.open.return_value)
|
{
"content_hash": "f9c34f2bc9b176dd4c9d72ea3c8e3c54",
"timestamp": "",
"source": "github",
"line_count": 422,
"max_line_length": 79,
"avg_line_length": 39.36255924170616,
"alnum_prop": 0.5952079947023057,
"repo_name": "openstack/os-win",
"id": "08b71348093eef84dc2152677a9844a79ceeb00a",
"size": "17214",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "os_win/tests/unit/utils/test_pathutils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1114520"
}
],
"symlink_target": ""
}
|
import logging
import os
import re
from packstack.installer import basedefs
from packstack.installer.setup_controller import Controller
from packstack.installer.exceptions import PackStackError
controller = Controller()
PUPPET_DIR = os.path.join(basedefs.DIR_PROJECT_DIR, "puppet")
PUPPET_TEMPLATE_DIR = os.path.join(PUPPET_DIR, "templates")
class NovaConfig(object):
"""
Helper class to create puppet manifest entries for nova_config
"""
def __init__(self):
self.options = {}
def addOption(self, n, v):
self.options[n] = v
def getManifestEntry(self):
entry = ""
if not self.options:
return entry
entry += "nova_config{\n"
for k, v in self.options.items():
entry += ' "%s": value => "%s";\n' % (k, v)
entry += "}"
return entry
class ManifestFiles(object):
def __init__(self):
self.filelist = []
self.data = {}
# continuous manifest file that have the same marker can be
# installed in parallel, if on different servers
def addFile(self, filename, marker, data=''):
self.data[filename] = self.data.get(filename, '') + '\n' + data
for f, p in self.filelist:
if f == filename:
return
self.filelist.append((filename, marker))
def getFiles(self):
return [f for f in self.filelist]
def writeManifests(self):
"""
Write out the manifest data to disk, this should only be called once
write before the puppet manifests are copied to the various servers
"""
os.mkdir(basedefs.PUPPET_MANIFEST_DIR, 0700)
for fname, data in self.data.items():
path = os.path.join(basedefs.PUPPET_MANIFEST_DIR, fname)
fd = os.open(path, os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0600)
with os.fdopen(fd, 'w') as fp:
fp.write(data)
manifestfiles = ManifestFiles()
def getManifestTemplate(template_name):
with open(os.path.join(PUPPET_TEMPLATE_DIR, template_name)) as fp:
return fp.read() % controller.CONF
def appendManifestFile(manifest_name, data, marker=''):
manifestfiles.addFile(manifest_name, marker, data)
def gethostlist(CONF):
hosts = []
for key, value in CONF.items():
if key.endswith("_HOST"):
value = value.split('/')[0]
if value and value not in hosts:
hosts.append(value)
if key.endswith("_HOSTS"):
for host in value.split(","):
host = host.strip()
host = host.split('/')[0]
if host and host not in hosts:
hosts.append(host)
return hosts
|
{
"content_hash": "47fa8545d9744de45cc75f0487ce34c2",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 76,
"avg_line_length": 29.835164835164836,
"alnum_prop": 0.596316758747698,
"repo_name": "radez/packstack",
"id": "d4fc0e0041c8cf4a544de22f3fc58706b63305ed",
"size": "2716",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packstack/modules/ospluginutils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Puppet",
"bytes": "40360"
},
{
"name": "Python",
"bytes": "342039"
},
{
"name": "Ruby",
"bytes": "15291"
},
{
"name": "Shell",
"bytes": "459"
}
],
"symlink_target": ""
}
|
from llvm.core import *
from llvm.tbaa import *
from llvm.tests.support import TestCase
import unittest
class TestTBAABuilder(TestCase):
def test_tbaa_builder(self):
mod = Module.new('test_tbaa_builder')
fty = Type.function(Type.void(), [Type.pointer(Type.float())])
foo = mod.add_function(fty, 'foo')
bb = foo.append_basic_block('entry')
bldr = Builder.new(bb)
tbaa = TBAABuilder.new(mod, "tbaa.root")
float = tbaa.get_node('float', const=False)
const_float = tbaa.get_node('const float', float, const=True)
tbaa = TBAABuilder.new(mod, "tbaa.root")
old_const_float = const_float
del const_float
const_float = tbaa.get_node('const float', float, const=True)
self.assertIs(old_const_float, const_float)
ptr = bldr.load(foo.args[0])
ptr.set_metadata('tbaa', const_float)
bldr.ret_void()
print(mod)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "b21d05aca750cfee688c1abd5449aad5",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 70,
"avg_line_length": 28.37142857142857,
"alnum_prop": 0.6112789526686808,
"repo_name": "llvmpy/llvmpy",
"id": "4a24de877d28556a5136f1cf9259b141e0f2eddd",
"size": "993",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/tbaa.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "10456"
},
{
"name": "C++",
"bytes": "58044"
},
{
"name": "CSS",
"bytes": "12590"
},
{
"name": "HTML",
"bytes": "851926"
},
{
"name": "JavaScript",
"bytes": "4102"
},
{
"name": "LLVM",
"bytes": "35445"
},
{
"name": "Makefile",
"bytes": "1862"
},
{
"name": "Python",
"bytes": "720443"
},
{
"name": "Shell",
"bytes": "335"
}
],
"symlink_target": ""
}
|
"""Tests unitofwork operations."""
from test.lib.testing import eq_, assert_raises, assert_raises_message
import datetime
import operator
from sqlalchemy.orm import mapper as orm_mapper
import sqlalchemy as sa
from sqlalchemy import Integer, String, ForeignKey, literal_column, event
from test.lib import engines, testing, pickleable
from test.lib.schema import Table
from test.lib.schema import Column
from sqlalchemy.orm import mapper, relationship, create_session, \
column_property, attributes, Session, reconstructor, object_session
from test.lib.testing import eq_, ne_
from test.lib.util import gc_collect
from test.lib import fixtures
from test.orm import _fixtures
from test.lib import fixtures
from test.lib.assertsql import AllOf, CompiledSQL
import gc
class UnitOfWorkTest(object):
pass
class HistoryTest(_fixtures.FixtureTest):
run_inserts = None
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
class Address(cls.Comparable):
pass
def test_backref(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
am = mapper(Address, addresses)
m = mapper(User, users, properties=dict(
addresses = relationship(am, backref='user', lazy='joined')))
session = create_session(autocommit=False)
u = User(name='u1')
a = Address(email_address='u1@e')
a.user = u
session.add(u)
eq_(u.addresses, [a])
session.commit()
session.expunge_all()
u = session.query(m).one()
assert u.addresses[0].user == u
session.close()
class UnicodeTest(fixtures.MappedTest):
__requires__ = ('unicode_connections',)
@classmethod
def define_tables(cls, metadata):
if testing.against('mysql+oursql'):
from sqlalchemy.dialects.mysql import VARCHAR
uni_type = VARCHAR(50, collation='utf8_unicode_ci')
else:
uni_type = sa.Unicode(50)
Table('uni_t1', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('txt', uni_type, unique=True))
Table('uni_t2', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('txt', uni_type, ForeignKey('uni_t1')))
@classmethod
def setup_classes(cls):
class Test(cls.Basic):
pass
class Test2(cls.Basic):
pass
def test_basic(self):
Test, uni_t1 = self.classes.Test, self.tables.uni_t1
mapper(Test, uni_t1)
txt = u"\u0160\u0110\u0106\u010c\u017d"
t1 = Test(id=1, txt=txt)
self.assert_(t1.txt == txt)
session = create_session(autocommit=False)
session.add(t1)
session.commit()
self.assert_(t1.txt == txt)
def test_relationship(self):
Test, uni_t2, uni_t1, Test2 = (self.classes.Test,
self.tables.uni_t2,
self.tables.uni_t1,
self.classes.Test2)
mapper(Test, uni_t1, properties={
't2s': relationship(Test2)})
mapper(Test2, uni_t2)
txt = u"\u0160\u0110\u0106\u010c\u017d"
t1 = Test(txt=txt)
t1.t2s.append(Test2())
t1.t2s.append(Test2())
session = create_session(autocommit=False)
session.add(t1)
session.commit()
session.close()
session = create_session()
t1 = session.query(Test).filter_by(id=t1.id).one()
assert len(t1.t2s) == 2
class UnicodeSchemaTest(fixtures.MappedTest):
__requires__ = ('unicode_connections', 'unicode_ddl',)
run_dispose_bind = 'once'
@classmethod
def create_engine(cls):
return engines.utf8_engine()
@classmethod
def define_tables(cls, metadata):
t1 = Table('unitable1', metadata,
Column(u'méil', Integer, primary_key=True, key='a', test_needs_autoincrement=True),
Column(u'\u6e2c\u8a66', Integer, key='b'),
Column('type', String(20)),
test_needs_fk=True,
test_needs_autoincrement=True)
t2 = Table(u'Unitéble2', metadata,
Column(u'méil', Integer, primary_key=True, key="cc", test_needs_autoincrement=True),
Column(u'\u6e2c\u8a66', Integer,
ForeignKey(u'unitable1.a'), key="d"),
Column(u'\u6e2c\u8a66_2', Integer, key="e"),
test_needs_fk=True,
test_needs_autoincrement=True)
cls.tables['t1'] = t1
cls.tables['t2'] = t2
@classmethod
def setup_class(cls):
super(UnicodeSchemaTest, cls).setup_class()
@classmethod
def teardown_class(cls):
super(UnicodeSchemaTest, cls).teardown_class()
@testing.fails_on('mssql+pyodbc',
'pyodbc returns a non unicode encoding of the results description.')
def test_mapping(self):
t2, t1 = self.tables.t2, self.tables.t1
class A(fixtures.ComparableEntity):
pass
class B(fixtures.ComparableEntity):
pass
mapper(A, t1, properties={
't2s':relationship(B)})
mapper(B, t2)
a1 = A()
b1 = B()
a1.t2s.append(b1)
session = create_session()
session.add(a1)
session.flush()
session.expunge_all()
new_a1 = session.query(A).filter(t1.c.a == a1.a).one()
assert new_a1.a == a1.a
assert new_a1.t2s[0].d == b1.d
session.expunge_all()
new_a1 = (session.query(A).options(sa.orm.joinedload('t2s')).
filter(t1.c.a == a1.a)).one()
assert new_a1.a == a1.a
assert new_a1.t2s[0].d == b1.d
session.expunge_all()
new_a1 = session.query(A).filter(A.a == a1.a).one()
assert new_a1.a == a1.a
assert new_a1.t2s[0].d == b1.d
session.expunge_all()
@testing.fails_on('mssql+pyodbc',
'pyodbc returns a non unicode encoding of the results description.')
def test_inheritance_mapping(self):
t2, t1 = self.tables.t2, self.tables.t1
class A(fixtures.ComparableEntity):
pass
class B(A):
pass
mapper(A, t1,
polymorphic_on=t1.c.type,
polymorphic_identity='a')
mapper(B, t2,
inherits=A,
polymorphic_identity='b')
a1 = A(b=5)
b1 = B(e=7)
session = create_session()
session.add_all((a1, b1))
session.flush()
session.expunge_all()
eq_([A(b=5), B(e=7)], session.query(A).all())
class BinaryHistTest(fixtures.MappedTest, testing.AssertsExecutionResults):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
Column('id', sa.Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', sa.LargeBinary),
)
@classmethod
def setup_classes(cls):
class Foo(cls.Basic):
pass
def test_binary_equality(self):
Foo, t1 = self.classes.Foo, self.tables.t1
# Py3K
#data = b"this is some data"
# Py2K
data = "this is some data"
# end Py2K
mapper(Foo, t1)
s = create_session()
f1 = Foo(data=data)
s.add(f1)
s.flush()
s.expire_all()
f1 = s.query(Foo).first()
assert f1.data == data
f1.data = data
eq_(
sa.orm.attributes.get_history(f1, "data"),
((), [data], ())
)
def go():
s.flush()
self.assert_sql_count(testing.db, go, 0)
class PKTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('multipk1', metadata,
Column('multi_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('multi_rev', Integer, primary_key=True),
Column('name', String(50), nullable=False),
Column('value', String(100)))
Table('multipk2', metadata,
Column('pk_col_1', String(30), primary_key=True),
Column('pk_col_2', String(30), primary_key=True),
Column('data', String(30)))
Table('multipk3', metadata,
Column('pri_code', String(30), key='primary', primary_key=True),
Column('sec_code', String(30), key='secondary', primary_key=True),
Column('date_assigned', sa.Date, key='assigned', primary_key=True),
Column('data', String(30)))
@classmethod
def setup_classes(cls):
class Entry(cls.Basic):
pass
# not supported on sqlite since sqlite's auto-pk generation only works with
# single column primary keys
@testing.fails_on('sqlite', 'FIXME: unknown')
def test_primary_key(self):
Entry, multipk1 = self.classes.Entry, self.tables.multipk1
mapper(Entry, multipk1)
e = Entry(name='entry1', value='this is entry 1', multi_rev=2)
session = create_session()
session.add(e)
session.flush()
session.expunge_all()
e2 = session.query(Entry).get((e.multi_id, 2))
self.assert_(e is not e2)
state = sa.orm.attributes.instance_state(e)
state2 = sa.orm.attributes.instance_state(e2)
eq_(state.key, state2.key)
# this one works with sqlite since we are manually setting up pk values
def test_manual_pk(self):
Entry, multipk2 = self.classes.Entry, self.tables.multipk2
mapper(Entry, multipk2)
e = Entry(pk_col_1='pk1', pk_col_2='pk1_related', data='im the data')
session = create_session()
session.add(e)
session.flush()
def test_key_pks(self):
Entry, multipk3 = self.classes.Entry, self.tables.multipk3
mapper(Entry, multipk3)
e = Entry(primary= 'pk1', secondary='pk2',
assigned=datetime.date.today(), data='some more data')
session = create_session()
session.add(e)
session.flush()
class ForeignPKTest(fixtures.MappedTest):
"""Detection of the relationship direction on PK joins."""
@classmethod
def define_tables(cls, metadata):
Table("people", metadata,
Column('person', String(10), primary_key=True),
Column('firstname', String(10)),
Column('lastname', String(10)))
Table("peoplesites", metadata,
Column('person', String(10), ForeignKey("people.person"),
primary_key=True),
Column('site', String(10)))
@classmethod
def setup_classes(cls):
class Person(cls.Basic):
pass
class PersonSite(cls.Basic):
pass
def test_basic(self):
peoplesites, PersonSite, Person, people = (self.tables.peoplesites,
self.classes.PersonSite,
self.classes.Person,
self.tables.people)
m1 = mapper(PersonSite, peoplesites)
m2 = mapper(Person, people, properties={
'sites' : relationship(PersonSite)})
sa.orm.configure_mappers()
eq_(list(m2.get_property('sites').synchronize_pairs),
[(people.c.person, peoplesites.c.person)])
p = Person(person='im the key', firstname='asdf')
ps = PersonSite(site='asdf')
p.sites.append(ps)
session = create_session()
session.add(p)
session.flush()
p_count = people.count(people.c.person=='im the key').scalar()
eq_(p_count, 1)
eq_(peoplesites.count(peoplesites.c.person=='im the key').scalar(), 1)
class ClauseAttributesTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('users_t', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(30)),
Column('counter', Integer, default=1))
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
User, users_t = cls.classes.User, cls.tables.users_t
mapper(User, users_t)
def test_update(self):
User = self.classes.User
u = User(name='test')
session = create_session()
session.add(u)
session.flush()
eq_(u.counter, 1)
u.counter = User.counter + 1
session.flush()
def go():
assert (u.counter == 2) is True # ensure its not a ClauseElement
self.sql_count_(1, go)
def test_multi_update(self):
User = self.classes.User
u = User(name='test')
session = create_session()
session.add(u)
session.flush()
eq_(u.counter, 1)
u.name = 'test2'
u.counter = User.counter + 1
session.flush()
def go():
eq_(u.name, 'test2')
assert (u.counter == 2) is True
self.sql_count_(1, go)
session.expunge_all()
u = session.query(User).get(u.id)
eq_(u.name, 'test2')
eq_(u.counter, 2)
def test_insert(self):
User = self.classes.User
u = User(name='test', counter=sa.select([5]))
session = create_session()
session.add(u)
session.flush()
assert (u.counter == 5) is True
class PassiveDeletesTest(fixtures.MappedTest):
__requires__ = ('foreign_keys',)
@classmethod
def define_tables(cls, metadata):
Table('mytable', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(30)),
test_needs_fk=True)
Table('myothertable', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('parent_id', Integer),
Column('data', String(30)),
sa.ForeignKeyConstraint(['parent_id'],
['mytable.id'],
ondelete="CASCADE"),
test_needs_fk=True)
@classmethod
def setup_classes(cls):
class MyClass(cls.Basic):
pass
class MyOtherClass(cls.Basic):
pass
def test_basic(self):
myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
self.classes.MyClass,
self.classes.MyOtherClass,
self.tables.mytable)
mapper(MyOtherClass, myothertable)
mapper(MyClass, mytable, properties={
'children':relationship(MyOtherClass,
passive_deletes=True,
cascade="all")})
session = create_session()
mc = MyClass()
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
session.add(mc)
session.flush()
session.expunge_all()
assert myothertable.count().scalar() == 4
mc = session.query(MyClass).get(mc.id)
session.delete(mc)
session.flush()
assert mytable.count().scalar() == 0
assert myothertable.count().scalar() == 0
@testing.emits_warning(r".*'passive_deletes' is normally configured on one-to-many")
def test_backwards_pd(self):
"""Test that passive_deletes=True disables a delete from an m2o.
This is not the usual usage and it now raises a warning, but test
that it works nonetheless.
"""
myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
self.classes.MyClass,
self.classes.MyOtherClass,
self.tables.mytable)
mapper(MyOtherClass, myothertable, properties={
'myclass':relationship(MyClass, cascade="all, delete", passive_deletes=True)
})
mapper(MyClass, mytable)
session = create_session()
mc = MyClass()
mco = MyOtherClass()
mco.myclass = mc
session.add(mco)
session.flush()
assert mytable.count().scalar() == 1
assert myothertable.count().scalar() == 1
session.expire(mco, ['myclass'])
session.delete(mco)
session.flush()
# mytable wasn't deleted, is the point.
assert mytable.count().scalar() == 1
assert myothertable.count().scalar() == 0
def test_aaa_m2o_emits_warning(self):
myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
self.classes.MyClass,
self.classes.MyOtherClass,
self.tables.mytable)
mapper(MyOtherClass, myothertable, properties={
'myclass':relationship(MyClass, cascade="all, delete", passive_deletes=True)
})
mapper(MyClass, mytable)
assert_raises(sa.exc.SAWarning, sa.orm.configure_mappers)
class BatchDeleteIgnoresRowcountTest(fixtures.DeclarativeMappedTest):
__requires__ = ('foreign_keys',)
@classmethod
def setup_classes(cls):
class A(cls.DeclarativeBasic):
__tablename__ = 'A'
__table_args__ = dict(test_needs_fk=True)
id = Column(Integer, primary_key=True)
parent_id = Column(Integer, ForeignKey('A.id', ondelete='CASCADE'))
def test_delete_both(self):
A = self.classes.A
session = Session(testing.db)
a1, a2 = A(id=1),A(id=2, parent_id=1)
session.add_all([a1, a2])
session.flush()
session.delete(a1)
session.delete(a2)
# no issue with multi-row count here
session.flush()
class ExtraPassiveDeletesTest(fixtures.MappedTest):
__requires__ = ('foreign_keys',)
@classmethod
def define_tables(cls, metadata):
Table('mytable', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(30)),
test_needs_fk=True)
Table('myothertable', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('parent_id', Integer),
Column('data', String(30)),
# no CASCADE, the same as ON DELETE RESTRICT
sa.ForeignKeyConstraint(['parent_id'],
['mytable.id']),
test_needs_fk=True)
@classmethod
def setup_classes(cls):
class MyClass(cls.Basic):
pass
class MyOtherClass(cls.Basic):
pass
def test_assertions(self):
myothertable, MyOtherClass = self.tables.myothertable, self.classes.MyOtherClass
mapper(MyOtherClass, myothertable)
assert_raises_message(
sa.exc.ArgumentError,
"Can't set passive_deletes='all' in conjunction with 'delete' "
"or 'delete-orphan' cascade",
relationship, MyOtherClass,
passive_deletes='all',
cascade="all"
)
def test_extra_passive(self):
myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
self.classes.MyClass,
self.classes.MyOtherClass,
self.tables.mytable)
mapper(MyOtherClass, myothertable)
mapper(MyClass, mytable, properties={
'children': relationship(MyOtherClass,
passive_deletes='all',
cascade="save-update")})
session = create_session()
mc = MyClass()
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
mc.children.append(MyOtherClass())
session.add(mc)
session.flush()
session.expunge_all()
assert myothertable.count().scalar() == 4
mc = session.query(MyClass).get(mc.id)
session.delete(mc)
assert_raises(sa.exc.DBAPIError, session.flush)
def test_extra_passive_2(self):
myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
self.classes.MyClass,
self.classes.MyOtherClass,
self.tables.mytable)
mapper(MyOtherClass, myothertable)
mapper(MyClass, mytable, properties={
'children': relationship(MyOtherClass,
passive_deletes='all',
cascade="save-update")})
session = create_session()
mc = MyClass()
mc.children.append(MyOtherClass())
session.add(mc)
session.flush()
session.expunge_all()
assert myothertable.count().scalar() == 1
mc = session.query(MyClass).get(mc.id)
session.delete(mc)
mc.children[0].data = 'some new data'
assert_raises(sa.exc.DBAPIError, session.flush)
def test_dont_emit(self):
myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
self.classes.MyClass,
self.classes.MyOtherClass,
self.tables.mytable)
mapper(MyOtherClass, myothertable)
mapper(MyClass, mytable, properties={
'children': relationship(MyOtherClass,
passive_deletes='all',
cascade="save-update")})
session = Session()
mc = MyClass()
session.add(mc)
session.commit()
mc.id
session.delete(mc)
# no load for "children" should occur
self.assert_sql_count(testing.db, session.flush, 1)
class ColumnCollisionTest(fixtures.MappedTest):
"""Ensure the mapper doesn't break bind param naming rules on flush."""
@classmethod
def define_tables(cls, metadata):
Table('book', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('book_id', String(50)),
Column('title', String(50))
)
def test_naming(self):
book = self.tables.book
class Book(fixtures.ComparableEntity):
pass
mapper(Book, book)
sess = create_session()
b1 = Book(book_id='abc', title='def')
sess.add(b1)
sess.flush()
b1.title = 'ghi'
sess.flush()
sess.close()
eq_(
sess.query(Book).first(),
Book(book_id='abc', title='ghi')
)
class DefaultTest(fixtures.MappedTest):
"""Exercise mappings on columns with DefaultGenerators.
Tests that when saving objects whose table contains DefaultGenerators,
either python-side, preexec or database-side, the newly saved instances
receive all the default values either through a post-fetch or getting the
pre-exec'ed defaults back from the engine.
"""
@classmethod
def define_tables(cls, metadata):
use_string_defaults = testing.against('postgresql', 'oracle', 'sqlite', 'mssql')
if use_string_defaults:
hohotype = String(30)
hohoval = "im hoho"
althohoval = "im different hoho"
else:
hohotype = Integer
hohoval = 9
althohoval = 15
cls.other['hohoval'] = hohoval
cls.other['althohoval'] = althohoval
dt = Table('default_t', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('hoho', hohotype, server_default=str(hohoval)),
Column('counter', Integer, default=sa.func.char_length("1234567", type_=Integer)),
Column('foober', String(30), default="im foober", onupdate="im the update"),
mysql_engine='MyISAM')
st = Table('secondary_table', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(50)),
mysql_engine='MyISAM')
if testing.against('postgresql', 'oracle'):
dt.append_column(
Column('secondary_id', Integer, sa.Sequence('sec_id_seq'),
unique=True))
st.append_column(
Column('fk_val', Integer,
ForeignKey('default_t.secondary_id')))
elif testing.against('mssql'):
st.append_column(
Column('fk_val', Integer,
ForeignKey('default_t.id')))
else:
st.append_column(
Column('hoho', hohotype, ForeignKey('default_t.hoho')))
@classmethod
def setup_classes(cls):
class Hoho(cls.Comparable):
pass
class Secondary(cls.Comparable):
pass
@testing.fails_on('firebird', 'Data type unknown on the parameter')
def test_insert(self):
althohoval, hohoval, default_t, Hoho = (self.other.althohoval,
self.other.hohoval,
self.tables.default_t,
self.classes.Hoho)
mapper(Hoho, default_t)
h1 = Hoho(hoho=althohoval)
h2 = Hoho(counter=12)
h3 = Hoho(hoho=althohoval, counter=12)
h4 = Hoho()
h5 = Hoho(foober='im the new foober')
session = create_session(autocommit=False)
session.add_all((h1, h2, h3, h4, h5))
session.commit()
eq_(h1.hoho, althohoval)
eq_(h3.hoho, althohoval)
def go():
# test deferred load of attribues, one select per instance
self.assert_(h2.hoho == h4.hoho == h5.hoho == hohoval)
self.sql_count_(3, go)
def go():
self.assert_(h1.counter == h4.counter == h5.counter == 7)
self.sql_count_(1, go)
def go():
self.assert_(h3.counter == h2.counter == 12)
self.assert_(h2.foober == h3.foober == h4.foober == 'im foober')
self.assert_(h5.foober == 'im the new foober')
self.sql_count_(0, go)
session.expunge_all()
(h1, h2, h3, h4, h5) = session.query(Hoho).order_by(Hoho.id).all()
eq_(h1.hoho, althohoval)
eq_(h3.hoho, althohoval)
self.assert_(h2.hoho == h4.hoho == h5.hoho == hohoval)
self.assert_(h3.counter == h2.counter == 12)
self.assert_(h1.counter == h4.counter == h5.counter == 7)
self.assert_(h2.foober == h3.foober == h4.foober == 'im foober')
eq_(h5.foober, 'im the new foober')
@testing.fails_on('firebird', 'Data type unknown on the parameter')
def test_eager_defaults(self):
hohoval, default_t, Hoho = (self.other.hohoval,
self.tables.default_t,
self.classes.Hoho)
mapper(Hoho, default_t, eager_defaults=True)
h1 = Hoho()
session = create_session()
session.add(h1)
session.flush()
self.sql_count_(0, lambda: eq_(h1.hoho, hohoval))
def test_insert_nopostfetch(self):
default_t, Hoho = self.tables.default_t, self.classes.Hoho
# populates from the FetchValues explicitly so there is no
# "post-update"
mapper(Hoho, default_t)
h1 = Hoho(hoho="15", counter=15)
session = create_session()
session.add(h1)
session.flush()
def go():
eq_(h1.hoho, "15")
eq_(h1.counter, 15)
eq_(h1.foober, "im foober")
self.sql_count_(0, go)
@testing.fails_on('firebird', 'Data type unknown on the parameter')
def test_update(self):
default_t, Hoho = self.tables.default_t, self.classes.Hoho
mapper(Hoho, default_t)
h1 = Hoho()
session = create_session()
session.add(h1)
session.flush()
eq_(h1.foober, 'im foober')
h1.counter = 19
session.flush()
eq_(h1.foober, 'im the update')
@testing.fails_on('firebird', 'Data type unknown on the parameter')
def test_used_in_relationship(self):
"""A server-side default can be used as the target of a foreign key"""
Hoho, hohoval, default_t, secondary_table, Secondary = (self.classes.Hoho,
self.other.hohoval,
self.tables.default_t,
self.tables.secondary_table,
self.classes.Secondary)
mapper(Hoho, default_t, properties={
'secondaries':relationship(Secondary, order_by=secondary_table.c.id)})
mapper(Secondary, secondary_table)
h1 = Hoho()
s1 = Secondary(data='s1')
h1.secondaries.append(s1)
session = create_session()
session.add(h1)
session.flush()
session.expunge_all()
eq_(session.query(Hoho).get(h1.id),
Hoho(hoho=hohoval,
secondaries=[
Secondary(data='s1')]))
h1 = session.query(Hoho).get(h1.id)
h1.secondaries.append(Secondary(data='s2'))
session.flush()
session.expunge_all()
eq_(session.query(Hoho).get(h1.id),
Hoho(hoho=hohoval,
secondaries=[
Secondary(data='s1'),
Secondary(data='s2')]))
class ColumnPropertyTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('data', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('a', String(50)),
Column('b', String(50))
)
Table('subdata', metadata,
Column('id', Integer, ForeignKey('data.id'), primary_key=True),
Column('c', String(50)),
)
@classmethod
def setup_mappers(cls):
class Data(cls.Basic):
pass
def test_refreshes(self):
Data, data = self.classes.Data, self.tables.data
mapper(Data, data, properties={
'aplusb':column_property(data.c.a + literal_column("' '") + data.c.b)
})
self._test(True)
def test_no_refresh(self):
Data, data = self.classes.Data, self.tables.data
mapper(Data, data, properties={
'aplusb':column_property(data.c.a + literal_column("' '") + data.c.b,
expire_on_flush=False)
})
self._test(False)
def test_refreshes_post_init(self):
Data, data = self.classes.Data, self.tables.data
m = mapper(Data, data)
m.add_property('aplusb', column_property(data.c.a + literal_column("' '") + data.c.b))
self._test(True)
def test_with_inheritance(self):
subdata, data, Data = (self.tables.subdata,
self.tables.data,
self.classes.Data)
class SubData(Data):
pass
mapper(Data, data, properties={
'aplusb':column_property(data.c.a + literal_column("' '") + data.c.b)
})
mapper(SubData, subdata, inherits=Data)
sess = create_session()
sd1 = SubData(a="hello", b="there", c="hi")
sess.add(sd1)
sess.flush()
eq_(sd1.aplusb, "hello there")
def _test(self, expect_expiry):
Data = self.classes.Data
sess = create_session()
d1 = Data(a="hello", b="there")
sess.add(d1)
sess.flush()
eq_(d1.aplusb, "hello there")
d1.b = "bye"
sess.flush()
if expect_expiry:
eq_(d1.aplusb, "hello bye")
else:
eq_(d1.aplusb, "hello there")
d1.b = 'foobar'
d1.aplusb = 'im setting this explicitly'
sess.flush()
eq_(d1.aplusb, "im setting this explicitly")
class OneToManyTest(_fixtures.FixtureTest):
run_inserts = None
def test_one_to_many_1(self):
"""Basic save of one to many."""
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
m = mapper(User, users, properties=dict(
addresses = relationship(mapper(Address, addresses), lazy='select')
))
u = User(name= 'one2manytester')
a = Address(email_address='one2many@test.org')
u.addresses.append(a)
a2 = Address(email_address='lala@test.org')
u.addresses.append(a2)
session = create_session()
session.add(u)
session.flush()
user_rows = users.select(users.c.id.in_([u.id])).execute().fetchall()
eq_(user_rows[0].values(), [u.id, 'one2manytester'])
address_rows = addresses.select(
addresses.c.id.in_([a.id, a2.id]),
order_by=[addresses.c.email_address]).execute().fetchall()
eq_(address_rows[0].values(), [a2.id, u.id, 'lala@test.org'])
eq_(address_rows[1].values(), [a.id, u.id, 'one2many@test.org'])
userid = u.id
addressid = a2.id
a2.email_address = 'somethingnew@foo.com'
session.flush()
address_rows = addresses.select(
addresses.c.id == addressid).execute().fetchall()
eq_(address_rows[0].values(),
[addressid, userid, 'somethingnew@foo.com'])
self.assert_(u.id == userid and a2.id == addressid)
def test_one_to_many_2(self):
"""Modifying the child items of an object."""
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
m = mapper(User, users, properties=dict(
addresses = relationship(mapper(Address, addresses), lazy='select')))
u1 = User(name='user1')
u1.addresses = []
a1 = Address(email_address='emailaddress1')
u1.addresses.append(a1)
u2 = User(name='user2')
u2.addresses = []
a2 = Address(email_address='emailaddress2')
u2.addresses.append(a2)
a3 = Address(email_address='emailaddress3')
session = create_session()
session.add_all((u1, u2, a3))
session.flush()
# modify user2 directly, append an address to user1.
# upon commit, user2 should be updated, user1 should not
# both address1 and address3 should be updated
u2.name = 'user2modified'
u1.addresses.append(a3)
del u1.addresses[0]
self.assert_sql(testing.db, session.flush, [
("UPDATE users SET name=:name "
"WHERE users.id = :users_id",
{'users_id': u2.id, 'name': 'user2modified'}),
("UPDATE addresses SET user_id=:user_id "
"WHERE addresses.id = :addresses_id",
{'user_id': None, 'addresses_id': a1.id}),
("UPDATE addresses SET user_id=:user_id "
"WHERE addresses.id = :addresses_id",
{'user_id': u1.id, 'addresses_id': a3.id})])
def test_child_move(self):
"""Moving a child from one parent to another, with a delete.
Tests that deleting the first parent properly updates the child with
the new parent. This tests the 'trackparent' option in the attributes
module.
"""
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
m = mapper(User, users, properties=dict(
addresses = relationship(mapper(Address, addresses), lazy='select')))
u1 = User(name='user1')
u2 = User(name='user2')
a = Address(email_address='address1')
u1.addresses.append(a)
session = create_session()
session.add_all((u1, u2))
session.flush()
del u1.addresses[0]
u2.addresses.append(a)
session.delete(u1)
session.flush()
session.expunge_all()
u2 = session.query(User).get(u2.id)
eq_(len(u2.addresses), 1)
def test_child_move_2(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
m = mapper(User, users, properties=dict(
addresses = relationship(mapper(Address, addresses), lazy='select')))
u1 = User(name='user1')
u2 = User(name='user2')
a = Address(email_address='address1')
u1.addresses.append(a)
session = create_session()
session.add_all((u1, u2))
session.flush()
del u1.addresses[0]
u2.addresses.append(a)
session.flush()
session.expunge_all()
u2 = session.query(User).get(u2.id)
eq_(len(u2.addresses), 1)
def test_o2m_delete_parent(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
m = mapper(User, users, properties=dict(
address = relationship(mapper(Address, addresses),
lazy='select',
uselist=False)))
u = User(name='one2onetester')
a = Address(email_address='myonlyaddress@foo.com')
u.address = a
session = create_session()
session.add(u)
session.flush()
session.delete(u)
session.flush()
assert a.id is not None
assert a.user_id is None
assert sa.orm.attributes.instance_state(a).key in session.identity_map
assert sa.orm.attributes.instance_state(u).key not in session.identity_map
def test_one_to_one(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
m = mapper(User, users, properties=dict(
address = relationship(mapper(Address, addresses),
lazy='select',
uselist=False)))
u = User(name='one2onetester')
u.address = Address(email_address='myonlyaddress@foo.com')
session = create_session()
session.add(u)
session.flush()
u.name = 'imnew'
session.flush()
u.address.email_address = 'imnew@foo.com'
session.flush()
def test_bidirectional(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
m1 = mapper(User, users)
m2 = mapper(Address, addresses, properties=dict(
user = relationship(m1, lazy='joined', backref='addresses')))
u = User(name='test')
a = Address(email_address='testaddress', user=u)
session = create_session()
session.add(u)
session.flush()
session.delete(u)
session.flush()
def test_double_relationship(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
m2 = mapper(Address, addresses)
m = mapper(User, users, properties={
'boston_addresses' : relationship(m2, primaryjoin=
sa.and_(users.c.id==addresses.c.user_id,
addresses.c.email_address.like('%boston%'))),
'newyork_addresses' : relationship(m2, primaryjoin=
sa.and_(users.c.id==addresses.c.user_id,
addresses.c.email_address.like('%newyork%')))})
u = User(name='u1')
a = Address(email_address='foo@boston.com')
b = Address(email_address='bar@newyork.com')
u.boston_addresses.append(a)
u.newyork_addresses.append(b)
session = create_session()
session.add(u)
session.flush()
class SaveTest(_fixtures.FixtureTest):
run_inserts = None
def test_basic(self):
User, users = self.classes.User, self.tables.users
m = mapper(User, users)
# save two users
u = User(name='savetester')
u2 = User(name='savetester2')
session = create_session()
session.add_all((u, u2))
session.flush()
# assert the first one retreives the same from the identity map
nu = session.query(m).get(u.id)
assert u is nu
# clear out the identity map, so next get forces a SELECT
session.expunge_all()
# check it again, identity should be different but ids the same
nu = session.query(m).get(u.id)
assert u is not nu and u.id == nu.id and nu.name == 'savetester'
# change first users name and save
session = create_session()
session.add(u)
u.name = 'modifiedname'
assert u in session.dirty
session.flush()
# select both
userlist = session.query(User).filter(
users.c.id.in_([u.id, u2.id])).order_by(users.c.name).all()
eq_(u.id, userlist[0].id)
eq_(userlist[0].name, 'modifiedname')
eq_(u2.id, userlist[1].id)
eq_(userlist[1].name, 'savetester2')
def test_synonym(self):
users = self.tables.users
class SUser(fixtures.BasicEntity):
def _get_name(self):
return "User:" + self.name
def _set_name(self, name):
self.name = name + ":User"
syn_name = property(_get_name, _set_name)
mapper(SUser, users, properties={
'syn_name': sa.orm.synonym('name')
})
u = SUser(syn_name="some name")
eq_(u.syn_name, 'User:some name:User')
session = create_session()
session.add(u)
session.flush()
session.expunge_all()
u = session.query(SUser).first()
eq_(u.syn_name, 'User:some name:User')
def test_lazyattr_commit(self):
"""Lazily loaded relationships.
When a lazy-loaded list is unloaded, and a commit occurs, that the
'passive' call on that list does not blow away its value
"""
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties = {
'addresses': relationship(mapper(Address, addresses))})
u = User(name='u1')
u.addresses.append(Address(email_address='u1@e1'))
u.addresses.append(Address(email_address='u1@e2'))
u.addresses.append(Address(email_address='u1@e3'))
u.addresses.append(Address(email_address='u1@e4'))
session = create_session()
session.add(u)
session.flush()
session.expunge_all()
u = session.query(User).one()
u.name = 'newname'
session.flush()
eq_(len(u.addresses), 4)
def test_inherits(self):
"""a user object that also has the users mailing address."""
users, addresses, User = (self.tables.users,
self.tables.addresses,
self.classes.User)
m1 = mapper(User, users)
class AddressUser(User):
pass
# define a mapper for AddressUser that inherits the User.mapper, and
# joins on the id column
mapper(AddressUser, addresses, inherits=m1)
au = AddressUser(name='u', email_address='u@e')
session = create_session()
session.add(au)
session.flush()
session.expunge_all()
rt = session.query(AddressUser).one()
eq_(au.user_id, rt.user_id)
eq_(rt.id, rt.id)
def test_deferred(self):
"""Deferred column operations"""
orders, Order = self.tables.orders, self.classes.Order
mapper(Order, orders, properties={
'description': sa.orm.deferred(orders.c.description)})
# dont set deferred attribute, commit session
o = Order(id=42)
session = create_session(autocommit=False)
session.add(o)
session.commit()
# assert that changes get picked up
o.description = 'foo'
session.commit()
eq_(list(session.execute(orders.select(), mapper=Order)),
[(42, None, None, 'foo', None)])
session.expunge_all()
# assert that a set operation doesn't trigger a load operation
o = session.query(Order).filter(Order.description == 'foo').one()
def go():
o.description = 'hoho'
self.sql_count_(0, go)
session.flush()
eq_(list(session.execute(orders.select(), mapper=Order)),
[(42, None, None, 'hoho', None)])
session.expunge_all()
# test assigning None to an unloaded deferred also works
o = session.query(Order).filter(Order.description == 'hoho').one()
o.description = None
session.flush()
eq_(list(session.execute(orders.select(), mapper=Order)),
[(42, None, None, None, None)])
session.close()
# why no support on oracle ? because oracle doesn't save
# "blank" strings; it saves a single space character.
@testing.fails_on('oracle', 'FIXME: unknown')
def test_dont_update_blanks(self):
User, users = self.classes.User, self.tables.users
mapper(User, users)
u = User(name='')
session = create_session()
session.add(u)
session.flush()
session.expunge_all()
u = session.query(User).get(u.id)
u.name = ''
self.sql_count_(0, session.flush)
def test_multi_table_selectable(self):
"""Mapped selectables that span tables.
Also tests redefinition of the keynames for the column properties.
"""
addresses, users, User = (self.tables.addresses,
self.tables.users,
self.classes.User)
usersaddresses = sa.join(users, addresses,
users.c.id == addresses.c.user_id)
m = mapper(User, usersaddresses,
properties=dict(
email = addresses.c.email_address,
foo_id = [users.c.id, addresses.c.user_id]))
u = User(name='multitester', email='multi@test.org')
session = create_session()
session.add(u)
session.flush()
session.expunge_all()
id = m.primary_key_from_instance(u)
u = session.query(User).get(id)
assert u.name == 'multitester'
user_rows = users.select(users.c.id.in_([u.foo_id])).execute().fetchall()
eq_(user_rows[0].values(), [u.foo_id, 'multitester'])
address_rows = addresses.select(addresses.c.id.in_([u.id])).execute().fetchall()
eq_(address_rows[0].values(), [u.id, u.foo_id, 'multi@test.org'])
u.email = 'lala@hey.com'
u.name = 'imnew'
session.flush()
user_rows = users.select(users.c.id.in_([u.foo_id])).execute().fetchall()
eq_(user_rows[0].values(), [u.foo_id, 'imnew'])
address_rows = addresses.select(addresses.c.id.in_([u.id])).execute().fetchall()
eq_(address_rows[0].values(), [u.id, u.foo_id, 'lala@hey.com'])
session.expunge_all()
u = session.query(User).get(id)
assert u.name == 'imnew'
def test_history_get(self):
"""The history lazy-fetches data when it wasn't otherwise loaded."""
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(Address, cascade="all, delete-orphan")})
mapper(Address, addresses)
u = User(name='u1')
u.addresses.append(Address(email_address='u1@e1'))
u.addresses.append(Address(email_address='u1@e2'))
session = create_session()
session.add(u)
session.flush()
session.expunge_all()
u = session.query(User).get(u.id)
session.delete(u)
session.flush()
assert users.count().scalar() == 0
assert addresses.count().scalar() == 0
def test_batch_mode(self):
"""The 'batch=False' flag on mapper()"""
users, User = self.tables.users, self.classes.User
names = []
class Events(object):
def before_insert(self, mapper, connection, instance):
self.current_instance = instance
names.append(instance.name)
def after_insert(self, mapper, connection, instance):
assert instance is self.current_instance
mapper(User, users, batch=False)
evt = Events()
event.listen(User, "before_insert", evt.before_insert)
event.listen(User, "after_insert", evt.after_insert)
u1 = User(name='user1')
u2 = User(name='user2')
session = create_session()
session.add_all((u1, u2))
session.flush()
u3 = User(name='user3')
u4 = User(name='user4')
u5 = User(name='user5')
session.add_all([u4, u5, u3])
session.flush()
# test insert ordering is maintained
assert names == ['user1', 'user2', 'user4', 'user5', 'user3']
session.expunge_all()
sa.orm.clear_mappers()
m = mapper(User, users)
evt = Events()
event.listen(User, "before_insert", evt.before_insert)
event.listen(User, "after_insert", evt.after_insert)
u1 = User(name='user1')
u2 = User(name='user2')
session.add_all((u1, u2))
assert_raises(AssertionError, session.flush)
class ManyToOneTest(_fixtures.FixtureTest):
run_inserts = None
def test_m2o_one_to_one(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
# TODO: put assertion in here !!!
m = mapper(Address, addresses, properties=dict(
user = relationship(mapper(User, users), lazy='select', uselist=False)))
session = create_session()
data = [
{'name': 'thesub' , 'email_address': 'bar@foo.com'},
{'name': 'assdkfj' , 'email_address': 'thesdf@asdf.com'},
{'name': 'n4knd' , 'email_address': 'asf3@bar.org'},
{'name': 'v88f4' , 'email_address': 'adsd5@llala.net'},
{'name': 'asdf8d' , 'email_address': 'theater@foo.com'}
]
objects = []
for elem in data:
a = Address()
a.email_address = elem['email_address']
a.user = User()
a.user.name = elem['name']
objects.append(a)
session.add(a)
session.flush()
objects[2].email_address = 'imnew@foo.bar'
objects[3].user = User()
objects[3].user.name = 'imnewlyadded'
self.assert_sql_execution(testing.db,
session.flush,
CompiledSQL("INSERT INTO users (name) VALUES (:name)",
{'name': 'imnewlyadded'} ),
AllOf(
CompiledSQL("UPDATE addresses SET email_address=:email_address "
"WHERE addresses.id = :addresses_id",
lambda ctx: {'email_address': 'imnew@foo.bar',
'addresses_id': objects[2].id}),
CompiledSQL("UPDATE addresses SET user_id=:user_id "
"WHERE addresses.id = :addresses_id",
lambda ctx: {'user_id': objects[3].user.id,
'addresses_id': objects[3].id})
)
)
l = sa.select([users, addresses],
sa.and_(users.c.id==addresses.c.user_id,
addresses.c.id==a.id)).execute()
eq_(l.first().values(),
[a.user.id, 'asdf8d', a.id, a.user_id, 'theater@foo.com'])
def test_many_to_one_1(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
m = mapper(Address, addresses, properties=dict(
user = relationship(mapper(User, users), lazy='select')))
a1 = Address(email_address='emailaddress1')
u1 = User(name='user1')
a1.user = u1
session = create_session()
session.add(a1)
session.flush()
session.expunge_all()
a1 = session.query(Address).get(a1.id)
u1 = session.query(User).get(u1.id)
assert a1.user is u1
a1.user = None
session.flush()
session.expunge_all()
a1 = session.query(Address).get(a1.id)
u1 = session.query(User).get(u1.id)
assert a1.user is None
def test_many_to_one_2(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
m = mapper(Address, addresses, properties=dict(
user = relationship(mapper(User, users), lazy='select')))
a1 = Address(email_address='emailaddress1')
a2 = Address(email_address='emailaddress2')
u1 = User(name='user1')
a1.user = u1
session = create_session()
session.add_all((a1, a2))
session.flush()
session.expunge_all()
a1 = session.query(Address).get(a1.id)
a2 = session.query(Address).get(a2.id)
u1 = session.query(User).get(u1.id)
assert a1.user is u1
a1.user = None
a2.user = u1
session.flush()
session.expunge_all()
a1 = session.query(Address).get(a1.id)
a2 = session.query(Address).get(a2.id)
u1 = session.query(User).get(u1.id)
assert a1.user is None
assert a2.user is u1
def test_many_to_one_3(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
m = mapper(Address, addresses, properties=dict(
user = relationship(mapper(User, users), lazy='select')))
a1 = Address(email_address='emailaddress1')
u1 = User(name='user1')
u2 = User(name='user2')
a1.user = u1
session = create_session()
session.add_all((a1, u1, u2))
session.flush()
session.expunge_all()
a1 = session.query(Address).get(a1.id)
u1 = session.query(User).get(u1.id)
u2 = session.query(User).get(u2.id)
assert a1.user is u1
a1.user = u2
session.flush()
session.expunge_all()
a1 = session.query(Address).get(a1.id)
u1 = session.query(User).get(u1.id)
u2 = session.query(User).get(u2.id)
assert a1.user is u2
def test_bidirectional_no_load(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(Address, backref='user', lazy='noload')})
mapper(Address, addresses)
# try it on unsaved objects
u1 = User(name='u1')
a1 = Address(email_address='e1')
a1.user = u1
session = create_session()
session.add(u1)
session.flush()
session.expunge_all()
a1 = session.query(Address).get(a1.id)
a1.user = None
session.flush()
session.expunge_all()
assert session.query(Address).get(a1.id).user is None
assert session.query(User).get(u1.id).addresses == []
class ManyToManyTest(_fixtures.FixtureTest):
run_inserts = None
def test_many_to_many(self):
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
m = mapper(Item, items, properties=dict(
keywords=relationship(Keyword,
item_keywords,
lazy='joined',
order_by=keywords.c.name)))
data = [Item,
{'description': 'mm_item1',
'keywords' : (Keyword, [{'name': 'big'},
{'name': 'green'},
{'name': 'purple'},
{'name': 'round'}])},
{'description': 'mm_item2',
'keywords' : (Keyword, [{'name':'blue'},
{'name':'imnew'},
{'name':'round'},
{'name':'small'}])},
{'description': 'mm_item3',
'keywords' : (Keyword, [])},
{'description': 'mm_item4',
'keywords' : (Keyword, [{'name':'big'},
{'name':'blue'},])},
{'description': 'mm_item5',
'keywords' : (Keyword, [{'name':'big'},
{'name':'exacting'},
{'name':'green'}])},
{'description': 'mm_item6',
'keywords' : (Keyword, [{'name':'red'},
{'name':'round'},
{'name':'small'}])}]
session = create_session()
objects = []
_keywords = dict([(k.name, k) for k in session.query(Keyword)])
for elem in data[1:]:
item = Item(description=elem['description'])
objects.append(item)
for spec in elem['keywords'][1]:
keyword_name = spec['name']
try:
kw = _keywords[keyword_name]
except KeyError:
_keywords[keyword_name] = kw = Keyword(name=keyword_name)
item.keywords.append(kw)
session.add_all(objects)
session.flush()
l = (session.query(Item).
filter(Item.description.in_([e['description']
for e in data[1:]])).
order_by(Item.description).all())
self.assert_result(l, *data)
objects[4].description = 'item4updated'
k = Keyword()
k.name = 'yellow'
objects[5].keywords.append(k)
self.assert_sql_execution(
testing.db,
session.flush,
AllOf(
CompiledSQL("UPDATE items SET description=:description "
"WHERE items.id = :items_id",
{'description': 'item4updated',
'items_id': objects[4].id},
),
CompiledSQL("INSERT INTO keywords (name) "
"VALUES (:name)",
{'name': 'yellow'},
)
),
CompiledSQL("INSERT INTO item_keywords (item_id, keyword_id) "
"VALUES (:item_id, :keyword_id)",
lambda ctx: [{'item_id': objects[5].id,
'keyword_id': k.id}])
)
objects[2].keywords.append(k)
dkid = objects[5].keywords[1].id
del objects[5].keywords[1]
self.assert_sql_execution(
testing.db,
session.flush,
CompiledSQL("DELETE FROM item_keywords "
"WHERE item_keywords.item_id = :item_id AND "
"item_keywords.keyword_id = :keyword_id",
[{'item_id': objects[5].id, 'keyword_id': dkid}]),
CompiledSQL("INSERT INTO item_keywords (item_id, keyword_id) "
"VALUES (:item_id, :keyword_id)",
lambda ctx: [{'item_id': objects[2].id, 'keyword_id': k.id}]
))
session.delete(objects[3])
session.flush()
def test_many_to_many_remove(self):
"""Setting a collection to empty deletes many-to-many rows.
Tests that setting a list-based attribute to '[]' properly affects the
history and allows the many-to-many rows to be deleted
"""
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keywords = relationship(Keyword, item_keywords, lazy='joined'),
))
i = Item(description='i1')
k1 = Keyword(name='k1')
k2 = Keyword(name='k2')
i.keywords.append(k1)
i.keywords.append(k2)
session = create_session()
session.add(i)
session.flush()
assert item_keywords.count().scalar() == 2
i.keywords = []
session.flush()
assert item_keywords.count().scalar() == 0
def test_scalar(self):
"""sa.dependency won't delete an m2m relationship referencing None."""
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keyword=relationship(Keyword, secondary=item_keywords, uselist=False)))
i = Item(description='x')
session = create_session()
session.add(i)
session.flush()
session.delete(i)
session.flush()
def test_many_to_many_update(self):
"""Assorted history operations on a many to many"""
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keywords=relationship(Keyword,
secondary=item_keywords,
lazy='joined',
order_by=keywords.c.name)))
k1 = Keyword(name='keyword 1')
k2 = Keyword(name='keyword 2')
k3 = Keyword(name='keyword 3')
item = Item(description='item 1')
item.keywords.extend([k1, k2, k3])
session = create_session()
session.add(item)
session.flush()
item.keywords = []
item.keywords.append(k1)
item.keywords.append(k2)
session.flush()
session.expunge_all()
item = session.query(Item).get(item.id)
assert item.keywords == [k1, k2]
def test_association(self):
"""Basic test of an association object"""
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
class IKAssociation(fixtures.ComparableEntity):
pass
mapper(Keyword, keywords)
# note that we are breaking a rule here, making a second
# mapper(Keyword, keywords) the reorganization of mapper construction
# affected this, but was fixed again
mapper(IKAssociation, item_keywords,
primary_key=[item_keywords.c.item_id, item_keywords.c.keyword_id],
properties=dict(
keyword=relationship(mapper(Keyword, keywords, non_primary=True),
lazy='joined',
uselist=False,
order_by=keywords.c.name # note here is a valid place where order_by can be used
))) # on a scalar relationship(); to determine eager ordering of
# the parent object within its collection.
mapper(Item, items, properties=dict(
keywords=relationship(IKAssociation, lazy='joined')))
session = create_session()
def fixture():
_kw = dict([(k.name, k) for k in session.query(Keyword)])
for n in ('big', 'green', 'purple', 'round', 'huge',
'violet', 'yellow', 'blue'):
if n not in _kw:
_kw[n] = Keyword(name=n)
def assocs(*names):
return [IKAssociation(keyword=kw)
for kw in [_kw[n] for n in names]]
return [
Item(description='a_item1',
keywords=assocs('big', 'green', 'purple', 'round')),
Item(description='a_item2',
keywords=assocs('huge', 'violet', 'yellow')),
Item(description='a_item3',
keywords=assocs('big', 'blue'))]
session.add_all(fixture())
session.flush()
eq_(fixture(), session.query(Item).order_by(Item.description).all())
class SaveTest2(_fixtures.FixtureTest):
run_inserts = None
def test_m2o_nonmatch(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties=dict(
user = relationship(User, lazy='select', uselist=False)))
session = create_session()
def fixture():
return [
Address(email_address='a1', user=User(name='u1')),
Address(email_address='a2', user=User(name='u2'))]
session.add_all(fixture())
self.assert_sql_execution(
testing.db,
session.flush,
CompiledSQL("INSERT INTO users (name) VALUES (:name)",
{'name': 'u1'}),
CompiledSQL("INSERT INTO users (name) VALUES (:name)",
{'name': 'u2'}),
CompiledSQL("INSERT INTO addresses (user_id, email_address) "
"VALUES (:user_id, :email_address)",
{'user_id': 1, 'email_address': 'a1'}),
CompiledSQL("INSERT INTO addresses (user_id, email_address) "
"VALUES (:user_id, :email_address)",
{'user_id': 2, 'email_address': 'a2'}),
)
class SaveTest3(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('items', metadata,
Column('item_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('item_name', String(50)))
Table('keywords', metadata,
Column('keyword_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(50)))
Table('assoc', metadata,
Column('item_id', Integer, ForeignKey("items")),
Column('keyword_id', Integer, ForeignKey("keywords")),
Column('foo', sa.Boolean, default=True))
@classmethod
def setup_classes(cls):
class Keyword(cls.Basic):
pass
class Item(cls.Basic):
pass
def test_manytomany_xtracol_delete(self):
"""A many-to-many on a table that has an extra column can properly delete rows from the table without referencing the extra column"""
keywords, items, assoc, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.assoc,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keywords = relationship(Keyword, secondary=assoc, lazy='joined'),))
i = Item()
k1 = Keyword()
k2 = Keyword()
i.keywords.append(k1)
i.keywords.append(k2)
session = create_session()
session.add(i)
session.flush()
assert assoc.count().scalar() == 2
i.keywords = []
print i.keywords
session.flush()
assert assoc.count().scalar() == 0
class BooleanColTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1_t', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('name', String(30)),
Column('value', sa.Boolean))
def test_boolean(self):
t1_t = self.tables.t1_t
# use the regular mapper
class T(fixtures.ComparableEntity):
pass
orm_mapper(T, t1_t, order_by=t1_t.c.id)
sess = create_session()
t1 = T(value=True, name="t1")
t2 = T(value=False, name="t2")
t3 = T(value=True, name="t3")
sess.add_all((t1, t2, t3))
sess.flush()
for clear in (False, True):
if clear:
sess.expunge_all()
eq_(sess.query(T).all(), [T(value=True, name="t1"), T(value=False, name="t2"), T(value=True, name="t3")])
if clear:
sess.expunge_all()
eq_(sess.query(T).filter(T.value==True).all(), [T(value=True, name="t1"),T(value=True, name="t3")])
if clear:
sess.expunge_all()
eq_(sess.query(T).filter(T.value==False).all(), [T(value=False, name="t2")])
t2 = sess.query(T).get(t2.id)
t2.value = True
sess.flush()
eq_(sess.query(T).filter(T.value==True).all(), [T(value=True, name="t1"), T(value=True, name="t2"), T(value=True, name="t3")])
t2.value = False
sess.flush()
eq_(sess.query(T).filter(T.value==True).all(), [T(value=True, name="t1"),T(value=True, name="t3")])
class RowSwitchTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
# parent
Table('t5', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(30), nullable=False))
# onetomany
Table('t6', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(30), nullable=False),
Column('t5id', Integer, ForeignKey('t5.id'),nullable=False))
# associated
Table('t7', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(30), nullable=False))
#manytomany
Table('t5t7', metadata,
Column('t5id', Integer, ForeignKey('t5.id'),nullable=False),
Column('t7id', Integer, ForeignKey('t7.id'),nullable=False))
@classmethod
def setup_classes(cls):
class T5(cls.Comparable):
pass
class T6(cls.Comparable):
pass
class T7(cls.Comparable):
pass
def test_onetomany(self):
t6, T6, t5, T5 = (self.tables.t6,
self.classes.T6,
self.tables.t5,
self.classes.T5)
mapper(T5, t5, properties={
't6s':relationship(T6, cascade="all, delete-orphan")
})
mapper(T6, t6)
sess = create_session()
o5 = T5(data='some t5', id=1)
o5.t6s.append(T6(data='some t6', id=1))
o5.t6s.append(T6(data='some other t6', id=2))
sess.add(o5)
sess.flush()
eq_(
list(sess.execute(t5.select(), mapper=T5)),
[(1, 'some t5')]
)
eq_(
list(sess.execute(t6.select().order_by(t6.c.id), mapper=T5)),
[(1, 'some t6', 1), (2, 'some other t6', 1)]
)
o6 = T5(data='some other t5', id=o5.id, t6s=[
T6(data='third t6', id=3),
T6(data='fourth t6', id=4),
])
sess.delete(o5)
sess.add(o6)
sess.flush()
eq_(
list(sess.execute(t5.select(), mapper=T5)),
[(1, 'some other t5')]
)
eq_(
list(sess.execute(t6.select().order_by(t6.c.id), mapper=T5)),
[(3, 'third t6', 1), (4, 'fourth t6', 1)]
)
def test_manytomany(self):
t7, t5, t5t7, T5, T7 = (self.tables.t7,
self.tables.t5,
self.tables.t5t7,
self.classes.T5,
self.classes.T7)
mapper(T5, t5, properties={
't7s':relationship(T7, secondary=t5t7, cascade="all")
})
mapper(T7, t7)
sess = create_session()
o5 = T5(data='some t5', id=1)
o5.t7s.append(T7(data='some t7', id=1))
o5.t7s.append(T7(data='some other t7', id=2))
sess.add(o5)
sess.flush()
assert list(sess.execute(t5.select(), mapper=T5)) == [(1, 'some t5')]
assert testing.rowset(sess.execute(t5t7.select(), mapper=T5)) == set([(1,1), (1, 2)])
assert list(sess.execute(t7.select(), mapper=T5)) == [(1, 'some t7'), (2, 'some other t7')]
o6 = T5(data='some other t5', id=1, t7s=[
T7(data='third t7', id=3),
T7(data='fourth t7', id=4),
])
sess.delete(o5)
assert o5 in sess.deleted
assert o5.t7s[0] in sess.deleted
assert o5.t7s[1] in sess.deleted
sess.add(o6)
sess.flush()
assert list(sess.execute(t5.select(), mapper=T5)) == [(1, 'some other t5')]
assert list(sess.execute(t7.select(), mapper=T5)) == [(3, 'third t7'), (4, 'fourth t7')]
def test_manytoone(self):
t6, T6, t5, T5 = (self.tables.t6,
self.classes.T6,
self.tables.t5,
self.classes.T5)
mapper(T6, t6, properties={
't5':relationship(T5)
})
mapper(T5, t5)
sess = create_session()
o5 = T6(data='some t6', id=1)
o5.t5 = T5(data='some t5', id=1)
sess.add(o5)
sess.flush()
assert list(sess.execute(t5.select(), mapper=T5)) == [(1, 'some t5')]
assert list(sess.execute(t6.select(), mapper=T5)) == [(1, 'some t6', 1)]
o6 = T6(data='some other t6', id=1, t5=T5(data='some other t5', id=2))
sess.delete(o5)
sess.delete(o5.t5)
sess.add(o6)
sess.flush()
assert list(sess.execute(t5.select(), mapper=T5)) == [(2, 'some other t5')]
assert list(sess.execute(t6.select(), mapper=T5)) == [(1, 'some other t6', 2)]
class InheritingRowSwitchTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('parent', metadata,
Column('id', Integer, primary_key=True),
Column('pdata', String(30))
)
Table('child', metadata,
Column('id', Integer, primary_key=True),
Column('pid', Integer, ForeignKey('parent.id')),
Column('cdata', String(30))
)
@classmethod
def setup_classes(cls):
class P(cls.Comparable):
pass
class C(P):
pass
def test_row_switch_no_child_table(self):
P, C, parent, child = (self.classes.P,
self.classes.C,
self.tables.parent,
self.tables.child)
mapper(P, parent)
mapper(C, child, inherits=P)
sess = create_session()
c1 = C(id=1, pdata='c1', cdata='c1')
sess.add(c1)
sess.flush()
# establish a row switch between c1 and c2.
# c2 has no value for the "child" table
c2 = C(id=1, pdata='c2')
sess.add(c2)
sess.delete(c1)
self.assert_sql_execution(testing.db, sess.flush,
CompiledSQL("UPDATE parent SET pdata=:pdata WHERE parent.id = :parent_id",
{'pdata':'c2', 'parent_id':1}
),
# this fires as of [ticket:1362], since we synchronzize
# PK/FKs on UPDATES. c2 is new so the history shows up as
# pure added, update occurs. If a future change limits the
# sync operation during _save_obj().update, this is safe to remove again.
CompiledSQL("UPDATE child SET pid=:pid WHERE child.id = :child_id",
{'pid':1, 'child_id':1}
)
)
class TransactionTest(fixtures.MappedTest):
__requires__ = ('deferrable_or_no_constraints',)
@classmethod
def define_tables(cls, metadata):
t1 = Table('t1', metadata,
Column('id', Integer, primary_key=True))
t2 = Table('t2', metadata,
Column('id', Integer, primary_key=True),
Column('t1_id', Integer,
ForeignKey('t1.id', deferrable=True, initially='deferred')
))
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
pass
class T2(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
T2, T1, t2, t1 = (cls.classes.T2,
cls.classes.T1,
cls.tables.t2,
cls.tables.t1)
orm_mapper(T1, t1)
orm_mapper(T2, t2)
def test_close_transaction_on_commit_fail(self):
T2, t1 = self.classes.T2, self.tables.t1
session = create_session(autocommit=True)
# with a deferred constraint, this fails at COMMIT time instead
# of at INSERT time.
session.add(T2(t1_id=123))
try:
session.flush()
assert False
except:
# Flush needs to rollback also when commit fails
assert session.transaction is None
# todo: on 8.3 at least, the failed commit seems to close the cursor?
# needs investigation. leaving in the DDL above now to help verify
# that the new deferrable support on FK isn't involved in this issue.
if testing.against('postgresql'):
t1.bind.engine.dispose()
class PartialNullPKTest(fixtures.MappedTest):
# sqlite totally fine with NULLs in pk columns.
# no other DB is like this.
__only_on__ = ('sqlite',)
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
Column('col1', String(10), primary_key=True, nullable=True),
Column('col2', String(10), primary_key=True, nullable=True),
Column('col3', String(50))
)
@classmethod
def setup_classes(cls):
class T1(cls.Basic):
pass
@classmethod
def setup_mappers(cls):
orm_mapper(cls.classes.T1, cls.tables.t1)
def test_key_switch(self):
T1 = self.classes.T1
s = Session()
s.add(T1(col1="1", col2=None))
t1 = s.query(T1).first()
t1.col2 = 5
assert_raises_message(
sa.exc.FlushError,
"Can't update table using NULL for primary key value",
s.commit
)
def test_plain_update(self):
T1 = self.classes.T1
s = Session()
s.add(T1(col1="1", col2=None))
t1 = s.query(T1).first()
t1.col3 = 'hi'
assert_raises_message(
sa.exc.FlushError,
"Can't update table using NULL for primary key value",
s.commit
)
def test_delete(self):
T1 = self.classes.T1
s = Session()
s.add(T1(col1="1", col2=None))
t1 = s.query(T1).first()
s.delete(t1)
assert_raises_message(
sa.exc.FlushError,
"Can't delete from table using NULL for primary key value",
s.commit
)
def test_total_null(self):
T1 = self.classes.T1
s = Session()
s.add(T1(col1=None, col2=None))
assert_raises_message(
sa.exc.FlushError,
r"Instance \<T1 at .+?\> has a NULL "
"identity key. If this is an auto-generated value, "
"check that the database table allows generation ",
s.commit
)
def test_dont_complain_if_no_update(self):
T1 = self.classes.T1
s = Session()
t = T1(col1="1", col2=None)
s.add(t)
s.commit()
t.col1 = "1"
s.commit()
|
{
"content_hash": "345564853469e6cd848b883ea24f27df",
"timestamp": "",
"source": "github",
"line_count": 2519,
"max_line_length": 141,
"avg_line_length": 32.987693529178244,
"alnum_prop": 0.5299533070183884,
"repo_name": "ioram7/keystone-federado-pgid2013",
"id": "725e0c543ed439062ce40c0a2b9977b359c74ddd",
"size": "83115",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "build/sqlalchemy/test/orm/test_unitofwork.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1841"
},
{
"name": "C",
"bytes": "10584735"
},
{
"name": "C++",
"bytes": "19231"
},
{
"name": "CSS",
"bytes": "172341"
},
{
"name": "JavaScript",
"bytes": "530938"
},
{
"name": "Python",
"bytes": "26306359"
},
{
"name": "Shell",
"bytes": "38138"
},
{
"name": "XSLT",
"bytes": "306125"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function, unicode_literals
from generate_travis_yml import generate_travis_yml
if __name__ == '__main__':
generate_travis_yml()
|
{
"content_hash": "9b7e87d61375f58062622aece41258fa",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 82,
"avg_line_length": 27,
"alnum_prop": 0.7248677248677249,
"repo_name": "twitter/pants",
"id": "c128eccd8bfccba16d7bebe299019485f27cb38e",
"size": "336",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "build-support/travis/generate_travis_yml_main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "655"
},
{
"name": "C++",
"bytes": "2010"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "Dockerfile",
"bytes": "5639"
},
{
"name": "GAP",
"bytes": "1283"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "2765"
},
{
"name": "HTML",
"bytes": "85294"
},
{
"name": "Java",
"bytes": "498956"
},
{
"name": "JavaScript",
"bytes": "22906"
},
{
"name": "Python",
"bytes": "6700799"
},
{
"name": "Rust",
"bytes": "765598"
},
{
"name": "Scala",
"bytes": "89346"
},
{
"name": "Shell",
"bytes": "94395"
},
{
"name": "Thrift",
"bytes": "2953"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from .models import ImagerProfile
# Register your models here.
admin.site.register(ImagerProfile)
|
{
"content_hash": "a7e13724263ce9a3aa979c46ff4ba124",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 34,
"avg_line_length": 32.75,
"alnum_prop": 0.8320610687022901,
"repo_name": "wohlfea/django_imager",
"id": "a61af3908f8da3b706f4818056d1bcfd2bd59803",
"size": "131",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "imagersite/imager_profile/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "13103"
},
{
"name": "Python",
"bytes": "50608"
}
],
"symlink_target": ""
}
|
"""
OCR - Controllers
"""
import StringIO
#Importing reportlab stuff
from reportlab.pdfgen.canvas import Canvas
from reportlab.lib.pagesizes import A4
# Fonts
Courier = 'Courier'
Helvetica = 'Helvetica'
Helvetica_Bold = 'Helvetica-Bold'
Helvetica_Bold_Oblique = 'Helvetica-BoldOblique'
Helvetica_Oblique = 'Helvetica-Oblique'
class Form:
def __init__(self, pdfname = "ocrform.pdf", margintop = 20, marginsides = 20, **kw):
"""Form initialization"""
self.pdfpath = kw.get('pdfpath', pdfname)
self.verbose = kw.get('verbose', 0)
# set the default font sizes
self.font = kw.get('typeface', Helvetica)
self.fontsize = kw.get('fontsize', 13)
# setting it to A4 for now
self.canvas = Canvas(self.pdfpath, pagesize = A4)
self.width, self.height = A4
self.x = marginsides
self.lastx = marginsides
self.marginsides = marginsides
self.margintop = margintop
self.y = self.height - margintop
def print_text(self, lines, fontsize = 8, gray = 0, seek = 0, continuetext = 0,style = "default"):
"""Give the lines to be printed as a list, set the font and grey level"""
c = self.canvas
self.fontsize = fontsize
if style == "center":
self.x = self.width/2
if seek > (self.width - (self.marginsides + self.fontsize) ) :
seek = 0
if seek != 0 :
self.x = self.x + seek
if continuetext == 1:
self.x = self.lastx + seek
if seek == 0:
self.y = self.y + fontsize
for line in lines:
t = c.beginText(self.x, self.y)
t.setFont(Helvetica, fontsize)
t.setFillGray(gray)
t.textOut(line)
c.drawText(t)
self.y = self.y - fontsize
self.lastx = t.getX()
self.x = self.marginsides
def draw_check_boxes(self, boxes = 1, seek = 0, continuetext = 0, fontsize = 0, gray = 0, style = ""):
"""Function to draw check boxes default no of boxes = 1"""
c = self.canvas
c.setLineWidth(0.20)
c.setStrokeGray(gray)
if style == "center":
self.x = self.width/2
if style == "right":
self.x = self.width - self.marginsides - self.fontsize
if seek > (self.width - (self.marginsides + self.fontsize) ) :
seek = 0
if continuetext == 1:
self.y = self.y + self.fontsize
self.x = self.lastx
else:
self.y = self.y - self.fontsize
if seek != 0 :
self.x = self.x + seek
if fontsize == 0 :
fontsize = self.fontsize
else:
self.fontsize = fontsize
for i in range(boxes):
c.rect(self.x, self.y, self.fontsize, self.fontsize, stroke=1)
self.x = self.x + self.fontsize
if self.x > (self.width - (self.marginsides + self.fontsize) ) :
break
self.x = self.marginsides
self.y = self.y - self.fontsize
def draw_line(self, gray = 0):
"""Function to draw a straight line"""
c = self.canvas
c.setStrokeGray(gray)
c.setLineWidth(0.40)
self.y = self.y - (self.fontsize )
c.line(self.x, self.y, self.width-self.x, self.y)
self.y = self.y - (self.fontsize )
def set_title(self,title = ""):
c = self.canvas.setTitle(title)
def save(self):
self.canvas.save()
def create():
"""
Function to create OCRforms from the tables
"""
if len(request.args) == 0:
session.error = T("Need to specify a table!")
redirect(URL(r=request))
output = StringIO.StringIO()
form = Form(pdfname=output)
_table = request.args(0)
title = _table
table = db[_table]
import string
title = string.capitalize(title.rpartition('_')[2])
form.set_title(title)
form.print_text([title], fontsize = 20, style = "center", seek = -100)
form.print_text(["",Tstr("1. Fill the necessary fields in BLOCK letters."),
Tstr("2. Always use one box per letter and leave one box space to seperate words.")], fontsize = 13, gray = 0)
form.draw_line()
form.print_text([""])
for field in table.fields:
if field in ['id', 'created_on', 'modified_on', 'uuid', 'deleted','admin'] :
# These aren't needed I guess
pass
else:
form.print_text([str(table[field].label)],fontsize = 13)
if table[field].type == "integer":
form.print_text([""]) # leave a space
for i in range(100):
try:
choice = str(table[field].represent(i+1))
form.print_text([str(i+1)+". "],seek = 20, fontsize = 12)
form.print_text([choice],continuetext = 1, fontsize = 12)
#form.draw_check_boxes(continuetext=1,style = "center",fontsize = 10, gray = 0.9) # reduce font size by 2
except:
break
form.print_text([""]) # leave a space
form.print_text([Tstr("Put a choice in the box")], fontsize = 13, gray = 0)
form.draw_check_boxes(boxes = 2, continuetext=1, gray = 0.9, fontsize = 20, seek = 10)
form.print_text([""]) # leave a space
else:
form.draw_check_boxes(boxes = table[field].length,fontsize = 20, gray = 0.9)
form.save()
output.seek(0)
import gluon.contenttype
response.headers['Content-Type'] = gluon.contenttype.contenttype('.pdf')
filename = "%s_%s.pdf" % (request.env.server_name, str(table))
response.headers['Content-disposition'] = "attachment; filename=\"%s\"" % filename
return output.read()
|
{
"content_hash": "afcc9834a0b192bfe68d35aa79b13cfe",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 120,
"avg_line_length": 33.675324675324674,
"alnum_prop": 0.6247589664481296,
"repo_name": "luisibanez/SahanaEden",
"id": "8d8fed746e4c7de9fb8ccee1bd852c6c8f32af0d",
"size": "5211",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "controllers/ocr.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "7006"
},
{
"name": "JavaScript",
"bytes": "24979950"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Python",
"bytes": "2980976"
}
],
"symlink_target": ""
}
|
import math
import pygame
class Simulation:
def __init__(self, width, height, ms_to_minutes):
"""
width - The width of the simulation in arbitrary internal units.
height - The height of the simulation in arbitrary internal units.
ms_to_minutes - Multiplied by the number of milliseconds in
a step to give the number of real time minutes that a step
corresponds to.
"""
self.base_ms_to_minutes = ms_to_minutes
self.zoom = 1
self.minutes_since_start = 0
self.width = width
self.height = height
self.max_distance = math.sqrt(math.pow(width, 2) + math.pow(height, 2))
self.agents = []
self.locations = []
self.goods = []
def step(self, dt_ms):
"""
Called to step the simulation on by the given number of
milliseconds.
This maps to a particular number of simulation minutes as defined
in the constructor.
"""
step_time_minutes = dt_ms * self.base_ms_to_minutes
for i in range(0, self.zoom):
self.minutes_since_start += step_time_minutes
for agent in self.agents:
agent.step(step_time_minutes, self)
for location in self.locations:
location.step(step_time_minutes)
def handle_event(self, event):
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_1:
self.zoom = 1
elif event.key == pygame.K_2:
self.zoom = 2
elif event.key == pygame.K_3:
self.zoom = 5
elif event.key == pygame.K_4:
self.zoom = 10
elif event.key == pygame.K_5:
self.zoom = 100
def time_str(self):
"""
Provides a string representation of the time since the simulation
started.
"""
hours = self.minutes_since_start // 60
minutes = self.minutes_since_start % 60.0
return "{0:d}:{1:d} (x{2:d})".format(int(hours), int(minutes), int(self.zoom))
def total_money(self):
"""
Provides access to information on the total money across all agents
in the simulation.
"""
money = 0
for agent in self.agents:
money += agent.money
return money
|
{
"content_hash": "7789f1c4fc709c2526f12a622d5b0173",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 89,
"avg_line_length": 31.86842105263158,
"alnum_prop": 0.541701073492981,
"repo_name": "DaveTCode/tradingsim",
"id": "91e5e07c0a141294a5b129c2157a29258ef5d601",
"size": "2422",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tradingsim/simulation/simulation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "40043"
}
],
"symlink_target": ""
}
|
"""
Public interface for flanker address (email or url) parsing and validation
capabilities.
Public Functions in flanker.addresslib.address module:
* parse(address, addr_spec_only=False)
Parse a single address or URL. Can parse just the address spec or the
full mailbox.
* parse_list(address_list, strict=False, as_tuple=False)
Parse a list of addresses, operates in strict or relaxed modes. Strict
mode will fail at the first instance of invalid grammar, relaxed modes
tries to recover and continue.
* validate_address(addr_spec)
Validates (parse, plus dns, mx check, and custom grammar) a single
address spec. In the case of a valid address returns an EmailAddress
object, otherwise returns None.
* validate_list(addr_list, as_tuple=False)
Validates an address list, and returns a tuple of parsed and unparsed
portions.
When valid addresses are returned, they are returned as an instance of either
EmailAddress or UrlAddress in flanker.addresslib.address.
See the parser.py module for implementation details of the parser.
"""
import time
import flanker.addresslib.parser
from flanker.addresslib.quote import smart_unquote, smart_quote
import flanker.addresslib.validate
from flanker.addresslib.parser import MAX_ADDRESS_LENGTH
from flanker.utils import is_pure_ascii
from flanker.utils import metrics_wrapper
from flanker.mime.message.headers.encoding import encode_string
from flanker.mime.message.headers.encodedword import mime_to_unicode
from urlparse import urlparse
@metrics_wrapper()
def parse(address, addr_spec_only=False, metrics=False):
"""
Given a string, returns a scalar object representing a single full
mailbox (display name and addr-spec), addr-spec, or a url.
Returns an Address object and optionally metrics on processing
time if requested.
Examples:
>>> address.parse('John Smith <john@smith.com')
John Smith <john@smith.com>
>>> print address.parse('John <john@smith.com>', addr_spec_only=True)
None
>>> print address.parse('john@smith.com', addr_spec_only=True)
'john@smith.com'
>>> address.parse('http://host.com/post?q')
http://host.com/post?q
>>> print address.parse('foo')
None
"""
mtimes = {'parsing': 0}
parser = flanker.addresslib.parser._AddressParser(False)
try:
# addr-spec only
if addr_spec_only:
bstart = time.time()
retval = parser.address_spec(address)
mtimes['parsing'] = time.time() - bstart
return retval, mtimes
# full address
bstart = time.time()
retval = parser.address(address)
mtimes['parsing'] = time.time() - bstart
return retval, mtimes
# supress any exceptions and return None
except flanker.addresslib.parser.ParserException:
return None, mtimes
@metrics_wrapper()
def parse_list(address_list, strict=False, as_tuple=False, metrics=False):
"""
Given an string or list of email addresses and/or urls seperated by a
delimiter (comma (,) or semi-colon (;)), returns an AddressList object
(an iterable list representing parsed email addresses and urls).
The Parser operates in strict or relaxed modes. In strict mode the parser
will quit at the first occurrence of error, in relaxed mode the parser
will attempt to seek to to known valid location and continue parsing.
The parser can return a list of parsed addresses or a tuple containing
the parsed and unparsed portions. The parser also returns the parsing
time metrics if requested.
Examples:
>>> address.parse_list('A <a@b>')
[A <a@b>]
>>> address.parse_list('A <a@b>, C <d@e>')
[A <a@b>, C <d@e>]
>>> address.parse_list('A <a@b>, C, D <d@e>')
[A <a@b>, D <d@e>]
>>> address.parse_list('A <a@b>, C, D <d@e>')
[A <a@b>]
>>> address.parse_list('A <a@b>, D <d@e>, http://localhost')
[A <a@b>, D <d@e>, http://localhost]
"""
mtimes = {'parsing': 0}
parser = flanker.addresslib.parser._AddressParser(strict)
# if we have a list, transform it into a string first
if isinstance(address_list, list):
address_list = ', '.join(_normalize_address_list(address_list))
# parse
try:
bstart = time.time()
if strict:
p = parser.address_list(address_list)
u = []
else:
p, u = parser.address_list(address_list)
mtimes['parsing'] = time.time() - bstart
except flanker.addresslib.parser.ParserException:
p, u = (AddressList(), [])
# return as tuple or just parsed addresses
if as_tuple:
return p, u, mtimes
return p, mtimes
@metrics_wrapper()
def validate_address(addr_spec, metrics=False):
"""
Given an addr-spec, runs the pre-parser, the parser, DNS MX checks,
MX existence checks, and if available, ESP specific grammar for the
local part.
In the case of a valid address returns an EmailAddress object, otherwise
returns None. If requested, will also return the parsing time metrics.
Examples:
>>> address.validate_address('john@non-existent-domain.com')
None
>>> address.validate_address('user@gmail.com')
None
>>> address.validate_address('user.1234@gmail.com')
user.1234@gmail.com
"""
mtimes = {'parsing': 0, 'mx_lookup': 0,
'dns_lookup': 0, 'mx_conn':0 , 'custom_grammar':0}
# sanity check
if addr_spec is None:
return None, mtimes
if not is_pure_ascii(addr_spec):
return None, mtimes
# preparse address into its parts and perform any ESP specific pre-parsing
addr_parts = flanker.addresslib.validate.preparse_address(addr_spec)
if addr_parts is None:
return None, mtimes
# run parser against address
bstart = time.time()
paddr = parse('@'.join(addr_parts), addr_spec_only=True)
mtimes['parsing'] = time.time() - bstart
if paddr is None:
return None, mtimes
# lookup if this domain has a mail exchanger
exchanger, mx_metrics = \
flanker.addresslib.validate.mail_exchanger_lookup(addr_parts[-1], metrics=True)
mtimes['mx_lookup'] = mx_metrics['mx_lookup']
mtimes['dns_lookup'] = mx_metrics['dns_lookup']
mtimes['mx_conn'] = mx_metrics['mx_conn']
if exchanger is None:
return None, mtimes
# lookup custom local-part grammar if it exists
bstart = time.time()
plugin = flanker.addresslib.validate.plugin_for_esp(exchanger)
mtimes['custom_grammar'] = time.time() - bstart
if plugin and plugin.validate(addr_parts[0]) is False:
return None, mtimes
return paddr, mtimes
@metrics_wrapper()
def validate_list(addr_list, as_tuple=False, metrics=False):
"""
Validates an address list, and returns a tuple of parsed and unparsed
portions.
Returns results as a list or tuple consisting of the parsed addresses
and unparsable protions. If requested, will also return parisng time
metrics.
Examples:
>>> address.validate_address_list('a@mailgun.com, c@mailgun.com')
[a@mailgun.com, c@mailgun.com]
>>> address.validate_address_list('a@mailgun.com, b@example.com')
[a@mailgun.com]
>>> address.validate_address_list('a@b, c@d, e@example.com', as_tuple=True)
([a@mailgun.com, c@mailgun.com], ['e@example.com'])
"""
mtimes = {'parsing': 0, 'mx_lookup': 0,
'dns_lookup': 0, 'mx_conn':0 , 'custom_grammar':0}
if addr_list is None:
return None, mtimes
# parse addresses
bstart = time.time()
parsed_addresses, unparseable = parse_list(addr_list, as_tuple=True)
mtimes['parsing'] = time.time() - bstart
plist = flanker.addresslib.address.AddressList()
ulist = []
# make sure parsed list pass dns and esp grammar
for paddr in parsed_addresses:
# lookup if this domain has a mail exchanger
exchanger, mx_metrics = \
flanker.addresslib.validate.mail_exchanger_lookup(paddr.hostname, metrics=True)
mtimes['mx_lookup'] += mx_metrics['mx_lookup']
mtimes['dns_lookup'] += mx_metrics['dns_lookup']
mtimes['mx_conn'] += mx_metrics['mx_conn']
if exchanger is None:
ulist.append(paddr.full_spec())
continue
# lookup custom local-part grammar if it exists
plugin = flanker.addresslib.validate.plugin_for_esp(exchanger)
bstart = time.time()
if plugin and plugin.validate(paddr.mailbox) is False:
ulist.append(paddr.full_spec())
continue
mtimes['custom_grammar'] = time.time() - bstart
plist.append(paddr)
# loop over unparsable list and check if any can be fixed with
# preparsing cleanup and if so, run full validator
for unpar in unparseable:
paddr, metrics = validate_address(unpar, metrics=True)
if paddr:
plist.append(paddr)
else:
ulist.append(unpar)
# update all the metrics
for k, v in metrics.iteritems():
metrics[k] += v
if as_tuple:
return plist, ulist, mtimes
return plist, mtimes
def is_email(string):
if parse(string, True):
return True
return False
class Address(object):
"""
Base class that represents an address (email or URL). Use it to create
concrete instances of different addresses:
"""
@property
def supports_routing(self):
"""
Indicates that by default this address cannot be routed.
"""
return False
class Type(object):
"""
Enumerates the types of addresses we support:
>>> parse('foo@example.com').addr_type
'email'
>>> parse('http://example.com').addr_type
'url'
"""
Email = 'email'
Url = 'url'
class EmailAddress(Address):
"""
Represents a fully parsed email address with built-in support for MIME
encoding. Note, do not use EmailAddress class directly, use the parse()
or parse_list() functions to return a scalar or iterable list respectively.
Examples:
>>> addr = EmailAddress("Bob Silva", "bob@host.com")
>>> addr.address
'bob@host.com'
>>> addr.hostname
'host.com'
>>> addr.mailbox
'bob'
Display name is always returned in Unicode, i.e. ready to be displayed on
web forms:
>>> addr.display_name
u'Bob Silva'
And full email spec is 100% ASCII, encoded for MIME:
>>> addr.full_spec()
'Bob Silva <bob@host.com>'
"""
__slots__ = ['display_name', 'mailbox', 'hostname', 'address']
def __init__(self, display_name, spec=None, parsed_name=None):
if spec is None:
spec = display_name
display_name = None
assert(spec)
if parsed_name:
self.display_name = smart_unquote(mime_to_unicode(parsed_name))
elif display_name:
self.display_name = display_name
else:
self.display_name = u''
parts = spec.rsplit('@', 1)
self.mailbox = parts[0]
self.hostname = parts[1].lower()
self.address = self.mailbox + "@" + self.hostname
self.addr_type = self.Type.Email
def __repr__(self):
"""
>>> repr(EmailAddress("John Smith", "john@smith.com"))
'John Smith <john@smith.com>'
"""
return self.full_spec()
def __str__(self):
"""
>>> str(EmailAddress("boo@host.com"))
'boo@host.com'
"""
return self.address
@property
def supports_routing(self):
"""
Email addresses can be routed.
"""
return True
def full_spec(self):
"""
Returns a full spec of an email address. Always in ASCII, RFC-2822
compliant, safe to be included into MIME:
>>> EmailAddress("Ev K", "ev@example.com").full_spec()
'Ev K <ev@host.com>'
>>> EmailAddress("Жека", "ev@example.com").full_spec()
'=?utf-8?b?0JbQtdC60LA=?= <ev@example.com>'
"""
if self.display_name:
encoded_display_name = smart_quote(encode_string(
None, self.display_name, maxlinelen=MAX_ADDRESS_LENGTH))
return '{0} <{1}>'.format(encoded_display_name, self.address)
return u'{0}'.format(self.address)
def to_unicode(self):
"""
Converts to unicode.
"""
if self.display_name:
return u'{0} <{1}>'.format(self.display_name, self.address)
return u'{0}'.format(self.address)
def __cmp__(self, other):
return True
def __eq__(self, other):
"""
Allows comparison of two addresses.
"""
if other:
if isinstance(other, basestring):
other = parse(other)
if not other:
return False
return self.address.lower() == other.address.lower()
return False
def __hash__(self):
"""
Hashing allows using Address objects as keys in collections and compare
them in sets
>>> a = Address.from_string("a@host")
>>> b = Address.from_string("A <A@host>")
>>> hash(a) == hash(b)
True
>>> s = set()
>>> s.add(a)
>>> s.add(b)
>>> len(s)
1
"""
return hash(self.address.lower())
class UrlAddress(Address):
"""
Represents a parsed URL:
>>> url = UrlAddress("http://user@host.com:8080?q=a")
>>> url.hostname
'host.com'
>>> url.port
8080
>>> url.scheme
'http'
>>> str(url)
'http://user@host.com:8080?q=a'
Note: do not create UrlAddress class directly by passing raw "internet
data", use the parse() and parse_list() functions instead.
"""
__slots__ = ['address', 'parse_result']
def __init__(self, spec):
self.address = spec
self.parse_result = urlparse(spec)
self.addr_type = self.Type.Url
@property
def hostname(self):
hostname = self.parse_result.hostname
if hostname:
return hostname.lower()
@property
def port(self):
return self.parse_result.port
@property
def scheme(self):
return self.parse_result.scheme
@property
def path(self):
return self.parse_result.path
def __str__(self):
return self.address
def full_spec(self):
return self.address
def to_unicode(self):
return self.address
def __repr__(self):
return self.address
def __eq__(self, other):
"Allows comparison of two URLs"
if other:
if not isinstance(other, basestring):
other = other.address
return self.address == other
def __hash__(self):
return hash(self.address)
class AddressList(object):
"""
Keeps the list of addresses. Each address is an EmailAddress or
URLAddress objectAddress-derived object.
To create a list, use the parse_list method, do not create an
AddressList directly.
To see if the address is in the list:
>>> "missing@host.com" in al
False
>>> "bob@host.COM" in al
True
"""
def __init__(self, container=None):
if container is None:
container = []
self.container = container
def append(self, n):
self.container.append(n)
def remove(self, n):
self.container.remove(n)
def __iter__(self):
return iter(self.container)
def __getitem__(self, key):
return self.container[key]
def __len__(self):
return len(self.container)
def __eq__(self, other):
"""
When comparing ourselves to other lists we must ignore order.
"""
if isinstance(other, list):
other = parse_list(other)
return set(self.container) == set(other.container)
def __repr__(self):
return ''.join(['[', self.full_spec(), ']'])
def __add__(self, other):
"""
Adding two AddressLists together yields another AddressList.
"""
if isinstance(other, list):
result = self.container + parse_list(other).container
else:
result = self.container + other.container
return AddressList(result)
def full_spec(self, delimiter=", "):
"""
Returns a full string which looks pretty much what the original was
like
>>> adl = AddressList("Foo <foo@host.com>, Bar <bar@host.com>")
>>> adl.full_spec(delimiter='; ')
'Foo <foo@host.com; Bar <bar@host.com>'
"""
return delimiter.join(addr.full_spec() for addr in self.container)
def to_unicode(self, delimiter=u", "):
return delimiter.join(addr.to_unicode() for addr in self.container)
def to_ascii_list(self):
return [addr.full_spec() for addr in self.container]
@property
def addresses(self):
"""
Returns a list of just addresses, i.e. no names:
>>> adl = AddressList("Foo <foo@host.com>, Bar <bar@host.com>")
>>> adl.addresses
['foo@host.com', 'bar@host.com']
"""
return [addr.address for addr in self.container]
def __str__(self):
return self.full_spec()
@property
def hostnames(self):
"""
Returns a set of hostnames used in addresses in this list.
"""
return set([addr.hostname for addr in self.container])
@property
def addr_types(self):
"""
Returns a set of address types used in addresses in this list.
"""
return set([addr.addr_type for addr in self.container])
def _normalize_address_list(address_list):
parts = []
for addr in address_list:
if isinstance(addr, Address):
parts.append(addr.to_unicode())
if isinstance(addr, basestring):
parts.append(addr)
return parts
|
{
"content_hash": "f6cec3112a8fcafc159549ff1c74a333",
"timestamp": "",
"source": "github",
"line_count": 620,
"max_line_length": 91,
"avg_line_length": 29.532258064516128,
"alnum_prop": 0.5994538503549973,
"repo_name": "aroberts/flanker",
"id": "6fb098d7736b5995427155e8ee00707015434fb0",
"size": "18330",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "flanker/addresslib/address.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "366011"
}
],
"symlink_target": ""
}
|
import sys
import time
import cli
from scapy.all import *
class Proto:
pass
const = Proto()
stores = Proto()
def GetToday():
return str((time.strftime("%d-%m-%Y-%H%M%S")))
def check_dependencies():
#CHECK FOR DEPENDENCIES
if len(cli.check_sysfile('scapy'))==0:
print 'scapy executable not found. Make sure you have installed scapy (pip install scapy) or this wont work.'
return False
if len(cli.check_sysfile('airmon-ng'))==0:
print 'airmon-ng executable not found. Make sure you have installed scapy (pip install scapy) or this wont work.'
return False
else:
return True
def ValidInterface():
avail = False
wlan = stores.args.interface
if stores.args.verbose: print 'Looking for: ' + wlan
if not "mon" in wlan:
print 'You must select a monitor interface (ie. mon0, mon1, etc).'
return
if stores.args.verbose: print 'Verifying wireless interface is available...'
s=cli.execute_shell('ifconfig | grep ' + wlan)
lines = s.splitlines()
if stores.args.verbose: print lines
for line in lines:
if wlan in line:
avail = True
if avail:
if stores.args.verbose: print 'Interface found.'
return True
else:
if stores.args.verbose: print 'Looking a little deeper for that interface you asked for.'
s2=cli.execute_shell('ifconfig -a | grep ' + wlan)
lines = s.splitlines()
if stores.args.verbose: print lines
for line in lines:
if wlan in line:
if stores.args.verbose: print 'Interface was found...but its not up. You need to fix that.'
if stores.args.verbose: print 'Interface NOT found anywhere.'
return False
#Add lines to the dump file
def AddLineToDump(LineToAdd):
if len(stores.args.dumpfile) > 1: #check to make sure the dump file exists
lout = []
lout.append(LineToAdd+'\n')
f = open(stores.dumpfilename,'a')
f.writelines(lout)
f.close()
def sniffReq(p):
if p.haslayer(Dot11Deauth) and not stores.args.deauthignore:
# Look for a deauth packet and print the AP BSSID, Client BSSID and the reason for the deauth.
line = p.sprintf("Deauth Found from AP [%Dot11.addr2%] Client [%Dot11.addr1%], Reason [%Dot11Deauth.reason%]")
print line
AddLineToDump(line)
if p.haslayer(Dot11AssoReq) and not stores.args.assocignore:
# Look for an association request packet and print the Station BSSID, Client BSSID, AP info.
line = p.sprintf("Association request from Station [%Dot11.addr1%], Client [%Dot11.addr2%], AP [%Dot11Elt.info%]")
print line
AddLineToDump(line)
if p.haslayer(Dot11Auth) and not stores.args.authignore:
# Look for an authentication packet and print the Client and AP BSSID
line = p.sprintf("Authentication Request from [%Dot11.addr1%] to AP [%Dot11.addr2%]")
print p.sprintf("------------------------------------------------------------------------------------------")
print line
AddLineToDump(line)
def start_sniff():
sniff(iface=stores.args.interface,prn=sniffReq)
def main(args):
stores.args = args #stores the args in our structure so any function can retrieve them
scpath = os.path.realpath(__file__)
realdir = os.path.dirname(scpath)
cli.arguments = args #initialize args for cli
#announce verbose
if args.verbose: print 'Verbose argument given...prepare to get info...'
#check deps
if not check_dependencies():
print 'Dependency check failed. Please make sure you have all dependencies installed.'
return
#check to make sure that the selected interface is valid
if not ValidInterface():
print 'The interface you selected is not valid or does not exist.'
return
if not stores.args.dumpfile == "": #If a name is specified for the dump file then create it
if stores.args.verbose: print "Dump file argument given. Creating dump file."
stores.dumpfilename = stores.args.dumpfile + "/authwatch-dump"
if not os.path.exists(stores.args.dumpfile):
if stores.args.verbose: print "Path does not exist trying to create it."
cli.execute_shell("mkdir -p " + stores.args.dumpfile)
lout = []
f = open(stores.dumpfilename,'w') #create an empty file
f.writelines(lout)
f.close()
if stores.args.verbose: print "Dump file with the name " + stores.dumpfilename + " successfully created."
print "authwatch running..."
start_sniff()
|
{
"content_hash": "3ed8713d9a901da44d03969e9ba58864",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 116,
"avg_line_length": 34.84166666666667,
"alnum_prop": 0.7084429562305669,
"repo_name": "greenygh0st/authwatch-gg",
"id": "2221529e48aace465212fa16e85d62b6d70af7dd",
"size": "4204",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "watcher.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10078"
},
{
"name": "Shell",
"bytes": "199"
}
],
"symlink_target": ""
}
|
"""This tests for things in source files. Initially, absence of tabs :-)
"""
from reportlab.lib.testutils import setOutDir,makeSuiteForClasses, outputfile, SecureTestCase, GlobDirectoryWalker, printLocation
setOutDir(__name__)
from reportlab.lib.testutils import RL_HOME,testsFolder
__version__=''' $Id$ '''
import os, sys, glob, re
import reportlab
import unittest
from reportlab.lib.utils import open_and_read
class SourceTester(SecureTestCase):
def setUp(self):
SecureTestCase.setUp(self)
try:
fn = __file__
except:
fn = sys.argv[0]
self.output = open(outputfile(os.path.splitext(os.path.basename(fn))[0]+'.txt'),'w')
def checkFileForTabs(self, filename):
txt = open_and_read(filename, 'r')
chunks = txt.split('\t')
tabCount = len(chunks) - 1
if tabCount:
#raise Exception, "File %s contains %d tab characters!" % (filename, tabCount)
self.output.write("file %s contains %d tab characters!\n" % (filename, tabCount))
def checkFileForTrailingSpaces(self, filename):
txt = open_and_read(filename, 'r')
initSize = len(txt)
badLines = 0
badChars = 0
for line in txt.split('\n'):
stripped = line.rstrip()
spaces = len(line) - len(stripped) # OK, so they might be trailing tabs, who cares?
if spaces:
badLines = badLines + 1
badChars = badChars + spaces
if badChars != 0:
self.output.write("file %s contains %d trailing spaces, or %0.2f%% wastage\n" % (filename, badChars, 100.0*badChars/initSize))
def testFiles(self):
w = GlobDirectoryWalker(RL_HOME, '*.py')
for filename in w:
self.checkFileForTabs(filename)
self.checkFileForTrailingSpaces(filename)
def zapTrailingWhitespace(dirname):
"""Eliminates trailing spaces IN PLACE. Use with extreme care
and only after a backup or with version-controlled code."""
assert os.path.isdir(dirname), "Directory not found!"
print("This will eliminate all trailing spaces in py files under %s." % dirname)
ok = input("Shall I proceed? type YES > ")
if ok != 'YES':
print('aborted by user')
return
w = GlobDirectoryWalker(dirname, '*.py')
for filename in w:
# trim off final newline and detect real changes
txt = open(filename, 'r').read()
badChars = 0
cleaned = []
for line in txt.split('\n'):
stripped = line.rstrip()
cleaned.append(stripped)
spaces = len(line) - len(stripped) # OK, so they might be trailing tabs, who cares?
if spaces:
badChars = badChars + spaces
if badChars != 0:
open(filename, 'w').write('\n'.join(cleaned))
print("file %s contained %d trailing spaces, FIXED" % (filename, badChars))
print('done')
def makeSuite():
return makeSuiteForClasses(SourceTester)
#noruntests
if __name__ == "__main__":
if len(sys.argv) == 3 and sys.argv[1] == 'zap' and os.path.isdir(sys.argv[2]):
zapTrailingWhitespace(sys.argv[2])
else:
unittest.TextTestRunner().run(makeSuite())
printLocation()
|
{
"content_hash": "bc16762b229ff2d9442515d2abf8f682",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 138,
"avg_line_length": 36.7752808988764,
"alnum_prop": 0.6119767797128017,
"repo_name": "malexandre/python-xhtml2pdf-demo",
"id": "496d2092499a228bf970667e4bfc44229a1ba576",
"size": "3375",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "reportlab/tests/test_source_chars.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "720407"
},
{
"name": "C++",
"bytes": "2019"
},
{
"name": "CSS",
"bytes": "16419"
},
{
"name": "Java",
"bytes": "6333"
},
{
"name": "Python",
"bytes": "4321122"
},
{
"name": "Shell",
"bytes": "4864"
}
],
"symlink_target": ""
}
|
import json
import threading
import time
from dcp import DcpClient, ResponseHandler
class MyHandler(ResponseHandler):
def __init__(self):
ResponseHandler.__init__(self)
self.lock = threading.Lock()
self.count = 0
def mutation(self, response):
self.lock.acquire()
#print "Mutation: ", response
self.count +=1
self.lock.release()
def deletion(self, response):
self.lock.acquire()
#print "Deletion: ", response
self.count += 1
self.lock.release()
def marker(self, response):
self.lock.acquire()
#print "Marker: ", response
self.lock.release()
def stream_end(self, response):
self.lock.acquire()
#print "Stream End: ", response
self.lock.release()
def get_num_items(self):
return self.count
def main():
handler = MyHandler()
client = DcpClient()
client.connect('172.23.105.195', 8091, 'bucket-1', 'Administrator', 'password',
handler)
for i in range(8):
result = client.add_stream(i, 0, 0, 10, 0, 0, 0)
if result['status'] != 0:
print 'Stream request to vb %d failed dur to error %d' %\
(i, result['status'])
while handler.has_active_streams():
time.sleep(.25)
print handler.get_num_items()
client.close()
#print json.dumps(client.nodes, sort_keys=True, indent=2)
#print json.dumps(client.buckets, sort_keys=True, indent=2)
if __name__ == "__main__":
main()
|
{
"content_hash": "47b1a794c1ab6dd25f393a632e02a5dc",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 83,
"avg_line_length": 25.360655737704917,
"alnum_prop": 0.5817711700064642,
"repo_name": "couchbaselabs/python-dcp-client",
"id": "edda838e50c3d60de428921c695327696154a365",
"size": "1548",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "example.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "22685"
}
],
"symlink_target": ""
}
|
from django.db import connections
from django.db.models.aggregates import Count
from django.utils.unittest import TestCase
from django_orm.postgresql.hstore.functions import HstoreKeys, HstoreSlice, HstorePeek
from django_orm.postgresql.hstore.expressions import HstoreExpression
from .models import DataBag, Ref, RefsBag
class TestDictionaryField(TestCase):
def setUp(self):
DataBag.objects.all().delete()
def _create_bags(self):
alpha = DataBag.objects.create(name='alpha', data={'v': '1', 'v2': '3'})
beta = DataBag.objects.create(name='beta', data={'v': '2', 'v2': '4'})
return alpha, beta
def _create_bitfield_bags(self):
# create dictionaries with bits as dictionary keys (i.e. bag5 = { 'b0':'1', 'b2':'1'})
for i in xrange(10):
DataBag.objects.create(name='bag%d' % (i,),
data=dict(('b%d' % (bit,), '1') for bit in xrange(4) if (1 << bit) & i))
def test_empty_instantiation(self):
bag = DataBag.objects.create(name='bag')
self.assertTrue(isinstance(bag.data, dict))
self.assertEqual(bag.data, {})
def test_named_querying(self):
alpha, beta = self._create_bags()
instance = DataBag.objects.get(name='alpha')
self.assertEqual(instance, alpha)
instance = DataBag.objects.filter(name='beta')[0]
self.assertEqual(instance, beta)
def test_annotations(self):
self._create_bitfield_bags()
queryset = DataBag.objects\
.annotate(num_id=Count('id'))\
.filter(num_id=1)
self.assertEqual(queryset[0].num_id, 1)
def test_unicode_processing(self):
greets = {
u'de': u'Gr\xfc\xdfe, Welt',
u'en': u'hello, world',
u'es': u'hola, ma\xf1ana',
u'he': u'\u05e9\u05dc\u05d5\u05dd, \u05e2\u05d5\u05dc\u05dd',
u'jp': u'\u3053\u3093\u306b\u3061\u306f\u3001\u4e16\u754c',
u'zh': u'\u4f60\u597d\uff0c\u4e16\u754c',
}
DataBag.objects.create(name='multilang', data=greets)
instance = DataBag.objects.get(name='multilang')
self.assertEqual(greets, instance.data)
def test_query_escaping(self):
me = self
def readwrite(s):
# try create and query with potentially illegal characters in the field and dictionary key/value
o = DataBag.objects.create(name=s, data={ s: s })
me.assertEqual(o, DataBag.objects.get(name=s, data={ s: s }))
readwrite('\' select')
readwrite('% select')
readwrite('\\\' select')
readwrite('-- select')
readwrite('\n select')
readwrite('\r select')
readwrite('* select')
def test_replace_full_dictionary(self):
DataBag.objects.create(name='foo', data={ 'change': 'old value', 'remove': 'baz'})
replacement = { 'change': 'new value', 'added': 'new'}
DataBag.objects.filter(name='foo').update(data=replacement)
instance = DataBag.objects.get(name='foo')
self.assertEqual(replacement, instance.data)
def test_equivalence_querying(self):
alpha, beta = self._create_bags()
for bag in (alpha, beta):
data = {'v': bag.data['v'], 'v2': bag.data['v2']}
instance = DataBag.objects.get(data=data)
self.assertEqual(instance, bag)
r = DataBag.objects.filter(data=data)
self.assertEqual(len(r), 1)
self.assertEqual(r[0], bag)
def test_hkeys(self):
alpha, beta = self._create_bags()
instance = DataBag.objects.filter(id=alpha.id)
self.assertEqual(instance.hkeys('data'), ['v', 'v2'])
instance = DataBag.objects.filter(id=beta.id)
self.assertEqual(instance.hkeys('data'), ['v', 'v2'])
def test_hkeys_annotation(self):
alpha, beta = self._create_bags()
queryset = DataBag.objects.annotate_functions(keys=HstoreKeys("data"))
self.assertEqual(queryset[0].keys, ['v', 'v2'])
self.assertEqual(queryset[1].keys, ['v', 'v2'])
def test_hpeek(self):
alpha, beta = self._create_bags()
queryset = DataBag.objects.filter(id=alpha.id)
self.assertEqual(queryset.hpeek(attr='data', key='v'), '1')
self.assertEqual(queryset.hpeek(attr='data', key='invalid'), None)
def test_hpeek_annotation(self):
alpha, beta = self._create_bags()
queryset = DataBag.objects.annotate_functions(peeked=HstorePeek("data", "v"))
self.assertEqual(queryset[0].peeked, "1")
self.assertEqual(queryset[1].peeked, "2")
def test_hremove(self):
alpha, beta = self._create_bags()
instance = DataBag.objects.get(name='alpha')
self.assertEqual(instance.data, alpha.data)
DataBag.objects.filter(name='alpha').hremove('data', 'v2')
instance = DataBag.objects.get(name='alpha')
self.assertEqual(instance.data, {'v': '1'})
instance = DataBag.objects.get(name='beta')
self.assertEqual(instance.data, beta.data)
DataBag.objects.filter(name='beta').hremove('data', ['v', 'v2'])
instance = DataBag.objects.get(name='beta')
self.assertEqual(instance.data, {})
def test_hslice(self):
alpha, beta = self._create_bags()
queryset = DataBag.objects.filter(id=alpha.id)
self.assertEqual(queryset.hslice(attr='data', keys=['v']), {'v': '1'})
self.assertEqual(queryset.hslice(attr='data', keys=['invalid']), {})
def test_hslice_annotation(self):
alpha, beta = self._create_bags()
queryset = DataBag.objects.annotate_functions(sliced=HstoreSlice("data", ['v']))
self.assertEqual(queryset.count(), 2)
self.assertEqual(queryset[0].sliced, {'v': '1'})
def test_hupdate(self):
alpha, beta = self._create_bags()
self.assertEqual(DataBag.objects.get(name='alpha').data, alpha.data)
DataBag.objects.filter(name='alpha').hupdate('data', {'v2': '10', 'v3': '20'})
self.assertEqual(DataBag.objects.get(name='alpha').data, {'v': '1', 'v2': '10', 'v3': '20'})
def test_key_value_subset_querying(self):
alpha, beta = self._create_bags()
for bag in (alpha, beta):
qs = DataBag.objects.where(
HstoreExpression("data").contains({'v': bag.data['v']})
)
self.assertEqual(len(qs), 1)
self.assertEqual(qs[0], bag)
qs = DataBag.objects.where(
HstoreExpression("data").contains({'v': bag.data['v'], 'v2': bag.data['v2']})
)
self.assertEqual(len(qs), 1)
self.assertEqual(qs[0], bag)
def test_multiple_key_subset_querying(self):
alpha, beta = self._create_bags()
for keys in (['v'], ['v', 'v2']):
qs = DataBag.objects.where(
HstoreExpression("data").contains(keys)
)
self.assertEqual(qs.count(), 2)
for keys in (['v', 'nv'], ['n1', 'n2']):
qs = DataBag.objects.where(
HstoreExpression("data").contains(keys)
)
self.assertEqual(qs.count(), 0)
def test_single_key_querying(self):
alpha, beta = self._create_bags()
for key in ('v', 'v2'):
qs = DataBag.objects.where(HstoreExpression("data").contains(key))
self.assertEqual(qs.count(), 2)
for key in ('n1', 'n2'):
qs = DataBag.objects.where(HstoreExpression("data").contains(key))
self.assertEqual(qs.count(), 0)
def test_nested_filtering(self):
self._create_bitfield_bags()
# Test cumulative successive filters for both dictionaries and other fields
qs = DataBag.objects.all()
self.assertEqual(10, qs.count())
qs = qs.where(HstoreExpression("data").contains({'b0':'1'}))
self.assertEqual(5, qs.count())
qs = qs.where(HstoreExpression("data").contains({'b1':'1'}))
self.assertEqual(2, qs.count())
qs = qs.filter(name='bag3')
self.assertEqual(1, qs.count())
def test_aggregates(self):
self._create_bitfield_bags()
res = DataBag.objects.where(HstoreExpression("data").contains({'b0':'1'}))\
.aggregate(Count('id'))
self.assertEqual(res['id__count'], 5)
def test_empty_querying(self):
bag = DataBag.objects.create(name='bag')
self.assertTrue(DataBag.objects.get(data={}))
self.assertTrue(DataBag.objects.filter(data={}))
self.assertTrue(DataBag.objects.where(HstoreExpression("data").contains({})))
class TestReferencesField(TestCase):
def setUp(self):
Ref.objects.all().delete()
RefsBag.objects.all().delete()
def _create_bags(self):
refs = [Ref.objects.create(name=str(i)) for i in range(4)]
alpha = RefsBag.objects.create(name='alpha', refs={'0': refs[0], '1': refs[1]})
beta = RefsBag.objects.create(name='beta', refs={'0': refs[2], '1': refs[3]})
return alpha, beta, refs
def test_empty_instantiation(self):
bag = RefsBag.objects.create(name='bag')
self.assertTrue(isinstance(bag.refs, dict))
self.assertEqual(bag.refs, {})
def test_equivalence_querying(self):
alpha, beta, refs = self._create_bags()
for bag in (alpha, beta):
refs = {'0': bag.refs['0'], '1': bag.refs['1']}
self.assertEqual(RefsBag.objects.get(refs=refs), bag)
r = RefsBag.objects.filter(refs=refs)
self.assertEqual(len(r), 1)
self.assertEqual(r[0], bag)
def test_hkeys(self):
alpha, beta, refs = self._create_bags()
self.assertEqual(RefsBag.objects.filter(id=alpha.id).hkeys(attr='refs'), ['0', '1'])
def test_hpeek(self):
alpha, beta, refs = self._create_bags()
self.assertEqual(RefsBag.objects.filter(id=alpha.id).hpeek(attr='refs', key='0'), refs[0])
self.assertEqual(RefsBag.objects.filter(id=alpha.id).hpeek(attr='refs', key='invalid'), None)
def test_hslice(self):
alpha, beta, refs = self._create_bags()
self.assertEqual(RefsBag.objects.filter(id=alpha.id).hslice(attr='refs', keys=['0']), {'0': refs[0]})
self.assertEqual(RefsBag.objects.filter(id=alpha.id).hslice(attr='refs', keys=['invalid']), {})
def test_empty_querying(self):
bag = RefsBag.objects.create(name='bag')
self.assertTrue(RefsBag.objects.get(refs={}))
self.assertTrue(RefsBag.objects.filter(refs={}))
# TODO: fix this test
#def test_key_value_subset_querying(self):
# alpha, beta, refs = self._create_bags()
# for bag in (alpha, beta):
# qs = RefsBag.objects.where(
# HstoreExpression("refs").contains({'0': bag.refs['0']})
# )
# self.assertEqual(len(qs), 1)
# self.assertEqual(qs[0], bag)
# qs = RefsBag.objects.where(
# HstoreExpression("refs").contains({'0': bag.refs['0'], '1': bag.refs['1']})
# )
# self.assertEqual(len(qs), 1)
# self.assertEqual(qs[0], bag)
def test_multiple_key_subset_querying(self):
alpha, beta, refs = self._create_bags()
for keys in (['0'], ['0', '1']):
qs = RefsBag.objects.where(HstoreExpression("refs").contains(keys))
self.assertEqual(qs.count(), 2)
for keys in (['0', 'nv'], ['n1', 'n2']):
qs = RefsBag.objects.where(HstoreExpression("refs").contains(keys))
self.assertEqual(qs.count(), 0)
def test_single_key_querying(self):
alpha, beta, refs = self._create_bags()
for key in ('0', '1'):
qs = RefsBag.objects.where(HstoreExpression("refs").contains(key))
self.assertEqual(qs.count(), 2)
for key in ('n1', 'n2'):
qs = RefsBag.objects.where(HstoreExpression("refs").contains(key))
self.assertEqual(qs.count(), 0)
|
{
"content_hash": "81090664ee295da9b9c106f6eb2c21fc",
"timestamp": "",
"source": "github",
"line_count": 316,
"max_line_length": 109,
"avg_line_length": 38.10126582278481,
"alnum_prop": 0.5866279069767442,
"repo_name": "cr8ivecodesmith/django-orm-extensions-save22",
"id": "6a3c31d4ad3b6bd3ba4e52add19142ef2722a13b",
"size": "12065",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/modeltests/pg_hstore/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "151530"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import datetime
import os
from decimal import Decimal
from unittest import skipUnless
from django import forms
from django.core.exceptions import (
NON_FIELD_ERRORS, FieldError, ImproperlyConfigured,
)
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.validators import ValidationError
from django.db import connection
from django.db.models.query import EmptyQuerySet
from django.forms.models import (
ModelFormMetaclass, construct_instance, fields_for_model, model_to_dict,
modelform_factory,
)
from django.template import Context, Template
from django.test import TestCase, skipUnlessDBFeature
from django.utils import six
from django.utils._os import upath
from .models import (
Article, ArticleStatus, Author, Author1, BetterWriter, BigInt, Book,
Category, Character, Colour, ColourfulItem, CommaSeparatedInteger,
CustomErrorMessage, CustomFF, CustomFieldForExclusionModel, DateTimePost,
DerivedBook, DerivedPost, Document, ExplicitPK, FilePathModel,
FlexibleDatePost, Homepage, ImprovedArticle, ImprovedArticleWithParentLink,
Inventory, Person, Photo, Post, Price, Product, Publication, Student,
StumpJoke, TextFile, Triple, Writer, WriterProfile, test_images,
)
if test_images:
from .models import ImageFile, OptionalImageFile
class ImageFileForm(forms.ModelForm):
class Meta:
model = ImageFile
fields = '__all__'
class OptionalImageFileForm(forms.ModelForm):
class Meta:
model = OptionalImageFile
fields = '__all__'
class ProductForm(forms.ModelForm):
class Meta:
model = Product
fields = '__all__'
class PriceForm(forms.ModelForm):
class Meta:
model = Price
fields = '__all__'
class BookForm(forms.ModelForm):
class Meta:
model = Book
fields = '__all__'
class DerivedBookForm(forms.ModelForm):
class Meta:
model = DerivedBook
fields = '__all__'
class ExplicitPKForm(forms.ModelForm):
class Meta:
model = ExplicitPK
fields = ('key', 'desc',)
class PostForm(forms.ModelForm):
class Meta:
model = Post
fields = '__all__'
class DerivedPostForm(forms.ModelForm):
class Meta:
model = DerivedPost
fields = '__all__'
class CustomWriterForm(forms.ModelForm):
name = forms.CharField(required=False)
class Meta:
model = Writer
fields = '__all__'
class BaseCategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = '__all__'
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
fields = '__all__'
class RoykoForm(forms.ModelForm):
class Meta:
model = Writer
fields = '__all__'
class ArticleStatusForm(forms.ModelForm):
class Meta:
model = ArticleStatus
fields = '__all__'
class InventoryForm(forms.ModelForm):
class Meta:
model = Inventory
fields = '__all__'
class SelectInventoryForm(forms.Form):
items = forms.ModelMultipleChoiceField(Inventory.objects.all(), to_field_name='barcode')
class CustomFieldForExclusionForm(forms.ModelForm):
class Meta:
model = CustomFieldForExclusionModel
fields = ['name', 'markup']
class TextFileForm(forms.ModelForm):
class Meta:
model = TextFile
fields = '__all__'
class BigIntForm(forms.ModelForm):
class Meta:
model = BigInt
fields = '__all__'
class ModelFormWithMedia(forms.ModelForm):
class Media:
js = ('/some/form/javascript',)
css = {
'all': ('/some/form/css',)
}
class Meta:
model = TextFile
fields = '__all__'
class CustomErrorMessageForm(forms.ModelForm):
name1 = forms.CharField(error_messages={'invalid': 'Form custom error message.'})
class Meta:
fields = '__all__'
model = CustomErrorMessage
class ModelFormBaseTest(TestCase):
def test_base_form(self):
self.assertEqual(list(BaseCategoryForm.base_fields),
['name', 'slug', 'url'])
def test_no_model_class(self):
class NoModelModelForm(forms.ModelForm):
pass
self.assertRaises(ValueError, NoModelModelForm)
def test_empty_fields_to_fields_for_model(self):
"""
An argument of fields=() to fields_for_model should return an empty dictionary
"""
field_dict = fields_for_model(Person, fields=())
self.assertEqual(len(field_dict), 0)
def test_empty_fields_on_modelform(self):
"""
No fields on a ModelForm should actually result in no fields.
"""
class EmptyPersonForm(forms.ModelForm):
class Meta:
model = Person
fields = ()
form = EmptyPersonForm()
self.assertEqual(len(form.fields), 0)
def test_empty_fields_to_construct_instance(self):
"""
No fields should be set on a model instance if construct_instance receives fields=().
"""
form = modelform_factory(Person, fields="__all__")({'name': 'John Doe'})
self.assertTrue(form.is_valid())
instance = construct_instance(form, Person(), fields=())
self.assertEqual(instance.name, '')
def test_blank_with_null_foreign_key_field(self):
"""
#13776 -- ModelForm's with models having a FK set to null=False and
required=False should be valid.
"""
class FormForTestingIsValid(forms.ModelForm):
class Meta:
model = Student
fields = '__all__'
def __init__(self, *args, **kwargs):
super(FormForTestingIsValid, self).__init__(*args, **kwargs)
self.fields['character'].required = False
char = Character.objects.create(username='user',
last_action=datetime.datetime.today())
data = {'study': 'Engineering'}
data2 = {'study': 'Engineering', 'character': char.pk}
# form is valid because required=False for field 'character'
f1 = FormForTestingIsValid(data)
self.assertTrue(f1.is_valid())
f2 = FormForTestingIsValid(data2)
self.assertTrue(f2.is_valid())
obj = f2.save()
self.assertEqual(obj.character, char)
def test_missing_fields_attribute(self):
message = (
"Creating a ModelForm without either the 'fields' attribute "
"or the 'exclude' attribute is prohibited; form "
"MissingFieldsForm needs updating."
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
class MissingFieldsForm(forms.ModelForm):
class Meta:
model = Category
def test_extra_fields(self):
class ExtraFields(BaseCategoryForm):
some_extra_field = forms.BooleanField()
self.assertEqual(list(ExtraFields.base_fields),
['name', 'slug', 'url', 'some_extra_field'])
def test_extra_field_model_form(self):
try:
class ExtraPersonForm(forms.ModelForm):
""" ModelForm with an extra field """
age = forms.IntegerField()
class Meta:
model = Person
fields = ('name', 'no-field')
except FieldError as e:
# Make sure the exception contains some reference to the
# field responsible for the problem.
self.assertIn('no-field', e.args[0])
else:
self.fail('Invalid "no-field" field not caught')
def test_extra_declared_field_model_form(self):
try:
class ExtraPersonForm(forms.ModelForm):
""" ModelForm with an extra field """
age = forms.IntegerField()
class Meta:
model = Person
fields = ('name', 'age')
except FieldError:
self.fail('Declarative field raised FieldError incorrectly')
def test_extra_field_modelform_factory(self):
self.assertRaises(FieldError, modelform_factory,
Person, fields=['no-field', 'name'])
def test_replace_field(self):
class ReplaceField(forms.ModelForm):
url = forms.BooleanField()
class Meta:
model = Category
fields = '__all__'
self.assertIsInstance(ReplaceField.base_fields['url'],
forms.fields.BooleanField)
def test_replace_field_variant_2(self):
# Should have the same result as before,
# but 'fields' attribute specified differently
class ReplaceField(forms.ModelForm):
url = forms.BooleanField()
class Meta:
model = Category
fields = ['url']
self.assertIsInstance(ReplaceField.base_fields['url'],
forms.fields.BooleanField)
def test_replace_field_variant_3(self):
# Should have the same result as before,
# but 'fields' attribute specified differently
class ReplaceField(forms.ModelForm):
url = forms.BooleanField()
class Meta:
model = Category
fields = [] # url will still appear, since it is explicit above
self.assertIsInstance(ReplaceField.base_fields['url'],
forms.fields.BooleanField)
def test_override_field(self):
class WriterForm(forms.ModelForm):
book = forms.CharField(required=False)
class Meta:
model = Writer
fields = '__all__'
wf = WriterForm({'name': 'Richard Lockridge'})
self.assertTrue(wf.is_valid())
def test_limit_nonexistent_field(self):
expected_msg = 'Unknown field(s) (nonexistent) specified for Category'
with self.assertRaisesMessage(FieldError, expected_msg):
class InvalidCategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = ['nonexistent']
def test_limit_fields_with_string(self):
expected_msg = "CategoryForm.Meta.fields cannot be a string. Did you mean to type: ('url',)?"
with self.assertRaisesMessage(TypeError, expected_msg):
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = ('url') # note the missing comma
def test_exclude_fields(self):
class ExcludeFields(forms.ModelForm):
class Meta:
model = Category
exclude = ['url']
self.assertEqual(list(ExcludeFields.base_fields),
['name', 'slug'])
def test_exclude_nonexistent_field(self):
class ExcludeFields(forms.ModelForm):
class Meta:
model = Category
exclude = ['nonexistent']
self.assertEqual(list(ExcludeFields.base_fields),
['name', 'slug', 'url'])
def test_exclude_fields_with_string(self):
expected_msg = "CategoryForm.Meta.exclude cannot be a string. Did you mean to type: ('url',)?"
with self.assertRaisesMessage(TypeError, expected_msg):
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = ('url') # note the missing comma
def test_exclude_and_validation(self):
# This Price instance generated by this form is not valid because the quantity
# field is required, but the form is valid because the field is excluded from
# the form. This is for backwards compatibility.
class PriceFormWithoutQuantity(forms.ModelForm):
class Meta:
model = Price
exclude = ('quantity',)
form = PriceFormWithoutQuantity({'price': '6.00'})
self.assertTrue(form.is_valid())
price = form.save(commit=False)
with self.assertRaises(ValidationError):
price.full_clean()
# The form should not validate fields that it doesn't contain even if they are
# specified using 'fields', not 'exclude'.
class PriceFormWithoutQuantity(forms.ModelForm):
class Meta:
model = Price
fields = ('price',)
form = PriceFormWithoutQuantity({'price': '6.00'})
self.assertTrue(form.is_valid())
# The form should still have an instance of a model that is not complete and
# not saved into a DB yet.
self.assertEqual(form.instance.price, Decimal('6.00'))
self.assertIsNone(form.instance.quantity)
self.assertIsNone(form.instance.pk)
def test_confused_form(self):
class ConfusedForm(forms.ModelForm):
""" Using 'fields' *and* 'exclude'. Not sure why you'd want to do
this, but uh, "be liberal in what you accept" and all.
"""
class Meta:
model = Category
fields = ['name', 'url']
exclude = ['url']
self.assertEqual(list(ConfusedForm.base_fields),
['name'])
def test_mixmodel_form(self):
class MixModelForm(BaseCategoryForm):
""" Don't allow more than one 'model' definition in the
inheritance hierarchy. Technically, it would generate a valid
form, but the fact that the resulting save method won't deal with
multiple objects is likely to trip up people not familiar with the
mechanics.
"""
class Meta:
model = Article
fields = '__all__'
# MixModelForm is now an Article-related thing, because MixModelForm.Meta
# overrides BaseCategoryForm.Meta.
self.assertEqual(
list(MixModelForm.base_fields),
['headline', 'slug', 'pub_date', 'writer', 'article', 'categories', 'status']
)
def test_article_form(self):
self.assertEqual(
list(ArticleForm.base_fields),
['headline', 'slug', 'pub_date', 'writer', 'article', 'categories', 'status']
)
def test_bad_form(self):
# First class with a Meta class wins...
class BadForm(ArticleForm, BaseCategoryForm):
pass
self.assertEqual(
list(BadForm.base_fields),
['headline', 'slug', 'pub_date', 'writer', 'article', 'categories', 'status']
)
def test_invalid_meta_model(self):
class InvalidModelForm(forms.ModelForm):
class Meta:
pass # no model
# Can't create new form
with self.assertRaises(ValueError):
InvalidModelForm()
# Even if you provide a model instance
with self.assertRaises(ValueError):
InvalidModelForm(instance=Category)
def test_subcategory_form(self):
class SubCategoryForm(BaseCategoryForm):
""" Subclassing without specifying a Meta on the class will use
the parent's Meta (or the first parent in the MRO if there are
multiple parent classes).
"""
pass
self.assertEqual(list(SubCategoryForm.base_fields),
['name', 'slug', 'url'])
def test_subclassmeta_form(self):
class SomeCategoryForm(forms.ModelForm):
checkbox = forms.BooleanField()
class Meta:
model = Category
fields = '__all__'
class SubclassMeta(SomeCategoryForm):
""" We can also subclass the Meta inner class to change the fields
list.
"""
class Meta(SomeCategoryForm.Meta):
exclude = ['url']
self.assertHTMLEqual(
str(SubclassMeta()),
"""<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>
<tr><th><label for="id_slug">Slug:</label></th><td><input id="id_slug" type="text" name="slug" maxlength="20" /></td></tr>
<tr><th><label for="id_checkbox">Checkbox:</label></th><td><input type="checkbox" name="checkbox" id="id_checkbox" /></td></tr>"""
)
def test_orderfields_form(self):
class OrderFields(forms.ModelForm):
class Meta:
model = Category
fields = ['url', 'name']
self.assertEqual(list(OrderFields.base_fields),
['url', 'name'])
self.assertHTMLEqual(
str(OrderFields()),
"""<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>
<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>"""
)
def test_orderfields2_form(self):
class OrderFields2(forms.ModelForm):
class Meta:
model = Category
fields = ['slug', 'url', 'name']
exclude = ['url']
self.assertEqual(list(OrderFields2.base_fields),
['slug', 'name'])
class FieldOverridesByFormMetaForm(forms.ModelForm):
class Meta:
model = Category
fields = ['name', 'url', 'slug']
widgets = {
'name': forms.Textarea,
'url': forms.TextInput(attrs={'class': 'url'})
}
labels = {
'name': 'Title',
}
help_texts = {
'slug': 'Watch out! Letters, numbers, underscores and hyphens only.',
}
error_messages = {
'slug': {
'invalid': (
"Didn't you read the help text? "
"We said letters, numbers, underscores and hyphens only!"
)
}
}
class TestFieldOverridesByFormMeta(TestCase):
def test_widget_overrides(self):
form = FieldOverridesByFormMetaForm()
self.assertHTMLEqual(
str(form['name']),
'<textarea id="id_name" rows="10" cols="40" name="name" maxlength="20"></textarea>',
)
self.assertHTMLEqual(
str(form['url']),
'<input id="id_url" type="text" class="url" name="url" maxlength="40" />',
)
self.assertHTMLEqual(
str(form['slug']),
'<input id="id_slug" type="text" name="slug" maxlength="20" />',
)
def test_label_overrides(self):
form = FieldOverridesByFormMetaForm()
self.assertHTMLEqual(
str(form['name'].label_tag()),
'<label for="id_name">Title:</label>',
)
self.assertHTMLEqual(
str(form['url'].label_tag()),
'<label for="id_url">The URL:</label>',
)
self.assertHTMLEqual(
str(form['slug'].label_tag()),
'<label for="id_slug">Slug:</label>',
)
def test_help_text_overrides(self):
form = FieldOverridesByFormMetaForm()
self.assertEqual(
form['slug'].help_text,
'Watch out! Letters, numbers, underscores and hyphens only.',
)
def test_error_messages_overrides(self):
form = FieldOverridesByFormMetaForm(data={
'name': 'Category',
'url': '/category/',
'slug': '!%#*@',
})
form.full_clean()
error = [
"Didn't you read the help text? "
"We said letters, numbers, underscores and hyphens only!",
]
self.assertEqual(form.errors, {'slug': error})
class IncompleteCategoryFormWithFields(forms.ModelForm):
"""
A form that replaces the model's url field with a custom one. This should
prevent the model field's validation from being called.
"""
url = forms.CharField(required=False)
class Meta:
fields = ('name', 'slug')
model = Category
class IncompleteCategoryFormWithExclude(forms.ModelForm):
"""
A form that replaces the model's url field with a custom one. This should
prevent the model field's validation from being called.
"""
url = forms.CharField(required=False)
class Meta:
exclude = ['url']
model = Category
class ValidationTest(TestCase):
def test_validates_with_replaced_field_not_specified(self):
form = IncompleteCategoryFormWithFields(data={'name': 'some name', 'slug': 'some-slug'})
assert form.is_valid()
def test_validates_with_replaced_field_excluded(self):
form = IncompleteCategoryFormWithExclude(data={'name': 'some name', 'slug': 'some-slug'})
assert form.is_valid()
def test_notrequired_overrides_notblank(self):
form = CustomWriterForm({})
assert form.is_valid()
class UniqueTest(TestCase):
"""
unique/unique_together validation.
"""
def setUp(self):
self.writer = Writer.objects.create(name='Mike Royko')
def test_simple_unique(self):
form = ProductForm({'slug': 'teddy-bear-blue'})
self.assertTrue(form.is_valid())
obj = form.save()
form = ProductForm({'slug': 'teddy-bear-blue'})
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['slug'], ['Product with this Slug already exists.'])
form = ProductForm({'slug': 'teddy-bear-blue'}, instance=obj)
self.assertTrue(form.is_valid())
def test_unique_together(self):
"""ModelForm test of unique_together constraint"""
form = PriceForm({'price': '6.00', 'quantity': '1'})
self.assertTrue(form.is_valid())
form.save()
form = PriceForm({'price': '6.00', 'quantity': '1'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'], ['Price with this Price and Quantity already exists.'])
def test_multiple_field_unique_together(self):
"""
When the same field is involved in multiple unique_together
constraints, we need to make sure we don't remove the data for it
before doing all the validation checking (not just failing after
the first one).
"""
class TripleForm(forms.ModelForm):
class Meta:
model = Triple
fields = '__all__'
Triple.objects.create(left=1, middle=2, right=3)
form = TripleForm({'left': '1', 'middle': '2', 'right': '3'})
self.assertFalse(form.is_valid())
form = TripleForm({'left': '1', 'middle': '3', 'right': '1'})
self.assertTrue(form.is_valid())
@skipUnlessDBFeature('supports_nullable_unique_constraints')
def test_unique_null(self):
title = 'I May Be Wrong But I Doubt It'
form = BookForm({'title': title, 'author': self.writer.pk})
self.assertTrue(form.is_valid())
form.save()
form = BookForm({'title': title, 'author': self.writer.pk})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'], ['Book with this Title and Author already exists.'])
form = BookForm({'title': title})
self.assertTrue(form.is_valid())
form.save()
form = BookForm({'title': title})
self.assertTrue(form.is_valid())
def test_inherited_unique(self):
title = 'Boss'
Book.objects.create(title=title, author=self.writer, special_id=1)
form = DerivedBookForm({'title': 'Other', 'author': self.writer.pk, 'special_id': '1', 'isbn': '12345'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['special_id'], ['Book with this Special id already exists.'])
def test_inherited_unique_together(self):
title = 'Boss'
form = BookForm({'title': title, 'author': self.writer.pk})
self.assertTrue(form.is_valid())
form.save()
form = DerivedBookForm({'title': title, 'author': self.writer.pk, 'isbn': '12345'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'], ['Book with this Title and Author already exists.'])
def test_abstract_inherited_unique(self):
title = 'Boss'
isbn = '12345'
DerivedBook.objects.create(title=title, author=self.writer, isbn=isbn)
form = DerivedBookForm({'title': 'Other', 'author': self.writer.pk, 'isbn': isbn})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['isbn'], ['Derived book with this Isbn already exists.'])
def test_abstract_inherited_unique_together(self):
title = 'Boss'
isbn = '12345'
DerivedBook.objects.create(title=title, author=self.writer, isbn=isbn)
form = DerivedBookForm({
'title': 'Other',
'author': self.writer.pk,
'isbn': '9876',
'suffix1': '0',
'suffix2': '0'
})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'],
['Derived book with this Suffix1 and Suffix2 already exists.'])
def test_explicitpk_unspecified(self):
"""Test for primary_key being in the form and failing validation."""
form = ExplicitPKForm({'key': '', 'desc': ''})
self.assertFalse(form.is_valid())
def test_explicitpk_unique(self):
"""Ensure keys and blank character strings are tested for uniqueness."""
form = ExplicitPKForm({'key': 'key1', 'desc': ''})
self.assertTrue(form.is_valid())
form.save()
form = ExplicitPKForm({'key': 'key1', 'desc': ''})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 3)
self.assertEqual(form.errors['__all__'], ['Explicit pk with this Key and Desc already exists.'])
self.assertEqual(form.errors['desc'], ['Explicit pk with this Desc already exists.'])
self.assertEqual(form.errors['key'], ['Explicit pk with this Key already exists.'])
def test_unique_for_date(self):
p = Post.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3))
form = PostForm({'title': "Django 1.0 is released", 'posted': '2008-09-03'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['title'], ['Title must be unique for Posted date.'])
form = PostForm({'title': "Work on Django 1.1 begins", 'posted': '2008-09-03'})
self.assertTrue(form.is_valid())
form = PostForm({'title': "Django 1.0 is released", 'posted': '2008-09-04'})
self.assertTrue(form.is_valid())
form = PostForm({'slug': "Django 1.0", 'posted': '2008-01-01'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['slug'], ['Slug must be unique for Posted year.'])
form = PostForm({'subtitle': "Finally", 'posted': '2008-09-30'})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['subtitle'], ['Subtitle must be unique for Posted month.'])
form = PostForm({'subtitle': "Finally", "title": "Django 1.0 is released",
"slug": "Django 1.0", 'posted': '2008-09-03'}, instance=p)
self.assertTrue(form.is_valid())
form = PostForm({'title': "Django 1.0 is released"})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['posted'], ['This field is required.'])
def test_unique_for_date_in_exclude(self):
"""
If the date for unique_for_* constraints is excluded from the
ModelForm (in this case 'posted' has editable=False, then the
constraint should be ignored.
"""
class DateTimePostForm(forms.ModelForm):
class Meta:
model = DateTimePost
fields = '__all__'
DateTimePost.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally",
posted=datetime.datetime(2008, 9, 3, 10, 10, 1))
# 'title' has unique_for_date='posted'
form = DateTimePostForm({'title': "Django 1.0 is released", 'posted': '2008-09-03'})
self.assertTrue(form.is_valid())
# 'slug' has unique_for_year='posted'
form = DateTimePostForm({'slug': "Django 1.0", 'posted': '2008-01-01'})
self.assertTrue(form.is_valid())
# 'subtitle' has unique_for_month='posted'
form = DateTimePostForm({'subtitle': "Finally", 'posted': '2008-09-30'})
self.assertTrue(form.is_valid())
def test_inherited_unique_for_date(self):
p = Post.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3))
form = DerivedPostForm({'title': "Django 1.0 is released", 'posted': '2008-09-03'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['title'], ['Title must be unique for Posted date.'])
form = DerivedPostForm({'title': "Work on Django 1.1 begins", 'posted': '2008-09-03'})
self.assertTrue(form.is_valid())
form = DerivedPostForm({'title': "Django 1.0 is released", 'posted': '2008-09-04'})
self.assertTrue(form.is_valid())
form = DerivedPostForm({'slug': "Django 1.0", 'posted': '2008-01-01'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['slug'], ['Slug must be unique for Posted year.'])
form = DerivedPostForm({'subtitle': "Finally", 'posted': '2008-09-30'})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['subtitle'], ['Subtitle must be unique for Posted month.'])
form = DerivedPostForm({'subtitle': "Finally", "title": "Django 1.0 is released",
"slug": "Django 1.0", 'posted': '2008-09-03'}, instance=p)
self.assertTrue(form.is_valid())
def test_unique_for_date_with_nullable_date(self):
class FlexDatePostForm(forms.ModelForm):
class Meta:
model = FlexibleDatePost
fields = '__all__'
p = FlexibleDatePost.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3))
form = FlexDatePostForm({'title': "Django 1.0 is released"})
self.assertTrue(form.is_valid())
form = FlexDatePostForm({'slug': "Django 1.0"})
self.assertTrue(form.is_valid())
form = FlexDatePostForm({'subtitle': "Finally"})
self.assertTrue(form.is_valid())
form = FlexDatePostForm({'subtitle': "Finally", "title": "Django 1.0 is released",
"slug": "Django 1.0"}, instance=p)
self.assertTrue(form.is_valid())
def test_override_unique_message(self):
class CustomProductForm(ProductForm):
class Meta(ProductForm.Meta):
error_messages = {
'slug': {
'unique': "%(model_name)s's %(field_label)s not unique.",
}
}
Product.objects.create(slug='teddy-bear-blue')
form = CustomProductForm({'slug': 'teddy-bear-blue'})
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['slug'], ["Product's Slug not unique."])
def test_override_unique_together_message(self):
class CustomPriceForm(PriceForm):
class Meta(PriceForm.Meta):
error_messages = {
NON_FIELD_ERRORS: {
'unique_together': "%(model_name)s's %(field_labels)s not unique.",
}
}
Price.objects.create(price=6.00, quantity=1)
form = CustomPriceForm({'price': '6.00', 'quantity': '1'})
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors[NON_FIELD_ERRORS], ["Price's Price and Quantity not unique."])
def test_override_unique_for_date_message(self):
class CustomPostForm(PostForm):
class Meta(PostForm.Meta):
error_messages = {
'title': {
'unique_for_date': "%(model_name)s's %(field_label)s not unique for %(date_field_label)s date.",
}
}
Post.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3))
form = CustomPostForm({'title': "Django 1.0 is released", 'posted': '2008-09-03'})
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['title'], ["Post's Title not unique for Posted date."])
class ModelToDictTests(TestCase):
"""
Tests for forms.models.model_to_dict
"""
def test_model_to_dict_many_to_many(self):
categories = [
Category(name='TestName1', slug='TestName1', url='url1'),
Category(name='TestName2', slug='TestName2', url='url2'),
Category(name='TestName3', slug='TestName3', url='url3')
]
for c in categories:
c.save()
writer = Writer(name='Test writer')
writer.save()
art = Article(
headline='Test article',
slug='test-article',
pub_date=datetime.date(1988, 1, 4),
writer=writer,
article='Hello.'
)
art.save()
for c in categories:
art.categories.add(c)
art.save()
with self.assertNumQueries(1):
d = model_to_dict(art)
# Ensure all many-to-many categories appear in model_to_dict
for c in categories:
self.assertIn(c.pk, d['categories'])
# Ensure many-to-many relation appears as a list
self.assertIsInstance(d['categories'], list)
def test_reuse_prefetched(self):
# model_to_dict should not hit the database if it can reuse
# the data populated by prefetch_related.
categories = [
Category(name='TestName1', slug='TestName1', url='url1'),
Category(name='TestName2', slug='TestName2', url='url2'),
Category(name='TestName3', slug='TestName3', url='url3')
]
for c in categories:
c.save()
writer = Writer(name='Test writer')
writer.save()
art = Article(
headline='Test article',
slug='test-article',
pub_date=datetime.date(1988, 1, 4),
writer=writer,
article='Hello.'
)
art.save()
for c in categories:
art.categories.add(c)
art = Article.objects.prefetch_related('categories').get(pk=art.pk)
with self.assertNumQueries(0):
d = model_to_dict(art)
# Ensure all many-to-many categories appear in model_to_dict
for c in categories:
self.assertIn(c.pk, d['categories'])
# Ensure many-to-many relation appears as a list
self.assertIsInstance(d['categories'], list)
class ModelFormBasicTests(TestCase):
def create_basic_data(self):
self.c1 = Category.objects.create(
name="Entertainment", slug="entertainment", url="entertainment")
self.c2 = Category.objects.create(
name="It's a test", slug="its-test", url="test")
self.c3 = Category.objects.create(
name="Third test", slug="third-test", url="third")
self.w_royko = Writer.objects.create(name='Mike Royko')
self.w_woodward = Writer.objects.create(name='Bob Woodward')
def test_base_form(self):
self.assertEqual(Category.objects.count(), 0)
f = BaseCategoryForm()
self.assertHTMLEqual(
str(f),
"""<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>
<tr><th><label for="id_slug">Slug:</label></th><td><input id="id_slug" type="text" name="slug" maxlength="20" /></td></tr>
<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>"""
)
self.assertHTMLEqual(
str(f.as_ul()),
"""<li><label for="id_name">Name:</label> <input id="id_name" type="text" name="name" maxlength="20" /></li>
<li><label for="id_slug">Slug:</label> <input id="id_slug" type="text" name="slug" maxlength="20" /></li>
<li><label for="id_url">The URL:</label> <input id="id_url" type="text" name="url" maxlength="40" /></li>"""
)
self.assertHTMLEqual(
str(f["name"]),
"""<input id="id_name" type="text" name="name" maxlength="20" />""")
def test_auto_id(self):
f = BaseCategoryForm(auto_id=False)
self.assertHTMLEqual(
str(f.as_ul()),
"""<li>Name: <input type="text" name="name" maxlength="20" /></li>
<li>Slug: <input type="text" name="slug" maxlength="20" /></li>
<li>The URL: <input type="text" name="url" maxlength="40" /></li>"""
)
def test_initial_values(self):
self.create_basic_data()
# Initial values can be provided for model forms
f = ArticleForm(
auto_id=False,
initial={
'headline': 'Your headline here',
'categories': [str(self.c1.id), str(self.c2.id)]
})
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="Your headline here" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s" selected="selected">Entertainment</option>
<option value="%s" selected="selected">It's a test</option>
<option value="%s">Third test</option>
</select></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk))
# When the ModelForm is passed an instance, that instance's current values are
# inserted as 'initial' data in each Field.
f = RoykoForm(auto_id=False, instance=self.w_royko)
self.assertHTMLEqual(six.text_type(f), '''<tr><th>Name:</th><td><input type="text" name="name" value="Mike Royko" maxlength="50" /><br /><span class="helptext">Use both first and last names.</span></td></tr>''')
art = Article.objects.create(
headline='Test article',
slug='test-article',
pub_date=datetime.date(1988, 1, 4),
writer=self.w_royko,
article='Hello.'
)
art_id_1 = art.id
f = ArticleForm(auto_id=False, instance=art)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="Test article" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="test-article" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>
<li>Writer: <select name="writer">
<option value="">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s" selected="selected">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article">Hello.</textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
</select></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk))
f = ArticleForm({
'headline': 'Test headline',
'slug': 'test-headline',
'pub_date': '1984-02-06',
'writer': six.text_type(self.w_royko.pk),
'article': 'Hello.'
}, instance=art)
self.assertEqual(f.errors, {})
self.assertTrue(f.is_valid())
test_art = f.save()
self.assertEqual(test_art.id, art_id_1)
test_art = Article.objects.get(id=art_id_1)
self.assertEqual(test_art.headline, 'Test headline')
def test_m2m_initial_callable(self):
"""
Regression for #10349: A callable can be provided as the initial value for an m2m field
"""
self.maxDiff = 1200
self.create_basic_data()
# Set up a callable initial value
def formfield_for_dbfield(db_field, **kwargs):
if db_field.name == 'categories':
kwargs['initial'] = lambda: Category.objects.all().order_by('name')[:2]
return db_field.formfield(**kwargs)
# Create a ModelForm, instantiate it, and check that the output is as expected
ModelForm = modelform_factory(Article, fields=['headline', 'categories'],
formfield_callback=formfield_for_dbfield)
form = ModelForm()
self.assertHTMLEqual(form.as_ul(), """<li><label for="id_headline">Headline:</label> <input id="id_headline" type="text" name="headline" maxlength="50" /></li>
<li><label for="id_categories">Categories:</label> <select multiple="multiple" name="categories" id="id_categories">
<option value="%d" selected="selected">Entertainment</option>
<option value="%d" selected="selected">It&39;s a test</option>
<option value="%d">Third test</option>
</select></li>"""
% (self.c1.pk, self.c2.pk, self.c3.pk))
def test_basic_creation(self):
self.assertEqual(Category.objects.count(), 0)
f = BaseCategoryForm({'name': 'Entertainment',
'slug': 'entertainment',
'url': 'entertainment'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['name'], 'Entertainment')
self.assertEqual(f.cleaned_data['slug'], 'entertainment')
self.assertEqual(f.cleaned_data['url'], 'entertainment')
c1 = f.save()
# Testing whether the same object is returned from the
# ORM... not the fastest way...
self.assertEqual(Category.objects.count(), 1)
self.assertEqual(c1, Category.objects.all()[0])
self.assertEqual(c1.name, "Entertainment")
def test_save_commit_false(self):
# If you call save() with commit=False, then it will return an object that
# hasn't yet been saved to the database. In this case, it's up to you to call
# save() on the resulting model instance.
f = BaseCategoryForm({'name': 'Third test', 'slug': 'third-test', 'url': 'third'})
self.assertTrue(f.is_valid())
c1 = f.save(commit=False)
self.assertEqual(c1.name, "Third test")
self.assertEqual(Category.objects.count(), 0)
c1.save()
self.assertEqual(Category.objects.count(), 1)
def test_save_with_data_errors(self):
# If you call save() with invalid data, you'll get a ValueError.
f = BaseCategoryForm({'name': '', 'slug': 'not a slug!', 'url': 'foo'})
self.assertEqual(f.errors['name'], ['This field is required.'])
self.assertEqual(f.errors['slug'], ["Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."])
self.assertEqual(f.cleaned_data, {'url': 'foo'})
with self.assertRaises(ValueError):
f.save()
f = BaseCategoryForm({'name': '', 'slug': '', 'url': 'foo'})
with self.assertRaises(ValueError):
f.save()
def test_multi_fields(self):
self.create_basic_data()
self.maxDiff = None
# ManyToManyFields are represented by a MultipleChoiceField, ForeignKeys and any
# fields with the 'choices' attribute are represented by a ChoiceField.
f = ArticleForm(auto_id=False)
self.assertHTMLEqual(six.text_type(f), '''<tr><th>Headline:</th><td><input type="text" name="headline" maxlength="50" /></td></tr>
<tr><th>Slug:</th><td><input type="text" name="slug" maxlength="50" /></td></tr>
<tr><th>Pub date:</th><td><input type="text" name="pub_date" /></td></tr>
<tr><th>Writer:</th><td><select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></td></tr>
<tr><th>Article:</th><td><textarea rows="10" cols="40" name="article"></textarea></td></tr>
<tr><th>Categories:</th><td><select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
</select></td></tr>
<tr><th>Status:</th><td><select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></td></tr>''' % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk))
# Add some categories and test the many-to-many form output.
new_art = Article.objects.create(
article="Hello.", headline="New headline", slug="new-headline",
pub_date=datetime.date(1988, 1, 4), writer=self.w_royko)
new_art.categories.add(Category.objects.get(name='Entertainment'))
self.assertQuerysetEqual(new_art.categories.all(), ["Entertainment"])
f = ArticleForm(auto_id=False, instance=new_art)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="New headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="new-headline" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>
<li>Writer: <select name="writer">
<option value="">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s" selected="selected">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article">Hello.</textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s" selected="selected">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
</select></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk))
def test_subset_fields(self):
# You can restrict a form to a subset of the complete list of fields
# by providing a 'fields' argument. If you try to save a
# model created with such a form, you need to ensure that the fields
# that are _not_ on the form have default values, or are allowed to have
# a value of None. If a field isn't specified on a form, the object created
# from the form can't provide a value for that field!
class PartialArticleForm(forms.ModelForm):
class Meta:
model = Article
fields = ('headline', 'pub_date')
f = PartialArticleForm(auto_id=False)
self.assertHTMLEqual(six.text_type(f), '''<tr><th>Headline:</th><td><input type="text" name="headline" maxlength="50" /></td></tr>
<tr><th>Pub date:</th><td><input type="text" name="pub_date" /></td></tr>''')
# You can create a form over a subset of the available fields
# by specifying a 'fields' argument to form_for_instance.
class PartialArticleFormWithSlug(forms.ModelForm):
class Meta:
model = Article
fields = ('headline', 'slug', 'pub_date')
w_royko = Writer.objects.create(name='Mike Royko')
art = Article.objects.create(
article="Hello.", headline="New headline", slug="new-headline",
pub_date=datetime.date(1988, 1, 4), writer=w_royko)
f = PartialArticleFormWithSlug({
'headline': 'New headline',
'slug': 'new-headline',
'pub_date': '1988-01-04'
}, auto_id=False, instance=art)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="New headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="new-headline" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>''')
self.assertTrue(f.is_valid())
new_art = f.save()
self.assertEqual(new_art.id, art.id)
new_art = Article.objects.get(id=art.id)
self.assertEqual(new_art.headline, 'New headline')
def test_m2m_editing(self):
self.create_basic_data()
form_data = {
'headline': 'New headline',
'slug': 'new-headline',
'pub_date': '1988-01-04',
'writer': six.text_type(self.w_royko.pk),
'article': 'Hello.',
'categories': [six.text_type(self.c1.id), six.text_type(self.c2.id)]
}
# Create a new article, with categories, via the form.
f = ArticleForm(form_data)
new_art = f.save()
new_art = Article.objects.get(id=new_art.id)
art_id_1 = new_art.id
self.assertQuerysetEqual(new_art.categories.order_by('name'),
["Entertainment", "It's a test"])
# Now, submit form data with no categories. This deletes the existing categories.
form_data['categories'] = []
f = ArticleForm(form_data, instance=new_art)
new_art = f.save()
self.assertEqual(new_art.id, art_id_1)
new_art = Article.objects.get(id=art_id_1)
self.assertQuerysetEqual(new_art.categories.all(), [])
# Create a new article, with no categories, via the form.
f = ArticleForm(form_data)
new_art = f.save()
art_id_2 = new_art.id
self.assertNotIn(art_id_2, (None, art_id_1))
new_art = Article.objects.get(id=art_id_2)
self.assertQuerysetEqual(new_art.categories.all(), [])
# Create a new article, with categories, via the form, but use commit=False.
# The m2m data won't be saved until save_m2m() is invoked on the form.
form_data['categories'] = [six.text_type(self.c1.id), six.text_type(self.c2.id)]
f = ArticleForm(form_data)
new_art = f.save(commit=False)
# Manually save the instance
new_art.save()
art_id_3 = new_art.id
self.assertNotIn(art_id_3, (None, art_id_1, art_id_2))
# The instance doesn't have m2m data yet
new_art = Article.objects.get(id=art_id_3)
self.assertQuerysetEqual(new_art.categories.all(), [])
# Save the m2m data on the form
f.save_m2m()
self.assertQuerysetEqual(new_art.categories.order_by('name'),
["Entertainment", "It's a test"])
def test_custom_form_fields(self):
# Here, we define a custom ModelForm. Because it happens to have the same fields as
# the Category model, we can just call the form's save() to apply its changes to an
# existing Category instance.
class ShortCategory(forms.ModelForm):
name = forms.CharField(max_length=5)
slug = forms.CharField(max_length=5)
url = forms.CharField(max_length=3)
class Meta:
model = Category
fields = '__all__'
cat = Category.objects.create(name='Third test')
form = ShortCategory({'name': 'Third', 'slug': 'third', 'url': '3rd'}, instance=cat)
self.assertEqual(form.save().name, 'Third')
self.assertEqual(Category.objects.get(id=cat.id).name, 'Third')
def test_runtime_choicefield_populated(self):
self.maxDiff = None
# Here, we demonstrate that choices for a ForeignKey ChoiceField are determined
# at runtime, based on the data in the database when the form is displayed, not
# the data in the database when the form is instantiated.
self.create_basic_data()
f = ArticleForm(auto_id=False)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
</select> </li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (self.w_woodward.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk))
c4 = Category.objects.create(name='Fourth', url='4th')
w_bernstein = Writer.objects.create(name='Carl Bernstein')
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Carl Bernstein</option>
<option value="%s">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
<option value="%s">Fourth</option>
</select></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (self.w_woodward.pk, w_bernstein.pk, self.w_royko.pk, self.c1.pk, self.c2.pk, self.c3.pk, c4.pk))
class ModelChoiceFieldTests(TestCase):
def setUp(self):
self.c1 = Category.objects.create(
name="Entertainment", slug="entertainment", url="entertainment")
self.c2 = Category.objects.create(
name="It's a test", slug="its-test", url="test")
self.c3 = Category.objects.create(
name="Third", slug="third-test", url="third")
# ModelChoiceField ############################################################
def test_modelchoicefield(self):
f = forms.ModelChoiceField(Category.objects.all())
self.assertEqual(list(f.choices), [
('', '---------'),
(self.c1.pk, 'Entertainment'),
(self.c2.pk, "It's a test"),
(self.c3.pk, 'Third')])
with self.assertRaises(ValidationError):
f.clean('')
with self.assertRaises(ValidationError):
f.clean(None)
with self.assertRaises(ValidationError):
f.clean(0)
# Invalid types that require TypeError to be caught (#22808).
with self.assertRaises(ValidationError):
f.clean([['fail']])
with self.assertRaises(ValidationError):
f.clean([{'foo': 'bar'}])
self.assertEqual(f.clean(self.c2.id).name, "It's a test")
self.assertEqual(f.clean(self.c3.id).name, 'Third')
# Add a Category object *after* the ModelChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
c4 = Category.objects.create(name='Fourth', url='4th')
self.assertEqual(f.clean(c4.id).name, 'Fourth')
# Delete a Category object *after* the ModelChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
Category.objects.get(url='4th').delete()
with self.assertRaises(ValidationError):
f.clean(c4.id)
def test_modelchoicefield_choices(self):
f = forms.ModelChoiceField(Category.objects.filter(pk=self.c1.id), required=False)
self.assertIsNone(f.clean(''))
self.assertEqual(f.clean(str(self.c1.id)).name, "Entertainment")
with self.assertRaises(ValidationError):
f.clean('100')
# len can be called on choices
self.assertEqual(len(f.choices), 2)
# queryset can be changed after the field is created.
f.queryset = Category.objects.exclude(name='Third')
self.assertEqual(list(f.choices), [
('', '---------'),
(self.c1.pk, 'Entertainment'),
(self.c2.pk, "It's a test")])
self.assertEqual(f.clean(self.c2.id).name, "It's a test")
with self.assertRaises(ValidationError):
f.clean(self.c3.id)
# check that we can safely iterate choices repeatedly
gen_one = list(f.choices)
gen_two = f.choices
self.assertEqual(gen_one[2], (self.c2.pk, "It's a test"))
self.assertEqual(list(gen_two), [
('', '---------'),
(self.c1.pk, 'Entertainment'),
(self.c2.pk, "It's a test")])
# check that we can override the label_from_instance method to print custom labels (#4620)
f.queryset = Category.objects.all()
f.label_from_instance = lambda obj: "category " + str(obj)
self.assertEqual(list(f.choices), [
('', '---------'),
(self.c1.pk, 'category Entertainment'),
(self.c2.pk, "category It's a test"),
(self.c3.pk, 'category Third')])
def test_modelchoicefield_11183(self):
"""
Regression test for ticket #11183.
"""
class ModelChoiceForm(forms.Form):
category = forms.ModelChoiceField(Category.objects.all())
form1 = ModelChoiceForm()
field1 = form1.fields['category']
# To allow the widget to change the queryset of field1.widget.choices correctly,
# without affecting other forms, the following must hold:
self.assertIsNot(field1, ModelChoiceForm.base_fields['category'])
self.assertIs(field1.widget.choices.field, field1)
def test_modelchoicefield_22745(self):
"""
#22745 -- Make sure that ModelChoiceField with RadioSelect widget
doesn't produce unnecessary db queries when accessing its BoundField's
attrs.
"""
class ModelChoiceForm(forms.Form):
category = forms.ModelChoiceField(Category.objects.all(), widget=forms.RadioSelect)
form = ModelChoiceForm()
field = form['category'] # BoundField
template = Template('{{ field.name }}{{ field }}{{ field.help_text }}')
with self.assertNumQueries(1):
template.render(Context({'field': field}))
class ModelMultipleChoiceFieldTests(TestCase):
def setUp(self):
self.c1 = Category.objects.create(
name="Entertainment", slug="entertainment", url="entertainment")
self.c2 = Category.objects.create(
name="It's a test", slug="its-test", url="test")
self.c3 = Category.objects.create(
name="Third", slug="third-test", url="third")
def test_model_multiple_choice_field(self):
f = forms.ModelMultipleChoiceField(Category.objects.all())
self.assertEqual(list(f.choices), [
(self.c1.pk, 'Entertainment'),
(self.c2.pk, "It's a test"),
(self.c3.pk, 'Third')])
with self.assertRaises(ValidationError):
f.clean(None)
with self.assertRaises(ValidationError):
f.clean([])
self.assertQuerysetEqual(f.clean([self.c1.id]), ["Entertainment"])
self.assertQuerysetEqual(f.clean([self.c2.id]), ["It's a test"])
self.assertQuerysetEqual(f.clean([str(self.c1.id)]), ["Entertainment"])
self.assertQuerysetEqual(f.clean([str(self.c1.id), str(self.c2.id)]),
["Entertainment", "It's a test"], ordered=False)
self.assertQuerysetEqual(f.clean([self.c1.id, str(self.c2.id)]),
["Entertainment", "It's a test"], ordered=False)
self.assertQuerysetEqual(f.clean((self.c1.id, str(self.c2.id))),
["Entertainment", "It's a test"], ordered=False)
with self.assertRaises(ValidationError):
f.clean(['100'])
with self.assertRaises(ValidationError):
f.clean('hello')
with self.assertRaises(ValidationError):
f.clean(['fail'])
# Invalid types that require TypeError to be caught (#22808).
with self.assertRaises(ValidationError):
f.clean([['fail']])
with self.assertRaises(ValidationError):
f.clean([{'foo': 'bar'}])
# Add a Category object *after* the ModelMultipleChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
# Note, we are using an id of 1006 here since tests that run before
# this may create categories with primary keys up to 6. Use
# a number that will not conflict.
c6 = Category.objects.create(id=1006, name='Sixth', url='6th')
self.assertQuerysetEqual(f.clean([c6.id]), ["Sixth"])
# Delete a Category object *after* the ModelMultipleChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
Category.objects.get(url='6th').delete()
with self.assertRaises(ValidationError):
f.clean([c6.id])
def test_model_multiple_choice_required_false(self):
f = forms.ModelMultipleChoiceField(Category.objects.all(), required=False)
self.assertIsInstance(f.clean([]), EmptyQuerySet)
self.assertIsInstance(f.clean(()), EmptyQuerySet)
with self.assertRaises(ValidationError):
f.clean(['0'])
with self.assertRaises(ValidationError):
f.clean([str(self.c3.id), '0'])
with self.assertRaises(ValidationError):
f.clean([str(self.c1.id), '0'])
# queryset can be changed after the field is created.
f.queryset = Category.objects.exclude(name='Third')
self.assertEqual(list(f.choices), [
(self.c1.pk, 'Entertainment'),
(self.c2.pk, "It's a test")])
self.assertQuerysetEqual(f.clean([self.c2.id]), ["It's a test"])
with self.assertRaises(ValidationError):
f.clean([self.c3.id])
with self.assertRaises(ValidationError):
f.clean([str(self.c2.id), str(self.c3.id)])
f.queryset = Category.objects.all()
f.label_from_instance = lambda obj: "multicategory " + str(obj)
self.assertEqual(list(f.choices), [
(self.c1.pk, 'multicategory Entertainment'),
(self.c2.pk, "multicategory It's a test"),
(self.c3.pk, 'multicategory Third')])
def test_model_multiple_choice_number_of_queries(self):
"""
Test that ModelMultipleChoiceField does O(1) queries instead of
O(n) (#10156).
"""
persons = [Writer.objects.create(name="Person %s" % i) for i in range(30)]
f = forms.ModelMultipleChoiceField(queryset=Writer.objects.all())
self.assertNumQueries(1, f.clean, [p.pk for p in persons[1:11:2]])
def test_model_multiple_choice_run_validators(self):
"""
Test that ModelMultipleChoiceField run given validators (#14144).
"""
for i in range(30):
Writer.objects.create(name="Person %s" % i)
self._validator_run = False
def my_validator(value):
self._validator_run = True
f = forms.ModelMultipleChoiceField(queryset=Writer.objects.all(),
validators=[my_validator])
f.clean([p.pk for p in Writer.objects.all()[8:9]])
self.assertTrue(self._validator_run)
def test_model_multiple_choice_show_hidden_initial(self):
"""
Test support of show_hidden_initial by ModelMultipleChoiceField.
"""
class WriterForm(forms.Form):
persons = forms.ModelMultipleChoiceField(show_hidden_initial=True,
queryset=Writer.objects.all())
person1 = Writer.objects.create(name="Person 1")
person2 = Writer.objects.create(name="Person 2")
form = WriterForm(initial={'persons': [person1, person2]},
data={'initial-persons': [str(person1.pk), str(person2.pk)],
'persons': [str(person1.pk), str(person2.pk)]})
self.assertTrue(form.is_valid())
self.assertFalse(form.has_changed())
form = WriterForm(initial={'persons': [person1, person2]},
data={'initial-persons': [str(person1.pk), str(person2.pk)],
'persons': [str(person2.pk)]})
self.assertTrue(form.is_valid())
self.assertTrue(form.has_changed())
def test_model_multiple_choice_field_22745(self):
"""
#22745 -- Make sure that ModelMultipleChoiceField with
CheckboxSelectMultiple widget doesn't produce unnecessary db queries
when accessing its BoundField's attrs.
"""
class ModelMultipleChoiceForm(forms.Form):
categories = forms.ModelMultipleChoiceField(Category.objects.all(), widget=forms.CheckboxSelectMultiple)
form = ModelMultipleChoiceForm()
field = form['categories'] # BoundField
template = Template('{{ field.name }}{{ field }}{{ field.help_text }}')
with self.assertNumQueries(1):
template.render(Context({'field': field}))
def test_show_hidden_initial_changed_queries_efficiently(self):
class WriterForm(forms.Form):
persons = forms.ModelMultipleChoiceField(
show_hidden_initial=True, queryset=Writer.objects.all())
writers = (Writer.objects.create(name=str(x)) for x in range(0, 50))
writer_pks = tuple(x.pk for x in writers)
form = WriterForm(data={'initial-persons': writer_pks})
with self.assertNumQueries(1):
self.assertTrue(form.has_changed())
def test_clean_does_deduplicate_values(self):
class WriterForm(forms.Form):
persons = forms.ModelMultipleChoiceField(queryset=Writer.objects.all())
person1 = Writer.objects.create(name="Person 1")
form = WriterForm(data={})
queryset = form.fields['persons'].clean([str(person1.pk)] * 50)
sql, params = queryset.query.sql_with_params()
self.assertEqual(len(params), 1)
class ModelOneToOneFieldTests(TestCase):
def test_modelform_onetoonefield(self):
class ImprovedArticleForm(forms.ModelForm):
class Meta:
model = ImprovedArticle
fields = '__all__'
class ImprovedArticleWithParentLinkForm(forms.ModelForm):
class Meta:
model = ImprovedArticleWithParentLink
fields = '__all__'
self.assertEqual(list(ImprovedArticleForm.base_fields), ['article'])
self.assertEqual(list(ImprovedArticleWithParentLinkForm.base_fields), [])
def test_modelform_subclassed_model(self):
class BetterWriterForm(forms.ModelForm):
class Meta:
# BetterWriter model is a subclass of Writer with an additional `score` field
model = BetterWriter
fields = '__all__'
bw = BetterWriter.objects.create(name='Joe Better', score=10)
self.assertEqual(sorted(model_to_dict(bw)),
['id', 'name', 'score', 'writer_ptr'])
form = BetterWriterForm({'name': 'Some Name', 'score': 12})
self.assertTrue(form.is_valid())
bw2 = form.save()
self.assertEqual(bw2.score, 12)
def test_onetoonefield(self):
class WriterProfileForm(forms.ModelForm):
class Meta:
# WriterProfile has a OneToOneField to Writer
model = WriterProfile
fields = '__all__'
self.w_royko = Writer.objects.create(name='Mike Royko')
self.w_woodward = Writer.objects.create(name='Bob Woodward')
form = WriterProfileForm()
self.assertHTMLEqual(form.as_p(), '''<p><label for="id_writer">Writer:</label> <select name="writer" id="id_writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></p>
<p><label for="id_age">Age:</label> <input type="number" name="age" id="id_age" min="0" /></p>''' % (self.w_woodward.pk, self.w_royko.pk))
data = {
'writer': six.text_type(self.w_woodward.pk),
'age': '65',
}
form = WriterProfileForm(data)
instance = form.save()
self.assertEqual(six.text_type(instance), 'Bob Woodward is 65')
form = WriterProfileForm(instance=instance)
self.assertHTMLEqual(form.as_p(), '''<p><label for="id_writer">Writer:</label> <select name="writer" id="id_writer">
<option value="">---------</option>
<option value="%s" selected="selected">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></p>
<p><label for="id_age">Age:</label> <input type="number" name="age" value="65" id="id_age" min="0" /></p>''' % (self.w_woodward.pk, self.w_royko.pk))
def test_assignment_of_none(self):
class AuthorForm(forms.ModelForm):
class Meta:
model = Author
fields = ['publication', 'full_name']
publication = Publication.objects.create(title="Pravda",
date_published=datetime.date(1991, 8, 22))
author = Author.objects.create(publication=publication, full_name='John Doe')
form = AuthorForm({'publication': '', 'full_name': 'John Doe'}, instance=author)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['publication'], None)
author = form.save()
# author object returned from form still retains original publication object
# that's why we need to retrieve it from database again
new_author = Author.objects.get(pk=author.pk)
self.assertEqual(new_author.publication, None)
def test_assignment_of_none_null_false(self):
class AuthorForm(forms.ModelForm):
class Meta:
model = Author1
fields = ['publication', 'full_name']
publication = Publication.objects.create(title="Pravda",
date_published=datetime.date(1991, 8, 22))
author = Author1.objects.create(publication=publication, full_name='John Doe')
form = AuthorForm({'publication': '', 'full_name': 'John Doe'}, instance=author)
self.assertFalse(form.is_valid())
class FileAndImageFieldTests(TestCase):
def test_clean_false(self):
"""
If the ``clean`` method on a non-required FileField receives False as
the data (meaning clear the field value), it returns False, regardless
of the value of ``initial``.
"""
f = forms.FileField(required=False)
self.assertEqual(f.clean(False), False)
self.assertEqual(f.clean(False, 'initial'), False)
def test_clean_false_required(self):
"""
If the ``clean`` method on a required FileField receives False as the
data, it has the same effect as None: initial is returned if non-empty,
otherwise the validation catches the lack of a required value.
"""
f = forms.FileField(required=True)
self.assertEqual(f.clean(False, 'initial'), 'initial')
self.assertRaises(ValidationError, f.clean, False)
def test_full_clear(self):
"""
Integration happy-path test that a model FileField can actually be set
and cleared via a ModelForm.
"""
class DocumentForm(forms.ModelForm):
class Meta:
model = Document
fields = '__all__'
form = DocumentForm()
self.assertIn('name="myfile"', six.text_type(form))
self.assertNotIn('myfile-clear', six.text_type(form))
form = DocumentForm(files={'myfile': SimpleUploadedFile('something.txt', b'content')})
self.assertTrue(form.is_valid())
doc = form.save(commit=False)
self.assertEqual(doc.myfile.name, 'something.txt')
form = DocumentForm(instance=doc)
self.assertIn('myfile-clear', six.text_type(form))
form = DocumentForm(instance=doc, data={'myfile-clear': 'true'})
doc = form.save(commit=False)
self.assertEqual(bool(doc.myfile), False)
def test_clear_and_file_contradiction(self):
"""
If the user submits a new file upload AND checks the clear checkbox,
they get a validation error, and the bound redisplay of the form still
includes the current file and the clear checkbox.
"""
class DocumentForm(forms.ModelForm):
class Meta:
model = Document
fields = '__all__'
form = DocumentForm(files={'myfile': SimpleUploadedFile('something.txt', b'content')})
self.assertTrue(form.is_valid())
doc = form.save(commit=False)
form = DocumentForm(instance=doc,
files={'myfile': SimpleUploadedFile('something.txt', b'content')},
data={'myfile-clear': 'true'})
self.assertTrue(not form.is_valid())
self.assertEqual(form.errors['myfile'],
['Please either submit a file or check the clear checkbox, not both.'])
rendered = six.text_type(form)
self.assertIn('something.txt', rendered)
self.assertIn('myfile-clear', rendered)
def test_file_field_data(self):
# Test conditions when files is either not given or empty.
f = TextFileForm(data={'description': 'Assistance'})
self.assertFalse(f.is_valid())
f = TextFileForm(data={'description': 'Assistance'}, files={})
self.assertFalse(f.is_valid())
# Upload a file and ensure it all works as expected.
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
self.assertTrue(f.is_valid())
self.assertEqual(type(f.cleaned_data['file']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test1.txt')
instance.file.delete()
# If the previous file has been deleted, the file name can be reused
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
self.assertTrue(f.is_valid())
self.assertEqual(type(f.cleaned_data['file']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test1.txt')
# Check if the max_length attribute has been inherited from the model.
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test-maxlength.txt', b'hello world')})
self.assertFalse(f.is_valid())
# Edit an instance that already has the file defined in the model. This will not
# save the file again, but leave it exactly as it is.
f = TextFileForm(
data={'description': 'Assistance'},
instance=instance)
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['file'].name, 'tests/test1.txt')
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test1.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
# Override the file by uploading a new one.
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test2.txt', b'hello world')}, instance=instance)
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test2.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
instance.delete()
def test_filefield_required_false(self):
# Test the non-required FileField
f = TextFileForm(data={'description': 'Assistance'})
f.fields['file'].required = False
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.file.name, '')
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test3.txt', b'hello world')}, instance=instance)
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test3.txt')
# Instance can be edited w/out re-uploading the file and existing file should be preserved.
f = TextFileForm(
data={'description': 'New Description'},
instance=instance)
f.fields['file'].required = False
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.description, 'New Description')
self.assertEqual(instance.file.name, 'tests/test3.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
instance.delete()
def test_custom_file_field_save(self):
"""
Regression for #11149: save_form_data should be called only once
"""
class CFFForm(forms.ModelForm):
class Meta:
model = CustomFF
fields = '__all__'
# It's enough that the form saves without error -- the custom save routine will
# generate an AssertionError if it is called more than once during save.
form = CFFForm(data={'f': None})
form.save()
def test_file_field_multiple_save(self):
"""
Simulate a file upload and check how many times Model.save() gets
called. Test for bug #639.
"""
class PhotoForm(forms.ModelForm):
class Meta:
model = Photo
fields = '__all__'
# Grab an image for testing.
filename = os.path.join(os.path.dirname(upath(__file__)), "test.png")
with open(filename, "rb") as fp:
img = fp.read()
# Fake a POST QueryDict and FILES MultiValueDict.
data = {'title': 'Testing'}
files = {"image": SimpleUploadedFile('test.png', img, 'image/png')}
form = PhotoForm(data=data, files=files)
p = form.save()
try:
# Check the savecount stored on the object (see the model).
self.assertEqual(p._savecount, 1)
finally:
# Delete the "uploaded" file to avoid clogging /tmp.
p = Photo.objects.get()
p.image.delete(save=False)
def test_file_path_field_blank(self):
"""
Regression test for #8842: FilePathField(blank=True)
"""
class FPForm(forms.ModelForm):
class Meta:
model = FilePathModel
fields = '__all__'
form = FPForm()
names = [p[1] for p in form['path'].field.choices]
names.sort()
self.assertEqual(names, ['---------', '__init__.py', 'models.py', 'tests.py'])
@skipUnless(test_images, "Pillow not installed")
def test_image_field(self):
# ImageField and FileField are nearly identical, but they differ slightly when
# it comes to validation. This specifically tests that #6302 is fixed for
# both file fields and image fields.
with open(os.path.join(os.path.dirname(upath(__file__)), "test.png"), 'rb') as fp:
image_data = fp.read()
with open(os.path.join(os.path.dirname(upath(__file__)), "test2.png"), 'rb') as fp:
image_data2 = fp.read()
f = ImageFileForm(
data={'description': 'An image'},
files={'image': SimpleUploadedFile('test.png', image_data)})
self.assertTrue(f.is_valid())
self.assertEqual(type(f.cleaned_data['image']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
f = ImageFileForm(
data={'description': 'An image'},
files={'image': SimpleUploadedFile('test.png', image_data)})
self.assertTrue(f.is_valid())
self.assertEqual(type(f.cleaned_data['image']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Edit an instance that already has the (required) image defined in the model. This will not
# save the image again, but leave it exactly as it is.
f = ImageFileForm(data={'description': 'Look, it changed'}, instance=instance)
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['image'].name, 'tests/test.png')
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test.png')
self.assertEqual(instance.height, 16)
self.assertEqual(instance.width, 16)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
# Override the file by uploading a new one.
f = ImageFileForm(
data={'description': 'Changed it'},
files={'image': SimpleUploadedFile('test2.png', image_data2)}, instance=instance)
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test2.png')
self.assertEqual(instance.height, 32)
self.assertEqual(instance.width, 48)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
instance.delete()
f = ImageFileForm(
data={'description': 'Changed it'},
files={'image': SimpleUploadedFile('test2.png', image_data2)})
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test2.png')
self.assertEqual(instance.height, 32)
self.assertEqual(instance.width, 48)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
instance.delete()
# Test the non-required ImageField
# Note: In Oracle, we expect a null ImageField to return '' instead of
# None.
if connection.features.interprets_empty_strings_as_nulls:
expected_null_imagefield_repr = ''
else:
expected_null_imagefield_repr = None
f = OptionalImageFileForm(data={'description': 'Test'})
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.image.name, expected_null_imagefield_repr)
self.assertEqual(instance.width, None)
self.assertEqual(instance.height, None)
f = OptionalImageFileForm(
data={'description': 'And a final one'},
files={'image': SimpleUploadedFile('test3.png', image_data)}, instance=instance)
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test3.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Editing the instance without re-uploading the image should not affect the image or its width/height properties
f = OptionalImageFileForm(
data={'description': 'New Description'},
instance=instance)
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.description, 'New Description')
self.assertEqual(instance.image.name, 'tests/test3.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Delete the current file since this is not done by Django.
instance.image.delete()
instance.delete()
f = OptionalImageFileForm(
data={'description': 'And a final one'},
files={'image': SimpleUploadedFile('test4.png', image_data2)}
)
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test4.png')
self.assertEqual(instance.width, 48)
self.assertEqual(instance.height, 32)
instance.delete()
# Test callable upload_to behavior that's dependent on the value of another field in the model
f = ImageFileForm(
data={'description': 'And a final one', 'path': 'foo'},
files={'image': SimpleUploadedFile('test4.png', image_data)})
self.assertTrue(f.is_valid())
instance = f.save()
self.assertEqual(instance.image.name, 'foo/test4.png')
instance.delete()
class ModelOtherFieldTests(TestCase):
def test_big_integer_field(self):
bif = BigIntForm({'biggie': '-9223372036854775808'})
self.assertTrue(bif.is_valid())
bif = BigIntForm({'biggie': '-9223372036854775809'})
self.assertFalse(bif.is_valid())
self.assertEqual(bif.errors, {'biggie': ['Ensure this value is greater than or equal to -9223372036854775808.']})
bif = BigIntForm({'biggie': '9223372036854775807'})
self.assertTrue(bif.is_valid())
bif = BigIntForm({'biggie': '9223372036854775808'})
self.assertFalse(bif.is_valid())
self.assertEqual(bif.errors, {'biggie': ['Ensure this value is less than or equal to 9223372036854775807.']})
def test_comma_separated_integer_field(self):
class CommaSeparatedIntegerForm(forms.ModelForm):
class Meta:
model = CommaSeparatedInteger
fields = '__all__'
f = CommaSeparatedIntegerForm({'field': '1,2,3'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data, {'field': '1,2,3'})
f = CommaSeparatedIntegerForm({'field': '1a,2'})
self.assertEqual(f.errors, {'field': ['Enter only digits separated by commas.']})
f = CommaSeparatedIntegerForm({'field': ',,,,'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data, {'field': ',,,,'})
f = CommaSeparatedIntegerForm({'field': '1.2'})
self.assertEqual(f.errors, {'field': ['Enter only digits separated by commas.']})
f = CommaSeparatedIntegerForm({'field': '1,a,2'})
self.assertEqual(f.errors, {'field': ['Enter only digits separated by commas.']})
f = CommaSeparatedIntegerForm({'field': '1,,2'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data, {'field': '1,,2'})
f = CommaSeparatedIntegerForm({'field': '1'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data, {'field': '1'})
def test_url_on_modelform(self):
"Check basic URL field validation on model forms"
class HomepageForm(forms.ModelForm):
class Meta:
model = Homepage
fields = '__all__'
self.assertFalse(HomepageForm({'url': 'foo'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://example'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://example.'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://com.'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://localhost'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://example.com'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com:8000'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com/test'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com:8000/test'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://example.com/foo/bar'}).is_valid())
def test_http_prefixing(self):
"""
If the http:// prefix is omitted on form input, the field adds it again. (Refs #13613)
"""
class HomepageForm(forms.ModelForm):
class Meta:
model = Homepage
fields = '__all__'
form = HomepageForm({'url': 'example.com'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['url'], 'http://example.com')
form = HomepageForm({'url': 'example.com/test'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['url'], 'http://example.com/test')
class OtherModelFormTests(TestCase):
def test_media_on_modelform(self):
# Similar to a regular Form class you can define custom media to be used on
# the ModelForm.
f = ModelFormWithMedia()
self.assertHTMLEqual(six.text_type(f.media), '''<link href="/some/form/css" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/some/form/javascript"></script>''')
def test_choices_type(self):
# Choices on CharField and IntegerField
f = ArticleForm()
with self.assertRaises(ValidationError):
f.fields['status'].clean('42')
f = ArticleStatusForm()
with self.assertRaises(ValidationError):
f.fields['status'].clean('z')
def test_prefetch_related_queryset(self):
"""
ModelChoiceField should respect a prefetch_related() on its queryset.
"""
blue = Colour.objects.create(name='blue')
red = Colour.objects.create(name='red')
multicolor_item = ColourfulItem.objects.create()
multicolor_item.colours.add(blue, red)
red_item = ColourfulItem.objects.create()
red_item.colours.add(red)
class ColorModelChoiceField(forms.ModelChoiceField):
def label_from_instance(self, obj):
return ', '.join(c.name for c in obj.colours.all())
field = ColorModelChoiceField(ColourfulItem.objects.prefetch_related('colours'))
with self.assertNumQueries(4): # would be 5 if prefetch is ignored
self.assertEqual(tuple(field.choices), (
('', '---------'),
(multicolor_item.pk, 'blue, red'),
(red_item.pk, 'red'),
))
def test_foreignkeys_which_use_to_field(self):
apple = Inventory.objects.create(barcode=86, name='Apple')
Inventory.objects.create(barcode=22, name='Pear')
core = Inventory.objects.create(barcode=87, name='Core', parent=apple)
field = forms.ModelChoiceField(Inventory.objects.all(), to_field_name='barcode')
self.assertEqual(tuple(field.choices), (
('', '---------'),
(86, 'Apple'),
(87, 'Core'),
(22, 'Pear')))
form = InventoryForm(instance=core)
self.assertHTMLEqual(six.text_type(form['parent']), '''<select name="parent" id="id_parent">
<option value="">---------</option>
<option value="86" selected="selected">Apple</option>
<option value="87">Core</option>
<option value="22">Pear</option>
</select>''')
data = model_to_dict(core)
data['parent'] = '22'
form = InventoryForm(data=data, instance=core)
core = form.save()
self.assertEqual(core.parent.name, 'Pear')
class CategoryForm(forms.ModelForm):
description = forms.CharField()
class Meta:
model = Category
fields = ['description', 'url']
self.assertEqual(list(CategoryForm.base_fields),
['description', 'url'])
self.assertHTMLEqual(six.text_type(CategoryForm()), '''<tr><th><label for="id_description">Description:</label></th><td><input type="text" name="description" id="id_description" /></td></tr>
<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>''')
# to_field_name should also work on ModelMultipleChoiceField ##################
field = forms.ModelMultipleChoiceField(Inventory.objects.all(), to_field_name='barcode')
self.assertEqual(tuple(field.choices), ((86, 'Apple'), (87, 'Core'), (22, 'Pear')))
self.assertQuerysetEqual(field.clean([86]), ['Apple'])
form = SelectInventoryForm({'items': [87, 22]})
self.assertTrue(form.is_valid())
self.assertEqual(len(form.cleaned_data), 1)
self.assertQuerysetEqual(form.cleaned_data['items'], ['Core', 'Pear'])
def test_model_field_that_returns_none_to_exclude_itself_with_explicit_fields(self):
self.assertEqual(list(CustomFieldForExclusionForm.base_fields),
['name'])
self.assertHTMLEqual(six.text_type(CustomFieldForExclusionForm()),
'''<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="10" /></td></tr>''')
def test_iterable_model_m2m(self):
class ColourfulItemForm(forms.ModelForm):
class Meta:
model = ColourfulItem
fields = '__all__'
colour = Colour.objects.create(name='Blue')
form = ColourfulItemForm()
self.maxDiff = 1024
self.assertHTMLEqual(
form.as_p(),
"""<p><label for="id_name">Name:</label> <input id="id_name" type="text" name="name" maxlength="50" /></p>
<p><label for="id_colours">Colours:</label> <select multiple="multiple" name="colours" id="id_colours">
<option value="%(blue_pk)s">Blue</option>
</select></p>"""
% {'blue_pk': colour.pk})
class ModelFormCustomErrorTests(TestCase):
def test_custom_error_messages(self):
data = {'name1': '@#$!!**@#$', 'name2': '@#$!!**@#$'}
errors = CustomErrorMessageForm(data).errors
self.assertHTMLEqual(
str(errors['name1']),
'<ul class="errorlist"><li>Form custom error message.</li></ul>'
)
self.assertHTMLEqual(
str(errors['name2']),
'<ul class="errorlist"><li>Model custom error message.</li></ul>'
)
def test_model_clean_error_messages(self):
data = {'name1': 'FORBIDDEN_VALUE', 'name2': 'ABC'}
form = CustomErrorMessageForm(data)
self.assertFalse(form.is_valid())
self.assertHTMLEqual(
str(form.errors['name1']),
'<ul class="errorlist"><li>Model.clean() error messages.</li></ul>'
)
data = {'name1': 'FORBIDDEN_VALUE2', 'name2': 'ABC'}
form = CustomErrorMessageForm(data)
self.assertFalse(form.is_valid())
self.assertHTMLEqual(
str(form.errors['name1']),
'<ul class="errorlist"><li>Model.clean() error messages (simpler syntax).</li></ul>'
)
data = {'name1': 'GLOBAL_ERROR', 'name2': 'ABC'}
form = CustomErrorMessageForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['__all__'], ['Global error message.'])
class CustomCleanTests(TestCase):
def test_override_clean(self):
"""
Regression for #12596: Calling super from ModelForm.clean() should be
optional.
"""
class TripleFormWithCleanOverride(forms.ModelForm):
class Meta:
model = Triple
fields = '__all__'
def clean(self):
if not self.cleaned_data['left'] == self.cleaned_data['right']:
raise forms.ValidationError('Left and right should be equal')
return self.cleaned_data
form = TripleFormWithCleanOverride({'left': 1, 'middle': 2, 'right': 1})
self.assertTrue(form.is_valid())
# form.instance.left will be None if the instance was not constructed
# by form.full_clean().
self.assertEqual(form.instance.left, 1)
def test_model_form_clean_applies_to_model(self):
"""
Regression test for #12960. Make sure the cleaned_data returned from
ModelForm.clean() is applied to the model instance.
"""
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = '__all__'
def clean(self):
self.cleaned_data['name'] = self.cleaned_data['name'].upper()
return self.cleaned_data
data = {'name': 'Test', 'slug': 'test', 'url': '/test'}
form = CategoryForm(data)
category = form.save()
self.assertEqual(category.name, 'TEST')
class ModelFormInheritanceTests(TestCase):
def test_form_subclass_inheritance(self):
class Form(forms.Form):
age = forms.IntegerField()
class ModelForm(forms.ModelForm, Form):
class Meta:
model = Writer
fields = '__all__'
self.assertEqual(list(ModelForm().fields.keys()), ['name', 'age'])
def test_field_removal(self):
class ModelForm(forms.ModelForm):
class Meta:
model = Writer
fields = '__all__'
class Mixin(object):
age = None
class Form(forms.Form):
age = forms.IntegerField()
class Form2(forms.Form):
foo = forms.IntegerField()
self.assertEqual(list(ModelForm().fields.keys()), ['name'])
self.assertEqual(list(type(str('NewForm'), (Mixin, Form), {})().fields.keys()), [])
self.assertEqual(list(type(str('NewForm'), (Form2, Mixin, Form), {})().fields.keys()), ['foo'])
self.assertEqual(list(type(str('NewForm'), (Mixin, ModelForm, Form), {})().fields.keys()), ['name'])
self.assertEqual(list(type(str('NewForm'), (ModelForm, Mixin, Form), {})().fields.keys()), ['name'])
self.assertEqual(list(type(str('NewForm'), (ModelForm, Form, Mixin), {})().fields.keys()), ['name', 'age'])
self.assertEqual(list(type(str('NewForm'), (ModelForm, Form), {'age': None})().fields.keys()), ['name'])
def test_field_removal_name_clashes(self):
"""Regression test for https://code.djangoproject.com/ticket/22510."""
class MyForm(forms.ModelForm):
media = forms.CharField()
class Meta:
model = Writer
fields = '__all__'
class SubForm(MyForm):
media = None
self.assertIn('media', MyForm().fields)
self.assertNotIn('media', SubForm().fields)
self.assertTrue(hasattr(MyForm, 'media'))
self.assertTrue(hasattr(SubForm, 'media'))
class StumpJokeForm(forms.ModelForm):
class Meta:
model = StumpJoke
fields = '__all__'
class CustomFieldWithQuerysetButNoLimitChoicesTo(forms.Field):
queryset = 42
class StumpJokeWithCustomFieldForm(forms.ModelForm):
custom = CustomFieldWithQuerysetButNoLimitChoicesTo()
class Meta:
model = StumpJoke
fields = () # We don't need any fields from the model
class LimitChoicesToTest(TestCase):
"""
Tests the functionality of ``limit_choices_to``.
"""
def setUp(self):
self.threepwood = Character.objects.create(
username='threepwood',
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
self.marley = Character.objects.create(
username='marley',
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
def test_limit_choices_to_callable_for_fk_rel(self):
"""
A ForeignKey relation can use ``limit_choices_to`` as a callable, re #2554.
"""
stumpjokeform = StumpJokeForm()
self.assertIn(self.threepwood, stumpjokeform.fields['most_recently_fooled'].queryset)
self.assertNotIn(self.marley, stumpjokeform.fields['most_recently_fooled'].queryset)
def test_limit_choices_to_callable_for_m2m_rel(self):
"""
A ManyToMany relation can use ``limit_choices_to`` as a callable, re #2554.
"""
stumpjokeform = StumpJokeForm()
self.assertIn(self.threepwood, stumpjokeform.fields['has_fooled_today'].queryset)
self.assertNotIn(self.marley, stumpjokeform.fields['has_fooled_today'].queryset)
def test_custom_field_with_queryset_but_no_limit_choices_to(self):
"""
Regression test for #23795: Make sure a custom field with a `queryset`
attribute but no `limit_choices_to` still works.
"""
f = StumpJokeWithCustomFieldForm()
self.assertEqual(f.fields['custom'].queryset, 42)
class FormFieldCallbackTests(TestCase):
def test_baseform_with_widgets_in_meta(self):
"""Regression for #13095: Using base forms with widgets defined in Meta should not raise errors."""
widget = forms.Textarea()
class BaseForm(forms.ModelForm):
class Meta:
model = Person
widgets = {'name': widget}
fields = "__all__"
Form = modelform_factory(Person, form=BaseForm)
self.assertIs(Form.base_fields['name'].widget, widget)
def test_factory_with_widget_argument(self):
""" Regression for #15315: modelform_factory should accept widgets
argument
"""
widget = forms.Textarea()
# Without a widget should not set the widget to textarea
Form = modelform_factory(Person, fields="__all__")
self.assertNotEqual(Form.base_fields['name'].widget.__class__, forms.Textarea)
# With a widget should not set the widget to textarea
Form = modelform_factory(Person, fields="__all__", widgets={'name': widget})
self.assertEqual(Form.base_fields['name'].widget.__class__, forms.Textarea)
def test_modelform_factory_without_fields(self):
""" Regression for #19733 """
message = (
"Calling modelform_factory without defining 'fields' or 'exclude' "
"explicitly is prohibited."
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
modelform_factory(Person)
def test_modelform_factory_with_all_fields(self):
""" Regression for #19733 """
form = modelform_factory(Person, fields="__all__")
self.assertEqual(list(form.base_fields), ["name"])
def test_custom_callback(self):
"""Test that a custom formfield_callback is used if provided"""
callback_args = []
def callback(db_field, **kwargs):
callback_args.append((db_field, kwargs))
return db_field.formfield(**kwargs)
widget = forms.Textarea()
class BaseForm(forms.ModelForm):
class Meta:
model = Person
widgets = {'name': widget}
fields = "__all__"
modelform_factory(Person, form=BaseForm, formfield_callback=callback)
id_field, name_field = Person._meta.fields
self.assertEqual(callback_args,
[(id_field, {}), (name_field, {'widget': widget})])
def test_bad_callback(self):
# A bad callback provided by user still gives an error
self.assertRaises(TypeError, modelform_factory, Person, fields="__all__",
formfield_callback='not a function or callable')
class LocalizedModelFormTest(TestCase):
def test_model_form_applies_localize_to_some_fields(self):
class PartiallyLocalizedTripleForm(forms.ModelForm):
class Meta:
model = Triple
localized_fields = ('left', 'right',)
fields = '__all__'
f = PartiallyLocalizedTripleForm({'left': 10, 'middle': 10, 'right': 10})
self.assertTrue(f.is_valid())
self.assertTrue(f.fields['left'].localize)
self.assertFalse(f.fields['middle'].localize)
self.assertTrue(f.fields['right'].localize)
def test_model_form_applies_localize_to_all_fields(self):
class FullyLocalizedTripleForm(forms.ModelForm):
class Meta:
model = Triple
localized_fields = '__all__'
fields = '__all__'
f = FullyLocalizedTripleForm({'left': 10, 'middle': 10, 'right': 10})
self.assertTrue(f.is_valid())
self.assertTrue(f.fields['left'].localize)
self.assertTrue(f.fields['middle'].localize)
self.assertTrue(f.fields['right'].localize)
def test_model_form_refuses_arbitrary_string(self):
with self.assertRaises(TypeError):
class BrokenLocalizedTripleForm(forms.ModelForm):
class Meta:
model = Triple
localized_fields = "foo"
class CustomMetaclass(ModelFormMetaclass):
def __new__(cls, name, bases, attrs):
new = super(CustomMetaclass, cls).__new__(cls, name, bases, attrs)
new.base_fields = {}
return new
class CustomMetaclassForm(six.with_metaclass(CustomMetaclass, forms.ModelForm)):
pass
class CustomMetaclassTestCase(TestCase):
def test_modelform_factory_metaclass(self):
new_cls = modelform_factory(Person, fields="__all__", form=CustomMetaclassForm)
self.assertEqual(new_cls.base_fields, {})
|
{
"content_hash": "b7add052742b669c347911d78322dfc5",
"timestamp": "",
"source": "github",
"line_count": 2609,
"max_line_length": 219,
"avg_line_length": 41.27251820620928,
"alnum_prop": 0.599925705794948,
"repo_name": "52ai/django-ccsds",
"id": "2672fa51164f02e4d5a17713e14d15c07401d6b6",
"size": "107680",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/model_forms/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "43623"
},
{
"name": "HTML",
"bytes": "173769"
},
{
"name": "JavaScript",
"bytes": "106416"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "10925166"
},
{
"name": "Shell",
"bytes": "934"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
}
|
from django import template
from dashboard.models import PagePos, SiteSetting
register = template.Library()
@register.inclusion_tag('header_links.html')
def show_header_links():
header_pos = PagePos.objects.get_headers()
return locals()
@register.inclusion_tag('footer.html')
def show_footer():
footer_elements = dict.fromkeys(['address_lines', 'phone', 'copyright',
'facebook_link', 'twitter_link'])
footer_elements['address_lines'] = [setting.value for setting in sorted(
SiteSetting.objects.filter(key__startswith='address_line_'),
key=lambda address: address.key.rsplit('_', 1)[-1],
)]
for k, v in footer_elements.iteritems():
footer_elements[k] = v or SiteSetting.get(k, '')
return footer_elements
|
{
"content_hash": "e52200459de20405329f371b931b8a04",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 76,
"avg_line_length": 30.76923076923077,
"alnum_prop": 0.65875,
"repo_name": "Ma233/beijingteach",
"id": "85e3fc1da9f0197c157805b96b47634ce070c7d1",
"size": "800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "home/templatetags/tags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "43425"
},
{
"name": "HTML",
"bytes": "57326"
},
{
"name": "JavaScript",
"bytes": "48523"
},
{
"name": "Python",
"bytes": "33497"
}
],
"symlink_target": ""
}
|
"""Inception Resnet v2 Faster R-CNN implementation in Keras.
See "Inception-v4, Inception-ResNet and the Impact of Residual Connections on
Learning" by Szegedy et al. (https://arxiv.org/abs/1602.07261)
as well as
"Speed/accuracy trade-offs for modern convolutional object detectors" by
Huang et al. (https://arxiv.org/abs/1611.10012)
"""
# Skip pylint for this file because it times out
# pylint: skip-file
import tensorflow.compat.v1 as tf
from object_detection.meta_architectures import faster_rcnn_meta_arch
from object_detection.models.keras_models import inception_resnet_v2
from object_detection.utils import model_util
from object_detection.utils import variables_helper
class FasterRCNNInceptionResnetV2KerasFeatureExtractor(
faster_rcnn_meta_arch.FasterRCNNKerasFeatureExtractor):
"""Faster R-CNN with Inception Resnet v2 feature extractor implementation."""
def __init__(self,
is_training,
first_stage_features_stride,
batch_norm_trainable=False,
weight_decay=0.0):
"""Constructor.
Args:
is_training: See base class.
first_stage_features_stride: See base class.
batch_norm_trainable: See base class.
weight_decay: See base class.
Raises:
ValueError: If `first_stage_features_stride` is not 8 or 16.
"""
if first_stage_features_stride != 8 and first_stage_features_stride != 16:
raise ValueError('`first_stage_features_stride` must be 8 or 16.')
super(FasterRCNNInceptionResnetV2KerasFeatureExtractor, self).__init__(
is_training, first_stage_features_stride, batch_norm_trainable,
weight_decay)
self._variable_dict = {}
self.classification_backbone = None
def preprocess(self, resized_inputs):
"""Faster R-CNN with Inception Resnet v2 preprocessing.
Maps pixel values to the range [-1, 1].
Args:
resized_inputs: A [batch, height_in, width_in, channels] float32 tensor
representing a batch of images with values between 0 and 255.0.
Returns:
preprocessed_inputs: A [batch, height_out, width_out, channels] float32
tensor representing a batch of images.
"""
return (2.0 / 255.0) * resized_inputs - 1.0
def get_proposal_feature_extractor_model(self, name=None):
"""Returns a model that extracts first stage RPN features.
Extracts features using the first half of the Inception Resnet v2 network.
We construct the network in `align_feature_maps=True` mode, which means
that all VALID paddings in the network are changed to SAME padding so that
the feature maps are aligned.
Args:
name: A scope name to construct all variables within.
Returns:
A Keras model that takes preprocessed_inputs:
A [batch, height, width, channels] float32 tensor
representing a batch of images.
And returns rpn_feature_map:
A tensor with shape [batch, height, width, depth]
"""
if not self.classification_backbone:
self.classification_backbone = inception_resnet_v2.inception_resnet_v2(
self._train_batch_norm,
output_stride=self._first_stage_features_stride,
align_feature_maps=True,
weight_decay=self._weight_decay,
weights=None,
include_top=False)
with tf.name_scope(name):
with tf.name_scope('InceptionResnetV2'):
proposal_features = self.classification_backbone.get_layer(
name='block17_20_ac').output
keras_model = tf.keras.Model(
inputs=self.classification_backbone.inputs,
outputs=proposal_features)
for variable in keras_model.variables:
self._variable_dict[variable.name[:-2]] = variable
return keras_model
def get_box_classifier_feature_extractor_model(self, name=None):
"""Returns a model that extracts second stage box classifier features.
This function reconstructs the "second half" of the Inception ResNet v2
network after the part defined in `get_proposal_feature_extractor_model`.
Args:
name: A scope name to construct all variables within.
Returns:
A Keras model that takes proposal_feature_maps:
A 4-D float tensor with shape
[batch_size * self.max_num_proposals, crop_height, crop_width, depth]
representing the feature map cropped to each proposal.
And returns proposal_classifier_features:
A 4-D float tensor with shape
[batch_size * self.max_num_proposals, height, width, depth]
representing box classifier features for each proposal.
"""
if not self.classification_backbone:
self.classification_backbone = inception_resnet_v2.inception_resnet_v2(
self._train_batch_norm,
output_stride=self._first_stage_features_stride,
align_feature_maps=True,
weight_decay=self._weight_decay,
weights=None,
include_top=False)
with tf.name_scope(name):
with tf.name_scope('InceptionResnetV2'):
proposal_feature_maps = self.classification_backbone.get_layer(
name='block17_20_ac').output
proposal_classifier_features = self.classification_backbone.get_layer(
name='conv_7b_ac').output
keras_model = model_util.extract_submodel(
model=self.classification_backbone,
inputs=proposal_feature_maps,
outputs=proposal_classifier_features)
for variable in keras_model.variables:
self._variable_dict[variable.name[:-2]] = variable
return keras_model
|
{
"content_hash": "8ffc6364bc9f75a04bc86f3d9aa5a8a6",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 79,
"avg_line_length": 39.06944444444444,
"alnum_prop": 0.679701386420192,
"repo_name": "tombstone/models",
"id": "f185aa01dd377c66b94ca37cc244350b2071f21c",
"size": "6316",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "1365199"
},
{
"name": "GLSL",
"bytes": "976"
},
{
"name": "HTML",
"bytes": "147010"
},
{
"name": "JavaScript",
"bytes": "33208"
},
{
"name": "Jupyter Notebook",
"bytes": "1858048"
},
{
"name": "Makefile",
"bytes": "4763"
},
{
"name": "Python",
"bytes": "7241242"
},
{
"name": "Shell",
"bytes": "102270"
},
{
"name": "TypeScript",
"bytes": "6515"
}
],
"symlink_target": ""
}
|
"""Handle the SCOP HIErarchy files, which describe the SCOP hierarchy in
terms of SCOP unique identifiers (sunid).
The file format is described in the scop
"release notes.":http://scop.berkeley.edu/release-notes-1.55.html
The latest HIE file can be found
"elsewhere at SCOP.":http://scop.mrc-lmb.cam.ac.uk/scop/parse/
"Release 1.55":http://scop.berkeley.edu/parse/dir.hie.scop.txt_1.55 (July 2001)
"""
class Record(object):
"""Holds information for one node in the SCOP hierarchy.
Attributes:
- sunid - SCOP unique identifiers of this node
- parent - Parents sunid
- children - Sequence of childrens sunids
"""
def __init__(self, line=None):
self.sunid = ''
self.parent = ''
self.children = []
if line:
self._process(line)
def _process(self, line):
"""Parses HIE records.
Records consist of 3 tab deliminated fields; node's sunid,
parent's sunid, and a list of children's sunids.
"""
# For example ::
#
# 0 - 46456,48724,51349,53931,56572,56835,56992,57942
# 21953 49268 -
# 49267 49266 49268,49269
line = line.rstrip() # no trailing whitespace
columns = line.split('\t') # separate the tab-delineated cols
if len(columns) != 3:
raise ValueError("I don't understand the format of %s" % line)
sunid, parent, children = columns
if sunid == '-':
self.sunid = ''
else:
self.sunid = int(sunid)
if parent == '-':
self.parent = ''
else:
self.parent = int(parent)
if children == '-':
self.children = ()
else:
children = children.split(',')
self.children = [int(x) for x in children]
def __str__(self):
s = []
s.append(str(self.sunid))
if self.parent:
s.append(str(self.parent))
else:
if self.sunid != 0:
s.append('0')
else:
s.append('-')
if self.children:
s.append(",".join(str(x) for x in self.children))
else:
s.append('-')
return "\t".join(s) + "\n"
def parse(handle):
"""Iterates over a HIE file as Hie records for each line.
Arguments:
- handle - file-like object.
"""
for line in handle:
if line.startswith('#'):
continue
yield Record(line)
|
{
"content_hash": "1d260b22b4a0768283ea91fd1fa21ba1",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 79,
"avg_line_length": 27.053763440860216,
"alnum_prop": 0.5397456279809221,
"repo_name": "zjuchenyuan/BioWeb",
"id": "3e2bc040ee0abd1b83645f8e091035a220f15f67",
"size": "2745",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Lib/Bio/SCOP/Hie.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "22925"
},
{
"name": "Batchfile",
"bytes": "143"
},
{
"name": "C",
"bytes": "414849"
},
{
"name": "CSS",
"bytes": "84526"
},
{
"name": "HTML",
"bytes": "6119"
},
{
"name": "Perl",
"bytes": "11818"
},
{
"name": "Python",
"bytes": "6614790"
}
],
"symlink_target": ""
}
|
from __future__ import division, print_function, absolute_import
import numpy as np
from functools import reduce
from itertools import product
from t_test import *
def size(shape, axis=None):
"""
Return the number of elements along a given axis.
Parameters
----------
shape : tuple
The shape of the input array
axis : int, optional
Axis along which the elements are counted. By default, give
the total number of elements.
Returns
-------
element_count : int
Number of elements along the specified axis.
"""
if axis is None:
size = 1
for e in shape:
size *= e
return size
return shape[axis]
def get_increment(shape):
"""
Return the increments corresponding to each axis or dimension in the shape.
Parameters
----------
shape : tuple
The shape of the input array.
Returns
-------
increment_per_axis : list
The number of positions in the linear order that you need to move
to retrieve the element specified by incrementing each axis
in the corresponding index.
"""
inc = [reduce(lambda x, y: x * y, shape[1 + i:], 1)
for i in range(len(shape[1:]))]
return inc + [1]
def get_index(shape, position):
"""
Return the index in the multidimensional array that is corresponds to the
element in the given position in the linear ordered.
Parameters
----------
shape : tuple
The shape of the input array
position : int
The position in the linear ordering
Returns
-------
index : tuple (same number of elements as shape)
The index in the multidimensional array specified by the
position in linear order
"""
assert position < size(shape)
inc = get_increment(shape)
index = list()
for i in inc:
x = position // i
position -= x * i
index.append(x)
return tuple(index)
def find_activated_voxel(shape, p):
"""
Return the positions of the activated voxels based on the p-values we get from the t-test
Parameters
----------
shape : tuple
The shape of the first dimension of our data
p: numpy.ndarray
The p values for our predictors and voxels
Returns
-------
position : numpy.ndarray
The index of our activated voxels
position1...4 correspond to the cond001...4 .txt in model_one
"""
# get the number of predictors(betas) first
n = p.shape[0]-1
loc={}
lst=[]
position=[]
for t in range(1,n+1):
loc["loc{0}".format(t)] = [i for i,j in enumerate(p[t,...]) if j < 0.05]
for i in loc["loc{0}".format(t)]:
position.append(get_index(shape,i))
position = np.asarray(position)
lst.append(position)
position=[]
return lst
|
{
"content_hash": "5cbdcac8cd41cb3e68a1ac4c2ccc62e4",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 93,
"avg_line_length": 27.95145631067961,
"alnum_prop": 0.6015977770059048,
"repo_name": "ye-zhi/project-epsilon",
"id": "f0b043aabd118dccc3ea71c08736ea52193d7046",
"size": "2879",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code/utils/functions/find_activated_voxel_functions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "3483"
},
{
"name": "Python",
"bytes": "140942"
},
{
"name": "TeX",
"bytes": "65261"
}
],
"symlink_target": ""
}
|
import mxnet as mx
import numpy as np
import unittest
from mxnet.test_utils import rand_ndarray, assert_almost_equal
from common import setup_module, with_seed, assertRaises, teardown
from mxnet.base import py_str, MXNetError
shape = (4, 4)
keys = [5, 7, 11]
str_keys = ['b', 'c', 'd']
def init_kv(stype='default'):
"""init kv """
kv = mx.kv.create()
# single
kv.init(3, mx.nd.zeros(shape=shape, stype=stype))
# list
kv.init(keys, [mx.nd.zeros(shape=shape, stype=stype)] * len(keys))
return kv
def init_kv_with_str(stype='default'):
"""init kv """
kv = mx.kv.create()
# single
kv.init('a', mx.nd.zeros(shape, stype=stype))
# list
kv.init(str_keys, [mx.nd.zeros(shape=shape, stype=stype)] * len(keys))
return kv
def check_diff_to_scalar(A, x):
""" assert A == x"""
assert(np.sum(np.abs((A - x).asnumpy())) == 0)
@with_seed()
def test_single_kv_pair():
"""single key-value pair push & pull"""
def check_single_kv_pair(kv, key, stype):
kv.push(key, mx.nd.ones(shape).tostype(stype))
val = mx.nd.empty(shape)
kv.pull(key, out=val)
check_diff_to_scalar(val, 1)
stypes = ['default', 'row_sparse']
for stype in stypes:
check_single_kv_pair(init_kv(), 3, stype)
check_single_kv_pair(init_kv_with_str(), 'a', stype)
@with_seed()
def test_row_sparse_pull():
kv = init_kv_with_str('row_sparse')
kv.init('e', mx.nd.ones(shape).tostype('row_sparse'))
def check_row_sparse_pull(kv, count):
num_rows = shape[0]
vals = []
row_ids = []
all_row_ids = np.arange(num_rows)
for i in range(count):
vals.append(mx.nd.zeros(shape).tostype('row_sparse'))
row_id = np.random.randint(num_rows, size=num_rows)
row_ids.append(mx.nd.array(row_id).reshape((2, num_rows//2)))
row_ids_to_pull = row_ids[0] if len(row_ids) == 1 else row_ids
vals_to_pull = vals[0] if len(vals) == 1 else vals
kv.row_sparse_pull('e', out=vals_to_pull, row_ids=row_ids_to_pull)
for val, row_id in zip(vals, row_ids):
retained = val.asnumpy()
excluded_row_ids = np.setdiff1d(all_row_ids, row_id.asnumpy())
for row in range(num_rows):
expected_val = np.zeros_like(retained[row])
expected_val += 0 if row in excluded_row_ids else 1
assert_almost_equal(retained[row], expected_val)
check_row_sparse_pull(kv, 1)
check_row_sparse_pull(kv, 4)
@with_seed()
def test_init():
"""test init"""
def check_init(kv, key):
kv.init(key, mx.nd.ones(shape)*4)
a = mx.nd.zeros(shape)
kv.pull(key, out=a)
check_diff_to_scalar(a, 4)
check_init(mx.kv.create(), 3)
check_init(mx.kv.create(), 'a')
@with_seed()
def test_pull():
"""test pull"""
def check_pull(kv):
a = mx.nd.ones(shape)
b = mx.nd.zeros(shape)
kv.init('1', mx.nd.zeros(shape))
kv.push('1', [a,a,a,a])
kv.pull('1', b)
check_diff_to_scalar(b, 4)
kv.init('2', mx.nd.zeros(shape))
kv.pull('2', b)
check_diff_to_scalar(b, 0)
check_pull(mx.kv.create('device'))
check_pull(mx.kv.create())
@with_seed()
def test_list_kv_pair():
"""list key-value pair push & pull"""
def check_list_kv_pair(kv, key, stype):
kv.push(key, [mx.nd.ones(shape).tostype(stype)*4] * len(key))
val = [mx.nd.empty(shape)] * len(key)
kv.pull(key, out=val)
for v in val:
check_diff_to_scalar(v, 4)
stypes = ['default', 'row_sparse']
for stype in stypes:
check_list_kv_pair(init_kv(), keys, stype)
check_list_kv_pair(init_kv_with_str(), str_keys, stype)
@with_seed()
def test_aggregator():
"""aggregate value on muliple devices"""
def check_aggregator(kv, key, key_list, stype):
# devices
num_devs = 4
devs = [mx.Context('cpu', i) for i in range(num_devs)]
# single
vals = [mx.nd.ones(shape, d).tostype(stype) for d in devs]
outs = [mx.nd.empty(shape, d) for d in devs]
kv.push(key, vals)
kv.pull(key, out=outs)
for out in outs:
check_diff_to_scalar(out, num_devs)
# list
vals = [[mx.nd.ones(shape, d).tostype(stype)*2.0 for d in devs]] * len(key_list)
outs = [[mx.nd.empty(shape, d) for d in devs]] * len(key_list)
kv.push(key_list, vals)
kv.pull(key_list, out=outs)
for out in outs:
for o in out:
check_diff_to_scalar(o, num_devs * 2.0)
stypes = ['default', 'row_sparse']
for stype in stypes:
check_aggregator(init_kv(), 3, keys, stype)
check_aggregator(init_kv_with_str(), 'a', str_keys, stype)
@with_seed()
def test_sparse_aggregator():
"""aggregate sparse ndarray on muliple devices"""
def check_sparse_aggregator(sparse_pull):
stype = 'row_sparse'
kv = init_kv_with_str(stype)
# devices
num_devs = 4
devs = [mx.Context('cpu', i) for i in range(num_devs)]
# single
vals = [rand_ndarray(shape, stype).copyto(devs[i]) for i in range(num_devs)]
expected_sum = np.zeros(shape)
for v in vals:
expected_sum += v.asnumpy()
# prepare row_ids
kv.push('a', vals)
if sparse_pull:
all_rows = mx.nd.array(np.arange(shape[0]))
kv.row_sparse_pull('a', out=vals, row_ids=[all_rows] * len(vals))
else:
kv.pull('a', out=vals, ignore_sparse=False)
result_sum = np.zeros(shape)
for v in vals:
result_sum += v.asnumpy()
assert_almost_equal(result_sum, expected_sum * num_devs)
# list
vals = [[rand_ndarray(shape, stype).copyto(devs[i]) for i in range(num_devs)]] * len(keys)
expected_sum = np.zeros(shape)
for v in vals[0]:
expected_sum += v.asnumpy()
kv.push(str_keys, vals)
if sparse_pull:
kv.row_sparse_pull(str_keys, out=vals, row_ids=[[all_rows] * num_devs] * len(vals))
else:
kv.pull(str_keys, out=vals, ignore_sparse=False)
for vv in vals:
result_sum = np.zeros(shape)
for v in vv:
result_sum += v.asnumpy()
assert_almost_equal(result_sum, expected_sum * num_devs)
check_sparse_aggregator(False)
check_sparse_aggregator(True)
def updater(key, recv, local):
"""use updater: += with int keys"""
assert(isinstance(key, int))
local += recv
def str_updater(key, recv, local):
"""use updater: += with str keys"""
if isinstance(key, bytes):
key = py_str(key)
assert(isinstance(key, str))
local += recv
@with_seed()
def test_updater(dev='cpu'):
"""updater"""
def check_updater(kv, key, key_list, stype):
# devices
num_devs = 4
devs = [mx.Context(dev, i) for i in range(num_devs)]
# single
vals = [mx.nd.ones(shape, d).tostype(stype) for d in devs]
outs = [mx.nd.empty(shape, d) for d in devs]
kv.push(key, vals)
kv.pull(key, out=outs)
for out in outs:
check_diff_to_scalar(out, num_devs)
# list
vals = [[mx.nd.ones(shape, d).tostype(stype) for d in devs]] * len(key_list)
outs = [[mx.nd.empty(shape, d) for d in devs]] * len(key_list)
num_push = 4
for i in range(num_push):
kv.push(key_list, vals)
kv.pull(key_list, out=outs)
for out in outs:
for o in out:
check_diff_to_scalar(o, num_devs * num_push)
stypes = ['default', 'row_sparse']
for stype in stypes:
kv = init_kv()
kv._set_updater(updater)
check_updater(kv, 3, keys, stype)
str_kv = init_kv_with_str()
str_kv._set_updater(str_updater)
check_updater(str_kv, 'a', str_keys, stype)
@with_seed()
def test_get_type():
kvtype = 'local_allreduce_cpu'
kv = mx.kv.create(kvtype)
assert kv.type == kvtype
@with_seed()
def test_invalid_pull():
def check_ignored_pull_single(kv, key):
dns_val = (mx.nd.ones(shape) * 2)
rsp_val = dns_val.tostype('row_sparse')
kv.pull(key, out=rsp_val)
check_diff_to_scalar(rsp_val, 2)
def check_ignored_pull_list(kv, key):
dns_val = [mx.nd.ones(shape) * 2] * len(key)
rsp_val = [val.tostype('row_sparse') for val in dns_val]
kv.pull(key, out=rsp_val)
for v in rsp_val:
check_diff_to_scalar(v, 2)
def check_invalid_rsp_pull_single(kv, key):
dns_val = mx.nd.ones(shape) * 2
assertRaises(MXNetError, kv.row_sparse_pull,
key, out=dns_val, row_ids=mx.nd.array([1]))
def check_invalid_rsp_pull_list(kv, key):
dns_val = [mx.nd.ones(shape) * 2] * len(key)
assertRaises(MXNetError, kv.row_sparse_pull, key, out=dns_val,
row_ids=[mx.nd.array([1])] * len(key))
def check_invalid_key_types_single(kv, key):
dns_val = mx.nd.ones(shape) * 2
rsp_val = dns_val.tostype('row_sparse')
assertRaises(MXNetError, kv.init, key, dns_val)
assertRaises(MXNetError, kv.push, key, dns_val)
assertRaises(MXNetError, kv.pull, key, dns_val)
assertRaises(MXNetError, kv.row_sparse_pull, key, rsp_val,
row_ids=mx.nd.array([1]))
def check_invalid_key_types_list(kv, key):
dns_val = [mx.nd.ones(shape) * 2] * len(key)
rsp_val = [val.tostype('row_sparse') for val in dns_val]
assertRaises(MXNetError, kv.init, key, dns_val)
assertRaises(MXNetError, kv.push, key, dns_val)
assertRaises(MXNetError, kv.pull, key, dns_val)
assertRaises(MXNetError, kv.row_sparse_pull, key, rsp_val,
row_ids=[mx.nd.array([1])] * len(key))
int_kv = init_kv()
str_kv = init_kv_with_str()
kvs = [int_kv, str_kv]
single_keys = [3, 'a']
list_keys = [keys, str_keys]
for i in range(2):
# pull with rsp outputs should be ignored with no values updated
check_ignored_pull_single(kvs[i], single_keys[i])
check_ignored_pull_list(kvs[i], list_keys[i])
# row_sparse_pull should be aborted when vals.stype != row_sparse
check_invalid_rsp_pull_single(kvs[i], single_keys[i])
check_invalid_rsp_pull_list(kvs[i], list_keys[i])
# kvstore should be restricted to only accept either int or str keys
check_invalid_key_types_single(kvs[i], single_keys[1 - i])
check_invalid_key_types_list(kvs[i], list_keys[1 - i])
if __name__ == '__main__':
import nose
nose.runmodule()
|
{
"content_hash": "6166bca2a176b0ae066d6d4615035113",
"timestamp": "",
"source": "github",
"line_count": 331,
"max_line_length": 98,
"avg_line_length": 32.55287009063444,
"alnum_prop": 0.5693735498839907,
"repo_name": "mbaijal/incubator-mxnet",
"id": "28d4ec262c06110f620e69d2b85c7249c5bad88d",
"size": "11581",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "tests/python/unittest/test_kvstore.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1731"
},
{
"name": "Batchfile",
"bytes": "13130"
},
{
"name": "C",
"bytes": "173224"
},
{
"name": "C++",
"bytes": "6116511"
},
{
"name": "CMake",
"bytes": "86446"
},
{
"name": "Clojure",
"bytes": "389028"
},
{
"name": "Cuda",
"bytes": "813783"
},
{
"name": "Dockerfile",
"bytes": "43395"
},
{
"name": "Groovy",
"bytes": "22850"
},
{
"name": "Java",
"bytes": "128595"
},
{
"name": "Julia",
"bytes": "408765"
},
{
"name": "Jupyter Notebook",
"bytes": "1657933"
},
{
"name": "MATLAB",
"bytes": "34903"
},
{
"name": "Makefile",
"bytes": "70735"
},
{
"name": "Perl",
"bytes": "1535873"
},
{
"name": "Perl 6",
"bytes": "7280"
},
{
"name": "PowerShell",
"bytes": "6150"
},
{
"name": "Python",
"bytes": "6206547"
},
{
"name": "R",
"bytes": "351354"
},
{
"name": "Scala",
"bytes": "1102749"
},
{
"name": "Shell",
"bytes": "305673"
},
{
"name": "Smalltalk",
"bytes": "43774"
}
],
"symlink_target": ""
}
|
from PyQt4 import QtGui, QtCore
from PyQt4.QtCore import Qt
from time import strftime
class DateTime(QtGui.QDialog):
def __init__(self,parent = None):
QtGui.QDialog.__init__(self, parent)
self.parent = parent
self.formats = ["%A, %d. %B %Y %H:%M",
"%A, %d. %B %Y",
"%d. %B %Y %H:%M",
"%d.%m.%Y %H:%M",
"%d. %B %Y",
"%d %m %Y",
"%d.%m.%Y",
"%x",
"%X",
"%H:%M"]
self.initUI()
def initUI(self):
self.box = QtGui.QComboBox(self)
for i in self.formats:
self.box.addItem(strftime(i))
insert = QtGui.QPushButton("Insert",self)
insert.clicked.connect(self.insert)
cancel = QtGui.QPushButton("Cancel",self)
cancel.clicked.connect(self.close)
layout = QtGui.QGridLayout()
layout.addWidget(self.box,0,0,1,2)
layout.addWidget(insert,1,0)
layout.addWidget(cancel,1,1)
self.setGeometry(300,300,400,80)
self.setWindowTitle("Date and Time")
self.setLayout(layout)
def insert(self):
# Grab cursor
cursor = self.parent.text.textCursor()
datetime = strftime(self.formats[self.box.currentIndex()])
# Insert the comboBox's current text
cursor.insertText(datetime)
# Close the window
self.close()
|
{
"content_hash": "8cab1c7c2b904308cd75eb2ff956a3ef",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 66,
"avg_line_length": 26.203389830508474,
"alnum_prop": 0.4896507115135834,
"repo_name": "goldsborough/Writer-Tutorial",
"id": "24c8b5ac55dad7244e91416a2093563a9342ff7e",
"size": "1546",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "Part-4/ext/datetime.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "178018"
}
],
"symlink_target": ""
}
|
import os, sys; sys.path.insert(0, os.path.join("..", ".."))
import datetime
import codecs
import random
import unittest
from pattern import db
# To test MySQL, you need MySQLdb and a username + password with rights to create a database.
HOST, PORT, USERNAME, PASSWORD = \
"localhost", 3306, "root", ""
DB_MYSQL = DB_MYSQL_EXCEPTION = None
DB_SQLITE = DB_SQLITE_EXCEPTION = None
def create_db_mysql():
global DB_MYSQL
global DB_MYSQL_EXCEPTION
try:
DB_MYSQL = db.Database(
type = db.MYSQL,
name = "pattern_unittest_db",
host = HOST,
port = PORT,
username = USERNAME,
password = PASSWORD)
except ImportError, e:
DB_MYSQL_EXCEPTION = None # "No module named MySQLdb"
except Exception, e:
DB_MYSQL_EXCEPTION = e
def create_db_sqlite():
global DB_SQLITE
global DB_SQLITE_EXCEPTION
try:
DB_SQLITE = db.Database(
type = db.SQLITE,
name = "pattern_unittest_db",
host = HOST,
port = PORT,
username = USERNAME,
password = PASSWORD)
except Exception, e:
DB_SQLITE_EXCEPTION = e
#---------------------------------------------------------------------------------------------------
class TestUnicode(unittest.TestCase):
def setUp(self):
# Test data with different (or wrong) encodings.
self.strings = (
u"ünîcøde",
u"ünîcøde".encode("utf-16"),
u"ünîcøde".encode("latin-1"),
u"ünîcøde".encode("windows-1252"),
"ünîcøde",
u"אוניקאָד"
)
def test_decode_utf8(self):
# Assert unicode.
for s in self.strings:
self.assertTrue(isinstance(db.decode_utf8(s), unicode))
print "pattern.db.decode_utf8()"
def test_encode_utf8(self):
# Assert Python bytestring.
for s in self.strings:
self.assertTrue(isinstance(db.encode_utf8(s), str))
print "pattern.db.encode_utf8()"
def test_string(self):
# Assert string() with default for "" and None.
for v, s in ((True, u"True"), (1, u"1"), (1.0, u"1.0"), ("", u"????"), (None, u"????")):
self.assertEqual(db.string(v, default="????"), s)
print "pattern.db.string()"
#---------------------------------------------------------------------------------------------------
class TestEntities(unittest.TestCase):
def setUp(self):
pass
def test_encode_entities(self):
# Assert HTML entity encoder (e.g., "&" => "&&")
for a, b in (
("É", "É"),
("&", "&"),
("<", "<"),
(">", ">"),
('"', """),
("'", "'")):
self.assertEqual(db.encode_entities(a), b)
print "pattern.db.encode_entities()"
def test_decode_entities(self):
# Assert HMTL entity decoder (e.g., "&" => "&")
for a, b in (
("&", "&"),
("&", "&"),
("&", "&"),
(" ", u"\xa0"),
("&foo;", "&foo;")):
self.assertEqual(db.decode_entities(a), b)
print "pattern.db.decode_entities()"
#---------------------------------------------------------------------------------------------------
class TestDate(unittest.TestCase):
def setUp(self):
pass
def test_date(self):
# Assert string input and default date formats.
for s in (
"2010-09-21 09:27:01",
"2010-09-21T09:27:01Z",
"2010-09-21T09:27:01+0000",
"2010-09-21 09:27",
"2010-09-21",
"21/09/2010",
"21 September 2010",
"September 21 2010",
"September 21, 2010",
1285054021):
v = db.date(s)
self.assertEqual(v.format, "%Y-%m-%d %H:%M:%S")
self.assertEqual(v.year, 2010)
self.assertEqual(v.month, 9)
self.assertEqual(v.day, 21)
# Assert NOW.
for v in (db.date(), db.date(db.NOW)):
self.assertEqual(v.year, datetime.datetime.now().year)
self.assertEqual(v.month, datetime.datetime.now().month)
self.assertEqual(v.day, datetime.datetime.now().day)
self.assertEqual(db.date().year, db.YEAR)
# Assert integer input.
v1 = db.date(2010, 9, 21, format=db.DEFAULT_DATE_FORMAT)
v2 = db.date(2010, 9, 21, 9, 27, 1, 0, db.DEFAULT_DATE_FORMAT)
v3 = db.date(2010, 9, 21, hour=9, minute=27, second=01, format=db.DEFAULT_DATE_FORMAT)
self.assertEqual(str(v1), "2010-09-21 00:00:00")
self.assertEqual(str(v2), "2010-09-21 09:27:01")
self.assertEqual(str(v3), "2010-09-21 09:27:01")
# Assert DateError for other input.
self.assertRaises(db.DateError, db.date, None)
print "pattern.db.date()"
def test_format(self):
# Assert custom input formats.
v = db.date("2010-09", "%Y-%m")
self.assertEqual(str(v), "2010-09-01 00:00:00")
self.assertEqual(v.year, 2010)
# Assert custom output formats.
v = db.date("2010-09", "%Y-%m", format="%Y-%m")
self.assertEqual(v.format, "%Y-%m")
self.assertEqual(str(v), "2010-09")
self.assertEqual(v.year, 2010)
# Assert strftime() for date < 1900.
v = db.date(1707, 4, 15)
self.assertEqual(str(v), "1707-04-15 00:00:00")
self.assertRaises(ValueError, lambda: v.timestamp)
print "pattern.db.Date.__str__()"
def test_timestamp(self):
# Assert Date.timestamp.
v = db.date(2010, 9, 21, format=db.DEFAULT_DATE_FORMAT)
self.assertEqual(v.timestamp, 1285020000)
print "pattern.db.Date.timestamp"
def test_time(self):
# Assert Date + time().
v = db.date("2010-09-21 9:27:00")
v = v - db.time(days=1, hours=1, minutes=1, seconds=1)
self.assertEqual(str(v), "2010-09-20 08:25:59")
print "pattern.db.time()"
#---------------------------------------------------------------------------------------------------
class TestUtilityFunctions(unittest.TestCase):
def setUp(self):
pass
def test_order(self):
# Assert a list of indices in the order as when the given list is sorted.
v = [3,1,2]
self.assertEqual(db.order(v), [1,2,0])
self.assertEqual(db.order(v, reverse=True), [0,2,1])
self.assertEqual(db.order(v, cmp=lambda a,b: a-b), [1,2,0])
self.assertEqual(db.order(v, key=lambda i:i), [1,2,0])
print "pattern.db.order()"
def test_avg(self):
# Assert (1+2+3+4) / 4 = 2.5.
self.assertEqual(db.avg([1,2,3,4]), 2.5)
print "pattern.db.avg()"
def test_variance(self):
# Assert 2.5.
self.assertEqual(db.variance([1,2,3,4,5]), 2.5)
print "pattern.db.variance()"
def test_stdev(self):
# Assert 2.429.
self.assertAlmostEqual(db.stdev([1,5,6,7,6,8]), 2.429, places=3)
print "pattern.db.stdev()"
def test_sqlite_functions(self):
# Assert year(), month(), day(), ..., first(), last() and group_concat() for SQLite.
v = "1707-04-15 01:02:03"
self.assertEqual(db.sqlite_year(v), 1707)
self.assertEqual(db.sqlite_month(v), 4)
self.assertEqual(db.sqlite_day(v), 15)
self.assertEqual(db.sqlite_hour(v), 1)
self.assertEqual(db.sqlite_minute(v), 2)
self.assertEqual(db.sqlite_second(v), 3)
# Aggregate functions.
for f, a, b in (
(db.sqlite_first, [1,2,3], 1),
(db.sqlite_last, [1,2,3], 3),
(db.sqlite_group_concat, [1,2,3], "1,2,3")):
f = f()
for x in a:
f.step(x)
self.assertEqual(f.finalize(), b)
print "pattern.db.sqlite_year()"
print "pattern.db.sqlite_month()"
print "pattern.db.sqlite_day()"
print "pattern.db.sqlite_hour()"
print "pattern.db.sqlite_minute()"
print "pattern.db.sqlite_second()"
print "pattern.db.sqlite_first()"
print "pattern.db.sqlite_last()"
print "pattern.db.sqlite_group_concat()"
#---------------------------------------------------------------------------------------------------
class TestDatabase(unittest.TestCase):
def setUp(self):
# Define self.db and self.type in a subclass.
pass
def tearDown(self):
for table in self.db:
self.db.drop(table)
def test_escape(self):
# Assert str, unicode, int, long, float, bool and None field values.
for v, s in (
( "a", "'a'"),
( u"a", "'a'"),
( 1, "1"),
( 1L, "1"),
( 1.0, "1.0"),
( True, "1"),
(False, "0"),
( None, "null")):
self.assertEqual(db._escape(v), s)
# Assert date.
v = db.date("1707-04-15")
self.assertEqual(db._escape(v), "'1707-04-15 00:00:00'")
# Assert current date.
v = "current_timestamp"
self.assertEqual(db._escape(v), "current_timestamp")
# Assert subquery.
v = self.db.create("dummy", fields=[db.pk()])
v = v.query()
self.assertEqual(db._escape(v), "(select dummy.* from `dummy`)")
# Assert MySQL and SQLite quotes.
if self.db.type == db.MYSQL:
self.assertEqual(self.db.escape("'"), "'\\''")
if self.db.type == db.SQLITE:
self.assertEqual(self.db.escape("'"), "''''")
print "pattern.db._escape()"
def test_database(self):
# Assert Database properties.
self.assertTrue(self.db.type == self.type)
self.assertTrue(self.db.name == "pattern_unittest_db")
self.assertTrue(self.db.host == HOST)
self.assertTrue(self.db.port == PORT)
self.assertTrue(self.db.username == USERNAME)
self.assertTrue(self.db.password == PASSWORD)
self.assertTrue(self.db.tables == {})
self.assertTrue(self.db.relations == [])
self.assertTrue(self.db.connected == True)
self.db.disconnect()
self.assertTrue(self.db.connected == False)
self.assertTrue(self.db.connection == None)
self.db.connect()
print "pattern.db.Database(type=%s)" % self.type.upper()
def test_create_table(self):
# Assert Database.create() new table.
v = self.db.create("products", fields=[
db.primary_key("pid"),
db.field("name", db.STRING, index=True, optional=False),
db.field("price", db.FLOAT)
])
# Assert that the last query executed is stored.
if self.db.type == db.SQLITE:
self.assertEqual(self.db.query, "pragma table_info(`products`);")
if self.db.type == db.MYSQL:
self.assertEqual(self.db.query, "show columns from `products`;")
# Assert new Table exists in Database.tables.
self.assertTrue(isinstance(v, db.Table))
self.assertTrue(len(self.db) == 1)
self.assertTrue(v.pk == "pid")
self.assertTrue(v.fields == ["pid", "name", "price"])
self.assertTrue(self.db[v.name] == v)
self.assertTrue(self.db.tables[v.name] == v)
self.assertTrue(getattr(self.db, v.name) == v)
# Assert Database._field_SQL subroutine for Database.create().
for field, sql1, sql2 in (
(db.primary_key("pid"),
("`pid` integer not null primary key auto_increment", None),
("`pid` integer not null primary key autoincrement", None)),
(db.field("name", db.STRING, index=True, optional=False),
("`name` varchar(100) not null", "create index `products_name` on `products` (`name`);"),
("`name` varchar(100) not null", "create index `products_name` on `products` (`name`);")),
(db.field("price", db.INTEGER),
("`price` integer null", None),
("`price` integer null", None))):
if self.db.type == db.MYSQL:
self.assertEqual(self.db._field_SQL(self.db["products"].name, field), sql1)
if self.db.type == db.SQLITE:
self.assertEqual(self.db._field_SQL(self.db["products"].name, field), sql2)
# Assert TableError if table already exists.
self.assertRaises(db.TableError, self.db.create, "products")
# Assert remove table.
self.db.drop("products")
self.assertTrue(len(self.db) == 0)
print "pattern.db.Database.create()"
class TestCreateMySQLDatabase(unittest.TestCase):
def runTest(self):
if DB_MYSQL_EXCEPTION:
raise DB_MYSQL_EXCEPTION
class TestCreateSQLiteDatabase(unittest.TestCase):
def runTest(self):
if DB_SQLITE_EXCEPTION:
raise DB_SQLITE_EXCEPTION
class TestDeleteMySQLDatabase(unittest.TestCase):
def runTest(self):
DB_MYSQL._delete()
class TestDeleteSQLiteDatabase(unittest.TestCase):
def runTest(self):
DB_SQLITE._delete()
class TestMySQLDatabase(TestDatabase):
def setUp(self):
self.db, self.type = DB_MYSQL, db.MYSQL
TestDatabase.setUp(self)
class TestSQLiteDatabase(TestDatabase):
def setUp(self):
self.db, self.type = DB_SQLITE, db.SQLITE
TestDatabase.setUp(self)
#---------------------------------------------------------------------------------------------------
class TestSchema(unittest.TestCase):
def setUp(self):
pass
def test_string(self):
# Assert callable String.
v1 = db._String()
v2 = db._String()(0)
v3 = db._String()(200)
v4 = db._String()(300)
self.assertEqual(v1, "string")
self.assertEqual(v2, "varchar(1)")
self.assertEqual(v3, "varchar(200)")
self.assertEqual(v4, "varchar(255)")
def test_field(self):
# Assert field() return value with different optional parameters.
# NAME TYPE DEFAULT INDEX OPTIONAL
for kwargs, f in (
(dict(name="id", type=db.INT), ("id", "integer", None, False, True)),
(dict(name="id", type=db.INT, index=db.PRIMARY), ("id", "integer", None, "primary", True)),
(dict(name="id", type=db.INT, index=db.UNIQUE), ("id", "integer", None, "unique", True)),
(dict(name="id", type=db.INT, index="0"), ("id", "integer", None, False, True)),
(dict(name="id", type=db.INT, index="1"), ("id", "integer", None, True, True)),
(dict(name="id", type=db.INT, index=True), ("id", "integer", None, True, True)),
(dict(name="id", type=db.INT, default=0), ("id", "integer", 0, False, True)),
(dict(name="name", type=db.STRING), ("name", "varchar(100)", None, False, True)),
(dict(name="name", type=db.STRING, optional=False), ("name", "varchar(100)", None, False, False)),
(dict(name="name", type=db.STRING, optional="0"), ("name", "varchar(100)", None, False, False)),
(dict(name="name", type=db.STRING(50)), ("name", "varchar(50)", None, False, True)),
(dict(name="price", type=db.FLOAT, default=0), ("price", "real", 0, False, True)),
(dict(name="show", type=db.BOOL), ("show", "tinyint(1)", None, False, True)),
(dict(name="show", type=db.BOOL, default=True), ("show", "tinyint(1)", True, False, True)),
(dict(name="show", type=db.BOOL, default=False), ("show", "tinyint(1)", False, False, True)),
(dict(name="date", type=db.DATE), ("date", "timestamp", "now", False, True)),
(dict(name="date", type=db.DATE, default=db.NOW), ("date", "timestamp", "now", False, True)),
(dict(name="date", type=db.DATE, default="1999-12-31 23:59:59"),
("date", "timestamp", "1999-12-31 23:59:59", False, True))):
self.assertEqual(db.field(**kwargs), f)
# Assert primary_key() return value.
self.assertTrue(db.primary_key() == db.pk() == ("id", "integer", None, "primary", False))
print "pattern.db.field()"
def test_schema(self):
now1 = "current_timestamp"
now2 = "'CURRENT_TIMESTAMP'"
# Assert Schema (= table schema in a uniform way across database engines).
# NAME TYPE DEFAULT INDEX OPTIONAL
for args, v in (
(("id", "integer", None, "pri", False), ("id", db.INT, None, db.PRIMARY, False, None)),
(("id", "integer", None, "uni", False), ("id", db.INT, None, db.UNIQUE, False, None)),
(("id", "int", None, "yes", True), ("id", db.INT, None, True, True, None)),
(("id", "real", None, "mul", True), ("id", db.FLOAT, None, True, True, None)),
(("id", "real", None, "1", True), ("id", db.FLOAT, None, True, True, None)),
(("id", "double", None, "0", True), ("id", db.FLOAT, None, False, True, None)),
(("id", "double", 0, False, False), ("id", db.FLOAT, 0, False, False, None)),
(("text", "varchar(10)", "?", False, True), ("text", db.STRING, "?", False, True, 10)),
(("text", "char(20)", "", False, True), ("text", db.STRING, None, False, True, 20)),
(("text", "text", None, False, True), ("text", db.TEXT, None, False, True, None)),
(("text", "blob", None, False, True), ("text", db.BLOB, None, False, True, None)),
(("show", "tinyint(1)", None, False, True), ("show", db.BOOL, None, False, True, None)),
(("date", "timestamp", None, False, True), ("date", db.DATE, None, False, True, None)),
(("date", "timestamp", now1, False, True), ("date", db.DATE, db.NOW, False, True, None)),
(("date", "time", now2, False, "YES"), ("date", db.DATE, db.NOW, False, True, None))):
s = db.Schema(*args)
self.assertEqual(s.name, v[0])
self.assertEqual(s.type, v[1])
self.assertEqual(s.default, v[2])
self.assertEqual(s.index, v[3])
self.assertEqual(s.optional, v[4])
self.assertEqual(s.length, v[5])
print "pattern.db.Schema()"
#---------------------------------------------------------------------------------------------------
class TestTable(unittest.TestCase):
def setUp(self):
# Define self.db in a subclass.
# Create test tables.
self.db.create("persons", fields=[
db.primary_key("id"),
db.field("name", db.STRING)
])
self.db.create("products", fields=[
db.primary_key("id"),
db.field("name", db.STRING),
db.field("price", db.FLOAT, default=0.0)
])
self.db.create("orders", fields=[
db.primary_key("id"),
db.field("person", db.INTEGER, index=True),
db.field("product", db.INTEGER, index=True),
])
def tearDown(self):
# Drop test tables.
for table in self.db:
self.db.drop(table)
def test_table(self):
# Assert Table properties.
v = self.db.persons
self.assertTrue(v.db == self.db)
self.assertTrue(v.pk == "id")
self.assertTrue(v.fields == ["id", "name"])
self.assertTrue(v.name == "persons")
self.assertTrue(v.abs("name") == "persons.name")
self.assertTrue(v.rows() == [])
self.assertTrue(v.schema["id"].type == db.INTEGER)
self.assertTrue(v.schema["id"].index == db.PRIMARY)
print "pattern.db.Table"
def test_rename(self):
# Assert ALTER TABLE when name changes.
v = self.db.persons
v.name = "clients"
self.assertEqual(self.db.query, "alter table `persons` rename to `clients`;")
self.assertEqual(self.db.tables.get("clients"), v)
print "pattern.db.Table.name"
def test_fields(self):
# Assert ALTER TABLE when column is inserted.
v = self.db.products
v.fields.append(db.field("description", db.TEXT))
self.assertEqual(v.fields, ["id", "name", "price", "description"])
print "pattern.db.Table.fields"
def test_insert_update_delete(self):
# Assert Table.insert().
v1 = self.db.persons.insert(name=u"Kurt Gödel")
v2 = self.db.products.insert(name="pizza", price=10.0)
v3 = self.db.products.insert({"name":"garlic bread", "price":3.0})
v4 = self.db.orders.insert(person=v1, product=v3)
self.assertEqual(v1, 1)
self.assertEqual(v2, 1)
self.assertEqual(v3, 2)
self.assertEqual(v4, 1)
self.assertEqual(self.db.persons.rows(), [(1, u"Kurt Gödel")])
self.assertEqual(self.db.products.rows(), [(1, u"pizza", 10.0), (2, u"garlic bread", 3.0)])
self.assertEqual(self.db.orders.rows(), [(1, 1, 2)])
self.assertEqual(self.db.orders.count(), 1)
self.assertEqual(self.db.products.xml.replace(' extra="auto_increment"', ""),
'<?xml version="1.0" encoding="utf-8"?>\n'
'<table name="products" fields="id, name, price" count="2">\n'
'\t<schema>\n'
'\t\t<field name="id" type="integer" index="primary" optional="no" />\n'
'\t\t<field name="name" type="string" length="100" />\n'
'\t\t<field name="price" type="float" default="0.0" />\n'
'\t</schema>\n'
'\t<rows>\n'
'\t\t<row id="1" name="pizza" price="10.0" />\n'
'\t\t<row id="2" name="garlic bread" price="3.0" />\n'
'\t</rows>\n'
'</table>'
)
# Assert transactions with commit=False.
if self.db.type == db.SQLITE:
self.db.orders.insert(person=v1, product=v2, commit=False)
self.db.rollback()
self.assertEqual(len(self.db.orders), 1)
self.db.orders.insert(person=v1, product=v2, commit=False)
# Assert Table.update().
self.db.products.update(2, price=4.0)
self.db.products.update(2, {"price":4.5})
self.db.products.update(db.all(db.filter("name", "pi*")), name="deeppan pizza")
self.assertEqual(self.db.products.rows(), [(1, u"deeppan pizza", 10.0), (2, u"garlic bread", 4.5)])
# Assert Table.delete().
self.db.products.delete(db.all(db.filter("name", "deeppan*")))
self.db.products.delete(db.ALL)
self.db.orders.delete(1)
self.assertEqual(len(self.db.products), 0)
self.assertEqual(len(self.db.orders), 1)
print "pattern.db.Table.insert()"
print "pattern.db.Table.update()"
print "pattern.db.Table.delete()"
def test_filter(self):
# Assert Table.filter().
self.db.persons.insert(name=u"Kurt Gödel")
self.db.persons.insert(name=u"M. C. Escher")
self.db.persons.insert(name=u"Johann Sebastian Bach")
f = self.db.persons.filter
self.assertEqual(f(("name",), id=1), [(u"Kurt Gödel",)])
self.assertEqual(f(db.ALL, id=(1,2)), [(1, u"Kurt Gödel"), (2, u"M. C. Escher")])
self.assertEqual(f({"id":(1,2)}), [(1, u"Kurt Gödel"), (2, u"M. C. Escher")])
self.assertEqual(f("id", name="Johan*"), [(3,)])
self.assertEqual(f("id", name=("J*","K*")), [(1,), (3,)])
print "pattern.db.Table.filter()"
def test_search(self):
# Assert Table.search => Query object.
v = self.db.persons.search()
self.assertTrue(isinstance(v, db.Query))
self.assertTrue(v.table == self.db.persons)
def test_datasheet(self):
# Assert Table.datasheet() => Datasheet object.
v = self.db.persons.datasheet()
self.assertTrue(isinstance(v, db.Datasheet))
self.assertTrue(v.fields[0] == ("id", db.INTEGER))
print "pattern.db.Table.datasheet()"
class TestMySQLTable(TestTable):
def setUp(self):
self.db = DB_MYSQL
TestTable.setUp(self)
class TestSQLiteTable(TestTable):
def setUp(self):
self.db = DB_SQLITE
TestTable.setUp(self)
#---------------------------------------------------------------------------------------------------
class TestQuery(unittest.TestCase):
def setUp(self):
# Define self.db in a subclass.
# Create test tables.
self.db.create("persons", fields=[
db.primary_key("id"),
db.field("name", db.STRING),
db.field("age", db.INTEGER),
db.field("gender", db.INTEGER)
])
self.db.create("gender", fields=[
db.primary_key("id"),
db.field("name", db.STRING)
])
# Create test data.
self.db.persons.insert(name="john", age="30", gender=2)
self.db.persons.insert(name="jack", age="20", gender=2)
self.db.persons.insert(name="jane", age="30", gender=1)
self.db.gender.insert(name="female")
self.db.gender.insert(name="male")
def tearDown(self):
# Drop test tables.
for table in self.db:
self.db.drop(table)
def _query(self, *args, **kwargs):
""" Returns a pattern.db.Query object on a mock Table and Database.
"""
class Database:
escape, relations = lambda self, v: db._escape(v), []
class Table:
name, fields, db = "persons", ["id", "name", "age", "sex"], Database()
return db.Query(Table(), *args, **kwargs)
def test_abs(self):
# Assert absolute fieldname for trivial cases.
self.assertEqual(db.abs("persons", "name"), "persons.name")
self.assertEqual(db.abs("persons", ("id", "name")), ["persons.id", "persons.name"])
# Assert absolute fieldname with SQL functions (e.g., avg(product.price)).
for f in db.sql_functions.split("|"):
self.assertEqual(db.abs("persons", "%s(name)" % f), "%s(persons.name)" % f)
print "pattern.db.abs()"
def test_cmp(self):
# Assert WHERE-clause from cmp() function.
q = self.db.persons.search(fields=["name"])
self.assertTrue(isinstance(q, db.Query))
for args, sql in (
(("name", u"Kurt%", db.LIKE), u"name like 'Kurt%'"),
(("name", u"Kurt*", "="), u"name like 'Kurt%'"),
(("name", u"*Gödel", "=="), u"name like '%Gödel'"),
(("name", u"Kurt*", "!="), u"name not like 'Kurt%'"),
(("name", u"Kurt*", "<>"), u"name not like 'Kurt%'"),
(("name", u"Gödel", "i="), u"name like 'Gödel'"), # case-insensitive search
(("id", (1, 2), db.IN), u"id in (1,2)"),
(("id", (1, 2), "="), u"id in (1,2)"),
(("id", (1, 2), "=="), u"id in (1,2)"),
(("id", (1, 2), "!="), u"id not in (1,2)"),
(("id", (1, 2), "<>"), u"id not in (1,2)"),
(("id", (1, 3), db.BETWEEN), u"id between 1 and 3"),
(("id", (1, 3), ":"), u"id between 1 and 3"),
(("name", ("G","K*"), "="), u"(name='G' or name like 'K%')"),
(("name", None, "="), u"name is null"),
(("name", None, "=="), u"name is null"),
(("name", None, "!="), u"name is not null"),
(("name", None, "<>"), u"name is not null"),
(("name", q, "="), u"name in (select persons.name from `persons`)"),
(("name", q, "=="), u"name in (select persons.name from `persons`)"),
(("name", q, "!="), u"name not in (select persons.name from `persons`)"),
(("name", q, "<>"), u"name not in (select persons.name from `persons`)"),
(("name", u"Gödel", "="), u"name='Gödel'"),
(("id", 1, ">"), u"id>1")):
self.assertEqual(db.cmp(*args), sql)
print "pattern.db.cmp()"
def test_group(self):
# Assert WHERE with AND/OR combinations from Group object().
yesterday = db.date()
yesterday -= db.time(days=1)
g1 = db.Group(("name", "garlic bread"))
g2 = db.Group(("name", "pizza"), ("price", 10, "<"), operator=db.AND)
g3 = db.Group(g1, g2, operator=db.OR)
g4 = db.Group(g3, ("date", yesterday, ">"), operator=db.AND)
self.assertEqual(g1.SQL(), "name='garlic bread'")
self.assertEqual(g2.SQL(), "name='pizza' and price<10")
self.assertEqual(g3.SQL(), "(name='garlic bread') or (name='pizza' and price<10)")
self.assertEqual(g4.SQL(), "((name='garlic bread') or (name='pizza' and price<10)) and date>'%s'" % yesterday)
# Assert subquery in group.
q = self._query(fields=["name"])
g = db.any(("name", u"Gödel"), ("name", q))
self.assertEqual(g.SQL(), u"name='Gödel' or name in (select persons.name from `persons`)")
print "pattern.db.Group"
def test_query(self):
# Assert table query results from Table.search().
for kwargs, sql, rows in (
(dict(fields=db.ALL),
"select persons.* from `persons`;",
[(1, u"john", 30, 2),
(2, u"jack", 20, 2),
(3, u"jane", 30, 1)]),
(dict(fields=db.ALL, range=(0, 2)),
"select persons.* from `persons` limit 0, 2;",
[(1, u"john", 30, 2),
(2, u"jack", 20, 2)]),
(dict(fields=db.ALL, filters=[("age", 30, "<")]),
"select persons.* from `persons` where persons.age<30;",
[(2, u"jack", 20, 2)]),
(dict(fields=db.ALL, filters=db.any(("age", 30, "<"), ("name", "john"))),
"select persons.* from `persons` where persons.age<30 or persons.name='john';",
[(1, u"john", 30, 2),
(2, u"jack", 20, 2)]),
(dict(fields=["name", "gender.name"], relations=[db.relation("gender", "id", "gender")]),
"select persons.name, gender.name from `persons` left join `gender` on persons.gender=gender.id;",
[(u"john", u"male"),
(u"jack", u"male"),
(u"jane", u"female")]),
(dict(fields=["name","age"], sort="name"),
"select persons.name, persons.age from `persons` order by persons.name asc;",
[(u"jack", 20),
(u"jane", 30),
(u"john", 30)]),
(dict(fields=["name","age"], sort=1, order=db.DESCENDING),
"select persons.name, persons.age from `persons` order by persons.name desc;",
[(u"john", 30),
(u"jane", 30),
(u"jack", 20)]),
(dict(fields=["age","name"], sort=["age","name"], order=[db.ASCENDING, db.DESCENDING]),
"select persons.age, persons.name from `persons` order by persons.age asc, persons.name desc;",
[(20, u"jack"),
(30, u"john"),
(30, u"jane")]),
(dict(fields=["age","name"], group="age", function=db.CONCATENATE),
"select persons.age, group_concat(persons.name) from `persons` group by persons.age;",
[(20, u"jack"),
(30, u"john,jane")]),
(dict(fields=["id", "name","age"], group="age", function=[db.COUNT, db.CONCATENATE]),
"select count(persons.id), group_concat(persons.name), persons.age from `persons` group by persons.age;",
[(1, u"jack", 20),
(2, u"john,jane", 30)])):
v = self.db.persons.search(**kwargs)
v.xml
self.assertEqual(v.SQL(), sql)
self.assertEqual(v.rows(), rows)
# Assert Database.link() permanent relations.
v = self.db.persons.search(fields=["name", "gender.name"])
v.aliases["gender.name"] = "gender"
self.db.link("persons", "gender", "gender", "id", join=db.LEFT)
self.assertEqual(v.SQL(),
"select persons.name, gender.name as gender from `persons` left join `gender` on persons.gender=gender.id;")
self.assertEqual(v.rows(),
[(u'john', u'male'),
(u'jack', u'male'),
(u'jane', u'female')])
print "pattern.db.Table.search()"
print "pattern.db.Table.Query"
def test_xml(self):
# Assert Query.xml dump.
v = self.db.persons.search(fields=["name", "gender.name"])
v.aliases["gender.name"] = "gender"
self.db.link("persons", "gender", "gender", "id", join=db.LEFT)
self.assertEqual(v.xml,
'<?xml version="1.0" encoding="utf-8"?>\n'
'<query table="persons" fields="name, gender" count="3">\n'
'\t<schema>\n'
'\t\t<field name="name" type="string" length="100" />\n'
'\t\t<field name="gender" type="string" length="100" />\n'
'\t</schema>\n'
'\t<rows>\n'
'\t\t<row name="john" gender="male" />\n'
'\t\t<row name="jack" gender="male" />\n'
'\t\t<row name="jane" gender="female" />\n'
'\t</rows>\n'
'</query>'
)
# Assert Database.create() from XML.
self.assertRaises(db.TableError, self.db.create, v.xml) # table 'persons' already exists
self.db.create(v.xml, name="persons2")
self.assertTrue("persons2" in self.db)
self.assertTrue(self.db.persons2.fields == ["name", "gender"])
self.assertTrue(len(self.db.persons2) == 3)
print "pattern.db.Query.xml"
class TestMySQLQuery(TestQuery):
def setUp(self):
self.db = DB_MYSQL
TestQuery.setUp(self)
class TestSQLiteQuery(TestQuery):
def setUp(self):
self.db = DB_SQLITE
TestQuery.setUp(self)
#---------------------------------------------------------------------------------------------------
class TestView(unittest.TestCase):
def setUp(self):
# Define self.db in a subclass.
pass
def tearDown(self):
# Drop test tables.
for table in self.db:
self.db.drop(table)
def test_view(self):
class Products(db.View):
def __init__(self, database):
db.View.__init__(self, database, "products", schema=[
db.pk(),
db.field("name", db.STRING),
db.field("price", db.FLOAT)
])
self.setup()
self.table.insert(name="pizza", price=15.0)
def render(self, query, **kwargs):
q = self.table.search(fields=["name", "price"], filters=[("name", "*%s*" % query)])
s = []
for row in q.rows():
s.append("<tr>%s</tr>" % "".join(
["<td class=\"%s\">%s</td>" % f for f in zip(q.fields, row)]))
return "<table>" + "".join(s) + "</table>"
# Assert View with automatic Table creation.
v = Products(self.db)
self.assertEqual(v.render("iz"),
"<table>"
"<tr>"
"<td class=\"name\">pizza</td>"
"<td class=\"price\">15.0</td>"
"</tr>"
"</table>"
)
print "pattern.db.View"
class TestMySQLView(TestView):
def setUp(self):
self.db = DB_MYSQL
TestView.setUp(self)
class TestSQLiteView(TestView):
def setUp(self):
self.db = DB_SQLITE
TestView.setUp(self)
#---------------------------------------------------------------------------------------------------
class TestCSV(unittest.TestCase):
def setUp(self):
# Create test table.
self.csv = db.CSV(
rows=[
[u"Schrödinger", "cat", True, 3, db.date(2009, 11, 3)],
[u"Hofstadter", "labrador", True, 5, db.date(2007, 8, 4)]
],
fields=[
["name", db.STRING],
["type", db.STRING],
["tail", db.BOOLEAN],
[ "age", db.INTEGER],
["date", db.DATE],
])
def test_csv_header(self):
# Assert field headers parser.
v1 = db.csv_header_encode("age", db.INTEGER)
v2 = db.csv_header_decode("age (INTEGER)")
self.assertEqual(v1, "age (INTEGER)")
self.assertEqual(v2, ("age", db.INTEGER))
print "pattern.db.csv_header_encode()"
print "pattern.db.csv_header_decode()"
def test_csv(self):
# Assert saving and loading data (field types are preserved).
v = self.csv
v.save("test.csv", headers=True)
v = db.CSV.load("test.csv", headers=True)
self.assertTrue(isinstance(v, list))
self.assertTrue(v.headers[0] == (u"name", db.STRING))
self.assertTrue(v[0] == [u"Schrödinger", "cat", True, 3, db.date(2009, 11, 3)])
os.unlink("test.csv")
print "pattern.db.CSV"
print "pattern.db.CSV.save()"
print "pattern.db.CSV.load()"
def test_file(self):
# Assert CSV file contents.
v = self.csv
v.save("test.csv", headers=True)
v = open("test.csv", "rb").read()
v = db.decode_utf8(v.lstrip(codecs.BOM_UTF8))
v = v.replace("\r\n", "\n")
self.assertEqual(v,
u'"name (STRING)","type (STRING)","tail (BOOLEAN)","age (INTEGER)","date (DATE)"\n'
u'"Schrödinger","cat","True","3","2009-11-03 00:00:00"\n'
u'"Hofstadter","labrador","True","5","2007-08-04 00:00:00"'
)
os.unlink("test.csv")
#---------------------------------------------------------------------------------------------------
class TestDatasheet(unittest.TestCase):
def setUp(self):
pass
def test_rows(self):
# Assert Datasheet.rows DatasheetRows object.
v = db.Datasheet(rows=[[1,2],[3,4]])
v.rows += [5,6]
v.rows[0] = [0,0]
v.rows.swap(0,1)
v.rows.insert(1, [1,1])
v.rows.pop(1)
self.assertTrue(isinstance(v.rows, db.DatasheetRows))
self.assertEqual(v.rows, [[3,4],[0,0],[5,6]])
self.assertEqual(v.rows[0], [3,4])
self.assertEqual(v.rows[-1], [5,6])
self.assertEqual(v.rows.count([3,4]), 1)
self.assertEqual(v.rows.index([3,4]), 0)
self.assertEqual(sorted(v.rows, reverse=True), [[5,6],[3,4],[0,0]])
self.assertRaises(AttributeError, v._set_rows, [])
# Assert default for new rows with missing columns.
v.rows.extend([[7],[9]], default=0)
self.assertEqual(v.rows, [[3,4],[0,0],[5,6],[7,0],[9,0]])
print "pattern.db.Datasheet.rows"
def test_columns(self):
# Assert Datasheet.columns DatasheetColumns object.
v = db.Datasheet(rows=[[1,3],[2,4]])
v.columns += [5,6]
v.columns[0] = [0,0]
v.columns.swap(0,1)
v.columns.insert(1, [1,1])
v.columns.pop(1)
self.assertTrue(isinstance(v.columns, db.DatasheetColumns))
self.assertEqual(v.columns, [[3,4],[0,0],[5,6]])
self.assertEqual(v.columns[0], [3,4])
self.assertEqual(v.columns[-1], [5,6])
self.assertEqual(v.columns.count([3,4]), 1)
self.assertEqual(v.columns.index([3,4]), 0)
self.assertEqual(sorted(v.columns, reverse=True), [[5,6],[3,4],[0,0]])
self.assertRaises(AttributeError, v._set_columns, [])
# Assert default for new columns with missing rows.
v.columns.extend([[7],[9]], default=0)
self.assertEqual(v.columns, [[3,4],[0,0],[5,6],[7,0],[9,0]])
print "pattern.db.Datasheet.columns"
def test_column(self):
# Assert DatasheetColumn object.
# It has a reference to the parent Datasheet, as long as it is not deleted from the datasheet.
v = db.Datasheet(rows=[[1,3],[2,4]])
column = v.columns[0]
column.insert(1, 0, default=None)
self.assertEqual(v, [[1,3], [0,None], [2,4]])
del v.columns[0]
self.assertTrue(column._datasheet, None)
print "pattern.db.DatasheetColumn"
def test_fields(self):
# Assert Datasheet with incomplete headers.
v = db.Datasheet(rows=[[u"Schrödinger", "cat"]], fields=[("name", db.STRING)])
self.assertEqual(v.fields, [("name", db.STRING)])
# Assert (None, None) for missing headers.
v.columns.swap(0,1)
self.assertEqual(v.fields, [(None, None), ("name", db.STRING)])
v.columns[0] = ["dog"]
self.assertEqual(v.fields, [(None, None), ("name", db.STRING)])
# Assert removing a column removes the header.
v.columns.pop(0)
self.assertEqual(v.fields, [("name",db.STRING)])
# Assert new columns with header description.
v.columns.append(["cat"])
v.columns.append([3], field=("age", db.INTEGER))
self.assertEqual(v.fields, [("name", db.STRING), (None, None), ("age", db.INTEGER)])
# Assert column by name.
self.assertEqual(v.name, [u"Schrödinger"])
print "pattern.db.Datasheet.fields"
def test_group(self):
# Assert Datasheet.group().
v1 = db.Datasheet(rows=[[1,2,"a"],[1,3,"b"],[1,4,"c"],[0,0,"d"]])
v2 = v1.group(0)
v3 = v1.group(0, function=db.LAST)
v4 = v1.group(0, function=(db.FIRST, db.COUNT, db.CONCATENATE))
v5 = v1.group(0, function=db.CONCATENATE, key=lambda j: j>0)
self.assertEqual(v2, [[1,2,"a"], [0,0,"d"]])
self.assertEqual(v3, [[1,4,"c"], [0,0,"d"]])
self.assertEqual(v4, [[1,3,u"a,b,c"], [0,1,u"d"]])
self.assertEqual(v5, [[True,u"2,3,4",u"a,b,c"], [False,u"0",u"d"]])
print "pattern.db.Datasheet.group()"
def test_slice(self):
# Assert Datasheet slices.
v = db.Datasheet([[1,2,3], [4,5,6], [7,8,9]])
v = v.copy()
self.assertEqual(v.slice(0,1,3,2), [[2,3], [5,6], [8,9]])
self.assertEqual(v[2], [7,8,9])
self.assertEqual(v[2,2], 9)
self.assertEqual(v[2,1:], [8,9])
self.assertEqual(v[0:2], [[1,2,3], [4,5,6]])
self.assertEqual(v[0:2,1], [2,5])
self.assertEqual(v[0:2,0:2], [[1,2], [4,5]])
# Assert new Datasheet for i:j slices.
self.assertTrue(isinstance(v[0:2], db.Datasheet))
self.assertTrue(isinstance(v[0:2,0:2], db.Datasheet))
print "pattern.db.Datasheet.slice()"
def test_copy(self):
# Assert Datasheet.copy().
v = db.Datasheet([[1,2,3], [4,5,6], [7,8,9]])
self.assertTrue(v.copy(), [[1,2,3], [4,5,6], [7,8,9]])
self.assertTrue(v.copy(rows=[0]), [[1,2,3]])
self.assertTrue(v.copy(rows=[0], columns=[0]), [[1]])
self.assertTrue(v.copy(columns=[0]), [[1], [4], [7]])
print "pattern.db.Datasheet.copy()"
def test_map(self):
# Assert Datasheet.map() (in-place).
v = db.Datasheet(rows=[[1,2],[3,4]])
v.map(lambda x: x+1)
self.assertEqual(v, [[2,3],[4,5]])
print "pattern.db.Datasheet.map()"
def test_json(self):
# Assert JSON output.
v = db.Datasheet(rows=[[u"Schrödinger", 3], [u"Hofstadter", 5]])
self.assertEqual(v.json, u'[["Schrödinger", 3], ["Hofstadter", 5]]')
# Assert JSON output with headers.
v = db.Datasheet(rows=[[u"Schrödinger", 3], [u"Hofstadter", 5]],
fields=[("name", db.STRING), ("age", db.INT)])
random.seed(0)
self.assertEqual(v.json, u'[{"age": 3, "name": "Schrödinger"}, {"age": 5, "name": "Hofstadter"}]')
print "pattern.db.Datasheet.json"
def test_flip(self):
# Assert flip matrix.
v = db.flip(db.Datasheet([[1,2], [3,4]]))
self.assertEqual(v, [[1,3], [2,4]])
print "pattern.db.flip()"
def test_truncate(self):
# Assert string truncate().
v1 = "a" * 50
v2 = "a" * 150
v3 = "aaa " * 50
self.assertEqual(db.truncate(v1), (v1, ""))
self.assertEqual(db.truncate(v2), ("a"*99+"-", "a"*51))
self.assertEqual(db.truncate(v3), (("aaa "*25).strip(), "aaa "*25))
print "pattern.db.truncate()"
def test_pprint(self):
pass
#---------------------------------------------------------------------------------------------------
def suite(**kwargs):
global HOST, PORT, USERNAME, PASSWORD
HOST = kwargs.get("host", "localhost")
PORT = kwargs.get("port", 3306)
USERNAME = kwargs.get("username", "root")
PASSWORD = kwargs.get("password", "")
create_db_mysql()
create_db_sqlite()
suite = unittest.TestSuite()
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestUnicode))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestEntities))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestDate))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestUtilityFunctions))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestSchema))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestCreateMySQLDatabase))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestCreateSQLiteDatabase))
if DB_MYSQL:
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestMySQLDatabase))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestMySQLTable))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestMySQLQuery))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestMySQLView))
if DB_SQLITE:
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestSQLiteDatabase))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestSQLiteTable))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestSQLiteQuery))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestSQLiteView))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestDeleteSQLiteDatabase))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestCSV))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestDatasheet))
return suite
if __name__ == "__main__":
unittest.TextTestRunner(verbosity=1).run(suite())
|
{
"content_hash": "db54961a497726f6b7602d0dadbbd3b7",
"timestamp": "",
"source": "github",
"line_count": 1080,
"max_line_length": 125,
"avg_line_length": 43.983333333333334,
"alnum_prop": 0.5135783756473412,
"repo_name": "piskvorky/pattern",
"id": "3c84c66d431b755e638b43215ea8657c6907dd7e",
"size": "47572",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_db.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "87196"
},
{
"name": "JavaScript",
"bytes": "163954"
},
{
"name": "Python",
"bytes": "1697597"
}
],
"symlink_target": ""
}
|
"""Setup script for the pyparsing module distribution."""
from distutils.core import setup
import sys
import os
_PY3 = sys.version_info[0] > 2
if _PY3:
from pyparsing_py3 import __version__ as pyparsing_version
else:
from pyparsing_py2 import __version__ as pyparsing_version
modules = ["pyparsing",]
# make sure that a pyparsing.py file exists - if not, copy the appropriate version
def fileexists(fname):
try:
return bool(os.stat(fname))
except:
return False
def copyfile(fromname, toname):
outf = open(toname,'w')
outf.write(open(fromname).read())
outf.close()
if "MAKING_PYPARSING_RELEASE" not in os.environ and not fileexists("pyparsing.py"):
if _PY3:
from_file = "pyparsing_py3.py"
else:
from_file = "pyparsing_py2.py"
copyfile(from_file, "pyparsing.py")
setup(# Distribution meta-data
name = "pyparsing",
version = pyparsing_version,
description = "Python parsing module",
author = "Paul McGuire",
author_email = "ptmcg@users.sourceforge.net",
url = "http://pyparsing.wikispaces.com/",
download_url = "http://sourceforge.net/project/showfiles.php?group_id=97203",
license = "MIT License",
py_modules = modules,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
]
)
|
{
"content_hash": "533f9b5e642fec053cecf381ae74a44c",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 83,
"avg_line_length": 30.363636363636363,
"alnum_prop": 0.6281437125748504,
"repo_name": "5monkeys/pyparsing",
"id": "642c5f001e6e5ba672929bcfeb4450f93985a836",
"size": "1693",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "563828"
}
],
"symlink_target": ""
}
|
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import latexrender
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'latexrender'
copyright = u'2013, Luke Pomfrey'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = latexrender.__version__
# The full version, including alpha/beta/rc tags.
release = latexrender.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'latexrenderdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'latexrender.tex', u'latexrender Documentation',
u'Luke Pomfrey', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'latexrender', u'latexrender Documentation',
[u'Luke Pomfrey'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'latexrender', u'latexrender Documentation',
u'Luke Pomfrey', 'latexrender', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
{
"content_hash": "8b6cb3a791a73d57f14a49b9dddcb33e",
"timestamp": "",
"source": "github",
"line_count": 246,
"max_line_length": 80,
"avg_line_length": 32.32113821138211,
"alnum_prop": 0.7067035593007169,
"repo_name": "lpomfrey/latexrender",
"id": "f22a86d437825968d9dbd6fa7cdecc7809027c38",
"size": "8394",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "18708"
},
{
"name": "Shell",
"bytes": "6466"
},
{
"name": "TeX",
"bytes": "272"
}
],
"symlink_target": ""
}
|
__author__ = 'Michael Montero <mcmontero@gmail.com>'
# ----- Imports ---------------------------------------------------------------
from tinyAPI.base.config import ConfigManager
import logging
import random
import tinyAPI
__all__ = [
'StatsLogger'
]
# ----- Public Classes --------------------------------------------------------
class StatsLogger(object):
'''Manages writing statistics to the application log file.'''
def hit_ratio(self, name, requests, hits, pid=None):
if tinyAPI.env_unit_test() is False and \
tinyAPI.env_cli() is False and \
random.randint(1, 100000) == 1:
log_file = ConfigManager.value('app log file')
if log_file is not None:
try:
hit_ratio = str((hits / requests) * 100) + '%'
except ZeroDivisionError:
hit_ratio = 'NA'
lines = [
'\n----- ' + name + ' (start) -----'
]
if pid is not None:
lines.append('PID #{}'.format(pid))
lines.extend([
'Requests: ' + '{0:,}'.format(requests),
'Hits: ' + '{0:,}'.format(hits),
'Hit Ratio: ' + hit_ratio,
'----- ' + name + ' (stop) ------'
])
logging.basicConfig(filename = log_file)
logging.critical('\n'.join(lines))
logging.shutdown()
|
{
"content_hash": "054329344cb1fad5d6e463011665fa9b",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 79,
"avg_line_length": 31.872340425531913,
"alnum_prop": 0.42923898531375165,
"repo_name": "mcmontero/tinyAPI",
"id": "fb3269d2512b17d26b67ae9fdaef9a7777ab6ef6",
"size": "1579",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "base/stats_logger.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "389720"
},
{
"name": "Shell",
"bytes": "7469"
}
],
"symlink_target": ""
}
|
import json
import os
import sys
from aldryn_client import forms
SYSTEM_FIELD_WARNING = 'WARNING: this field is auto-written. Please do not change it here.'
class Form(forms.BaseForm):
languages = forms.CharField(
'Languages',
required=True,
initial='["en", "de"]',
help_text=SYSTEM_FIELD_WARNING,
)
def to_settings(self, data, settings):
import dj_database_url
import warnings
import yurl
from functools import partial
from aldryn_addons.utils import boolean_ish, djsenv
env = partial(djsenv, settings=settings)
# BASE_DIR should already be set by aldryn-addons
settings['BASE_DIR'] = env('BASE_DIR', required=True)
settings['DATA_ROOT'] = env('DATA_ROOT', os.path.join(settings['BASE_DIR'], 'data'))
settings['SECRET_KEY'] = env('SECRET_KEY', 'this-is-not-very-random')
settings['DEBUG'] = boolean_ish(env('DEBUG', False))
settings['DATABASE_URL'] = env('DATABASE_URL')
settings['CACHE_URL'] = env('CACHE_URL')
if env('DJANGO_MODE') == 'build':
# In build mode we don't have any connected services like db or
# cache available. So we need to configure those things in a way
# they can run without real backends.
settings['DATABASE_URL'] = 'sqlite://:memory:'
settings['CACHE_URL'] = 'locmem://'
if not settings['DATABASE_URL']:
settings['DATABASE_URL'] = 'sqlite:///{}'.format(
os.path.join(settings['DATA_ROOT'], 'db.sqlite3')
)
warnings.warn(
'no database configured. Falling back to DATABASE_URL={0}'.format(
settings['DATABASE_URL']
),
RuntimeWarning,
)
if not settings['CACHE_URL']:
settings['CACHE_URL'] = 'locmem://'
warnings.warn(
'no cache configured. Falling back to CACHE_URL={0}'.format(
settings['CACHE_URL']
),
RuntimeWarning,
)
settings['DATABASES']['default'] = dj_database_url.parse(settings['DATABASE_URL'])
settings['ROOT_URLCONF'] = env('ROOT_URLCONF', 'urls')
settings['ADDON_URLS'].append('aldryn_django.urls')
settings['ADDON_URLS_I18N'].append('aldryn_django.i18n_urls')
settings['WSGI_APPLICATION'] = 'wsgi.application'
settings['STATIC_URL'] = env('STATIC_URL', '/static/')
settings['STATIC_URL_IS_ON_OTHER_DOMAIN'] = bool(yurl.URL(settings['STATIC_URL']).host)
settings['STATIC_ROOT'] = env(
'STATIC_ROOT',
os.path.join(settings['BASE_DIR'], 'static_collected'),
)
settings['STATICFILES_DIRS'] = env(
'STATICFILES_DIRS',
[os.path.join(settings['BASE_DIR'], 'static'),]
)
settings['MEDIA_URL'] = env('MEDIA_URL', '/media/')
settings['MEDIA_URL_IS_ON_OTHER_DOMAIN'] = bool(yurl.URL(settings['MEDIA_URL']).host)
settings['MEDIA_ROOT'] = env('MEDIA_ROOT', os.path.join(settings['DATA_ROOT'], 'media'))
settings['INSTALLED_APPS'].extend([
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.staticfiles',
'aldryn_django',
])
settings['TEMPLATES'] = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': env('TEMPLATE_DIRS', [os.path.join(settings['BASE_DIR'], 'templates')], ),
'OPTIONS': {
'debug': boolean_ish(env('TEMPLATE_DEBUG', settings['DEBUG'])),
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.i18n',
'django.core.context_processors.debug',
'django.core.context_processors.request',
'django.core.context_processors.media',
'django.core.context_processors.csrf',
'django.core.context_processors.tz',
'django.core.context_processors.static',
'aldryn_django.context_processors.debug',
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
],
},
},
]
settings['MIDDLEWARE_CLASSES'] = [
'django.contrib.sessions.middleware.SessionMiddleware',
# 'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
# 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
# 'django.middleware.security.SecurityMiddleware',
]
settings['SITE_ID'] = env('SITE_ID', 1)
self.domain_settings(data, settings, env=env)
self.server_settings(settings, env=env)
self.logging_settings(settings, env=env)
# Order matters, sentry settings rely on logging being configured.
self.sentry_settings(settings, env=env)
self.cache_settings(settings, env=env)
self.storage_settings(settings, env=env)
self.i18n_settings(data, settings, env=env)
self.migration_settings(settings, env=env)
return settings
def domain_settings(self, data, settings, env):
settings['ALLOWED_HOSTS'] = env('ALLOWED_HOSTS', ['localhost', '*'])
# will take a full config dict from ALDRYN_SITES_DOMAINS if available,
# otherwise fall back to constructing the dict from DOMAIN,
# DOMAIN_ALIASES and DOMAIN_REDIRECTS
domains = env('ALDRYN_SITES_DOMAINS', {})
domain = env('DOMAIN')
if domain:
settings['DOMAIN'] = domain
domain_aliases = env('DOMAIN_ALIASES', '')
domain_redirects = env('DOMAIN_REDIRECTS', '')
if not domains and domain:
domains = {
1: {
'domain': domain,
'aliases': [d.strip() for d in domain_aliases.split(',') if d.strip()],
'redirects': [d.strip() for d in domain_redirects.split(',') if d.strip()]
}
}
settings['ALDRYN_SITES_DOMAINS'] = domains
if domains and settings['SITE_ID'] in domains:
settings['ALLOWED_HOSTS'].extend([
domain for domain in domains[settings['SITE_ID']]['aliases']
] + [
domain for domain in domains[settings['SITE_ID']]['redirects']
])
# TODO: aldryn-sites claims it doesn't support django>1.7
# settings['INSTALLED_APPS'].append('aldryn_sites')
# settings['MIDDLEWARE_CLASSES'].insert(
# settings['MIDDLEWARE_CLASSES'].index('django.middleware.common.CommonMiddleware'),
# 'aldryn_sites.middleware.SiteMiddleware',
# )
def server_settings(self, settings, env):
settings['PORT'] = env('PORT', 80)
settings['BACKEND_PORT'] = env('BACKEND_PORT', 8000)
settings['ENABLE_NGINX'] = env('ENABLE_NGINX', False)
settings['ENABLE_PAGESPEED'] = env('ENABLE_PAGESPEED', False)
settings['ENABLE_BROWSERCACHE'] = env('ENABLE_BROWSERCACHE', False)
settings['BROWSERCACHE_MAX_AGE'] = env('BROWSERCACHE_MAX_AGE', 300)
settings['NGINX_CONF_PATH'] = env('NGINX_CONF_PATH')
settings['NGINX_PROCFILE_PATH'] = env('NGINX_PROCFILE_PATH')
settings['DJANGO_WEB_WORKERS'] = env('DJANGO_WEB_WORKERS', 3)
settings['DJANGO_WEB_MAX_REQUESTS'] = env('DJANGO_WEB_MAX_REQUESTS', 500)
settings['DJANGO_WEB_TIMEOUT'] = env('DJANGO_WEB_TIMEOUT', 120)
def logging_settings(self, settings, env):
settings['LOGGING'] = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'handlers': {
'console': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'stream': sys.stdout,
},
'null': {
'class': 'django.utils.log.NullHandler',
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'INFO',
},
'django': {
'handlers': ['console'],
'level': 'INFO',
},
'django.request': {
'handlers': ['console'],
'level': 'INFO',
'propagate': False,
},
'aldryn': {
'handlers': ['console'],
'level': 'INFO',
},
'py.warnings': {
'handlers': ['console'],
},
}
}
def sentry_settings(self, settings, env):
sentry_dsn = env('SENTRY_DSN')
if sentry_dsn:
settings['INSTALLED_APPS'].append('raven.contrib.django')
settings['RAVEN_CONFIG'] = {'dsn': sentry_dsn}
settings['LOGGING']['handlers']['sentry'] = {
'level': 'ERROR',
'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler',
}
def cache_settings(self, settings, env):
import django_cache_url
cache_url = env('CACHE_URL')
if cache_url:
settings['CACHES']['default'] = django_cache_url.parse(cache_url)
def storage_settings(self, settings, env):
from aldryn_django.storage import parse_storage_url
if env('DEFAULT_STORAGE_DSN'):
settings['DEFAULT_STORAGE_DSN'] = env('DEFAULT_STORAGE_DSN')
if 'DEFAULT_STORAGE_DSN' in settings:
settings.update(parse_storage_url(settings['DEFAULT_STORAGE_DSN']))
def i18n_settings(self, data, settings, env):
settings['ALL_LANGUAGES'] = list(settings['LANGUAGES'])
settings['ALL_LANGUAGES_DICT'] = dict(settings['ALL_LANGUAGES'])
languages = json.loads(data['languages'])
settings['LANGUAGE_CODE'] = languages[0]
settings['USE_L10N'] = False
settings['USE_I18N'] = False
settings['LANGUAGES'] = [
(code, settings['ALL_LANGUAGES_DICT'][code])
for code in languages
]
settings['LOCALE_PATHS'] = [
os.path.join(settings['BASE_DIR'], 'locale'),
]
def time_settings(self, settings, env):
if env('TIME_ZONE'):
settings['TIME_ZONE'] = env('TIME_ZONE')
def migration_settings(self, settings, env):
settings.setdefault('MIGRATION_COMMANDS', [])
mcmds = settings['MIGRATION_COMMANDS']
mcmds.append('CACHE_URL="locmem://" python manage.py createcachetable django_dbcache; exit 0')
mcmds.append('python manage.py syncdb --noinput')
mcmds.append('python manage.py migrate --list --noinput && python manage.py migrate --noinput && python manage.py migrate --list --noinput')
|
{
"content_hash": "90febeb1228876ba8e012ffb45bbb604",
"timestamp": "",
"source": "github",
"line_count": 289,
"max_line_length": 148,
"avg_line_length": 42.13840830449827,
"alnum_prop": 0.5436853342092297,
"repo_name": "JimyRyan/jimyryan-djangocms",
"id": "2a83b3c3a6d47addf0c2178a1049fc51be4a6519",
"size": "12202",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/addons/aldryn-django/aldryn_config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "252376"
},
{
"name": "HTML",
"bytes": "195079"
},
{
"name": "JavaScript",
"bytes": "69122"
},
{
"name": "Nginx",
"bytes": "1214"
},
{
"name": "Python",
"bytes": "48234"
},
{
"name": "Shell",
"bytes": "4639"
}
],
"symlink_target": ""
}
|
import yaml
from pyramid_mako import add_mako_renderer
from pyramid.interfaces import IStaticURLInfo
import sqlalchemy
import sqlahelper
import pyramid_tm
from papyrus.renderers import GeoJSON, XSD
import simplejson as json
from c2cgeoportal.lib import dbreflection, get_setting, caching, \
MultiDomainPregenerator, MultiDomainStaticURLInfo
# used by (sql|form)alchemy
srid = None
schema = None
parentschema = None
formalchemy_language = None
formalchemy_default_zoom = 10
formalchemy_default_x = 740000
formalchemy_default_y = 5860000
formalchemy_available_functionalities = []
formalchemy_available_metadata = []
class DecimalJSON:
def __init__(self, jsonp_param_name='callback'):
self.jsonp_param_name = jsonp_param_name
def __call__(self, info):
def _render(value, system):
ret = json.dumps(value, use_decimal=True)
request = system.get('request')
if request is not None:
callback = request.params.get(self.jsonp_param_name)
if callback is None:
request.response.content_type = 'application/json'
else:
request.response.content_type = 'text/javascript'
ret = '%(callback)s(%(json)s);' % {
'callback': callback,
'json': ret
}
return ret
return _render
INTERFACE_TYPE_CGXP = 'cgxp'
INTERFACE_TYPE_SENCHA_TOUCH = 'senchatouch'
INTERFACE_TYPE_NGEO = 'ngeo'
INTERFACE_TYPE_NGEO_CATALOGUE = 'ngeo'
def add_interface(
config, interface_name=None, interface_type=INTERFACE_TYPE_CGXP, **kwargs
): # pragma: nocover
if interface_type == INTERFACE_TYPE_CGXP:
if interface_name is None:
add_interface_cgxp(
config,
interface_name='main',
route_names=('home', 'viewer'),
routes=('/', '/viewer.js'),
renderers=('index.html', 'viewer.js'),
)
else:
add_interface_cgxp(
config,
interface_name=interface_name,
route_names=(interface_name, interface_name + '.js'),
routes=('/%s' % interface_name, '/%s.js' % interface_name),
renderers=('/%s.html' % interface_name, '/%s.js' % interface_name),
)
elif interface_type == INTERFACE_TYPE_SENCHA_TOUCH:
add_interface_senchatouch(config, interface_name, **kwargs)
elif interface_type == INTERFACE_TYPE_NGEO:
if interface_name is None:
add_interface_ngeo(
config,
interface_name='main',
route_name='home',
route='/',
renderer='index.html',
)
else:
add_interface_ngeo(
config,
interface_name=interface_name,
route_name=interface_name,
route='/%s' % interface_name,
renderer='/%s.html' % interface_name,
)
def add_interface_cgxp(config, interface_name, route_names, routes, renderers): # pragma: nocover
# Cannot be at the header to don't load the model too early
from c2cgeoportal.views.entry import Entry
def add_interface(f):
def new_f(root, request):
request.interface_name = interface_name
return f(root, request)
return new_f
config.add_route(route_names[0], routes[0])
config.add_view(
Entry,
decorator=add_interface,
attr='get_cgxp_index_vars',
route_name=route_names[0],
renderer=renderers[0]
)
# permalink theme: recover the theme for generating custom viewer.js url
config.add_route(
'%stheme' % route_names[0],
'%s%stheme/*themes' % (routes[0], '' if routes[0][-1] == '/' else '/')
)
config.add_view(
Entry,
decorator=add_interface,
attr='get_cgxp_permalinktheme_vars',
route_name='%stheme' % route_names[0],
renderer=renderers[0]
)
config.add_route(route_names[1], routes[1])
config.add_view(
Entry,
decorator=add_interface,
attr='get_cgxp_viewer_vars',
route_name=route_names[1],
renderer=renderers[1]
)
def add_interface_senchatouch(config, interface_name, package=None): # pragma: nocover
# Cannot be at the header to don't load the model too early
from c2cgeoportal.views.entry import Entry
if package is None:
package = config.get_settings()['package']
def add_interface(f):
def new_f(root, request):
request.interface_name = interface_name
return f(root, request)
return new_f
interface_name = 'mobile' if interface_name is None else interface_name
config.add_route('mobile_index_dev', '/mobile_dev/')
config.add_view(
Entry,
decorator=add_interface,
attr='mobile',
renderer='%(package)s:static/mobile/index.html' % {
'package': package
},
route_name='mobile_index_dev'
)
config.add_route('mobile_config_dev', '/mobile_dev/config.js')
config.add_view(
Entry,
decorator=add_interface,
attr='mobileconfig',
renderer='%(package)s:static/mobile/config.js' % {
'package': package
},
route_name='mobile_config_dev'
)
config.add_static_view('%s_dev' % interface_name, '%(package)s:static/mobile' % {
'package': package
})
config.add_route('mobile_index_prod', '/mobile/')
config.add_view(
Entry,
decorator=add_interface,
attr='mobile',
renderer='%(package)s:static/mobile/build/production/App/index.html' % {
'package': package
},
route_name='mobile_index_prod'
)
config.add_route('mobile_config_prod', '/mobile/config.js')
config.add_view(
Entry,
decorator=add_interface,
attr='mobileconfig',
renderer='%(package)s:static/mobile/build/production/App/config.js' % {
'package': package
},
route_name='mobile_config_prod'
)
config.add_static_view(interface_name, '%(package)s:static/mobile/build/production/App' % {
'package': package
})
def add_interface_ngeo(config, interface_name, route_name, route, renderer): # pragma: nocover
# Cannot be at the header to don't load the model too early
from c2cgeoportal.views.entry import Entry
def add_interface(f):
def new_f(root, request):
request.interface_name = interface_name
return f(root, request)
return new_f
config.add_route(route_name, route)
config.add_view(
Entry,
decorator=add_interface,
attr='get_ngeo_index_vars',
route_name=route_name,
renderer=renderer
)
# permalink theme: recover the theme for generating custom viewer.js url
config.add_route(
'%stheme' % route_name,
'%s%stheme/*themes' % (route, '' if route[-1] == '/' else '/')
)
config.add_view(
Entry,
decorator=add_interface,
attr='get_ngeo_permalinktheme_vars',
route_name='%stheme' % route_name,
renderer=renderer
)
def locale_negotiator(request):
lang = request.params.get('lang')
if lang is None:
# if best_match returns None then Pyramid will use what's defined in
# the default_locale_name configuration variable
return request.accept_language.best_match(
request.registry.settings.get("available_locale_names"))
return lang
def get_user_from_request(request):
""" Return the User object for the request.
Return ``None`` if user is anonymous or if it does not
exist in the database.
"""
from c2cgeoportal.models import DBSession, User
from sqlalchemy.orm import joinedload
if not hasattr(request, '_user'):
request._user = None
username = request.authenticated_userid
if username is not None:
# We know we will need the role object of the
# user so we use joined loading
request._user = DBSession.query(User) \
.options(joinedload(User.role)) \
.filter_by(username=username) \
.first()
return request._user
def set_user_validator(config, user_validator):
""" Call this function to register a user validator function.
The validator function is passed three arguments: ``request``,
``username``, and ``password``. The function should return the
user name if the credentials are valid, and ``None`` otherwise.
The validator should not do the actual authentication operation
by calling ``remember``, this is handled by the ``login`` view.
"""
def register():
config.registry.validate_user = user_validator
config.action('user_validator', register)
def default_user_validator(request, username, password):
""" Validate the username/password. This is c2cgeoportal's
default user validator. """
from c2cgeoportal.models import DBSession, User
user = DBSession.query(User).filter_by(username=username).first()
return username if user and user.validate_password(password) else None
def ogcproxy_route_predicate(info, request):
""" Serve as a custom route predicate function for ogcproxy.
We do not want the OGC proxy to be used to reach the app's
mapserv script. We just return False if the url includes
"mapserv". It is rather drastic, but works for us. """
url = request.params.get('url')
if url is None:
return False
if url.find('mapserv') > 0:
return False
return True
def mapserverproxy_route_predicate(info, request):
""" Serve as a custom route predicate function for mapserverproxy.
If the hide_capabilities setting is set and is true then we want to
return 404s on GetCapabilities requests."""
hide_capabilities = request.registry.settings.get('hide_capabilities')
if not hide_capabilities:
return True
params = dict(
(k.lower(), unicode(v).lower()) for k, v in request.params.iteritems()
)
return 'request' not in params or params['request'] != u'getcapabilities'
def includeme(config):
""" This function returns a Pyramid WSGI application.
"""
# update the settings object from the YAML application config file
settings = config.get_settings()
settings.update(yaml.load(file(settings.get('app.cfg'))))
global srid
global schema
global parentschema
global formalchemy_language
global formalchemy_default_zoom
global formalchemy_default_x
global formalchemy_default_y
global formalchemy_available_functionalities
global formalchemy_available_metadata
config.set_request_property(get_user_from_request, name='user')
# configure 'locale' dir as the translation dir for c2cgeoportal app
config.add_translation_dirs('c2cgeoportal:locale/')
# initialize database
engine = sqlalchemy.engine_from_config(
config.get_settings(),
'sqlalchemy.')
sqlahelper.add_engine(engine)
config.include(pyramid_tm.includeme)
# initialize the dbreflection module
dbreflection.init(engine)
# dogpile.cache configuration
caching.init_region(settings['cache'])
caching.invalidate_region()
# bind the mako renderer to other file extensions
add_mako_renderer(config, '.html')
add_mako_renderer(config, '.js')
config.include('pyramid_chameleon')
# add the "geojson" renderer
config.add_renderer('geojson', GeoJSON())
# add decimal json renderer
config.add_renderer('decimaljson', DecimalJSON())
# add the "xsd" renderer
config.add_renderer('xsd', XSD(
sequence_callback=dbreflection._xsd_sequence_callback))
# add the set_user_validator directive, and set a default user
# validator
config.add_directive('set_user_validator', set_user_validator)
config.set_user_validator(default_user_validator)
# add an OGCProxy view
config.add_route(
'ogcproxy', '/ogcproxy',
custom_predicates=(ogcproxy_route_predicate,))
config.add_view('papyrus_ogcproxy.views:ogcproxy', route_name='ogcproxy')
# add routes to the mapserver proxy
config.add_route(
'mapserverproxy', '/mapserv_proxy',
custom_predicates=(mapserverproxy_route_predicate,),
pregenerator=MultiDomainPregenerator())
# add routes to csv view
config.add_route('csvecho', '/csv')
# add routes to the echo service
config.add_route('echo', '/echo')
# add routes to the entry view class
config.add_route('loginform', '/login.html')
config.add_route('login', '/login')
config.add_route('logout', '/logout')
config.add_route('loginchange', '/loginchange')
config.add_route('testi18n', '/testi18n.html')
config.add_route('apijs', '/api.js')
config.add_route('xapijs', '/xapi.js')
config.add_route('apihelp', '/apihelp.html')
config.add_route('xapihelp', '/xapihelp.html')
config.add_route('themes', '/themes')
config.add_route('invalidate', '/invalidate')
# checker routes, Checkers are web services to test and assess that
# the application is correctly functioning.
# These web services are used by tools like (nagios).
config.add_route('checker_main', '/checker_main')
config.add_route('checker_viewer', '/checker_viewer')
config.add_route('checker_edit', '/checker_edit')
config.add_route('checker_edit_js', '/checker_edit_js')
config.add_route('checker_api', '/checker_api')
config.add_route('checker_xapi', '/checker_xapi')
config.add_route('checker_printcapabilities', '/checker_printcapabilities')
config.add_route('checker_pdf', '/checker_pdf')
config.add_route('checker_fts', '/checker_fts')
config.add_route('checker_wmscapabilities', '/checker_wmscapabilities')
config.add_route('checker_wfscapabilities', '/checker_wfscapabilities')
# collector
config.add_route('check_collector', '/check_collector')
# print proxy routes
config.add_route('printproxy', '/printproxy')
config.add_route('printproxy_info', '/printproxy/info.json')
config.add_route('printproxy_create', '/printproxy/create.json')
config.add_route('printproxy_get', '/printproxy/{file}.printout')
# full text search routes
config.add_route('fulltextsearch', '/fulltextsearch')
# Access to raster data
config.add_route('raster', '/raster')
config.add_route('profile.csv', '/profile.csv')
config.add_route('profile.json', '/profile.json')
# shortener
config.add_route('shortener_create', '/short/create')
config.add_route('shortener_get', '/short/{ref}')
# add routes for the "layers" web service
config.add_route(
'layers_count', '/layers/{layer_id:\\d+}/count',
request_method='GET')
config.add_route(
'layers_metadata', '/layers/{layer_id:\\d+}/md.xsd',
request_method='GET')
config.add_route(
'layers_read_many',
'/layers/{layer_id:\\d+,?(\\d+,)*\\d*$}',
request_method='GET') # supports URLs like /layers/1,2,3
config.add_route(
'layers_read_one', '/layers/{layer_id:\\d+}/{feature_id}',
request_method='GET')
config.add_route(
'layers_create', '/layers/{layer_id:\\d+}',
request_method='POST')
config.add_route(
'layers_update', '/layers/{layer_id:\\d+}/{feature_id}',
request_method='PUT')
config.add_route(
'layers_delete', '/layers/{layer_id:\\d+}/{feature_id}',
request_method='DELETE')
config.add_route(
'layers_enumerate_attribute_values',
'/layers/{layer_name}/values/{field_name}',
request_method='GET')
# there's no view corresponding to that route, it is to be used from
# mako templates to get the root of the "layers" web service
config.add_route('layers_root', '/layers/')
# pyramid_formalchemy's configuration
config.include('pyramid_formalchemy')
config.include('fa.jquery')
# define the srid, schema and parentschema
# as global variables to be usable in the model
srid = config.get_settings()['srid']
schema = config.get_settings()['schema']
parentschema = config.get_settings()['parentschema']
settings = config.get_settings()
formalchemy_default_zoom = get_setting(
settings,
('admin_interface', 'map_zoom'), formalchemy_default_zoom)
formalchemy_default_x = get_setting(
settings,
('admin_interface', 'map_x'), formalchemy_default_x)
formalchemy_default_y = get_setting(
settings,
('admin_interface', 'map_y'), formalchemy_default_y)
formalchemy_available_functionalities = get_setting(
settings,
('admin_interface', 'available_functionalities'),
formalchemy_available_functionalities)
formalchemy_available_metadata = get_setting(
settings,
('admin_interface', 'available_metadata'),
formalchemy_available_metadata)
# scan view decorator for adding routes
config.scan(ignore='c2cgeoportal.tests')
config.registry.registerUtility(
MultiDomainStaticURLInfo(), IStaticURLInfo)
# add the static view (for static resources)
config.add_static_view(
'static', 'c2cgeoportal:static',
cache_max_age=int(settings["default_max_age"])
)
|
{
"content_hash": "77489c88b51f511440a02191c133740a",
"timestamp": "",
"source": "github",
"line_count": 507,
"max_line_length": 98,
"avg_line_length": 34.67061143984221,
"alnum_prop": 0.634656957560587,
"repo_name": "tsauerwein/c2cgeoportal",
"id": "07eca2768c26398f56e42073e94003207e2ac36e",
"size": "19167",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "c2cgeoportal/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "3701979"
},
{
"name": "JavaScript",
"bytes": "12750450"
},
{
"name": "Makefile",
"bytes": "11077"
},
{
"name": "Python",
"bytes": "615585"
},
{
"name": "Ruby",
"bytes": "2164"
},
{
"name": "Shell",
"bytes": "4476"
}
],
"symlink_target": ""
}
|
import pytest
import sqlalchemy as sa
from sqlalchemy_utils import assert_max_length, assert_non_nullable
from tests.factories import UserFactory
@pytest.mark.usefixtures('database')
class TestUser(object):
@pytest.mark.parametrize(
'column',
(
'id',
'email',
'first_name',
'last_name'
)
)
def test_non_nullable_columns(self, column):
assert_non_nullable(UserFactory(), column)
@pytest.mark.parametrize(
('column', 'max_length'),
(
('email', 255),
('first_name', 255),
('last_name', 255),
('guild', 100),
('class_year', 1),
('phone_number', 20)
)
)
def test_max_lengths(self, column, max_length):
assert_max_length(UserFactory(), column, max_length)
def test_email_unique(self):
UserFactory(email='email@email.com')
with pytest.raises(sa.exc.IntegrityError):
UserFactory(email='email@email.com')
def test_name_property(self):
user = UserFactory(first_name='John', last_name='Doe')
assert user.name == 'John Doe'
def test_repr(self):
user = UserFactory.build()
assert repr(user) == "<User name='%s'>" % user.name
|
{
"content_hash": "fcd54c1a44e9b02b3cada9d1da8d7502",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 67,
"avg_line_length": 28.217391304347824,
"alnum_prop": 0.5631741140215717,
"repo_name": "wappulehti-apy/diilikone-api",
"id": "95eaa8e21ccc054a72d461a51467198f1ed0b629",
"size": "1298",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/models/test_user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "573"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "51557"
}
],
"symlink_target": ""
}
|
class Pipeline(object):
def __init__(self, filters, **kwargs):
"""Initialize a new pipeline
Any arguments that should be passed to specific filters
should be passed as a kwarg.
:param list filters: A list of filters. All filters should inherit
Example:
import pypeline
renderer = pypeline.Renderer([
MarkdownFilter,
...,
MentionFilter
])
renderer.render(content)
"""
self.filters = filters
self.context = kwargs
def render(self, content):
"""Passes the `content` through the pipeline and renders it.
:param str content: The content to pass down the pipeline
:returns: The rendered HTML
"""
rendered = content
for filter in self.filters:
rendered = filter(rendered)
return rendered
|
{
"content_hash": "8fec2bdb46fed42b30b94a195dbb84cd",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 74,
"avg_line_length": 25.75,
"alnum_prop": 0.5631067961165048,
"repo_name": "rsenk330/html-pypeline",
"id": "36dbb1fec6ee5a94fb4b632796ad3f36c6fdd354",
"size": "927",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pypeline/pipeline.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "22943"
},
{
"name": "Shell",
"bytes": "2687"
}
],
"symlink_target": ""
}
|
"""
Various common informations
"""
from .exact_common_information import exact_common_information
from .functional_common_information import functional_common_information
from .gk_common_information import gk_common_information
from .mss_common_information import mss_common_information
from .wyner_common_information import wyner_common_information
|
{
"content_hash": "5ade8400490768a1725a34d8342335bd",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 72,
"avg_line_length": 39.111111111111114,
"alnum_prop": 0.8380681818181818,
"repo_name": "Autoplectic/dit",
"id": "aab4bd67485658304bfe7142406677c082d911de",
"size": "352",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dit/multivariate/common_informations/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "5938"
},
{
"name": "HTML",
"bytes": "265"
},
{
"name": "PHP",
"bytes": "614"
},
{
"name": "Python",
"bytes": "1272731"
},
{
"name": "Shell",
"bytes": "180"
},
{
"name": "TeX",
"bytes": "6951"
}
],
"symlink_target": ""
}
|
__author__ = 'Dustin Whittle <dustin@yahoo-inc.com>'
__version__ = '0.1'
import os, sys, unittest
# update sys path to include bundled modules with priority
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../../../src'))
import yahoo.yql
class YQLTest(unittest.TestCase):
def setUp(self):
self.yql = yahoo.yql.YQLQuery()
def test_query_valid(self):
"""
Tests the calling of yql public api given a valid query.
"""
response = self.yql.execute('select * from search.web where query="dustin whittle"')
self.assertTrue('query' in response and 'results' in response['query'])
def test_query_invalid(self):
"""
Tests error handling when calling a yql public api given an invalid query.
"""
response = self.yql.execute('select * from delicious.feeds.unknown_test')
self.assertEquals('No definition found for Table delicious.feeds.unknown_test', response['error']['description'])
def tearDown(self):
self.yql = None
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "c02cd1a639201850c293b378f161357e",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 117,
"avg_line_length": 29.6,
"alnum_prop": 0.6718146718146718,
"repo_name": "yahoo/yos-social-python",
"id": "eee6795359d647346114d3e2042e62af2baa90a2",
"size": "1055",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/unit/yahoo/test_yql.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2690"
},
{
"name": "JavaScript",
"bytes": "1867"
},
{
"name": "Python",
"bytes": "1349625"
}
],
"symlink_target": ""
}
|
"""Tests for the HTTP API for the cloud component."""
import asyncio
from unittest.mock import patch, MagicMock
import pytest
from homeassistant.bootstrap import async_setup_component
from homeassistant.components.cloud import DOMAIN, auth_api
from tests.common import mock_coro
@pytest.fixture
def cloud_client(hass, test_client):
"""Fixture that can fetch from the cloud client."""
with patch('homeassistant.components.cloud.Cloud.initialize'):
hass.loop.run_until_complete(async_setup_component(hass, 'cloud', {
'cloud': {
'mode': 'development',
'cognito_client_id': 'cognito_client_id',
'user_pool_id': 'user_pool_id',
'region': 'region',
'relayer': 'relayer',
}
}))
return hass.loop.run_until_complete(test_client(hass.http.app))
@pytest.fixture
def mock_cognito():
"""Mock warrant."""
with patch('homeassistant.components.cloud.auth_api._cognito') as mock_cog:
yield mock_cog()
@asyncio.coroutine
def test_account_view_no_account(cloud_client):
"""Test fetching account if no account available."""
req = yield from cloud_client.get('/api/cloud/account')
assert req.status == 400
@asyncio.coroutine
def test_account_view(hass, cloud_client):
"""Test fetching account if no account available."""
hass.data[DOMAIN].email = 'hello@home-assistant.io'
req = yield from cloud_client.get('/api/cloud/account')
assert req.status == 200
result = yield from req.json()
assert result == {'email': 'hello@home-assistant.io'}
@asyncio.coroutine
def test_login_view(hass, cloud_client):
"""Test logging in."""
hass.data[DOMAIN].email = 'hello@home-assistant.io'
with patch('homeassistant.components.cloud.iot.CloudIoT.connect'), \
patch('homeassistant.components.cloud.'
'auth_api.login') as mock_login:
req = yield from cloud_client.post('/api/cloud/login', json={
'email': 'my_username',
'password': 'my_password'
})
assert req.status == 200
result = yield from req.json()
assert result == {'email': 'hello@home-assistant.io'}
assert len(mock_login.mock_calls) == 1
cloud, result_user, result_pass = mock_login.mock_calls[0][1]
assert result_user == 'my_username'
assert result_pass == 'my_password'
@asyncio.coroutine
def test_login_view_invalid_json(cloud_client):
"""Try logging in with invalid JSON."""
with patch('homeassistant.components.cloud.auth_api.login') as mock_login:
req = yield from cloud_client.post('/api/cloud/login', data='Not JSON')
assert req.status == 400
assert len(mock_login.mock_calls) == 0
@asyncio.coroutine
def test_login_view_invalid_schema(cloud_client):
"""Try logging in with invalid schema."""
with patch('homeassistant.components.cloud.auth_api.login') as mock_login:
req = yield from cloud_client.post('/api/cloud/login', json={
'invalid': 'schema'
})
assert req.status == 400
assert len(mock_login.mock_calls) == 0
@asyncio.coroutine
def test_login_view_request_timeout(cloud_client):
"""Test request timeout while trying to log in."""
with patch('homeassistant.components.cloud.auth_api.login',
side_effect=asyncio.TimeoutError):
req = yield from cloud_client.post('/api/cloud/login', json={
'email': 'my_username',
'password': 'my_password'
})
assert req.status == 502
@asyncio.coroutine
def test_login_view_invalid_credentials(cloud_client):
"""Test logging in with invalid credentials."""
with patch('homeassistant.components.cloud.auth_api.login',
side_effect=auth_api.Unauthenticated):
req = yield from cloud_client.post('/api/cloud/login', json={
'email': 'my_username',
'password': 'my_password'
})
assert req.status == 401
@asyncio.coroutine
def test_login_view_unknown_error(cloud_client):
"""Test unknown error while logging in."""
with patch('homeassistant.components.cloud.auth_api.login',
side_effect=auth_api.UnknownError):
req = yield from cloud_client.post('/api/cloud/login', json={
'email': 'my_username',
'password': 'my_password'
})
assert req.status == 502
@asyncio.coroutine
def test_logout_view(hass, cloud_client):
"""Test logging out."""
cloud = hass.data['cloud'] = MagicMock()
cloud.logout.return_value = mock_coro()
req = yield from cloud_client.post('/api/cloud/logout')
assert req.status == 200
data = yield from req.json()
assert data == {'message': 'ok'}
assert len(cloud.logout.mock_calls) == 1
@asyncio.coroutine
def test_logout_view_request_timeout(hass, cloud_client):
"""Test timeout while logging out."""
cloud = hass.data['cloud'] = MagicMock()
cloud.logout.side_effect = asyncio.TimeoutError
req = yield from cloud_client.post('/api/cloud/logout')
assert req.status == 502
@asyncio.coroutine
def test_logout_view_unknown_error(hass, cloud_client):
"""Test unknown error while logging out."""
cloud = hass.data['cloud'] = MagicMock()
cloud.logout.side_effect = auth_api.UnknownError
req = yield from cloud_client.post('/api/cloud/logout')
assert req.status == 502
@asyncio.coroutine
def test_register_view(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post('/api/cloud/register', json={
'email': 'hello@bla.com',
'password': 'falcon42'
})
assert req.status == 200
assert len(mock_cognito.register.mock_calls) == 1
result_email, result_pass = mock_cognito.register.mock_calls[0][1]
assert result_email == auth_api._generate_username('hello@bla.com')
assert result_pass == 'falcon42'
@asyncio.coroutine
def test_register_view_bad_data(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post('/api/cloud/register', json={
'email': 'hello@bla.com',
'not_password': 'falcon'
})
assert req.status == 400
assert len(mock_cognito.logout.mock_calls) == 0
@asyncio.coroutine
def test_register_view_request_timeout(mock_cognito, cloud_client):
"""Test timeout while logging out."""
mock_cognito.register.side_effect = asyncio.TimeoutError
req = yield from cloud_client.post('/api/cloud/register', json={
'email': 'hello@bla.com',
'password': 'falcon42'
})
assert req.status == 502
@asyncio.coroutine
def test_register_view_unknown_error(mock_cognito, cloud_client):
"""Test unknown error while logging out."""
mock_cognito.register.side_effect = auth_api.UnknownError
req = yield from cloud_client.post('/api/cloud/register', json={
'email': 'hello@bla.com',
'password': 'falcon42'
})
assert req.status == 502
@asyncio.coroutine
def test_confirm_register_view(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post('/api/cloud/confirm_register', json={
'email': 'hello@bla.com',
'confirmation_code': '123456'
})
assert req.status == 200
assert len(mock_cognito.confirm_sign_up.mock_calls) == 1
result_code, result_email = mock_cognito.confirm_sign_up.mock_calls[0][1]
assert result_email == auth_api._generate_username('hello@bla.com')
assert result_code == '123456'
@asyncio.coroutine
def test_confirm_register_view_bad_data(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post('/api/cloud/confirm_register', json={
'email': 'hello@bla.com',
'not_confirmation_code': '123456'
})
assert req.status == 400
assert len(mock_cognito.confirm_sign_up.mock_calls) == 0
@asyncio.coroutine
def test_confirm_register_view_request_timeout(mock_cognito, cloud_client):
"""Test timeout while logging out."""
mock_cognito.confirm_sign_up.side_effect = asyncio.TimeoutError
req = yield from cloud_client.post('/api/cloud/confirm_register', json={
'email': 'hello@bla.com',
'confirmation_code': '123456'
})
assert req.status == 502
@asyncio.coroutine
def test_confirm_register_view_unknown_error(mock_cognito, cloud_client):
"""Test unknown error while logging out."""
mock_cognito.confirm_sign_up.side_effect = auth_api.UnknownError
req = yield from cloud_client.post('/api/cloud/confirm_register', json={
'email': 'hello@bla.com',
'confirmation_code': '123456'
})
assert req.status == 502
@asyncio.coroutine
def test_forgot_password_view(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post('/api/cloud/forgot_password', json={
'email': 'hello@bla.com',
})
assert req.status == 200
assert len(mock_cognito.initiate_forgot_password.mock_calls) == 1
@asyncio.coroutine
def test_forgot_password_view_bad_data(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post('/api/cloud/forgot_password', json={
'not_email': 'hello@bla.com',
})
assert req.status == 400
assert len(mock_cognito.initiate_forgot_password.mock_calls) == 0
@asyncio.coroutine
def test_forgot_password_view_request_timeout(mock_cognito, cloud_client):
"""Test timeout while logging out."""
mock_cognito.initiate_forgot_password.side_effect = asyncio.TimeoutError
req = yield from cloud_client.post('/api/cloud/forgot_password', json={
'email': 'hello@bla.com',
})
assert req.status == 502
@asyncio.coroutine
def test_forgot_password_view_unknown_error(mock_cognito, cloud_client):
"""Test unknown error while logging out."""
mock_cognito.initiate_forgot_password.side_effect = auth_api.UnknownError
req = yield from cloud_client.post('/api/cloud/forgot_password', json={
'email': 'hello@bla.com',
})
assert req.status == 502
@asyncio.coroutine
def test_confirm_forgot_password_view(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post(
'/api/cloud/confirm_forgot_password', json={
'email': 'hello@bla.com',
'confirmation_code': '123456',
'new_password': 'hello2',
})
assert req.status == 200
assert len(mock_cognito.confirm_forgot_password.mock_calls) == 1
result_code, result_new_password = \
mock_cognito.confirm_forgot_password.mock_calls[0][1]
assert result_code == '123456'
assert result_new_password == 'hello2'
@asyncio.coroutine
def test_confirm_forgot_password_view_bad_data(mock_cognito, cloud_client):
"""Test logging out."""
req = yield from cloud_client.post(
'/api/cloud/confirm_forgot_password', json={
'email': 'hello@bla.com',
'not_confirmation_code': '123456',
'new_password': 'hello2',
})
assert req.status == 400
assert len(mock_cognito.confirm_forgot_password.mock_calls) == 0
@asyncio.coroutine
def test_confirm_forgot_password_view_request_timeout(mock_cognito,
cloud_client):
"""Test timeout while logging out."""
mock_cognito.confirm_forgot_password.side_effect = asyncio.TimeoutError
req = yield from cloud_client.post(
'/api/cloud/confirm_forgot_password', json={
'email': 'hello@bla.com',
'confirmation_code': '123456',
'new_password': 'hello2',
})
assert req.status == 502
@asyncio.coroutine
def test_confirm_forgot_password_view_unknown_error(mock_cognito,
cloud_client):
"""Test unknown error while logging out."""
mock_cognito.confirm_forgot_password.side_effect = auth_api.UnknownError
req = yield from cloud_client.post(
'/api/cloud/confirm_forgot_password', json={
'email': 'hello@bla.com',
'confirmation_code': '123456',
'new_password': 'hello2',
})
assert req.status == 502
|
{
"content_hash": "62f8b5653b6e216e364a5babeb351e91",
"timestamp": "",
"source": "github",
"line_count": 353,
"max_line_length": 79,
"avg_line_length": 34.51558073654391,
"alnum_prop": 0.6492941562705187,
"repo_name": "stefan-jonasson/home-assistant",
"id": "1090acb01e992c04f9ff507a29453c1e4e212a29",
"size": "12184",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "tests/components/cloud/test_http_api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4056"
},
{
"name": "Python",
"bytes": "8360711"
},
{
"name": "Ruby",
"bytes": "517"
},
{
"name": "Shell",
"bytes": "12658"
}
],
"symlink_target": ""
}
|
from PyQt5.QtWidgets import QWidget, QDialogButtonBox
from PyQt5.QtCore import QEvent, Qt
from .password_input_uic import Ui_PasswordInputWidget
class PasswordInputView(QWidget, Ui_PasswordInputWidget):
"""
The model of Navigation component
"""
def __init__(self, parent):
# construct from qtDesigner
super().__init__(parent)
self.setupUi(self)
self.button_box = QDialogButtonBox(self)
self.button_box.setOrientation(Qt.Horizontal)
self.button_box.setStandardButtons(QDialogButtonBox.Cancel | QDialogButtonBox.Ok)
self.button_box.button(QDialogButtonBox.Ok).setEnabled(False)
self.layout().addWidget(self.button_box)
self.button_box.hide()
def error(self, text):
self.label_info.setText(text)
self.button_box.button(QDialogButtonBox.Ok).setEnabled(False)
def clear(self):
self.edit_password.clear()
self.edit_secret_key.clear()
def valid(self):
self.label_info.setText(self.tr("Password is valid"))
self.button_box.button(QDialogButtonBox.Ok).setEnabled(True)
def changeEvent(self, event):
"""
Intercepte LanguageChange event to translate UI
:param QEvent QEvent: Event
:return:
"""
if event.type() == QEvent.LanguageChange:
self.retranslateUi(self)
return super(PasswordInputView, self).changeEvent(event)
|
{
"content_hash": "6a3f2fdcc137e9bd9804976b372b4648",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 89,
"avg_line_length": 34.951219512195124,
"alnum_prop": 0.6699232379623168,
"repo_name": "ucoin-io/cutecoin",
"id": "51a32a20c6a6fc787719860b997b7f662506c394",
"size": "1433",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/sakia/gui/sub/password_input/view.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2475"
},
{
"name": "JavaScript",
"bytes": "1594"
},
{
"name": "PowerShell",
"bytes": "3111"
},
{
"name": "Python",
"bytes": "718811"
},
{
"name": "Shell",
"bytes": "3983"
}
],
"symlink_target": ""
}
|
"""A word-counting workflow."""
from __future__ import absolute_import
import logging
import re
import apache_beam as beam
from apache_beam.io import ReadFromText
from apache_beam.io import WriteToText
from apache_beam.metrics import Metrics
from apache_beam.metrics.metric import MetricsFilter
from apache_beam.utils.pipeline_options import PipelineOptions
from apache_beam.utils.pipeline_options import SetupOptions
class WordExtractingDoFn(beam.DoFn):
"""Parse each line of input text into words."""
def __init__(self):
super(WordExtractingDoFn, self).__init__()
self.words_counter = Metrics.counter(self.__class__, 'words')
self.word_lengths_counter = Metrics.counter(self.__class__, 'word_lengths')
self.word_lengths_dist = Metrics.distribution(
self.__class__, 'word_len_dist')
self.empty_line_counter = Metrics.counter(self.__class__, 'empty_lines')
def process(self, element):
"""Returns an iterator over the words of this element.
The element is a line of text. If the line is blank, note that, too.
Args:
element: the element being processed
Returns:
The processed element.
"""
text_line = element.strip()
if not text_line:
self.empty_line_counter.inc(1)
words = re.findall(r'[A-Za-z\']+', text_line)
for w in words:
self.words_counter.inc()
self.word_lengths_counter.inc(len(w))
self.word_lengths_dist.update(len(w))
return words
def run(argv=None):
"""Main entry point; defines and runs the wordcount pipeline."""
class WordcountOptions(PipelineOptions):
@classmethod
def _add_argparse_args(cls, parser):
parser.add_value_provider_argument(
'--input',
dest='input',
default='gs://dataflow-samples/shakespeare/kinglear.txt',
help='Input file to process.')
parser.add_value_provider_argument(
'--output',
dest='output',
required=True,
help='Output file to write results to.')
pipeline_options = PipelineOptions(argv)
wordcount_options = pipeline_options.view_as(WordcountOptions)
# We use the save_main_session option because one or more DoFn's in this
# workflow rely on global context (e.g., a module imported at module level).
pipeline_options.view_as(SetupOptions).save_main_session = True
p = beam.Pipeline(options=pipeline_options)
# Read the text file[pattern] into a PCollection.
lines = p | 'read' >> ReadFromText(wordcount_options.input)
# Count the occurrences of each word.
counts = (lines
| 'split' >> (beam.ParDo(WordExtractingDoFn())
.with_output_types(unicode))
| 'pair_with_one' >> beam.Map(lambda x: (x, 1))
| 'group' >> beam.GroupByKey()
| 'count' >> beam.Map(lambda (word, ones): (word, sum(ones))))
# Format the counts into a PCollection of strings.
output = counts | 'format' >> beam.Map(lambda (word, c): '%s: %s' % (word, c))
# Write the output using a "Write" transform that has side effects.
# pylint: disable=expression-not-assigned
output | 'write' >> WriteToText(wordcount_options.output)
# Actually run the pipeline (all operations above are deferred).
result = p.run()
result.wait_until_finish()
# Do not query metrics when creating a template which doesn't run
if (not hasattr(result, 'has_job') # direct runner
or result.has_job): # not just a template creation
empty_lines_filter = MetricsFilter().with_name('empty_lines')
query_result = result.metrics().query(empty_lines_filter)
if query_result['counters']:
empty_lines_counter = query_result['counters'][0]
logging.info('number of empty lines: %d', empty_lines_counter.committed)
# TODO(pabloem)(BEAM-1366): Add querying of MEAN metrics.
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
run()
|
{
"content_hash": "f4ad6688e96c24f12ff5533f6b80bc3a",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 80,
"avg_line_length": 36.25925925925926,
"alnum_prop": 0.6693054136874361,
"repo_name": "vikkyrk/incubator-beam",
"id": "e93fd2b2a33fb27f2f3233e7d0910ad1eaa9e43f",
"size": "4701",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sdks/python/apache_beam/examples/wordcount.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "42732"
},
{
"name": "Java",
"bytes": "11417976"
},
{
"name": "Protocol Buffer",
"bytes": "50080"
},
{
"name": "Python",
"bytes": "2779037"
},
{
"name": "Shell",
"bytes": "45279"
}
],
"symlink_target": ""
}
|
"""The SpectralMixture kernel."""
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.internal import assert_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import parameter_properties
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import tensor_util
from tensorflow_probability.python.math import generic as tfp_math
from tensorflow_probability.python.math.psd_kernels import positive_semidefinite_kernel as psd_kernel
from tensorflow_probability.python.math.psd_kernels.internal import util
__all__ = ['SpectralMixture']
class SpectralMixture(psd_kernel.AutoCompositeTensorPsdKernel):
"""The SpectralMixture kernel.
This kernel is derived from parameterizing the spectral density of a
stationary kernel by a mixture of `m` diagonal multivariate normal
distributions [1].
This in turn parameterizes the following kernel:
```none
k(x, y) = sum_j w[j] (prod_i
exp(-2 * (pi * (x[i] - y[i]) * s[j][i])**2) *
cos(2 * pi * (x[i] - y[i]) * m[j][i]))
```
where:
* `j` is the number of mixtures (as mentioned above).
* `w[j]` are the mixture weights.
* `m[j]` and `s[j]` parameterize a `MultivariateNormalDiag(m[j], s[j])`.
In other words, they are the mean and diagonal scale for each mixture
component.
NOTE: This kernel can result in negative off-diagonal entries.
#### References
[1]: A. Wilson, R. P. Adams.
Gaussian Process Kernels for Pattern Discovery and Extrapolation.
https://arxiv.org/abs/1302.4245
"""
def __init__(self,
logits,
locs,
scales,
feature_ndims=1,
validate_args=False,
name='SpectralMixture'):
"""Construct a SpectralMixture kernel instance.
Args:
logits: Floating-point `Tensor` of shape `[..., M]`, whose softmax
represents the mixture weights for the spectral density. Must
be broadcastable with `locs` and `scales`.
locs: Floating-point `Tensor` of shape `[..., M, F1, F2, ... FN]`, which
represents the location parameter of each of the `M` mixture components.
`N` is `feature_ndims`. Must be broadcastable with `logits` and
`scales`.
scales: Positive Floating-point `Tensor` of shape
`[..., M, F1, F2, ..., FN]`, which represents the scale parameter of
each of the `M` mixture components. `N` is `feature_ndims`. Must be
broadcastable with `locs` and `logits`. These parameters act like
inverse length scale parameters.
feature_ndims: Python `int` number of rightmost dims to include in the
squared difference norm in the exponential.
validate_args: If `True`, parameters are checked for validity despite
possibly degrading runtime performance
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
with tf.name_scope(name):
dtype = util.maybe_get_common_dtype([logits, locs, scales])
self._logits = tensor_util.convert_nonref_to_tensor(
logits, name='logits', dtype=dtype)
self._locs = tensor_util.convert_nonref_to_tensor(
locs, name='locs', dtype=dtype)
self._scales = tensor_util.convert_nonref_to_tensor(
scales, name='scales', dtype=dtype)
super(SpectralMixture, self).__init__(
feature_ndims,
dtype=dtype,
name=name,
validate_args=validate_args,
parameters=parameters)
@property
def logits(self):
"""Logits parameter."""
return self._logits
@property
def locs(self):
"""Location parameter."""
return self._locs
@property
def scales(self):
"""Scale parameter."""
return self._scales
@classmethod
def _parameter_properties(cls, dtype):
from tensorflow_probability.python.bijectors import softplus # pylint:disable=g-import-not-at-top
return dict(
logits=parameter_properties.ParameterProperties(event_ndims=1),
locs=parameter_properties.ParameterProperties(
event_ndims=lambda self: self.feature_ndims + 1),
scales=parameter_properties.ParameterProperties(
event_ndims=lambda self: self.feature_ndims + 1,
default_constraining_bijector_fn=(
lambda: softplus.Softplus(low=dtype_util.eps(dtype)))))
def _apply_with_distance(
self, x1, x2, pairwise_square_distance, example_ndims=0):
exponent = -2. * pairwise_square_distance
locs = util.pad_shape_with_ones(
self.locs, ndims=example_ndims, start=-(self.feature_ndims + 1))
cos_coeffs = tf.math.cos(2 * np.pi * (x1 - x2) * locs)
feature_ndims = ps.cast(self.feature_ndims, ps.rank(cos_coeffs).dtype)
reduction_axes = ps.range(
ps.rank(cos_coeffs) - feature_ndims, ps.rank(cos_coeffs))
coeff_sign = tf.math.reduce_prod(
tf.math.sign(cos_coeffs), axis=reduction_axes)
log_cos_coeffs = tf.math.reduce_sum(
tf.math.log(tf.math.abs(cos_coeffs)), axis=reduction_axes)
logits = util.pad_shape_with_ones(
self.logits, ndims=example_ndims, start=-1)
log_result, sign = tfp_math.reduce_weighted_logsumexp(
exponent + log_cos_coeffs + logits,
coeff_sign, return_sign=True, axis=-(example_ndims + 1))
return sign * tf.math.exp(log_result)
def _apply(self, x1, x2, example_ndims=0):
# Add an extra dimension to x1 and x2 so it broadcasts with scales.
# [B1, ...., E1, ...., E2, M, F1, ..., F2]
x1 = util.pad_shape_with_ones(
x1, ndims=1, start=-(self.feature_ndims + example_ndims + 1))
x2 = util.pad_shape_with_ones(
x2, ndims=1, start=-(self.feature_ndims + example_ndims + 1))
scales = util.pad_shape_with_ones(
self.scales, ndims=example_ndims, start=-(self.feature_ndims + 1))
pairwise_square_distance = util.sum_rightmost_ndims_preserving_shape(
tf.math.square(np.pi * (x1 - x2) * scales), ndims=self.feature_ndims)
return self._apply_with_distance(
x1, x2, pairwise_square_distance, example_ndims=example_ndims)
def _matrix(self, x1, x2):
# Add an extra dimension to x1 and x2 so it broadcasts with scales.
x1 = util.pad_shape_with_ones(x1, ndims=1, start=-(self.feature_ndims + 2))
x2 = util.pad_shape_with_ones(x2, ndims=1, start=-(self.feature_ndims + 2))
scales = util.pad_shape_with_ones(
self.scales, ndims=1, start=-(self.feature_ndims + 1))
pairwise_square_distance = util.pairwise_square_distance_matrix(
np.pi * x1 * scales, np.pi * x2 * scales, self.feature_ndims)
x1 = util.pad_shape_with_ones(x1, ndims=1, start=-(self.feature_ndims + 1))
x2 = util.pad_shape_with_ones(x2, ndims=1, start=-(self.feature_ndims + 2))
# Expand `x1` and `x2` so that the broadcast against each other.
return self._apply_with_distance(
x1, x2, pairwise_square_distance, example_ndims=2)
def _parameter_control_dependencies(self, is_init):
if not self.validate_args:
return []
assertions = []
if is_init != tensor_util.is_ref(self._scales):
assertions.append(assert_util.assert_positive(
self._scales,
message='`scales` must be positive.'))
return assertions
|
{
"content_hash": "b575b27593c51d5d2bba65d9b5fca761",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 102,
"avg_line_length": 41.30167597765363,
"alnum_prop": 0.6591370215068307,
"repo_name": "tensorflow/probability",
"id": "bfd6846a3b07af631b438094942a66a63db0af79",
"size": "8071",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tensorflow_probability/python/math/psd_kernels/spectral_mixture.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "55552121"
},
{
"name": "Python",
"bytes": "17339674"
},
{
"name": "Shell",
"bytes": "24852"
},
{
"name": "Starlark",
"bytes": "663851"
}
],
"symlink_target": ""
}
|
import json
import time
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.state import State
from datetime import datetime as dt
from airflow.contrib.kubernetes.pod import Pod
from airflow.contrib.kubernetes.kubernetes_request_factory import \
pod_request_factory as pod_factory
from kubernetes import watch, client
from kubernetes.client.rest import ApiException
from kubernetes.stream import stream as kubernetes_stream
from airflow import AirflowException
from requests.exceptions import HTTPError
from .kube_client import get_kube_client
class PodStatus(object):
PENDING = 'pending'
RUNNING = 'running'
FAILED = 'failed'
SUCCEEDED = 'succeeded'
class PodLauncher(LoggingMixin):
def __init__(self, kube_client=None, in_cluster=True, cluster_context=None,
extract_xcom=False):
super(PodLauncher, self).__init__()
self._client = kube_client or get_kube_client(in_cluster=in_cluster,
cluster_context=cluster_context)
self._watch = watch.Watch()
self.extract_xcom = extract_xcom
self.kube_req_factory = pod_factory.ExtractXcomPodRequestFactory(
) if extract_xcom else pod_factory.SimplePodRequestFactory()
def run_pod_async(self, pod):
req = self.kube_req_factory.create(pod)
self.log.debug('Pod Creation Request: \n%s', json.dumps(req, indent=2))
try:
resp = self._client.create_namespaced_pod(body=req, namespace=pod.namespace)
self.log.debug('Pod Creation Response: %s', resp)
except ApiException:
self.log.exception('Exception when attempting to create Namespaced Pod.')
raise
return resp
def delete_pod(self, pod):
try:
self._client.delete_namespaced_pod(
pod.name, pod.namespace, body=client.V1DeleteOptions())
except ApiException as e:
# If the pod is already deleted
if e.status != 404:
raise
def run_pod(self, pod, startup_timeout=120, get_logs=True):
# type: (Pod, int, bool) -> (State, str)
"""
Launches the pod synchronously and waits for completion.
Args:
pod (Pod):
startup_timeout (int): Timeout for startup of the pod (if pod is pending for
too long, considers task a failure
"""
resp = self.run_pod_async(pod)
curr_time = dt.now()
if resp.status.start_time is None:
while self.pod_not_started(pod):
delta = dt.now() - curr_time
if delta.seconds >= startup_timeout:
raise AirflowException("Pod took too long to start")
time.sleep(1)
self.log.debug('Pod not yet started')
return self._monitor_pod(pod, get_logs)
def _monitor_pod(self, pod, get_logs):
# type: (Pod, bool) -> (State, str)
if get_logs:
logs = self._client.read_namespaced_pod_log(
name=pod.name,
namespace=pod.namespace,
container='base',
follow=True,
tail_lines=10,
_preload_content=False)
for line in logs:
self.log.info(line)
result = None
if self.extract_xcom:
while self.base_container_is_running(pod):
self.log.info('Container %s has state %s', pod.name, State.RUNNING)
time.sleep(2)
result = self._extract_xcom(pod)
self.log.info(result)
result = json.loads(result)
while self.pod_is_running(pod):
self.log.info('Pod %s has state %s', pod.name, State.RUNNING)
time.sleep(2)
return self._task_status(self.read_pod(pod)), result
def _task_status(self, event):
self.log.info(
'Event: %s had an event of type %s',
event.metadata.name, event.status.phase)
status = self.process_status(event.metadata.name, event.status.phase)
return status
def pod_not_started(self, pod):
state = self._task_status(self.read_pod(pod))
return state == State.QUEUED
def pod_is_running(self, pod):
state = self._task_status(self.read_pod(pod))
return state != State.SUCCESS and state != State.FAILED
def base_container_is_running(self, pod):
event = self.read_pod(pod)
status = next(iter(filter(lambda s: s.name == 'base',
event.status.container_statuses)), None)
return status.state.running is not None
def read_pod(self, pod):
try:
return self._client.read_namespaced_pod(pod.name, pod.namespace)
except HTTPError as e:
raise AirflowException(
'There was an error reading the kubernetes API: {}'.format(e)
)
def _extract_xcom(self, pod):
resp = kubernetes_stream(self._client.connect_get_namespaced_pod_exec,
pod.name, pod.namespace,
container=self.kube_req_factory.SIDECAR_CONTAINER_NAME,
command=['/bin/sh'], stdin=True, stdout=True,
stderr=True, tty=False,
_preload_content=False)
try:
result = self._exec_pod_command(
resp, 'cat {}/return.json'.format(self.kube_req_factory.XCOM_MOUNT_PATH))
self._exec_pod_command(resp, 'kill -s SIGINT 1')
finally:
resp.close()
if result is None:
raise AirflowException('Failed to extract xcom from pod: {}'.format(pod.name))
return result
def _exec_pod_command(self, resp, command):
if resp.is_open():
self.log.info('Running command... %s\n' % command)
resp.write_stdin(command + '\n')
while resp.is_open():
resp.update(timeout=1)
if resp.peek_stdout():
return resp.read_stdout()
if resp.peek_stderr():
self.log.info(resp.read_stderr())
break
def process_status(self, job_id, status):
status = status.lower()
if status == PodStatus.PENDING:
return State.QUEUED
elif status == PodStatus.FAILED:
self.log.info('Event with job id %s Failed', job_id)
return State.FAILED
elif status == PodStatus.SUCCEEDED:
self.log.info('Event with job id %s Succeeded', job_id)
return State.SUCCESS
elif status == PodStatus.RUNNING:
return State.RUNNING
else:
self.log.info('Event: Invalid state %s on job %s', status, job_id)
return State.FAILED
|
{
"content_hash": "cf94cda8a5364885a64c3844f33833af",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 90,
"avg_line_length": 39.63793103448276,
"alnum_prop": 0.5761925474844135,
"repo_name": "fenglu-g/incubator-airflow",
"id": "2704fd9d327151a4bf16dc6f5146c8b3bb96cdfe",
"size": "7683",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "airflow/contrib/kubernetes/pod_launcher.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "12126"
},
{
"name": "Dockerfile",
"bytes": "3634"
},
{
"name": "HTML",
"bytes": "129454"
},
{
"name": "JavaScript",
"bytes": "22118"
},
{
"name": "Mako",
"bytes": "1284"
},
{
"name": "Python",
"bytes": "5852162"
},
{
"name": "Shell",
"bytes": "41793"
}
],
"symlink_target": ""
}
|
import math
class Controller():
"""
The controller class defines a behavior for the supervisor class.
Any implemention must inherit from this class and implement the
:meth:`~Controller,execute` method to return a unicycle model output.
:param params: A structure containing the internal controller parameters, such as PID constants.
:type params: :class:`~helpers.Struct`
"""
def __init__(self,params):
"""Initialize the controller with parameters
:params params: A structure containing the internal controller parameters, such as PID constants.
:type params: :class:`~helpers.Struct`
"""
self.set_parameters(params)
self.restart()
def execute(self, state, dt):
"""Given a state and elapsed time, calculate and return robot motion parameters
:param state: Output from the supervisor :meth:`~Supervisor.process` method
:type state: :class:`~helpers.Struct`
:param float dt: Time elapsed since last call to `execute()`
To be implemented in subclasses.
"""
raise NotImplementedError("Controller.execute")
def set_parameters(self,params):
"""Set the internal parameters of the controller.
:param params: A structure containing the internal controller parameters, such as PID constants.
:type params: :class:`~helpers.Struct`
To be implemented in subclasses,
"""
raise NotImplementedError("Controller.set_parameters")
def restart(self):
"""Reset the controller to the initial state."""
pass
|
{
"content_hash": "fe8e175752ea949f962f72617c891514",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 105,
"avg_line_length": 38.48837209302326,
"alnum_prop": 0.6519637462235649,
"repo_name": "ZhuangER/robot_path_planning",
"id": "45fe7b756bf1d91cfa75ff93bc29621a92a58d30",
"size": "1779",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/controller.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Matlab",
"bytes": "1666"
},
{
"name": "Python",
"bytes": "2095512"
}
],
"symlink_target": ""
}
|
BASE_DIR = '/home/travis/build/whitews/PubMedVis'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'pubmedvis.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
BASE_DIR + '/static/',
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'do-not-use-as-your-secret-key'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'PubMedVis.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'PubMedVis.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
BASE_DIR + '/templates',
BASE_DIR + '/publication_linker/templates',
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'publication_linker',
'django.contrib.admin',
'django_extensions',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
{
"content_hash": "b165c53091e17b6d135c654949981b31",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 108,
"avg_line_length": 34.189542483660134,
"alnum_prop": 0.6792200344102466,
"repo_name": "whitews/PubMedVis",
"id": "f920eb163bde3304aed2592a4db6bd076c7fee78",
"size": "5273",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PubMedVis/settings_tests.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "326464"
},
{
"name": "Python",
"bytes": "21150"
}
],
"symlink_target": ""
}
|
from collections import defaultdict
import datetime
import inspect
import json
import os
import subprocess
import traceback
import sys
import shutil
from functools import wraps
from multiprocessing import Pool, cpu_count
from submitty_utils import submitty_schema_validator
# global variable available to be used by the test suite modules
# this file is at SUBMITTY_INSTALL_DIR/test_suite/integrationTests
SUBMITTY_INSTALL_DIR = os.path.realpath(
os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..')
)
INTEGRATION_TEST_ROOT_FOLDER = os.path.join(
SUBMITTY_INSTALL_DIR,
"./test_suite/integrationTests/"
)
COMPILE_CONFIGURE_BIN_PATH = os.path.join(
INTEGRATION_TEST_ROOT_FOLDER,
'compile_configure_bin.sh'
)
CONFIGURE_BIN_PATH = os.path.join(
INTEGRATION_TEST_ROOT_FOLDER,
"configure.bin"
)
BUILD_MAIN_CONFIGUE_PATH = os.path.join(
INTEGRATION_TEST_ROOT_FOLDER,
'build_main_configure.sh'
)
# Verify that this has been installed by just checking that this file is located in
# a directory next to the config directory which has submitty.json in it
if not os.path.exists(os.path.join(SUBMITTY_INSTALL_DIR, 'config', 'submitty.json')):
raise SystemExit('You must install the test suite before being able to run it.')
SUBMITTY_TUTORIAL_DIR = os.path.join(
SUBMITTY_INSTALL_DIR,
"./GIT_CHECKOUT/Tutorial"
)
GRADING_SOURCE_DIR = os.path.join(
SUBMITTY_INSTALL_DIR,
"./src/grading"
)
LOG_FILE = None
LOG_DIR = os.path.join(
SUBMITTY_INSTALL_DIR,
"./test_suite/log"
)
def print(*args, **kwargs):
global LOG_FILE
if "sep" not in kwargs:
kwargs["sep"] = " "
if "end" not in kwargs:
kwargs["end"] = '\n'
message = kwargs["sep"].join(map(str, args)) + kwargs["end"]
if LOG_FILE is None:
# include a couple microseconds in string so that we have unique log file
# per test run
LOG_FILE = datetime.datetime.now().strftime('%Y%m%d%H%M%S%f')[:-3]
with open(os.path.join(LOG_DIR, LOG_FILE), 'a') as write_file:
write_file.write(message)
sys.stdout.write(message)
# Concurrency note:
# To be able to work across process boundary, there must be no lambda expression
# within the TestcaseFile class, and all wrappers must be annotated with
# functools.wraps. This is a must for python to transfer the object to the worker
# process, for more please see python pickling.
# Note: there could still be lambdas and non-top level functions in client test
# case code, its just the TestcaseFile must be pickle-able
class TestcaseFile:
def __init__(self):
self.wrapper = type(None) # Equivalent to lambda: None, but pickle-able
self.prebuild = type(None)
self.testcases = []
self.testcases_names = []
to_run = defaultdict(lambda: TestcaseFile(), {})
# Helpers for color
class ASCIIEscapeManager:
def __init__(self, codes):
self.codes = list(map(str, codes))
def __enter__(self):
sys.stdout.write(f"\u001B[{';'.join(self.codes)}m")
def __exit__(self, exc_type, exc_value, traceback):
sys.stdout.write("\033[0m")
def __add__(self, other):
return ASCIIEscapeManager(self.codes + other.codes)
bold = ASCIIEscapeManager([1])
underscore = ASCIIEscapeManager([4])
blink = ASCIIEscapeManager([5])
black = ASCIIEscapeManager([30])
red = ASCIIEscapeManager([31])
green = ASCIIEscapeManager([32])
yellow = ASCIIEscapeManager([33])
blue = ASCIIEscapeManager([34])
magenta = ASCIIEscapeManager([35])
cyan = ASCIIEscapeManager([36])
white = ASCIIEscapeManager([37])
###################################################################################
###################################################################################
# Run the given list of test case names in parallel using multiprocessing
def run_tests(names):
arguments = [] # Arguments for run_single_test(list<str>, TestcaseFile)
for name in sorted(names):
name = name.split(".")
key = name[0]
case = to_run[key]
arguments.append((name, case))
__setup()
# Concurrency note:
# Its best to use something close to cpu_count() here, as single-thread
# performance is important for some modules that are compute-intensive,
# they might timeout as a result of lack of single thread performance.
# This can happen with python_simple_homework.infinite_loop_time_cutoff
with Pool(cpu_count()) as p:
test_result = p.starmap(__run_single_test_module, arguments)
if False in test_result:
with bold + red:
print(f"{test_result.count(True)}/{len(test_result)} modules passed")
sys.exit(1)
else:
with bold + green:
print(f"All {len(names)} modules passed")
def __setup():
subprocess.check_output([
"/bin/bash",
COMPILE_CONFIGURE_BIN_PATH,
SUBMITTY_INSTALL_DIR,
CONFIGURE_BIN_PATH
],
stderr=subprocess.STDOUT
)
pass
# Concurrency note:
# Currently all tests are run in parallel per-number of threads on a module level.
# Different modules are run in parallel on different worker threads, but all the
# test cases within the same module is run on the same thread sequentially.
# Executes a single test module, this method is executed across process boundary,
# Not meant to be called externally.
def __run_single_test_module(name, case):
key = name[0]
with bold:
print(f"--- BEGIN TEST MODULE {key.upper()} AT PROCESS {os.getpid()} ---")
module_success = __compile_test_module(case)
if module_success:
testcases = __collect_test_cases(case, name)
succeed_count = len(testcases)
for index, f in zip(range(1, succeed_count + 1), testcases):
test_case_success = __execute_test_case(index, f)
if not test_case_success:
succeed_count -= 1
if succeed_count == len(testcases):
with bold + green:
print("All testcases passed")
else:
with bold + red:
print(f"{succeed_count}/{len(testcases)} testcases passed")
module_success = False
with bold:
print(f"--- END TEST MODULE {key.upper()} ---")
print()
return module_success
def __compile_test_module(case):
try:
print("* Starting compilation...")
case.prebuild()
case.wrapper.build()
print("* Finished compilation")
return True
except Exception as e:
print(f"Build failed with exception: {e}")
return False
def __collect_test_cases(case, name):
testcases = []
if len(name) > 1:
for i in range(len(case.testcases)):
if str(case.testcases_names[i]).lower() == name[1].lower():
testcases.append(case.testcases[i])
else:
testcases = case.testcases
return testcases
# Concurrency note:
# For future integration test speed-ups, concurrency level can be
# increased if this __execute_test_case is executed in parallel.
# However test code will lose its ability to share states within a
# test module across test cases as they are run in executed in
# different processes.
def __execute_test_case(index, test_case):
try:
test_case()
return True
except Exception as e:
with bold + red:
lineno = None
tb = traceback.extract_tb(sys.exc_info()[2])
for i in range(len(tb) - 1, -1, -1):
if os.path.basename(tb[i][0]) == '__init__.py':
lineno = tb[i][1]
print(f"Testcase {index} failed on line {lineno} with exception: ", e)
sys.exc_info()
return False
# Run every test currently loaded
def run_all():
run_tests(to_run.keys())
# Copy the files & directories from source to target
# it will create directories as needed
# it's ok if the target directory or subdirectories already exist
# it will overwrite files with the same name if they exist
def copy_contents_into(source, target):
if not os.path.isdir(target):
raise RuntimeError(f"ERROR: the target directory does not exist '{target}'")
if os.path.isdir(source):
for item in os.listdir(source):
if os.path.isdir(os.path.join(source, item)):
if os.path.isdir(os.path.join(target, item)):
# recurse
copy_contents_into(os.path.join(source, item), os.path.join(target, item))
elif os.path.isfile(os.path.join(target, item)):
raise RuntimeError(f"ERROR: the target subpath is a file not a directory '{os.path.join(target, item)}'")
else:
# copy entire subtree
shutil.copytree(os.path.join(source, item), os.path.join(target, item))
else:
if os.path.exists(os.path.join(target, item)):
os.remove(os.path.join(target, item))
try:
shutil.copy(os.path.join(source, item), target)
except:
raise RuntimeError(f"ERROR COPYING FILE: {os.path.join(source, item)} -> {os.path.join(target, item)}")
def move_only_files(source, target):
source_files = os.listdir(source)
for file_name in source_files:
full_file_name = os.path.join(source, file_name)
if os.path.isfile(full_file_name):
shutil.move(full_file_name, target)
###################################################################################
###################################################################################
# Helper class used to remove the burden of paths from the testcase author.
# The path (in /var/local) of the testcase is provided to the constructor,
# and is subsequently used in all methods for compilation, linkage, etc.
# The resulting object is passed to each function defined within the testcase
# package. Typically, one would use the @testcase decorator defined below,
# as it uses inspection to fully handle all paths with no input from the
# testcase author.
class TestcaseWrapper:
def __init__(self, path):
self.testcase_path = path
# Compile each .cpp file into an object file in the current directory. Those
# object files are then moved into the appropriate directory in the /var/local
# tree. Unfortunately, this presents some issues, for example non-grading
# object files in that directory being moved and subsequently linked with the
# rest of the program. gcc/clang do not provide an option to specify an output
# directory when compiling source files in bulk in this manner. The solution
# is likely to run the compiler with a different working directory alongside
# using relative paths.
def build(self):
try:
# the log directory will contain various log files
os.mkdir(os.path.join(self.testcase_path, "log"))
# the build directory will contain the intermediate cmake files
os.mkdir(os.path.join(self.testcase_path, "build"))
# the bin directory will contain the autograding executables
os.mkdir(os.path.join(self.testcase_path, "bin"))
# The data directory in which configure will be run. This is needed to
# make complete_config.json for schema testing
os.mkdir(os.path.join(self.testcase_path, "data"))
except OSError as e:
pass
# copy the cmake file to the build directory
subprocess.call(["cp",
os.path.join(GRADING_SOURCE_DIR, "Sample_CMakeLists.txt"),
os.path.join(self.testcase_path, "build", "CMakeLists.txt")])
shutil.copy(BUILD_MAIN_CONFIGUE_PATH, os.path.join(self.testcase_path, "build"))
# First, we need to compile and run configure.out
with open(os.path.join(self.testcase_path, "log", "main_configure_build.txt"), "w") as configure_output:
return_code = subprocess.call(["/bin/bash", "build_main_configure.sh", self.testcase_path, SUBMITTY_INSTALL_DIR, CONFIGURE_BIN_PATH],
cwd=os.path.join(self.testcase_path, "build"), stdout=configure_output, stderr=configure_output)
if return_code != 0:
raise RuntimeError(f"Failed to generate main configure: {return_code}")
with open(os.path.join(self.testcase_path, "log", "cmake_output.txt"), "w") as cmake_output:
return_code = subprocess.call(["cmake", "-DASSIGNMENT_INSTALLATION=OFF", "."],
cwd=os.path.join(self.testcase_path, "build"), stdout=cmake_output, stderr=cmake_output)
if return_code != 0:
raise RuntimeError(f"Build (cmake) exited with exit code {return_code}")
with open(os.path.join(self.testcase_path, "log", "make_output.txt"), "w") as make_output:
return_code = subprocess.call(["make"],
cwd=os.path.join(self.testcase_path, "build"), stdout=make_output, stderr=make_output)
if return_code != 0:
self.debug_print("log/make_output.txt")
raise RuntimeError(f"Build (make) exited with exit code {return_code}")
# Run compile.out using some sane arguments.
def run_compile(self):
config_path = os.path.join(self.testcase_path, 'assignment_config', 'complete_config.json')
with open(config_path, 'r') as infile:
config = json.load(infile)
my_testcases = config['testcases']
data_folder = os.path.join(self.testcase_path, 'data')
# We create a temporary data folder, so that we don't tarnish the original.
tmp_data_folder = os.path.join(self.testcase_path, 'tmp_data')
if os.path.isdir(tmp_data_folder):
shutil.rmtree(tmp_data_folder)
os.makedirs(tmp_data_folder)
# Will hold the compiled files and their STDOUT/STDERRs
tmp_comp_folder = os.path.join(self.testcase_path, 'tmp_comp')
# Make the work folder used by run_run.
if os.path.isdir(tmp_comp_folder):
shutil.rmtree(tmp_comp_folder)
os.makedirs(tmp_comp_folder)
# Copy the data folder to the temp_data folder, then pull that into the tmp_comp_folder
copy_contents_into(data_folder, tmp_data_folder)
copy_contents_into(tmp_data_folder, tmp_comp_folder)
executable_path_list = list()
with open(os.path.join(self.testcase_path, "log", "compile_output.txt"), "w") as log:
# We start counting from one.
for testcase_num in range(1, len(my_testcases) + 1):
my_testcase = my_testcases[testcase_num - 1]
testcase_folder = os.path.join(tmp_comp_folder, my_testcase['testcase_id'])
if 'type' in my_testcase:
if my_testcase['type'] != 'FileCheck' and my_testcase['type'] != 'Compilation':
continue
if my_testcase['type'] == 'Compilation':
if 'executable_name' in my_testcase:
provided_executable_list = my_testcase['executable_name']
if not isinstance(provided_executable_list, (list,)):
provided_executable_list = list([provided_executable_list])
for exe in provided_executable_list:
if exe.strip() == '':
continue
executable_path = os.path.join(testcase_folder, exe)
executable_path_list.append((executable_path, exe))
else:
continue
# Make the tmp folder for this testcase.
if os.path.isdir(testcase_folder):
shutil.rmtree(testcase_folder)
os.makedirs(testcase_folder)
copy_contents_into(tmp_data_folder, testcase_folder)
return_code = subprocess.call(
[
os.path.join(self.testcase_path, "bin", "compile.out"),
"testassignment",
"testuser",
"1",
"0",
my_testcase['testcase_id']
],
cwd=testcase_folder, stdout=log, stderr=log)
if return_code != 0:
raise RuntimeError(f"Compile exited with exit code {return_code}")
compiled_files_directory = os.path.join(self.testcase_path, 'compiled_files')
# Don't trust that the developer properly cleaned up after themselves.
if os.path.isdir(compiled_files_directory):
shutil.rmtree(compiled_files_directory)
os.makedirs(compiled_files_directory)
# Move the compiled files into the compiled_files_directory
for path, name in executable_path_list:
if not os.path.isfile(path):
continue
target_path = os.path.join(compiled_files_directory, name)
if not os.path.exists(target_path):
os.makedirs(os.path.dirname(target_path), exist_ok=True)
shutil.copy(path, target_path)
# Create the work folder, which will be used by run_run.
work_folder = os.path.join(self.testcase_path, 'work')
if os.path.isdir(work_folder):
shutil.rmtree(work_folder)
os.makedirs(work_folder)
# Move the test##/ files generated by compoliation into the work directory
copy_contents_into(tmp_comp_folder, work_folder)
shutil.rmtree(tmp_comp_folder)
# Run run.out using some sane arguments.
def run_run(self):
config_path = os.path.join(self.testcase_path, 'assignment_config', 'complete_config.json')
with open(config_path, 'r') as infile:
config = json.load(infile)
my_testcases = config['testcases']
data_folder = os.path.join(self.testcase_path, 'data')
tmp_data_folder = os.path.join(self.testcase_path, 'tmp_data')
work_folder = os.path.join(self.testcase_path, 'work')
compiled_files_directory = os.path.join(self.testcase_path, 'compiled_files')
if os.path.isdir(tmp_data_folder):
shutil.rmtree(tmp_data_folder)
os.makedirs(tmp_data_folder)
copy_contents_into(data_folder, tmp_data_folder)
with open(os.path.join(self.testcase_path, "log", "run_output.txt"), "w") as log:
# We start counting from one.
for testcase_num in range(1, len(my_testcases) + 1):
my_testcase = my_testcases[testcase_num - 1]
if 'type' in my_testcases[testcase_num - 1]:
if my_testcase['type'] == 'FileCheck' or my_testcase['type'] == 'Compilation':
continue
# Make the tmp folder for this testcase.
testcase_folder = os.path.join(work_folder, my_testcase['testcase_id'])
# Don't trust that the developer properly cleaned up after themselves.
if os.path.isdir(testcase_folder):
shutil.rmtree(testcase_folder)
os.makedirs(testcase_folder)
copy_contents_into(tmp_data_folder, testcase_folder)
copy_contents_into(compiled_files_directory, testcase_folder)
return_code = subprocess.call(
[
os.path.join(self.testcase_path, "bin", "run.out"),
"testassignment",
"testuser",
"1",
"0",
my_testcase['testcase_id']
],
cwd=testcase_folder, stdout=log, stderr=log)
if return_code != 0:
raise RuntimeError(f"run.out exited with exit code {return_code}")
# Copy the results to the data folder.
copy_contents_into(work_folder, data_folder)
copy_contents_into(compiled_files_directory, data_folder)
shutil.rmtree(work_folder)
shutil.rmtree(tmp_data_folder)
# if os.path.isdir(compiled_files_directory):
# shutil.rmtree(compiled_files_directory)
# Run the validator using some sane arguments. Likely wants to be made much more
# customizable (different submission numbers, multiple users, etc.)
# TODO: Read "main" for other executables, determine what files they expect and
# the locations in which they expect them given different inputs.
def run_validator(self, user="testuser", subnum="1", subtime="0"):
# VALIDATOR USAGE: validator <hw_id> <rcsid> <submission#> <time-of-submission>
with open(os.path.join(self.testcase_path, "log", "validate_output.txt"), "w") as log:
return_code = subprocess.call([os.path.join(self.testcase_path, "bin", "validate.out"),
"testassignment", user, subnum, subtime], # "testuser", "1", "0"],
cwd=os.path.join(self.testcase_path, "data"), stdout=log, stderr=log)
if return_code != 0:
raise RuntimeError(f"Validator exited with exit code {return_code}")
###################################################################################
# Run the UNIX diff command given a filename. The files are compared between the
# data folder and the validation folder within the test package. For example,
# running test.diff("foo.txt") within the test package "test_foo", the files
# /var/local/autograde_tests/tests/test_foo/data/foo.txt and
# /var/local/autograde_tests/tests/test_foo/validation/foo.txt will be compared.
def diff(self, f1, f2="", arg=""):
# if only 1 filename provided...
if not f2:
f2 = f1
f1 = os.path.join("data", f1)
if not 'data' in os.path.split(os.path.split(f2)[0]):
f2 = os.path.join("validation", f2)
filename1 = os.path.join(self.testcase_path, f1)
filename2 = os.path.join(self.testcase_path, f2)
if not os.path.isfile(filename1):
raise RuntimeError(f"File {filename1} does not exist")
if not os.path.isfile(filename2):
raise RuntimeError(f"File {filename2} does not exist")
if arg == "":
process = subprocess.Popen(["diff", filename1, filename2], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
elif arg == "-b":
# Ignore changes in white space
process = subprocess.Popen(["diff", arg, filename1, filename2], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
raise RuntimeError(f"ARGUMENT {arg} TO DIFF NOT TESTED")
out, _ = process.communicate()
out = out.decode('utf-8')
if process.returncode == 1:
raise RuntimeError(f"Difference between {filename1} and {filename2} "
f"exited with exit code {process.returncode}\n\n"
f"Diff:\n{out}")
# Helpful for debugging make errors on CI
def debug_print(self, f):
filename = os.path.join(self.testcase_path, f)
print("\nDEBUG_PRINT: ", filename)
if os.path.exists(filename):
with open(filename, 'r') as fin:
print(fin.read())
else:
print(" < file does not exist >")
# Loads 2 files, truncates them after specified number of lines,
# and then checks to see if they match
def diff_truncate(self, num_lines_to_compare, f1, f2=""):
# if only 1 filename provided...
if not f2:
f2 = f1
f1 = os.path.join("data", f1)
if not 'data' in os.path.split(f2):
f2 = os.path.join("validation", f2)
filename1 = os.path.join(self.testcase_path, f1)
filename2 = os.path.join(self.testcase_path, f2)
if not os.path.isfile(filename1):
raise RuntimeError(f"File {filename1} does not exist")
if not os.path.isfile(filename2):
raise RuntimeError(f"File {filename2} does not exist")
with open(filename1) as file1:
contents1 = file1.readlines()
with open(filename2) as file2:
contents2 = file2.readlines()
# delete/truncate the file
del contents1[num_lines_to_compare:]
del contents2[num_lines_to_compare:]
if contents1 != contents2:
raise RuntimeError(f"Files {filename1} and {filename2} are different within the first {num_lines_to_compare} lines.")
###################################################################################
def empty_file(self, f):
# if no directory provided...
# if not os.path.dirname(f):
f = os.path.join("data", f)
filename = os.path.join(self.testcase_path, f)
if not os.path.isfile(filename):
raise RuntimeError(f"File {filename} should exist")
if os.stat(filename).st_size != 0:
raise RuntimeError(f"File {filename} should be empty")
###################################################################################
# Helper function for json_diff. Sorts each nested list. Allows comparison.
# Credit: Zero Piraeus.
# http://stackoverflow.com/questions/25851183/how-to-compare-two-json-objects-with-the-same-elements-in-a-different-order-equa
def json_ordered(self, obj):
if isinstance(obj, dict):
return sorted((k, self.json_ordered(v)) for k, v in obj.items())
if isinstance(obj, list):
return sorted(self.json_ordered(x) for x in obj)
else:
return obj
###################################################################################
# Helper function for json_diff. Recursivly removes keys from input_dict.
# Credit: Olivier Melançon.
# https://stackoverflow.com/a/49103013
def dict_sweep(self, input_dict, keys):
if isinstance(input_dict, dict):
return {k: self.dict_sweep(v, keys) for k, v in input_dict.items() if k not in keys}
elif isinstance(input_dict, list):
return [self.dict_sweep(element, keys) for element in input_dict]
else:
return input_dict
# Compares two json files allowing differences in file whitespace
# (indentation, newlines, etc) and also alternate ordering of data
# inside dictionary/key-value pairs
# By default ignore metric values if they exist because they are different for every run
def json_diff(self, f1, f2="", ignore_keys=['metrics']):
# if only 1 filename provided...
if not f2:
f2 = f1
f1 = os.path.join('validation', f1)
else:
f1 = os.path.join("data", f1)
if not 'data' in os.path.split(f2):
f2 = os.path.join("validation", f2)
filename1 = os.path.join(self.testcase_path, f1)
filename2 = os.path.join(self.testcase_path, f2)
if not os.path.isfile(filename1):
raise RuntimeError(f"File {filename1} does not exist")
if not os.path.isfile(filename2):
raise RuntimeError(f"File {filename2} does not exist")
with open(filename1) as file1:
contents1 = json.load(file1)
with open(filename2) as file2:
contents2 = json.load(file2)
ordered1 = self.json_ordered(self.dict_sweep(contents1, ignore_keys))
ordered2 = self.json_ordered(self.dict_sweep(contents2, ignore_keys))
if ordered1 != ordered2:
# NOTE: The ordered json has extra syntax....
# so instead, print the original contents to a file and diff that
# (yes clumsy)
with open('json_ordered_1.json', 'w') as outfile:
json.dump(contents1, outfile, sort_keys=True, indent=4, separators=(',', ': '))
with open('json_ordered_2.json', 'w') as outfile:
json.dump(contents2, outfile, sort_keys=True, indent=4, separators=(',', ': '))
print("\ndiff json_ordered_1.json json_ordered_2.json\n")
process = subprocess.Popen(["diff", 'json_ordered_1.json', 'json_ordered_2.json'])
raise RuntimeError(f"JSON files are different: {filename1} {filename2}")
def empty_json_diff(self, f):
f = os.path.join("data", f)
filename1 = os.path.join(self.testcase_path, f)
filename2 = os.path.join(SUBMITTY_INSTALL_DIR, "test_suite/integrationTests/data/empty_json_diff_file.json")
return self.json_diff(filename1, filename2)
###################################################################################
# remove the running time, and many of the system stack trace lines
def simplify_junit_output(self, filename):
if not os.path.isfile(filename):
raise RuntimeError(f"File {filename} does not exist")
simplified = []
with open(filename, 'r') as file:
for line in file:
if 'Time' in line:
continue
if 'org.junit' in line:
continue
if 'sun.reflect' in line:
continue
if 'java.lang' in line:
continue
if 'java.net' in line:
continue
if 'sun.misc' in line:
continue
if '... ' in line and ' more' in line:
continue
# sys.stdout.write(f"LINE: {line}")
simplified.append(line)
return simplified
# Compares two junit output files, ignoring the run time
def junit_diff(self, f1, f2=""):
# if only 1 filename provided...
if not f2:
f2 = f1
f1 = os.path.join("data", f1)
if not 'data' in os.path.split(f2):
f2 = os.path.join("validation", f2)
filename1 = os.path.join(self.testcase_path, f1)
filename2 = os.path.join(self.testcase_path, f2)
if self.simplify_junit_output(filename1) != self.simplify_junit_output(filename2):
raise RuntimeError(f"JUNIT OUTPUT files {filename1} and {filename2} are different")
# Validate a configuration against the submitty complete_config_schema.json
def validate_complete_config(self, config_path):
schema_path = os.path.join(SUBMITTY_INSTALL_DIR, 'bin', 'json_schemas', 'complete_config_schema.json')
try:
submitty_schema_validator.validate_complete_config_schema_using_filenames(config_path, schema_path)
except submitty_schema_validator.SubmittySchemaException as s:
s.print_human_readable_error()
raise
###################################################################################
# Decorators
###################################################################################
def prebuild(func):
mod = inspect.getmodule(inspect.stack()[1][0])
path = os.path.dirname(mod.__file__)
modname = mod.__name__
tw = TestcaseWrapper(path)
@wraps(func) # Allows pickling for this lambda
def wrapper():
print(f"* Starting prebuild for {modname}... ", end="")
func(tw)
print("Done")
global to_run
to_run[modname].wrapper = tw
to_run[modname].prebuild = wrapper
return wrapper
# Decorator function using some inspection trickery to determine paths
def testcase(func):
# inspect.stack() gets the current program stack. Index 1 is one
# level up from the current stack frame, which in this case will
# be the frame of the function calling this decorator. The first
# element of that tuple is a frame object, which can be passed to
# inspect.getmodule to fetch the module associated with that frame.
# From there, we can get the path of that module, and infer the rest
# of the required information.
mod = inspect.getmodule(inspect.stack()[1][0])
path = os.path.dirname(mod.__file__)
modname = mod.__name__
tw = TestcaseWrapper(path)
@wraps(func) # Allows pickling for this lambda
def wrapper():
print(f"* Starting testcase {modname}.{func.__name__}... ", end="")
try:
func(tw)
with bold + green:
print("PASSED")
except Exception as e:
with bold + red:
print("FAILED")
# blank raise raises the last exception as is
raise
global to_run
to_run[modname].wrapper = tw
to_run[modname].testcases.append(wrapper)
to_run[modname].testcases_names.append(func.__name__)
return wrapper
|
{
"content_hash": "2dedf2bcc460391c4e3c2bd570a8fa6d",
"timestamp": "",
"source": "github",
"line_count": 779,
"max_line_length": 145,
"avg_line_length": 42.28754813863928,
"alnum_prop": 0.5905227369315768,
"repo_name": "Submitty/Submitty",
"id": "b79418f468fa73b2073d4f36cd4c3c1563b1c3f2",
"size": "32943",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/integrationTests/lib.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "8450"
},
{
"name": "C++",
"bytes": "496998"
},
{
"name": "CMake",
"bytes": "1561"
},
{
"name": "CSS",
"bytes": "210295"
},
{
"name": "HTML",
"bytes": "799796"
},
{
"name": "Java",
"bytes": "3828"
},
{
"name": "JavaScript",
"bytes": "981630"
},
{
"name": "PHP",
"bytes": "3103857"
},
{
"name": "PLpgSQL",
"bytes": "122825"
},
{
"name": "Python",
"bytes": "1589891"
},
{
"name": "Shell",
"bytes": "205161"
},
{
"name": "TeX",
"bytes": "21960"
},
{
"name": "Twig",
"bytes": "1239136"
},
{
"name": "TypeScript",
"bytes": "17328"
}
],
"symlink_target": ""
}
|
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.exception import SqlmapUnsupportedFeatureException
from plugins.generic.filesystem import Filesystem as GenericFilesystem
class Filesystem(GenericFilesystem):
def __init__(self):
GenericFilesystem.__init__(self)
def readFile(self, rFile):
errMsg = "on Firebird it is not possible to read files"
raise SqlmapUnsupportedFeatureException(errMsg)
def writeFile(self, wFile, dFile, fileType=None, forceCheck=False):
errMsg = "on Firebird it is not possible to write files"
raise SqlmapUnsupportedFeatureException(errMsg)
|
{
"content_hash": "8539ef63687d252c423c579b1e8bfc60",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 71,
"avg_line_length": 37.21052631578947,
"alnum_prop": 0.7397454031117398,
"repo_name": "JeyZeta/Dangerous",
"id": "da8fc26ed969fb4da6d9c270daa4bc5506343e3b",
"size": "730",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "Dangerous/Golismero/tools/sqlmap/plugins/dbms/firebird/filesystem.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "13260"
},
{
"name": "C",
"bytes": "12851"
},
{
"name": "C++",
"bytes": "3174"
},
{
"name": "CSS",
"bytes": "267451"
},
{
"name": "HTML",
"bytes": "2686153"
},
{
"name": "JavaScript",
"bytes": "1356956"
},
{
"name": "Lua",
"bytes": "14436"
},
{
"name": "Makefile",
"bytes": "11190"
},
{
"name": "Objective-C",
"bytes": "998"
},
{
"name": "PHP",
"bytes": "619"
},
{
"name": "PLpgSQL",
"bytes": "536"
},
{
"name": "Perl",
"bytes": "263365"
},
{
"name": "Python",
"bytes": "16669102"
},
{
"name": "Roff",
"bytes": "9828"
},
{
"name": "Ruby",
"bytes": "503"
},
{
"name": "Shell",
"bytes": "6691"
}
],
"symlink_target": ""
}
|
"""Tests for grr_response_client.client_build."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import io
import multiprocessing
import os
from absl.testing import absltest
import mock
from grr_response_client_builder import client_build
from grr_response_core.lib.util import temp
class MultiRepackTest(absltest.TestCase):
def setUp(self):
super(MultiRepackTest, self).setUp()
self.pool_obj = mock.MagicMock()
pool_patcher = mock.patch.object(
multiprocessing, "Pool", return_value=self.pool_obj)
self.mock_pool = pool_patcher.start()
self.addCleanup(pool_patcher.stop)
config_dir = temp.TempDirPath()
self.label1_config = os.path.join(config_dir, "label1.yaml")
self.label2_config = os.path.join(config_dir, "label2.yaml")
with io.open(self.label1_config, mode="w") as filedesc:
filedesc.write("Client.labels: [label1]")
with io.open(self.label2_config, mode="w") as filedesc:
filedesc.write("Client.labels: [label2]")
self.template_dir = temp.TempDirPath()
self.deb_template = os.path.join(self.template_dir,
"grr_3.1.0.2_amd64.deb.zip")
self.exe_template = os.path.join(self.template_dir,
"GRR_3.1.0.2_i386.exe.zip")
self.xar_template = os.path.join(self.template_dir,
"grr_3.1.0.2_amd64.xar.zip")
with io.open(self.deb_template, mode="w") as filedesc:
filedesc.write("linux")
with io.open(self.exe_template, mode="w") as filedesc:
filedesc.write("windows")
with io.open(self.xar_template, mode="w") as filedesc:
filedesc.write("darwin")
self.output_dir = temp.TempDirPath()
def testMultipleRepackingNoSigning(self):
client_build.MultiTemplateRepacker().RepackTemplates(
[self.label1_config, self.label2_config],
[self.deb_template, self.exe_template, self.xar_template],
self.output_dir)
# (3 templates + 1 debug) x 2 labels = 8 repacks
self.assertEqual(self.pool_obj.apply_async.call_count, 8)
if __name__ == "__main__":
absltest.main()
|
{
"content_hash": "47e0fd141ae93e4c7c17d2594e68fd4a",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 66,
"avg_line_length": 35.78688524590164,
"alnum_prop": 0.6610169491525424,
"repo_name": "dunkhong/grr",
"id": "564bfbc816ee6523f6e993193cb6d25e3ab08d1d",
"size": "2205",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "grr/client_builder/grr_response_client_builder/client_build_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "227"
},
{
"name": "Batchfile",
"bytes": "882"
},
{
"name": "C",
"bytes": "11321"
},
{
"name": "C++",
"bytes": "54535"
},
{
"name": "CSS",
"bytes": "36745"
},
{
"name": "Dockerfile",
"bytes": "1822"
},
{
"name": "HCL",
"bytes": "8451"
},
{
"name": "HTML",
"bytes": "193751"
},
{
"name": "JavaScript",
"bytes": "12795"
},
{
"name": "Jupyter Notebook",
"bytes": "199190"
},
{
"name": "Makefile",
"bytes": "3139"
},
{
"name": "PowerShell",
"bytes": "1984"
},
{
"name": "Python",
"bytes": "7430923"
},
{
"name": "Roff",
"bytes": "444"
},
{
"name": "Shell",
"bytes": "49155"
},
{
"name": "Standard ML",
"bytes": "8172"
},
{
"name": "TSQL",
"bytes": "10560"
},
{
"name": "TypeScript",
"bytes": "56756"
}
],
"symlink_target": ""
}
|
import gettext
#: Version information (major, minor, revision[, 'dev']).
version_info = (2, 0, 2)
#: Version string 'major.minor.revision'.
version = __version__ = ".".join(map(str, version_info))
gettext.install('stalkerutils')
|
{
"content_hash": "7027ae88aa8edf9e6b014e2fb31f3fad",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 57,
"avg_line_length": 28.875,
"alnum_prop": 0.683982683982684,
"repo_name": "pandemicsyn/stalker",
"id": "6b258b5c37c2dbffe84aef18b33e9d896fdc85d2",
"size": "231",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stalkerutils/stalkerutils/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "35664"
},
{
"name": "HTML",
"bytes": "48554"
},
{
"name": "JavaScript",
"bytes": "15047"
},
{
"name": "Makefile",
"bytes": "2004"
},
{
"name": "Python",
"bytes": "134178"
},
{
"name": "Shell",
"bytes": "9495"
}
],
"symlink_target": ""
}
|
from __future__ import annotations
import json
import random
import string
import textwrap
from io import StringIO
from unittest import mock
import paramiko
import pytest
from airflow import settings
from airflow.exceptions import AirflowException
from airflow.models import Connection
from airflow.providers.ssh.hooks.ssh import SSHHook
from airflow.utils import db
from airflow.utils.session import create_session
HELLO_SERVER_CMD = """
import socket, sys
listener = socket.socket()
listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
listener.bind(('localhost', 2134))
listener.listen(1)
sys.stdout.write('ready')
sys.stdout.flush()
conn = listener.accept()[0]
conn.sendall(b'hello')
"""
def generate_key_string(pkey: paramiko.PKey, passphrase: str | None = None):
key_fh = StringIO()
pkey.write_private_key(key_fh, password=passphrase)
key_fh.seek(0)
key_str = key_fh.read()
return key_str
def generate_host_key(pkey: paramiko.PKey):
key_fh = StringIO()
pkey.write_private_key(key_fh)
key_fh.seek(0)
key_obj = paramiko.RSAKey(file_obj=key_fh)
return key_obj.get_base64()
TEST_PKEY = paramiko.RSAKey.generate(4096)
TEST_PRIVATE_KEY = generate_key_string(pkey=TEST_PKEY)
TEST_HOST_KEY = generate_host_key(pkey=TEST_PKEY)
TEST_PKEY_ECDSA = paramiko.ECDSAKey.generate()
TEST_PRIVATE_KEY_ECDSA = generate_key_string(pkey=TEST_PKEY_ECDSA)
TEST_TIMEOUT = 20
TEST_CONN_TIMEOUT = 30
PASSPHRASE = "".join(random.choice(string.ascii_letters) for i in range(10))
TEST_ENCRYPTED_PRIVATE_KEY = generate_key_string(pkey=TEST_PKEY, passphrase=PASSPHRASE)
TEST_DISABLED_ALGORITHMS = {"pubkeys": ["rsa-sha2-256", "rsa-sha2-512"]}
TEST_CIPHERS = ["aes128-ctr", "aes192-ctr", "aes256-ctr"]
class TestSSHHook:
CONN_SSH_WITH_NO_EXTRA = "ssh_with_no_extra"
CONN_SSH_WITH_PRIVATE_KEY_EXTRA = "ssh_with_private_key_extra"
CONN_SSH_WITH_PRIVATE_KEY_ECDSA_EXTRA = "ssh_with_private_key_ecdsa_extra"
CONN_SSH_WITH_PRIVATE_KEY_PASSPHRASE_EXTRA = "ssh_with_private_key_passphrase_extra"
CONN_SSH_WITH_TIMEOUT_EXTRA = "ssh_with_timeout_extra"
CONN_SSH_WITH_CONN_TIMEOUT_EXTRA = "ssh_with_conn_timeout_extra"
CONN_SSH_WITH_TIMEOUT_AND_CONN_TIMEOUT_EXTRA = "ssh_with_timeout_and_conn_timeout_extra"
CONN_SSH_WITH_EXTRA = "ssh_with_extra"
CONN_SSH_WITH_EXTRA_FALSE_LOOK_FOR_KEYS = "ssh_with_extra_false_look_for_keys"
CONN_SSH_WITH_HOST_KEY_EXTRA = "ssh_with_host_key_extra"
CONN_SSH_WITH_HOST_KEY_EXTRA_WITH_TYPE = "ssh_with_host_key_extra_with_type"
CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE = "ssh_with_host_key_and_no_host_key_check_false"
CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_TRUE = "ssh_with_host_key_and_no_host_key_check_true"
CONN_SSH_WITH_NO_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE = "ssh_with_no_host_key_and_no_host_key_check_false"
CONN_SSH_WITH_NO_HOST_KEY_AND_NO_HOST_KEY_CHECK_TRUE = "ssh_with_no_host_key_and_no_host_key_check_true"
CONN_SSH_WITH_HOST_KEY_AND_ALLOW_HOST_KEY_CHANGES_TRUE = (
"ssh_with_host_key_and_allow_host_key_changes_true"
)
CONN_SSH_WITH_EXTRA_DISABLED_ALGORITHMS = "ssh_with_extra_disabled_algorithms"
CONN_SSH_WITH_EXTRA_CIPHERS = "ssh_with_extra_ciphers"
CONN_SSH_WITH_NO_HOST_KEY_CHECK_TRUE_AND_ALLOW_HOST_KEY_CHANGES_TRUE = (
"ssh_with_no_host_key_check_true_and_allow_host_key_changes_true"
)
CONN_SSH_WITH_NO_HOST_KEY_CHECK_TRUE_AND_ALLOW_HOST_KEY_CHANGES_FALSE = (
"ssh_with_no_host_key_check_true_and_allow_host_key_changes_false"
)
@classmethod
def teardown_class(cls) -> None:
with create_session() as session:
conns_to_reset = [
cls.CONN_SSH_WITH_NO_EXTRA,
cls.CONN_SSH_WITH_PRIVATE_KEY_EXTRA,
cls.CONN_SSH_WITH_PRIVATE_KEY_PASSPHRASE_EXTRA,
cls.CONN_SSH_WITH_PRIVATE_KEY_ECDSA_EXTRA,
cls.CONN_SSH_WITH_TIMEOUT_EXTRA,
cls.CONN_SSH_WITH_CONN_TIMEOUT_EXTRA,
cls.CONN_SSH_WITH_TIMEOUT_AND_CONN_TIMEOUT_EXTRA,
cls.CONN_SSH_WITH_EXTRA,
cls.CONN_SSH_WITH_HOST_KEY_EXTRA,
cls.CONN_SSH_WITH_HOST_KEY_EXTRA_WITH_TYPE,
cls.CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE,
cls.CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_TRUE,
cls.CONN_SSH_WITH_NO_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE,
cls.CONN_SSH_WITH_NO_HOST_KEY_AND_NO_HOST_KEY_CHECK_TRUE,
cls.CONN_SSH_WITH_EXTRA_DISABLED_ALGORITHMS,
cls.CONN_SSH_WITH_EXTRA_CIPHERS,
cls.CONN_SSH_WITH_NO_HOST_KEY_CHECK_TRUE_AND_ALLOW_HOST_KEY_CHANGES_TRUE,
cls.CONN_SSH_WITH_NO_HOST_KEY_CHECK_TRUE_AND_ALLOW_HOST_KEY_CHANGES_FALSE,
]
connections = session.query(Connection).filter(Connection.conn_id.in_(conns_to_reset))
connections.delete(synchronize_session=False)
session.commit()
@classmethod
def setup_class(cls) -> None:
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_NO_EXTRA,
host="localhost",
conn_type="ssh",
extra=None,
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_EXTRA,
host="localhost",
conn_type="ssh",
extra='{"compress" : true, "no_host_key_check" : "true", "allow_host_key_change": false}',
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_EXTRA_FALSE_LOOK_FOR_KEYS,
host="localhost",
conn_type="ssh",
extra='{"compress" : true, "no_host_key_check" : "true", '
'"allow_host_key_change": false, "look_for_keys": false}',
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_PRIVATE_KEY_EXTRA,
host="localhost",
conn_type="ssh",
extra=json.dumps({"private_key": TEST_PRIVATE_KEY}),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_PRIVATE_KEY_PASSPHRASE_EXTRA,
host="localhost",
conn_type="ssh",
extra=json.dumps(
{"private_key": TEST_ENCRYPTED_PRIVATE_KEY, "private_key_passphrase": PASSPHRASE}
),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_PRIVATE_KEY_ECDSA_EXTRA,
host="localhost",
conn_type="ssh",
extra=json.dumps({"private_key": TEST_PRIVATE_KEY_ECDSA}),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_TIMEOUT_EXTRA,
host="localhost",
conn_type="ssh",
extra=json.dumps({"timeout": TEST_TIMEOUT}),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_CONN_TIMEOUT_EXTRA,
host="localhost",
conn_type="ssh",
extra=json.dumps({"conn_timeout": TEST_CONN_TIMEOUT}),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_TIMEOUT_AND_CONN_TIMEOUT_EXTRA,
host="localhost",
conn_type="ssh",
extra=json.dumps({"conn_timeout": TEST_CONN_TIMEOUT, "timeout": TEST_TIMEOUT}),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_HOST_KEY_EXTRA,
host="localhost",
conn_type="ssh",
extra=json.dumps({"private_key": TEST_PRIVATE_KEY, "host_key": TEST_HOST_KEY}),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_HOST_KEY_EXTRA_WITH_TYPE,
host="localhost",
conn_type="ssh",
extra=json.dumps({"private_key": TEST_PRIVATE_KEY, "host_key": "ssh-rsa " + TEST_HOST_KEY}),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE,
host="remote_host",
conn_type="ssh",
extra=json.dumps(
{"private_key": TEST_PRIVATE_KEY, "host_key": TEST_HOST_KEY, "no_host_key_check": False}
),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_TRUE,
host="remote_host",
conn_type="ssh",
extra=json.dumps(
{"private_key": TEST_PRIVATE_KEY, "host_key": TEST_HOST_KEY, "no_host_key_check": True}
),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_NO_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE,
host="remote_host",
conn_type="ssh",
extra=json.dumps({"private_key": TEST_PRIVATE_KEY, "no_host_key_check": False}),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_NO_HOST_KEY_AND_NO_HOST_KEY_CHECK_TRUE,
host="remote_host",
conn_type="ssh",
extra=json.dumps({"private_key": TEST_PRIVATE_KEY, "no_host_key_check": True}),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_HOST_KEY_AND_ALLOW_HOST_KEY_CHANGES_TRUE,
host="remote_host",
conn_type="ssh",
extra=json.dumps(
{
"private_key": TEST_PRIVATE_KEY,
"host_key": TEST_HOST_KEY,
"allow_host_key_change": True,
}
),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_EXTRA_DISABLED_ALGORITHMS,
host="localhost",
conn_type="ssh",
extra=json.dumps({"disabled_algorithms": TEST_DISABLED_ALGORITHMS}),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_EXTRA_CIPHERS,
host="localhost",
conn_type="ssh",
extra=json.dumps({"ciphers": TEST_CIPHERS}),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_NO_HOST_KEY_CHECK_TRUE_AND_ALLOW_HOST_KEY_CHANGES_TRUE,
host="remote_host",
conn_type="ssh",
extra=json.dumps({"no_host_key_check": True, "allow_host_key_change": True}),
)
)
db.merge_conn(
Connection(
conn_id=cls.CONN_SSH_WITH_NO_HOST_KEY_CHECK_TRUE_AND_ALLOW_HOST_KEY_CHANGES_FALSE,
host="remote_host",
conn_type="ssh",
extra=json.dumps({"no_host_key_check": True, "allow_host_key_change": False}),
)
)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_password(self, ssh_mock):
hook = SSHHook(
remote_host="remote_host",
port="port",
username="username",
password="password",
timeout=10,
key_file="fake.file",
)
with hook.get_conn():
ssh_mock.return_value.connect.assert_called_once_with(
banner_timeout=30.0,
hostname="remote_host",
username="username",
password="password",
key_filename="fake.file",
timeout=10,
compress=True,
port="port",
sock=None,
look_for_keys=True,
)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_without_password(self, ssh_mock):
hook = SSHHook(
remote_host="remote_host", port="port", username="username", timeout=10, key_file="fake.file"
)
with hook.get_conn():
ssh_mock.return_value.connect.assert_called_once_with(
banner_timeout=30.0,
hostname="remote_host",
username="username",
key_filename="fake.file",
timeout=10,
compress=True,
port="port",
sock=None,
look_for_keys=True,
)
@mock.patch("airflow.providers.ssh.hooks.ssh.SSHTunnelForwarder")
def test_tunnel_with_password(self, ssh_mock):
hook = SSHHook(
remote_host="remote_host",
port="port",
username="username",
password="password",
timeout=10,
key_file="fake.file",
)
with hook.get_tunnel(1234):
ssh_mock.assert_called_once_with(
"remote_host",
ssh_port="port",
ssh_username="username",
ssh_password="password",
ssh_pkey="fake.file",
ssh_proxy=None,
local_bind_address=("localhost",),
remote_bind_address=("localhost", 1234),
logger=hook.log,
)
@mock.patch("airflow.providers.ssh.hooks.ssh.SSHTunnelForwarder")
def test_tunnel_without_password(self, ssh_mock):
hook = SSHHook(
remote_host="remote_host", port="port", username="username", timeout=10, key_file="fake.file"
)
with hook.get_tunnel(1234):
ssh_mock.assert_called_once_with(
"remote_host",
ssh_port="port",
ssh_username="username",
ssh_pkey="fake.file",
ssh_proxy=None,
local_bind_address=("localhost",),
remote_bind_address=("localhost", 1234),
host_pkey_directories=None,
logger=hook.log,
)
def test_conn_with_extra_parameters(self):
ssh_hook = SSHHook(ssh_conn_id=self.CONN_SSH_WITH_EXTRA)
assert ssh_hook.compress is True
assert ssh_hook.no_host_key_check is True
assert ssh_hook.allow_host_key_change is False
assert ssh_hook.look_for_keys is True
def test_conn_with_extra_parameters_false_look_for_keys(self):
ssh_hook = SSHHook(ssh_conn_id=self.CONN_SSH_WITH_EXTRA_FALSE_LOOK_FOR_KEYS)
assert ssh_hook.look_for_keys is False
@mock.patch("airflow.providers.ssh.hooks.ssh.SSHTunnelForwarder")
def test_tunnel_with_private_key(self, ssh_mock):
hook = SSHHook(
ssh_conn_id=self.CONN_SSH_WITH_PRIVATE_KEY_EXTRA,
remote_host="remote_host",
port="port",
username="username",
timeout=10,
)
with hook.get_tunnel(1234):
ssh_mock.assert_called_once_with(
"remote_host",
ssh_port="port",
ssh_username="username",
ssh_pkey=TEST_PKEY,
ssh_proxy=None,
local_bind_address=("localhost",),
remote_bind_address=("localhost", 1234),
host_pkey_directories=None,
logger=hook.log,
)
@mock.patch("airflow.providers.ssh.hooks.ssh.SSHTunnelForwarder")
def test_tunnel_with_private_key_passphrase(self, ssh_mock):
hook = SSHHook(
ssh_conn_id=self.CONN_SSH_WITH_PRIVATE_KEY_PASSPHRASE_EXTRA,
remote_host="remote_host",
port="port",
username="username",
timeout=10,
)
with hook.get_tunnel(1234):
ssh_mock.assert_called_once_with(
"remote_host",
ssh_port="port",
ssh_username="username",
ssh_pkey=TEST_PKEY,
ssh_proxy=None,
local_bind_address=("localhost",),
remote_bind_address=("localhost", 1234),
host_pkey_directories=None,
logger=hook.log,
)
@mock.patch("airflow.providers.ssh.hooks.ssh.SSHTunnelForwarder")
def test_tunnel_with_private_key_ecdsa(self, ssh_mock):
hook = SSHHook(
ssh_conn_id=self.CONN_SSH_WITH_PRIVATE_KEY_ECDSA_EXTRA,
remote_host="remote_host",
port="port",
username="username",
timeout=10,
)
with hook.get_tunnel(1234):
ssh_mock.assert_called_once_with(
"remote_host",
ssh_port="port",
ssh_username="username",
ssh_pkey=TEST_PKEY_ECDSA,
ssh_proxy=None,
local_bind_address=("localhost",),
remote_bind_address=("localhost", 1234),
host_pkey_directories=None,
logger=hook.log,
)
def test_ssh_connection(self):
hook = SSHHook(ssh_conn_id="ssh_default")
with hook.get_conn() as client:
(_, stdout, _) = client.exec_command("ls")
assert stdout.read() is not None
def test_ssh_connection_no_connection_id(self):
hook = SSHHook(remote_host="localhost")
assert hook.ssh_conn_id is None
with hook.get_conn() as client:
(_, stdout, _) = client.exec_command("ls")
assert stdout.read() is not None
def test_ssh_connection_old_cm(self):
with SSHHook(ssh_conn_id="ssh_default") as hook:
client = hook.get_conn()
(_, stdout, _) = client.exec_command("ls")
assert stdout.read() is not None
def test_tunnel(self):
hook = SSHHook(ssh_conn_id="ssh_default")
import socket
import subprocess
subprocess_kwargs = dict(
args=["python", "-c", HELLO_SERVER_CMD],
stdout=subprocess.PIPE,
)
with subprocess.Popen(**subprocess_kwargs) as server_handle, hook.get_tunnel(
local_port=2135, remote_port=2134
):
server_output = server_handle.stdout.read(5)
assert b"ready" == server_output
socket = socket.socket()
socket.connect(("localhost", 2135))
response = socket.recv(5)
assert response == b"hello"
socket.close()
server_handle.communicate()
assert server_handle.returncode == 0
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_private_key_extra(self, ssh_mock):
hook = SSHHook(
ssh_conn_id=self.CONN_SSH_WITH_PRIVATE_KEY_EXTRA,
remote_host="remote_host",
port="port",
username="username",
timeout=10,
)
with hook.get_conn():
ssh_mock.return_value.connect.assert_called_once_with(
banner_timeout=30.0,
hostname="remote_host",
username="username",
pkey=TEST_PKEY,
timeout=10,
compress=True,
port="port",
sock=None,
look_for_keys=True,
)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_private_key_passphrase_extra(self, ssh_mock):
hook = SSHHook(
ssh_conn_id=self.CONN_SSH_WITH_PRIVATE_KEY_PASSPHRASE_EXTRA,
remote_host="remote_host",
port="port",
username="username",
timeout=10,
)
with hook.get_conn():
ssh_mock.return_value.connect.assert_called_once_with(
banner_timeout=30.0,
hostname="remote_host",
username="username",
pkey=TEST_PKEY,
timeout=10,
compress=True,
port="port",
sock=None,
look_for_keys=True,
)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_host_key_extra(self, ssh_client):
hook = SSHHook(ssh_conn_id=self.CONN_SSH_WITH_HOST_KEY_EXTRA)
assert hook.host_key is not None
with hook.get_conn():
assert ssh_client.return_value.connect.called is True
assert ssh_client.return_value.get_host_keys.return_value.add.called
assert ssh_client.return_value.get_host_keys.return_value.add.call_args == mock.call(
hook.remote_host, "ssh-rsa", hook.host_key
)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_host_key_extra_with_type(self, ssh_client):
hook = SSHHook(ssh_conn_id=self.CONN_SSH_WITH_HOST_KEY_EXTRA_WITH_TYPE)
assert hook.host_key is not None
with hook.get_conn():
assert ssh_client.return_value.connect.called is True
assert ssh_client.return_value.get_host_keys.return_value.add.called
assert ssh_client.return_value.get_host_keys.return_value.add.call_args == mock.call(
hook.remote_host, "ssh-rsa", hook.host_key
)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_no_host_key_where_no_host_key_check_is_false(self, ssh_client):
hook = SSHHook(ssh_conn_id=self.CONN_SSH_WITH_NO_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE)
assert hook.host_key is None
with hook.get_conn():
assert ssh_client.return_value.connect.called is True
assert ssh_client.return_value.get_host_keys.return_value.add.called is False
def test_ssh_connection_with_host_key_where_no_host_key_check_is_true(self):
with pytest.raises(ValueError):
SSHHook(ssh_conn_id=self.CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_TRUE)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_no_host_key_where_no_host_key_check_is_true(self, ssh_client):
hook = SSHHook(ssh_conn_id=self.CONN_SSH_WITH_NO_HOST_KEY_AND_NO_HOST_KEY_CHECK_TRUE)
assert hook.host_key is None
with hook.get_conn():
assert ssh_client.return_value.connect.called is True
assert ssh_client.return_value.set_missing_host_key_policy.called is True
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_host_key_where_allow_host_key_change_is_true(self, ssh_client):
hook = SSHHook(ssh_conn_id=self.CONN_SSH_WITH_HOST_KEY_AND_ALLOW_HOST_KEY_CHANGES_TRUE)
assert hook.host_key is not None
with hook.get_conn():
assert ssh_client.return_value.connect.called is True
assert ssh_client.return_value.load_system_host_keys.called is False
assert ssh_client.return_value.set_missing_host_key_policy.called is True
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_conn_timeout(self, ssh_mock):
hook = SSHHook(
remote_host="remote_host",
port="port",
username="username",
password="password",
conn_timeout=20,
key_file="fake.file",
)
with hook.get_conn():
ssh_mock.return_value.connect.assert_called_once_with(
banner_timeout=30.0,
hostname="remote_host",
username="username",
password="password",
key_filename="fake.file",
timeout=20,
compress=True,
port="port",
sock=None,
look_for_keys=True,
)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_conn_timeout_and_timeout(self, ssh_mock):
hook = SSHHook(
remote_host="remote_host",
port="port",
username="username",
password="password",
timeout=10,
conn_timeout=20,
key_file="fake.file",
)
with hook.get_conn():
ssh_mock.return_value.connect.assert_called_once_with(
banner_timeout=30.0,
hostname="remote_host",
username="username",
password="password",
key_filename="fake.file",
timeout=20,
compress=True,
port="port",
sock=None,
look_for_keys=True,
)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_timeout_extra(self, ssh_mock):
hook = SSHHook(
ssh_conn_id=self.CONN_SSH_WITH_TIMEOUT_EXTRA,
remote_host="remote_host",
port="port",
username="username",
timeout=10,
)
with hook.get_conn():
ssh_mock.return_value.connect.assert_called_once_with(
banner_timeout=30.0,
hostname="remote_host",
username="username",
timeout=20,
compress=True,
port="port",
sock=None,
look_for_keys=True,
)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_conn_timeout_extra(self, ssh_mock):
hook = SSHHook(
ssh_conn_id=self.CONN_SSH_WITH_CONN_TIMEOUT_EXTRA,
remote_host="remote_host",
port="port",
username="username",
timeout=10,
conn_timeout=15,
)
# conn_timeout parameter wins over extra options
with hook.get_conn():
ssh_mock.return_value.connect.assert_called_once_with(
banner_timeout=30.0,
hostname="remote_host",
username="username",
timeout=15,
compress=True,
port="port",
sock=None,
look_for_keys=True,
)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_timeout_extra_and_conn_timeout_extra(self, ssh_mock):
hook = SSHHook(
ssh_conn_id=self.CONN_SSH_WITH_TIMEOUT_AND_CONN_TIMEOUT_EXTRA,
remote_host="remote_host",
port="port",
username="username",
timeout=10,
conn_timeout=15,
)
# conn_timeout parameter wins over extra options
with hook.get_conn():
ssh_mock.return_value.connect.assert_called_once_with(
banner_timeout=30.0,
hostname="remote_host",
username="username",
timeout=15,
compress=True,
port="port",
sock=None,
look_for_keys=True,
)
@pytest.mark.parametrize(
"timeout, conn_timeout, timeoutextra, conn_timeoutextra, expected_value",
[
(TEST_TIMEOUT, TEST_CONN_TIMEOUT, True, True, TEST_CONN_TIMEOUT),
(TEST_TIMEOUT, TEST_CONN_TIMEOUT, True, False, TEST_CONN_TIMEOUT),
(TEST_TIMEOUT, TEST_CONN_TIMEOUT, False, True, TEST_CONN_TIMEOUT),
(TEST_TIMEOUT, TEST_CONN_TIMEOUT, False, False, TEST_CONN_TIMEOUT),
(TEST_TIMEOUT, None, True, True, TEST_CONN_TIMEOUT),
(TEST_TIMEOUT, None, True, False, TEST_TIMEOUT),
(TEST_TIMEOUT, None, False, True, TEST_CONN_TIMEOUT),
(TEST_TIMEOUT, None, False, False, TEST_TIMEOUT),
(None, TEST_CONN_TIMEOUT, True, True, TEST_CONN_TIMEOUT),
(None, TEST_CONN_TIMEOUT, True, False, TEST_CONN_TIMEOUT),
(None, TEST_CONN_TIMEOUT, False, True, TEST_CONN_TIMEOUT),
(None, TEST_CONN_TIMEOUT, False, False, TEST_CONN_TIMEOUT),
(None, None, True, True, TEST_CONN_TIMEOUT),
(None, None, True, False, TEST_TIMEOUT),
(None, None, False, True, TEST_CONN_TIMEOUT),
(None, None, False, False, 10),
],
)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_all_timeout_param_and_extra_combinations(
self, ssh_mock, timeout, conn_timeout, timeoutextra, conn_timeoutextra, expected_value
):
if timeoutextra and conn_timeoutextra:
ssh_conn_id = self.CONN_SSH_WITH_TIMEOUT_AND_CONN_TIMEOUT_EXTRA
elif timeoutextra and not conn_timeoutextra:
ssh_conn_id = self.CONN_SSH_WITH_TIMEOUT_EXTRA
elif not timeoutextra and conn_timeoutextra:
ssh_conn_id = self.CONN_SSH_WITH_CONN_TIMEOUT_EXTRA
else:
ssh_conn_id = self.CONN_SSH_WITH_NO_EXTRA
hook = SSHHook(
ssh_conn_id=ssh_conn_id,
remote_host="remote_host",
port="port",
username="username",
timeout=timeout,
conn_timeout=conn_timeout,
)
# conn_timeout parameter wins over extra options
with hook.get_conn():
ssh_mock.return_value.connect.assert_called_once_with(
banner_timeout=30.0,
hostname="remote_host",
username="username",
timeout=expected_value,
compress=True,
port="port",
sock=None,
look_for_keys=True,
)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_with_extra_disabled_algorithms(self, ssh_mock):
hook = SSHHook(
ssh_conn_id=self.CONN_SSH_WITH_EXTRA_DISABLED_ALGORITHMS,
remote_host="remote_host",
port="port",
username="username",
)
with hook.get_conn():
ssh_mock.return_value.connect.assert_called_once_with(
banner_timeout=30.0,
hostname="remote_host",
username="username",
compress=True,
timeout=10,
port="port",
sock=None,
look_for_keys=True,
disabled_algorithms=TEST_DISABLED_ALGORITHMS,
)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_with_extra_ciphers(self, ssh_mock):
hook = SSHHook(
ssh_conn_id=self.CONN_SSH_WITH_EXTRA_CIPHERS,
remote_host="remote_host",
port="port",
username="username",
)
with hook.get_conn():
transport = ssh_mock.return_value.get_transport.return_value
assert transport.get_security_options.return_value.ciphers == TEST_CIPHERS
def test_openssh_private_key(self):
# Paramiko behaves differently with OpenSSH generated keys to paramiko
# generated keys, so we need a test one.
# This has been generated specifically to put here, it is not otherwise in use
TEST_OPENSSH_PRIVATE_KEY = "-----BEGIN OPENSSH " + textwrap.dedent(
"""\
PRIVATE KEY-----
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAlwAAAAdzc2gtcn
NhAAAAAwEAAQAAAIEAuPKIGPWtIpMDrXwMAvNKQlhQ1gXV/tKyufElw/n6hrr6lvtfGhwX
DihHMsAF+8+KKWQjWgh0fttbIF3+3C56Ns8hgvgMQJT2nyWd7egwqn+LQa08uCEBEka3MO
arKzj39P66EZ/KQDD29VErlVOd97dPhaR8pOZvzcHxtLbU6rMAAAIA3uBiZd7gYmUAAAAH
c3NoLXJzYQAAAIEAuPKIGPWtIpMDrXwMAvNKQlhQ1gXV/tKyufElw/n6hrr6lvtfGhwXDi
hHMsAF+8+KKWQjWgh0fttbIF3+3C56Ns8hgvgMQJT2nyWd7egwqn+LQa08uCEBEka3MOar
Kzj39P66EZ/KQDD29VErlVOd97dPhaR8pOZvzcHxtLbU6rMAAAADAQABAAAAgA2QC5b4/T
dZ3J0uSZs1yC5RV6w6RVUokl68Zm6WuF6E+7dyu6iogrBRF9eK6WVr9M/QPh9uG0zqPSaE
fhobdm7KeycXmtDtrJnXE2ZSk4oU29++TvYZBrAqAli9aHlSArwiLnOIMzY/kIHoSJLJmd
jwXykdQ7QAd93KPEnkaMzBAAAAQGTyp6/wWqtqpMmYJ5prCGNtpVOGthW5upeiuQUytE/K
5pyPoq6dUCUxQpkprtkuNAv/ff9nW6yy1v2DWohKfaEAAABBAO3y+erRXmiMreMOAd1S84
RK2E/LUHOvClQqf6GnVavmIgkxIYEgjcFiWv4xIkTc1/FN6aX5aT4MB3srvuM7sxEAAABB
AMb6QAkvxo4hT/xKY0E0nG7zCUMXeBV35MEXQK0/InFC7aZ0tjzFsQJzLe/7q7ljIf+9/O
rCqNhxgOrv7XrRuYMAAAAKYXNoQHNpbm9wZQE=
-----END OPENSSH PRIVATE KEY-----
"""
)
session = settings.Session()
try:
conn = Connection(
conn_id="openssh_pkey",
host="localhost",
conn_type="ssh",
extra={"private_key": TEST_OPENSSH_PRIVATE_KEY},
)
session.add(conn)
session.flush()
hook = SSHHook(ssh_conn_id=conn.conn_id)
assert isinstance(hook.pkey, paramiko.RSAKey)
finally:
session.delete(conn)
session.commit()
def test_oneline_key(self):
with pytest.raises(Exception):
TEST_ONELINE_KEY = "-----BEGIN OPENSSHPRIVATE KEY-----asdfg-----END OPENSSHPRIVATE KEY-----"
session = settings.Session()
try:
conn = Connection(
conn_id="openssh_pkey",
host="localhost",
conn_type="ssh",
extra={"private_key": TEST_ONELINE_KEY},
)
session.add(conn)
session.flush()
SSHHook(ssh_conn_id=conn.conn_id)
finally:
session.delete(conn)
session.commit()
@pytest.mark.flaky(max_runs=5, min_passes=1)
def test_exec_ssh_client_command(self):
hook = SSHHook(
ssh_conn_id="ssh_default",
conn_timeout=30,
banner_timeout=100,
)
with hook.get_conn() as client:
ret = hook.exec_ssh_client_command(
client,
"echo airflow",
False,
None,
30,
)
assert ret == (0, b"airflow\n", b"")
@pytest.mark.flaky(max_runs=5, min_passes=1)
def test_command_timeout(self):
hook = SSHHook(
ssh_conn_id="ssh_default",
conn_timeout=30,
banner_timeout=100,
)
with hook.get_conn() as client:
with pytest.raises(AirflowException):
hook.exec_ssh_client_command(
client,
"sleep 10",
False,
None,
1,
)
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_no_host_key_check_true_and_allow_host_key_changes_true(self, ssh_mock):
hook = SSHHook(ssh_conn_id=self.CONN_SSH_WITH_NO_HOST_KEY_CHECK_TRUE_AND_ALLOW_HOST_KEY_CHANGES_TRUE)
with hook.get_conn():
assert ssh_mock.return_value.set_missing_host_key_policy.called is True
assert isinstance(
ssh_mock.return_value.set_missing_host_key_policy.call_args[0][0], paramiko.AutoAddPolicy
)
assert ssh_mock.return_value.load_host_keys.called is False
@mock.patch("airflow.providers.ssh.hooks.ssh.paramiko.SSHClient")
def test_ssh_connection_with_no_host_key_check_true_and_allow_host_key_changes_false(self, ssh_mock):
hook = SSHHook(ssh_conn_id=self.CONN_SSH_WITH_NO_HOST_KEY_CHECK_TRUE_AND_ALLOW_HOST_KEY_CHANGES_FALSE)
with mock.patch("os.path.isfile", return_value=True):
with hook.get_conn():
assert ssh_mock.return_value.set_missing_host_key_policy.called is True
assert isinstance(
ssh_mock.return_value.set_missing_host_key_policy.call_args[0][0], paramiko.AutoAddPolicy
)
assert ssh_mock.return_value.load_host_keys.called is True
ssh_mock.reset_mock()
with mock.patch("os.path.isfile", return_value=False):
with hook.get_conn():
assert ssh_mock.return_value.set_missing_host_key_policy.called is True
assert isinstance(
ssh_mock.return_value.set_missing_host_key_policy.call_args[0][0], paramiko.AutoAddPolicy
)
assert ssh_mock.return_value.load_host_keys.called is False
|
{
"content_hash": "980a5f050a3dbb88ec53e37938166176",
"timestamp": "",
"source": "github",
"line_count": 940,
"max_line_length": 110,
"avg_line_length": 39.12340425531915,
"alnum_prop": 0.5602023058516423,
"repo_name": "apache/airflow",
"id": "6448d88efe49736020ca8108205b2a771d62190d",
"size": "37563",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tests/providers/ssh/hooks/test_ssh.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "25980"
},
{
"name": "Dockerfile",
"bytes": "71458"
},
{
"name": "HCL",
"bytes": "3786"
},
{
"name": "HTML",
"bytes": "172957"
},
{
"name": "JavaScript",
"bytes": "143915"
},
{
"name": "Jinja",
"bytes": "38911"
},
{
"name": "Jupyter Notebook",
"bytes": "5482"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "23697738"
},
{
"name": "R",
"bytes": "313"
},
{
"name": "Shell",
"bytes": "211306"
},
{
"name": "TypeScript",
"bytes": "521019"
}
],
"symlink_target": ""
}
|
from lxml import etree
from tempest.common import http
from tempest.common import rest_client
from tempest.common import xml_utils as common
from tempest import config
CONF = config.CONF
XMLNS = "http://docs.openstack.org/identity/api/v3"
class PolicyClientXML(rest_client.RestClient):
TYPE = "xml"
def __init__(self, auth_provider):
super(PolicyClientXML, self).__init__(auth_provider)
self.service = CONF.identity.catalog_type
self.endpoint_url = 'adminURL'
self.api_version = "v3"
def _parse_array(self, node):
array = []
for child in node.getchildren():
tag_list = child.tag.split('}', 1)
if tag_list[1] == "policy":
array.append(common.xml_to_json(child))
return array
def _parse_body(self, body):
json = common.xml_to_json(body)
return json
def request(self, method, url, extra_headers=False, headers=None,
body=None, wait=None):
"""Overriding the existing HTTP request in super class RestClient."""
if extra_headers:
try:
headers.update(self.get_headers())
except (ValueError, TypeError):
headers = self.get_headers()
dscv = CONF.identity.disable_ssl_certificate_validation
self.http_obj = http.ClosingHttp(
disable_ssl_certificate_validation=dscv)
return super(PolicyClientXML, self).request(method, url,
extra_headers,
headers=headers,
body=body)
def create_policy(self, blob, type):
"""Creates a Policy."""
create_policy = common.Element("policy", xmlns=XMLNS,
blob=blob, type=type)
resp, body = self.post('policies', str(common.Document(create_policy)))
self.expected_success(201, resp.status)
body = self._parse_body(etree.fromstring(body))
return resp, body
def list_policies(self):
"""Lists the policies."""
resp, body = self.get('policies')
self.expected_success(200, resp.status)
body = self._parse_array(etree.fromstring(body))
return resp, body
def get_policy(self, policy_id):
"""Lists out the given policy."""
url = 'policies/%s' % policy_id
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = self._parse_body(etree.fromstring(body))
return resp, body
def update_policy(self, policy_id, **kwargs):
"""Updates a policy."""
type = kwargs.get('type')
update_policy = common.Element("policy", xmlns=XMLNS, type=type)
url = 'policies/%s' % policy_id
resp, body = self.patch(url, str(common.Document(update_policy)))
self.expected_success(200, resp.status)
body = self._parse_body(etree.fromstring(body))
return resp, body
def delete_policy(self, policy_id):
"""Deletes the policy."""
url = "policies/%s" % policy_id
resp, body = self.delete(url)
self.expected_success(204, resp.status)
return resp, body
|
{
"content_hash": "a83644d3cee96da2e73a3e0116250f1b",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 79,
"avg_line_length": 36.70786516853933,
"alnum_prop": 0.5791245791245792,
"repo_name": "nikolay-fedotov/tempest",
"id": "41bbfe55638f41268071692f363e1f94b71409ae",
"size": "3903",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tempest/services/identity/v3/xml/policy_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
import sys
import fileinput
from foreman.client import Foreman
with open("password_foreman.txt") as f:
password_foreman = f.readline().rstrip()
fore = Foreman(url="https://foreman-crn.acis.ufl.edu", api_version=2,
auth=('mjcollin', password_foreman))
for line in fileinput.input():
fields = line.split(",")
fqdn = fields[0]
hostname = fields[1]
ip = fields[2]
mac = fields[3]
fore.hosts.create(
host={
"name": fqdn,
"ip": ip,
"mac": mac,
"build": True,
"hostgroup_id": "32" # tried using *_name but didn't work, had to go
# get id from database:
# select id,name,title from hostgroups;
}
)
|
{
"content_hash": "8840e917e7c35c5f2f7102a632b37c75",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 83,
"avg_line_length": 27.20689655172414,
"alnum_prop": 0.523447401774398,
"repo_name": "acislab/vm_scripts",
"id": "d073a72025b8a151ff58fb0b3f098bb157ffa861",
"size": "987",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "csv_to_foreman_recs.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "PowerShell",
"bytes": "1987"
},
{
"name": "Python",
"bytes": "2654"
}
],
"symlink_target": ""
}
|
import re
import os
import stat
import json
import Queue
import hashlib
import ansible
import ansible.runner
import ansible.playbook
from ansible import callbacks
from ansible import utils
from flask import flash, redirect, url_for
def command_runner(user, command, inventory, view):
if re.findall('rm', command) or re.findall('mv', command):
flash(u'内容包含删除了移动命令')
return redirect(url_for(view))
res = ansible.runner.Runner(
module_name='shell', # 调用shell模块,这个代码是为了示例执行shell命令
module_args=command, # shell命令
remote_user=user,
host_list=inventory,
pattern='all',
private_key_file='/root/.ssh/id_rsa'
).run()
return res
def playbook_runner(playbook, inventory):
stats = callbacks.AggregateStats()
playbook_cb = callbacks.PlaybookCallbacks(verbose=utils.VERBOSITY)
runner_cb = callbacks.PlaybookRunnerCallbacks(stats, verbose=utils.VERBOSITY)
res = ansible.playbook.PlayBook(
playbook=playbook,
stats=stats,
callbacks=playbook_cb,
runner_callbacks=runner_cb,
host_list=inventory,
).run()
return res
def task_runner(taskList, inventory):
task_res = []
for task in taskList:
res = playbook_runner(task['path'], inventory)
task_res.append({
'name' : task['name'],
'res' : res,
})
if not task_res:
return False
return task_res
def GenerateInventory(current_app, devices=None):
if not devices:
return None
app = current_app._get_current_object()
FLASK_TMP_HOME = app.config['FLASK_TMP_HOME']
print FLASK_TMP_HOME
if not os.path.exists(FLASK_TMP_HOME):
os.mkdir(FLASK_TMP_HOME)
Inventory_devices = {
"devices": {
'hosts': [],
},
}
print Inventory_devices
for device in devices:
if device.ip is None:
flash(u'设备{0}IP地址未设置.'.format(device.hostname))
return None
Inventory_devices['devices']['hosts'].append(device.ip)
if len(Inventory_devices['devices']['hosts']) < 1:
return None
Inventory_devices = json.dumps(Inventory_devices)
print Inventory_devices
md5 = hashlib.md5(Inventory_devices)
print md5.hexdigest()
json_devices = '''#!/usr/bin/env python\n# encoding: utf-8\nimport json\ndevices = json.dumps({0})\nprint devices\n'''.format(
Inventory_devices)
Inventory_devices_file = FLASK_TMP_HOME + '/tasks/{0}'.format(str(md5.hexdigest()))
with open(Inventory_devices_file, 'w') as f:
f.write(json_devices)
os.chmod(Inventory_devices_file, stat.S_IRWXU | stat.S_IRGRP | stat.S_IROTH)
return Inventory_devices_file
|
{
"content_hash": "0a16c5f332a8c2aeace2d9bbf685f87a",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 130,
"avg_line_length": 25.60747663551402,
"alnum_prop": 0.6401459854014598,
"repo_name": "kefatong/ops",
"id": "e599b6039bdabb155123a95c80ad59a2d1541354",
"size": "2832",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/main/ansible_tasks.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "342130"
},
{
"name": "HTML",
"bytes": "2417702"
},
{
"name": "JavaScript",
"bytes": "5814095"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "119862"
}
],
"symlink_target": ""
}
|
from cms.models.pluginmodel import CMSPlugin
from cms.models.pagemodel import Page
from django.utils.translation import ugettext_lazy as _
from django.db import models
class NavbarPluginModel(CMSPlugin):
DISPLAY_CHOICES =(("", _("default")),
("navbar-fixed-top", _("fixed to top")),
("navbar-fixed-bottom", _("fixed to bottom")),
("navbar-static-top", _("static top")),
)
USER_ICON_CHOICES = (("",_("grey")),
("icon-white",_("white")),
)
navbar_type = models.CharField(_("navbar type"), max_length=64, blank=True, default="navbar-fixed-top", choices=DISPLAY_CHOICES)
inverted = models.BooleanField(default=False)
brand = models.CharField(max_length=80, default='', blank=True)
link_to_children = models.BooleanField(default=True,
help_text=_("Show links to all navigable children of the home page. NOTE: You must set the home page's id to 'home' under the advanced settings. Will not work if there is a 'softroot' in your CMS."))
icon_type = models.CharField(_("user actions icon type"), max_length=24, blank=True, choices=USER_ICON_CHOICES)
def __unicode__(self):
if self.brand:
return self.brand
else:
return ""
|
{
"content_hash": "a2a5fa04609d7c0f91918ce394db7cbc",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 211,
"avg_line_length": 49.48148148148148,
"alnum_prop": 0.6070359281437125,
"repo_name": "RacingTadpole/cmsplugin-rt",
"id": "6de00f6ee8998216f2033161063135b19348b3ff",
"size": "1353",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cmsplugin_rt/navbar/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1537"
},
{
"name": "Python",
"bytes": "175431"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('goals', '0097_auto_20151125_1802'),
]
operations = [
migrations.AddField(
model_name='trigger',
name='start_when_selected',
field=models.BooleanField(default=False, help_text='Should this trigger start on the day the user selects the action? '),
),
]
|
{
"content_hash": "4eb38cd7270d80edba1fdbc27c21e559",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 133,
"avg_line_length": 26.166666666666668,
"alnum_prop": 0.6284501061571125,
"repo_name": "izzyalonso/tndata_backend",
"id": "cbded46db66a37989804cf5bb586dc6e536856ea",
"size": "495",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tndata_backend/goals/migrations/0098_trigger_start_when_selected.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "29078"
},
{
"name": "HTML",
"bytes": "680433"
},
{
"name": "JavaScript",
"bytes": "186991"
},
{
"name": "Makefile",
"bytes": "393"
},
{
"name": "Python",
"bytes": "2023392"
},
{
"name": "Shell",
"bytes": "2282"
}
],
"symlink_target": ""
}
|
"""Extract, format and print information about Python stack traces."""
import linecache
import string
import sys
import types
def _print(file, str='', terminator='\n'):
file.write(str+terminator)
def print_list(extracted_list, file=None):
"""Print the list of tuples as returned by extract_tb() or
extract_stack() as a formatted stack trace to the given file."""
if not file:
file = sys.stderr
for filename, lineno, name, line in extracted_list:
_print(file,
' File "%s", line %d, in %s' % (filename,lineno,name))
if line:
_print(file, ' %s' % string.strip(line))
def format_list(extracted_list):
"""Given a list of tuples as returned by extract_tb() or
extract_stack(), return a list of strings ready for printing.
Each string in the resulting list corresponds to the item with
the same index in the argument list. Each string ends in a
newline; the strings may contain internal newlines as well, for
those items whose source text line is not None."""
list = []
for filename, lineno, name, line in extracted_list:
item = ' File "%s", line %d, in %s\n' % (filename,lineno,name)
if line:
item = item + ' %s\n' % string.strip(line)
list.append(item)
return list
def print_tb(tb, limit=None, file=None):
"""Print up to 'limit' stack trace entries from the traceback 'tb'.
If 'limit' is omitted or None, all entries are printed. If 'file' is
omitted or None, the output goes to sys.stderr; otherwise 'file'
should be an open file or file-like object with a write() method."""
if not file:
file = sys.stderr
if limit is None:
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
n = 0
while tb is not None and (limit is None or n < limit):
f = tb.tb_frame
lineno = tb_lineno(tb)
co = f.f_code
filename = co.co_filename
name = co.co_name
_print(file,
' File "%s", line %d, in %s' % (filename,lineno,name))
line = linecache.getline(filename, lineno)
if line: _print(file, ' ' + string.strip(line))
tb = tb.tb_next
n = n+1
def format_tb(tb, limit = None):
"""A shorthand for 'format_list(extract_stack(f, limit))."""
return format_list(extract_tb(tb, limit))
def extract_tb(tb, limit = None):
"""Return a list of up to 'limit' pre-processed stack trace entries
extracted from the traceback object 'traceback'. This is useful for
alternate formatting of stack traces. If 'limit' is omitted or None,
all entries are extracted. A pre-processed stack trace entry is a
quadruple (filename, line number, function name, text) representing
the information that is usually printed for a stack trace. The text
is a string with leading and trailing whitespace stripped; if the
source is not available it is None."""
if limit is None:
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
list = []
n = 0
while tb is not None and (limit is None or n < limit):
f = tb.tb_frame
lineno = tb_lineno(tb)
co = f.f_code
filename = co.co_filename
name = co.co_name
line = linecache.getline(filename, lineno)
if line: line = string.strip(line)
else: line = None
list.append((filename, lineno, name, line))
tb = tb.tb_next
n = n+1
return list
def print_exception(etype, value, tb, limit=None, file=None):
"""Print exception information and up to 'limit' stack trace entries
from the traceback 'tb' to 'file'. This differs from print_tb() in
the following ways: (1) if traceback is not None, it prints a header
"Traceback (most recent call last):"; (2) it prints the exception type and
value after the stack trace; (3) if type is SyntaxError and value has
the appropriate format, it prints the line where the syntax error
occurred with a caret on the next line indicating the approximate
position of the error."""
if not file:
file = sys.stderr
if tb:
_print(file, 'Traceback (most recent call last):')
print_tb(tb, limit, file)
lines = format_exception_only(etype, value)
for line in lines[:-1]:
_print(file, line, ' ')
_print(file, lines[-1], '')
def format_exception(etype, value, tb, limit = None):
"""Format a stack trace and the exception information. The arguments
have the same meaning as the corresponding arguments to
print_exception(). The return value is a list of strings, each
ending in a newline and some containing internal newlines. When
these lines are concatenated and printed, exactly the same text is
printed as does print_exception()."""
if tb:
list = ['Traceback (most recent call last):\n']
list = list + format_tb(tb, limit)
else:
list = []
list = list + format_exception_only(etype, value)
return list
def format_exception_only(etype, value):
"""Format the exception part of a traceback. The arguments are the
exception type and value such as given by sys.last_type and
sys.last_value. The return value is a list of strings, each ending
in a newline. Normally, the list contains a single string;
however, for SyntaxError exceptions, it contains several lines that
(when printed) display detailed information about where the syntax
error occurred. The message indicating which exception occurred is
the always last string in the list."""
list = []
if type(etype) == types.ClassType:
stype = etype.__name__
else:
stype = etype
if value is None:
list.append(str(stype) + '\n')
else:
if etype is SyntaxError:
try:
msg, (filename, lineno, offset, line) = value
except:
pass
else:
if not filename: filename = "<string>"
list.append(' File "%s", line %d\n' %
(filename, lineno))
i = 0
while i < len(line) and \
line[i] in string.whitespace:
i = i+1
list.append(' %s\n' % string.strip(line))
s = ' '
for c in line[i:offset-1]:
if c in string.whitespace:
s = s + c
else:
s = s + ' '
list.append('%s^\n' % s)
value = msg
list.append('%s: %s\n' % (str(stype), _some_str(value)))
return list
def _some_str(value):
try:
return str(value)
except:
return '<unprintable %s object>' % type(value).__name__
def print_exc(limit=None, file=None):
"""This is a shorthand for 'print_exception(sys.exc_type,
sys.exc_value, sys.exc_traceback, limit, file)'.
(In fact, it uses sys.exc_info() to retrieve the same information
in a thread-safe way.)"""
if not file:
file = sys.stderr
try:
etype, value, tb = sys.exc_info()
print_exception(etype, value, tb, limit, file)
finally:
etype = value = tb = None
def print_last(limit=None, file=None):
"""This is a shorthand for 'print_exception(sys.last_type,
sys.last_value, sys.last_traceback, limit, file)'."""
if not file:
file = sys.stderr
print_exception(sys.last_type, sys.last_value, sys.last_traceback,
limit, file)
def print_stack(f=None, limit=None, file=None):
"""This function prints a stack trace from its invocation point.
The optional 'f' argument can be used to specify an alternate stack
frame at which to start. The optional 'limit' and 'file' arguments
have the same meaning as for print_exception()."""
if f is None:
try:
raise ZeroDivisionError
except ZeroDivisionError:
f = sys.exc_info()[2].tb_frame.f_back
print_list(extract_stack(f, limit), file)
def format_stack(f=None, limit=None):
"""A shorthand for 'format_list(extract_stack(f, limit))'."""
if f is None:
try:
raise ZeroDivisionError
except ZeroDivisionError:
f = sys.exc_info()[2].tb_frame.f_back
return format_list(extract_stack(f, limit))
def extract_stack(f=None, limit = None):
"""Extract the raw traceback from the current stack frame. The
return value has the same format as for extract_tb(). The optional
'f' and 'limit' arguments have the same meaning as for print_stack().
Each item in the list is a quadruple (filename, line number,
function name, text), and the entries are in order from oldest
to newest stack frame."""
if f is None:
try:
raise ZeroDivisionError
except ZeroDivisionError:
f = sys.exc_info()[2].tb_frame.f_back
if limit is None:
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
list = []
n = 0
while f is not None and (limit is None or n < limit):
lineno = f.f_lineno # XXX Too bad if -O is used
co = f.f_code
filename = co.co_filename
name = co.co_name
line = linecache.getline(filename, lineno)
if line: line = string.strip(line)
else: line = None
list.append((filename, lineno, name, line))
f = f.f_back
n = n+1
list.reverse()
return list
def tb_lineno(tb):
"""Calculate the correct line number of the traceback given in tb
(even with -O on)."""
# Coded by Marc-Andre Lemburg from the example of PyCode_Addr2Line()
# in compile.c.
# Revised version by Jim Hugunin to work with JPython too.
c = tb.tb_frame.f_code
if not hasattr(c, 'co_lnotab'):
return tb.tb_lineno
tab = c.co_lnotab
line = c.co_firstlineno
stopat = tb.tb_lasti
addr = 0
for i in range(0, len(tab), 2):
addr = addr + ord(tab[i])
if addr > stopat:
break
line = line + ord(tab[i+1])
return line
|
{
"content_hash": "c54deb47f520e3221815379f98309577",
"timestamp": "",
"source": "github",
"line_count": 274,
"max_line_length": 75,
"avg_line_length": 32.85766423357664,
"alnum_prop": 0.6871042985671443,
"repo_name": "MalloyPower/parsing-python",
"id": "b733598f258476b34721d8b99a133e7d28a65eb0",
"size": "9003",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "front-end/testsuite-python-lib/Python-2.0/Lib/dos-8x3/tracebac.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1963"
},
{
"name": "Lex",
"bytes": "238458"
},
{
"name": "Makefile",
"bytes": "4513"
},
{
"name": "OCaml",
"bytes": "412695"
},
{
"name": "Python",
"bytes": "17319"
},
{
"name": "Rascal",
"bytes": "523063"
},
{
"name": "Yacc",
"bytes": "429659"
}
],
"symlink_target": ""
}
|
"""b3j0f.sync version module."""
__all__ = ['__version__']
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
# 3) we can import it into the utils module
# thanks to https://github.com/pycontribs/jira/blob/master/jira/version.py
#: project version
__version__ = '0.1.0'
|
{
"content_hash": "0f34a006966f56d8dbe15673159293c5",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 74,
"avg_line_length": 30.5,
"alnum_prop": 0.680327868852459,
"repo_name": "b3j0f/sync",
"id": "8e3fa88dff41be304e3fa316e16da0ee09a3c828",
"size": "1686",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "b3j0f/sync/version.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "74316"
}
],
"symlink_target": ""
}
|
''' view_log_meta worker '''
import pprint
class ViewLogMeta(object):
''' ViewLogMeta: Generates a view for meta data on the sample '''
dependencies = ['log_meta']
def execute(self, input_data):
''' Execute the ViewLogMeta worker '''
# Deprecation unless something more interesting happens with this class
return input_data['log_meta']
# Unit test: Create the class, the proper input and run the execute() method for a test
def test():
''' view_log_meta.py: Unit test'''
# This worker test requires a local server running
import zerorpc
workbench = zerorpc.Client(timeout=300, heartbeat=60)
workbench.connect("tcp://127.0.0.1:4242")
# Generate input for the worker
import os
data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../data/log/system.log')
md5 = workbench.store_sample(open(data_path, 'rb').read(), 'system.log', 'log')
input_data = workbench.work_request('log_meta', md5)
# Execute the worker (unit test)
worker = ViewLogMeta()
output = worker.execute(input_data)
print '\n<<< Unit Test >>>'
pprint.pprint(output)
# Execute the worker (server test)
output = workbench.work_request('view_log_meta', md5)
print '\n<<< Server Test >>>'
pprint.pprint(output)
if __name__ == "__main__":
test()
|
{
"content_hash": "cdc4926355e6f3afdcace43d48b8d548",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 99,
"avg_line_length": 33.625,
"alnum_prop": 0.6505576208178439,
"repo_name": "SuperCowPowers/workbench",
"id": "56d908184d04d84bfc4f12f1a8a9a483ccc844b4",
"size": "1346",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "workbench/workers/view_log_meta.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Bro",
"bytes": "10331"
},
{
"name": "HTML",
"bytes": "4031"
},
{
"name": "JavaScript",
"bytes": "2809"
},
{
"name": "Jupyter Notebook",
"bytes": "887764"
},
{
"name": "Makefile",
"bytes": "899"
},
{
"name": "Python",
"bytes": "333606"
}
],
"symlink_target": ""
}
|
import pytest
from plenum.test.checkpoints.helper import check_for_nodes, check_stable_checkpoint
from stp_core.common.log import getlogger
from plenum.test.conftest import getValueFromModule
from plenum.test.helper import waitForViewChange, \
sdk_send_random_and_check
from plenum.test.node_catchup.helper import ensure_all_nodes_have_same_data
from plenum.test.test_node import ensureElectionsDone
from plenum.test.view_change.helper import start_stopped_node
from plenum.test.primary_selection.test_recover_more_than_f_failure import \
stop_primary
from stp_core.loop.eventually import eventually
logger = getlogger()
@pytest.fixture(scope="module")
def tconf(tconf):
old_val = tconf.ToleratePrimaryDisconnection
tconf.ToleratePrimaryDisconnection = 1000
yield tconf
tconf.ToleratePrimaryDisconnection = old_val
def test_recover_stop_primaries_no_view_change(looper, checkpoint_size, txnPoolNodeSet,
allPluginsPath, tdir, tconf, sdk_pool_handle,
sdk_wallet_steward):
"""
Test that we can recover after having more than f nodes disconnected:
- send txns
- stop current master primary
- restart current master primary
- send txns
"""
active_nodes = list(txnPoolNodeSet)
assert 4 == len(active_nodes)
initial_view_no = active_nodes[0].viewNo
checkpoint_freq = tconf.CHK_FREQ
logger.info("send at least one checkpoint")
check_for_nodes(active_nodes, check_stable_checkpoint, 0)
sdk_send_random_and_check(looper, txnPoolNodeSet, sdk_pool_handle,
sdk_wallet_steward, 2 * checkpoint_size)
# TODO: When stable checkpoint is not deleted it makes sense to check just our last checkpoint
# and remove eventually
looper.run(eventually(check_for_nodes, active_nodes, check_stable_checkpoint, 2 * checkpoint_freq))
ensure_all_nodes_have_same_data(looper, nodes=active_nodes)
logger.info("Stop first node (current Primary)")
stopped_node, active_nodes = stop_primary(looper, active_nodes)
logger.info("Restart the primary node")
restarted_node = start_stopped_node(stopped_node, looper, tconf, tdir, allPluginsPath)
# TODO: Actually I'm not sure that this is a correct behavior. Can we restore stable
# checkpoint just from audit ledger or node status db?
check_for_nodes([restarted_node], check_stable_checkpoint, 0)
check_for_nodes(active_nodes, check_stable_checkpoint, 2 * checkpoint_freq)
active_nodes = active_nodes + [restarted_node]
logger.info("Check that primary selected")
ensureElectionsDone(looper=looper, nodes=active_nodes,
instances_list=range(2), customTimeout=30)
waitForViewChange(looper, active_nodes, expectedViewNo=0)
ensure_all_nodes_have_same_data(looper, nodes=active_nodes,
exclude_from_check=['check_last_ordered_3pc_backup'])
logger.info("Check if the pool is able to process requests")
sdk_send_random_and_check(looper, txnPoolNodeSet, sdk_pool_handle,
sdk_wallet_steward, 10 * checkpoint_size)
ensure_all_nodes_have_same_data(looper, nodes=active_nodes,
exclude_from_check=['check_last_ordered_3pc_backup'])
looper.run(eventually(check_for_nodes, active_nodes, check_stable_checkpoint, 12 * checkpoint_freq))
|
{
"content_hash": "242460714b26d58e5f4ef25d25bf6e09",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 104,
"avg_line_length": 45.48684210526316,
"alnum_prop": 0.6994503905120046,
"repo_name": "evernym/zeno",
"id": "72d5954dde105bf53975a7d8374189ed07963efa",
"size": "3457",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "plenum/test/primary_selection/test_recover_primary_no_view_change.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "531061"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('builds', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Application',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=50, verbose_name='Name')),
],
options={
'ordering': ('name',),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Flavor',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=50, verbose_name='Name')),
('download_root', models.FilePathField(recursive=True, allow_files=False, max_length=250, allow_folders=True, path=b'D:\\Web\\django-marcsupdater\\marcsupdater\\_static', verbose_name='Download root')),
('download_path', models.URLField(max_length=250, verbose_name='Download path')),
('application', models.ForeignKey(related_name=b'flavors', to='downloads.Application')),
('builder', models.ForeignKey(related_name=b'flavors', to='builds.Builder')),
],
options={
'ordering': ('name',),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Version',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=50, verbose_name='Name')),
('date', models.DateTimeField(verbose_name='Date')),
('stable', models.BooleanField(default=False, verbose_name='Stable')),
('changes', models.TextField(null=True, verbose_name='Changes', blank=True)),
('build', models.ForeignKey(related_name=b'versions', to='builds.Build')),
('flavor', models.ForeignKey(related_name=b'versions', to='downloads.Flavor')),
],
options={
'ordering': ('-date',),
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='version',
unique_together=set([('flavor', 'build')]),
),
migrations.AlterUniqueTogether(
name='flavor',
unique_together=set([('application', 'builder')]),
),
]
|
{
"content_hash": "f45732f06cc659a8240fb3d06a75a65c",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 218,
"avg_line_length": 43.23809523809524,
"alnum_prop": 0.5458883994126285,
"repo_name": "mback2k/django-app-downloads",
"id": "af794064f6d836b3492fc83b604e2e25fb8345c1",
"size": "2748",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2380"
},
{
"name": "Python",
"bytes": "10319"
}
],
"symlink_target": ""
}
|
import sys
import inspect
import logging
from textwrap import dedent
import discord
from discord.ext.commands.bot import _get_variable
from .exceptions import HelpfulError
from .bot import MusicBot
from .constructs import BetterLogRecord
class Yikes:
""" TODO """
def find_module(self, fullname, path=None):
""" TODO """
if fullname == 'requests':
return self
return None
def _get_import_chain(self, *, until=None):
stack = inspect.stack()[2:]
try:
for frameinfo in stack:
try:
if not frameinfo.code_context:
continue
data = dedent(''.join(frameinfo.code_context))
if data.strip() == until:
raise StopIteration
yield frameinfo.filename, frameinfo.lineno, data.strip()
del data
finally:
del frameinfo
finally:
del stack
def _format_import_chain(self, chain, *, message=None):
lines = []
for line in chain:
lines.append("In %s, line %s:\n %s" % line)
if message:
lines.append(message)
return '\n'.join(lines)
def load_module(self, name):
""" TODO """
if _get_variable('allow_requests'):
sys.meta_path.pop(0)
return __import__('requests')
import_chain = tuple(self._get_import_chain(
until='from .bot import MusicBot'))
import_tb = self._format_import_chain(import_chain)
raise HelpfulError(
"You are attempting to import requests, or import a module that uses requests. "
"Requests (or any module that uses requests) should not be used in this code. "
"See %s for why requests is not suitable for this code."
% "[https://discordpy.readthedocs.io/en/latest/faq.html#what-does-blocking-mean]",
"Don't use requests, use aiohttp instead. The api is very similar to requests "
"when using session objects. [http://aiohttp.readthedocs.io/en/stable/] If "
"a module you're trying to use depends on requests, see if you can find a similar "
"module compatible with asyncio. If you can't find one, learn how to avoid blocking "
"in coroutines. If you're new to programming, consider learning more about how "
"asynchronous code and coroutines work. Blocking calls (notably HTTP requests) can take"
"a long time, during which the bot is unable to do anything but wait for it. "
"If you're sure you know what you're doing, simply add `allow_requests = True` above your"
"import statement, that being `import requests` or whatever requests dependent module.",
footnote="Import traceback (most recent call last):\n" + import_tb
)
sys.meta_path.insert(0, Yikes())
__all__ = ['MusicBot']
logging.setLogRecordFactory(BetterLogRecord)
_FUNC_PROTOTYPE = """def {logger_func_name}(self, message, *args, **kwargs):\n
if self.isEnabledFor({levelname}):\n
self._log({levelname}, message, args, **kwargs)"""
def _add_logger_level(levelname, level, *, func_name=None):
"""
:type levelname: str
The reference name of the level, e.g. DEBUG, WARNING, etc
:type level: int
Numeric logging level
:type func_name: str
The name of the logger function to LOG to a level, \
e.g. "info" for LOG.info(...)
"""
func_name = func_name or levelname.lower()
setattr(logging, levelname, level)
logging.addLevelName(level, levelname)
exec(_FUNC_PROTOTYPE.format(
logger_func_name=func_name, levelname=levelname),
logging.__dict__, locals())
setattr(logging.Logger, func_name, eval(func_name))
_add_logger_level('EVERYTHING', 1)
_add_logger_level('NOISY', 4, func_name='noise')
_add_logger_level('FFMPEG', 5)
_add_logger_level('VOICEDEBUG', 6)
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.EVERYTHING)
FH = logging.FileHandler(
filename='logs/musicbot.log', encoding='utf-8', mode='a')
FH.setFormatter(logging.Formatter(
"""[{relativeCreated:.16f}] {asctime} - {levelname} - {name} |
In {filename}::{threadName}({thread}),
line {lineno} in {funcName}: {message}""",
style='{'
))
LOG.addHandler(FH)
del _FUNC_PROTOTYPE
del _add_logger_level
del FH
|
{
"content_hash": "7650110d32074cf0ea0f9cdaf36e9601",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 102,
"avg_line_length": 32.56115107913669,
"alnum_prop": 0.6042863455589925,
"repo_name": "DiscordMusicBot/MusicBot",
"id": "29007e5187aa03ed80117c8dcc6adc7b94911990",
"size": "4526",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "musicbot/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "711"
},
{
"name": "Python",
"bytes": "260280"
}
],
"symlink_target": ""
}
|
'''
This module contains code to build PDF "Form XObjects".
A Form XObject allows a fragment from one PDF file to be cleanly
included in another PDF file.
Reference for syntax: "Parameters for opening PDF files" from SDK 8.1
http://www.adobe.com/devnet/acrobat/pdfs/pdf_open_parameters.pdf
supported 'page=xxx', 'viewrect=<left>,<top>,<width>,<height>'
Also supported by this, but not by Adobe:
'rotate=xxx' where xxx in [0, 90, 180, 270]
Units are in points
Reference for content: Adobe PDF reference, sixth edition, version 1.7
http://www.adobe.com/devnet/acrobat/pdfs/pdf_reference_1-7.pdf
Form xobjects discussed chapter 4.9, page 355
'''
from pdfrw.objects import PdfDict, PdfArray, PdfName
from pdfrw.pdfreader import PdfReader
from pdfrw.errors import log
class ViewInfo(object):
''' Instantiate ViewInfo with a uri, and it will parse out
the filename, page, and viewrect into object attributes.
'''
doc = None
docname = None
page = None
viewrect = None
rotate = None
def __init__(self, pageinfo='', **kw):
pageinfo=pageinfo.split('#',1)
if len(pageinfo) == 2:
pageinfo[1:] = pageinfo[1].replace('&', '#').split('#')
for key in 'page viewrect'.split():
if pageinfo[0].startswith(key+'='):
break
else:
self.docname = pageinfo.pop(0)
for item in pageinfo:
key, value = item.split('=')
key = key.strip()
value = value.replace(',', ' ').split()
if key in ('page', 'rotate'):
assert len(value) == 1
setattr(self, key, int(value[0]))
elif key == 'viewrect':
assert len(value) == 4
setattr(self, key, [float(x) for x in value])
else:
log.error('Unknown option: %s', key)
for key, value in kw.iteritems():
assert hasattr(self, key), key
setattr(self, key, value)
def get_rotation(rotate):
''' Return clockwise rotation code:
0 = unrotated
1 = 90 degrees
2 = 180 degrees
3 = 270 degrees
'''
try:
rotate = int(rotate)
except (ValueError, TypeError):
return 0
if rotate % 90 != 0:
return 0
return rotate / 90
def rotate_point(point, rotation):
''' Rotate an (x,y) coordinate clockwise by a
rotation code specifying a multiple of 90 degrees.
'''
if rotation & 1:
point = point[1], -point[0]
if rotation & 2:
point = -point[0], -point[1]
return point
def rotate_rect(rect, rotation):
''' Rotate both points within the rectangle, then normalize
the rectangle by returning the new lower left, then new
upper right.
'''
rect = rotate_point(rect[:2], rotation) + rotate_point(rect[2:], rotation)
return (min(rect[0], rect[2]), min(rect[1], rect[3]),
max(rect[0], rect[2]), max(rect[1], rect[3]))
def getrects(inheritable, pageinfo, rotation):
''' Given the inheritable attributes of a page and
the desired pageinfo rectangle, return the page's
media box and the calculated boundary (clip) box.
'''
mbox = tuple([float(x) for x in inheritable.MediaBox])
vrect = pageinfo.viewrect
if vrect is None:
cbox = tuple([float(x) for x in (inheritable.CropBox or mbox)])
else:
# Rotate the media box to match what the user sees,
# figure out the clipping box, then rotate back
mleft, mbot, mright, mtop = rotate_rect(mbox, rotation)
x, y, w, h = vrect
cleft = mleft + x
ctop = mtop - y
cright = cleft + w
cbot = ctop - h
cbox = max(mleft, cleft), max(mbot, cbot), min(mright, cright), min(mtop, ctop)
cbox = rotate_rect(cbox, -rotation)
return mbox, cbox
def _cache_xobj(contents, resources, mbox, bbox, rotation):
''' Return a cached Form XObject, or create a new one and cache it.
Adds private members x, y, w, h
'''
cachedict = contents.xobj_cachedict
if cachedict is None:
cachedict = contents.private.xobj_cachedict = {}
cachekey = mbox, bbox, rotation
result = cachedict.get(cachekey)
if result is None:
func = (_get_fullpage, _get_subpage)[mbox != bbox]
result = PdfDict(
func(contents, resources, mbox, bbox, rotation),
Type = PdfName.XObject,
Subtype = PdfName.Form,
FormType = 1,
BBox = PdfArray(bbox),
)
rect = bbox
if rotation:
matrix = rotate_point((1, 0), rotation) + rotate_point((0, 1), rotation)
result.Matrix = PdfArray(matrix + (0, 0))
rect = rotate_rect(rect, rotation)
result.private.x = rect[0]
result.private.y = rect[1]
result.private.w = rect[2] - rect[0]
result.private.h = rect[3] - rect[1]
cachedict[cachekey] = result
return result
def _get_fullpage(contents, resources, mbox, bbox, rotation):
''' fullpage is easy. Just copy the contents,
set up the resources, and let _cache_xobj handle the
rest.
'''
return PdfDict(contents, Resources=resources)
def _get_subpage(contents, resources, mbox, bbox, rotation):
''' subpages *could* be as easy as full pages, but we
choose to complicate life by creating a Form XObject
for the page, and then one that references it for
the subpage, on the off-chance that we want multiple
items from the page.
'''
return PdfDict(
stream = '/FullPage Do\n',
Resources = PdfDict(
XObject = PdfDict(
FullPage = _cache_xobj(contents, resources, mbox, mbox, 0)
)
)
)
def pagexobj(page, viewinfo=ViewInfo(), allow_compressed=True):
''' pagexobj creates and returns a Form XObject for
a given view within a page (Defaults to entire page.)
'''
inheritable = page.inheritable
resources = inheritable.Resources
rotation = get_rotation(inheritable.Rotate)
mbox, bbox = getrects(inheritable, viewinfo, rotation)
rotation += get_rotation(viewinfo.rotate)
contents = page.Contents
# Make sure the only attribute is length
# All the filters must have been executed
assert int(contents.Length) == len(contents.stream)
if not allow_compressed:
assert len([x for x in contents.iteritems()]) == 1
return _cache_xobj(contents, resources, mbox, bbox, rotation)
def docxobj(pageinfo, doc=None, allow_compressed=True):
''' docxobj creates and returns an actual Form XObject.
Can work standalone, or in conjunction with
the CacheXObj class (below).
'''
if not isinstance(pageinfo, ViewInfo):
pageinfo = ViewInfo(pageinfo)
# If we're explicitly passed a document,
# make sure we don't have one implicitly as well.
# If no implicit or explicit doc, then read one in
# from the filename.
if doc is not None:
assert pageinfo.doc is None
pageinfo.doc = doc
elif pageinfo.doc is not None:
doc = pageinfo.doc
else:
doc = pageinfo.doc = PdfReader(pageinfo.docname, decompress = not allow_compressed)
assert isinstance(doc, PdfReader)
sourcepage = doc.pages[(pageinfo.page or 1) - 1]
return pagexobj(sourcepage, pageinfo, allow_compressed)
class CacheXObj(object):
''' Use to keep from reparsing files over and over,
and to keep from making the output too much
bigger than it ought to be by replicating
unnecessary object copies.
'''
def __init__(self, decompress=False):
''' Set decompress true if you need
the Form XObjects to be decompressed.
Will decompress what it can and scream
about the rest.
'''
self.cached_pdfs = {}
self.decompress = decompress
def load(self, sourcename):
''' Load a Form XObject from a uri
'''
info = ViewInfo(sourcename)
fname = info.docname
pcache = self.cached_pdfs
doc = pcache.get(fname)
if doc is None:
doc = pcache[fname] = PdfReader(fname, decompress=self.decompress)
return docxobj(info, doc, allow_compressed=not self.decompress)
|
{
"content_hash": "b16e66d73d7d73e0f188a93aaf62e8ea",
"timestamp": "",
"source": "github",
"line_count": 245,
"max_line_length": 91,
"avg_line_length": 34.371428571428574,
"alnum_prop": 0.6071725448284052,
"repo_name": "tajtiattila/pdfrw",
"id": "ba34f61557328fd7d1c7d226f2e25b1193061ecb",
"size": "8564",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "pdfrw/buildxobj.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "105035"
}
],
"symlink_target": ""
}
|
orIfInt = eval(input('Please enter an int:'))
if orIfInt == 1 or orIfInt == 2 or orIfInt == 3:
print('Value of orIfInt is', orIfInt, '\n')
else:
print(orIfInt, 'is not in the scope.\n')
print('End of program')
|
{
"content_hash": "777cac2957096b89129998d2aefaccd9",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 48,
"avg_line_length": 27.5,
"alnum_prop": 0.6363636363636364,
"repo_name": "panherz/MyPyCode",
"id": "7785c03fe124476ebab55810162b1c39ac455d81",
"size": "237",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "myExercises.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13112"
}
],
"symlink_target": ""
}
|
from distutils.core import setup
setup(
name='belfort',
version='0.0.0',
packages=[''],
url='https://github.com/frxncisjoseph/belfort',
license='Apache License 2.0',
author='Francis Joseph',
author_email='francisgjoseph@outlook.com',
description='Belfort is a binary options trading robot.'
)
|
{
"content_hash": "757ecb2e41cf6e4933fb363171a46444",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 60,
"avg_line_length": 27.166666666666668,
"alnum_prop": 0.6779141104294478,
"repo_name": "frxncisjoseph/belfort",
"id": "36f2abfcfc0091925688e5f177d2379494128410",
"size": "326",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1522"
}
],
"symlink_target": ""
}
|
import os
import numpy as np
import tables as tb
from toolz import first
from .dispatch import dispatch
import datashape
import shutil
from blaze.utils import tmpfile
from .resource import resource
__all__ = ['PyTables']
def dtype_to_pytables(dtype):
""" Convert NumPy dtype to PyTable descriptor
Examples
--------
>>> from tables import Int32Col, StringCol, Time64Col
>>> dt = np.dtype([('name', 'S7'), ('amount', 'i4'), ('time', 'M8[us]')])
>>> dtype_to_pytables(dt) # doctest: +SKIP
{'amount': Int32Col(shape=(), dflt=0, pos=1),
'name': StringCol(itemsize=7, shape=(), dflt='', pos=0),
'time': Time64Col(shape=(), dflt=0.0, pos=2)}
"""
d = {}
for pos, name in enumerate(dtype.names):
dt, _ = dtype.fields[name]
if issubclass(dt.type, np.datetime64):
tdtype = tb.Description({name: tb.Time64Col(pos=pos)}),
else:
tdtype = tb.descr_from_dtype(np.dtype([(name, dt)]))
el = first(tdtype)
getattr(el, name)._v_pos = pos
d.update(el._v_colobjects)
return d
def PyTables(path, datapath, dshape=None, **kwargs):
"""Create or open a ``tables.Table`` object.
Parameters
----------
path : str
Path to a PyTables HDF5 file.
datapath : str
The name of the node in the ``tables.File``.
dshape : str or datashape.DataShape
DataShape to use to create the ``Table``.
Returns
-------
t : tables.Table
Examples
--------
>>> from blaze.utils import tmpfile
>>> # create from scratch
>>> with tmpfile('.h5') as f:
... t = PyTables(filename, '/bar',
... dshape='var * {volume: float64, planet: string[10, "A"]}')
... data = [(100.3, 'mars'), (100.42, 'jupyter')]
... t.append(data)
... t[:] # doctest: +SKIP
...
array([(100.3, b'mars'), (100.42, b'jupyter')],
dtype=[('volume', '<f8'), ('planet', 'S10')])
"""
def possibly_create_table(filename, dtype):
f = tb.open_file(filename, mode='a')
try:
if datapath not in f:
if dtype is None:
raise ValueError('dshape cannot be None and datapath not'
' in file')
else:
f.create_table('/', datapath.lstrip('/'), description=dtype)
finally:
f.close()
if dshape:
if isinstance(dshape, str):
dshape = datashape.dshape(dshape)
if dshape[0] == datashape.var:
dshape = dshape.subshape[0]
dtype = dtype_to_pytables(datashape.to_numpy_dtype(dshape))
else:
dtype = None
if os.path.exists(path):
possibly_create_table(path, dtype)
else:
with tmpfile('.h5') as filename:
possibly_create_table(filename, dtype)
shutil.copyfile(filename, path)
return tb.open_file(path, mode='a').get_node(datapath)
@dispatch(tb.Table)
def chunks(b, chunksize=2**15):
start = 0
n = len(b)
while start < n:
yield b[start:start + chunksize]
start += chunksize
@dispatch(tb.Table, int)
def get_chunk(b, i, chunksize=2**15):
start = chunksize * i
stop = chunksize * (i + 1)
return b[start:stop]
@resource.register('.+\.h5')
def resource_pytables(path, datapath, **kwargs):
return PyTables(path, datapath, **kwargs)
|
{
"content_hash": "61d8af60f9563aec65ee79d179201ef5",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 83,
"avg_line_length": 28.33884297520661,
"alnum_prop": 0.5567220764071158,
"repo_name": "vitan/blaze",
"id": "5fe60cee55d743526645471e63580639a2ba8578",
"size": "3429",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blaze/pytables.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
"""Constants for the Goodwe component."""
from datetime import timedelta
from homeassistant.const import Platform
DOMAIN = "goodwe"
PLATFORMS = [Platform.NUMBER, Platform.SELECT, Platform.SENSOR]
DEFAULT_NAME = "GoodWe"
SCAN_INTERVAL = timedelta(seconds=10)
CONF_MODEL_FAMILY = "model_family"
KEY_INVERTER = "inverter"
KEY_COORDINATOR = "coordinator"
KEY_DEVICE_INFO = "device_info"
|
{
"content_hash": "751aebb29fc00ba65973284a8320a5fe",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 63,
"avg_line_length": 22.88235294117647,
"alnum_prop": 0.7609254498714653,
"repo_name": "GenericStudent/home-assistant",
"id": "0e40601ccdb41b3c0ba407b1e29b47b3ae3bc6b8",
"size": "389",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/goodwe/const.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3070"
},
{
"name": "Python",
"bytes": "44491729"
},
{
"name": "Shell",
"bytes": "5092"
}
],
"symlink_target": ""
}
|
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: ce_interface_ospf
version_added: "2.4"
short_description: Manages configuration of an OSPF interface instanceon HUAWEI CloudEngine switches.
description:
- Manages configuration of an OSPF interface instanceon HUAWEI CloudEngine switches.
author: QijunPan (@QijunPan)
options:
interface:
description:
- Full name of interface, i.e. 40GE1/0/10.
required: true
process_id:
description:
- Specifies a process ID.
The value is an integer ranging from 1 to 4294967295.
required: true
area:
description:
- Ospf area associated with this ospf process.
Valid values are a string, formatted as an IP address
(i.e. "0.0.0.0") or as an integer between 1 and 4294967295.
required: true
cost:
description:
- The cost associated with this interface.
Valid values are an integer in the range from 1 to 65535.
hello_interval:
description:
- Time between sending successive hello packets.
Valid values are an integer in the range from 1 to 65535.
dead_interval:
description:
- Time interval an ospf neighbor waits for a hello
packet before tearing down adjacencies. Valid values are an
integer in the range from 1 to 235926000.
silent_interface:
description:
- Setting to true will prevent this interface from receiving
HELLO packets. Valid values are 'true' and 'false'.
type: bool
default: 'no'
auth_mode:
description:
- Specifies the authentication type.
choices: ['none', 'null', 'hmac-sha256', 'md5', 'hmac-md5', 'simple']
auth_text_simple:
description:
- Specifies a password for simple authentication.
The value is a string of 1 to 8 characters.
auth_key_id:
description:
- Authentication key id when C(auth_mode) is 'hmac-sha256', 'md5' or 'hmac-md5.
Valid value is an integer is in the range from 1 to 255.
auth_text_md5:
description:
- Specifies a password for MD5, HMAC-MD5, or HMAC-SHA256 authentication.
The value is a string of 1 to 255 case-sensitive characters, spaces not supported.
state:
description:
- Determines whether the config should be present or not
on the device.
default: present
choices: ['present','absent']
"""
EXAMPLES = '''
- name: eth_trunk module test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: Enables OSPF and sets the cost on an interface
ce_interface_ospf:
interface: 10GE1/0/30
process_id: 1
area: 100
cost: 100
provider: '{{ cli }}'
- name: Sets the dead interval of the OSPF neighbor
ce_interface_ospf:
interface: 10GE1/0/30
process_id: 1
area: 100
dead_interval: 100
provider: '{{ cli }}'
- name: Sets the interval for sending Hello packets on an interface
ce_interface_ospf:
interface: 10GE1/0/30
process_id: 1
area: 100
hello_interval: 2
provider: '{{ cli }}'
- name: Disables an interface from receiving and sending OSPF packets
ce_interface_ospf:
interface: 10GE1/0/30
process_id: 1
area: 100
silent_interface: true
provider: '{{ cli }}'
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {"process_id": "1", "area": "0.0.0.100", "interface": "10GE1/0/30", "cost": "100"}
existing:
description: k/v pairs of existing configuration
returned: verbose mode
type: dict
sample: {"process_id": "1", "area": "0.0.0.100"}
end_state:
description: k/v pairs of configuration after module execution
returned: verbose mode
type: dict
sample: {"process_id": "1", "area": "0.0.0.100", "interface": "10GE1/0/30",
"cost": "100", "dead_interval": "40", "hello_interval": "10",
"silent_interface": "false", "auth_mode": "none"}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["interface 10GE1/0/30",
"ospf enable 1 area 0.0.0.100",
"ospf cost 100"]
changed:
description: check to see if a change was made on the device
returned: always
type: bool
sample: true
'''
from xml.etree import ElementTree
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_nc_config, set_nc_config, ce_argument_spec
CE_NC_GET_OSPF = """
<filter type="subtree">
<ospfv2 xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<ospfv2comm>
<ospfSites>
<ospfSite>
<processId>%s</processId>
<routerId></routerId>
<vrfName></vrfName>
<areas>
<area>
<areaId>%s</areaId>
<interfaces>
<interface>
<ifName>%s</ifName>
<networkType></networkType>
<helloInterval></helloInterval>
<deadInterval></deadInterval>
<silentEnable></silentEnable>
<configCost></configCost>
<authenticationMode></authenticationMode>
<authTextSimple></authTextSimple>
<keyId></keyId>
<authTextMd5></authTextMd5>
</interface>
</interfaces>
</area>
</areas>
</ospfSite>
</ospfSites>
</ospfv2comm>
</ospfv2>
</filter>
"""
CE_NC_XML_BUILD_PROCESS = """
<config>
<ospfv2 xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<ospfv2comm>
<ospfSites>
<ospfSite>
<processId>%s</processId>
<areas>
<area>
<areaId>%s</areaId>
%s
</area>
</areas>
</ospfSite>
</ospfSites>
</ospfv2comm>
</ospfv2>
</config>
"""
CE_NC_XML_BUILD_MERGE_INTF = """
<interfaces>
<interface operation="merge">
%s
</interface>
</interfaces>
"""
CE_NC_XML_BUILD_DELETE_INTF = """
<interfaces>
<interface operation="delete">
%s
</interface>
</interfaces>
"""
CE_NC_XML_SET_IF_NAME = """
<ifName>%s</ifName>
"""
CE_NC_XML_SET_HELLO = """
<helloInterval>%s</helloInterval>
"""
CE_NC_XML_SET_DEAD = """
<deadInterval>%s</deadInterval>
"""
CE_NC_XML_SET_SILENT = """
<silentEnable>%s</silentEnable>
"""
CE_NC_XML_SET_COST = """
<configCost>%s</configCost>
"""
CE_NC_XML_SET_AUTH_MODE = """
<authenticationMode>%s</authenticationMode>
"""
CE_NC_XML_SET_AUTH_TEXT_SIMPLE = """
<authTextSimple>%s</authTextSimple>
"""
CE_NC_XML_SET_AUTH_MD5 = """
<keyId>%s</keyId>
<authTextMd5>%s</authTextMd5>
"""
def get_interface_type(interface):
"""Gets the type of interface, such as 10GE, ETH-TRUNK, VLANIF..."""
if interface is None:
return None
iftype = None
if interface.upper().startswith('GE'):
iftype = 'ge'
elif interface.upper().startswith('10GE'):
iftype = '10ge'
elif interface.upper().startswith('25GE'):
iftype = '25ge'
elif interface.upper().startswith('4X10GE'):
iftype = '4x10ge'
elif interface.upper().startswith('40GE'):
iftype = '40ge'
elif interface.upper().startswith('100GE'):
iftype = '100ge'
elif interface.upper().startswith('VLANIF'):
iftype = 'vlanif'
elif interface.upper().startswith('LOOPBACK'):
iftype = 'loopback'
elif interface.upper().startswith('METH'):
iftype = 'meth'
elif interface.upper().startswith('ETH-TRUNK'):
iftype = 'eth-trunk'
elif interface.upper().startswith('VBDIF'):
iftype = 'vbdif'
elif interface.upper().startswith('NVE'):
iftype = 'nve'
elif interface.upper().startswith('TUNNEL'):
iftype = 'tunnel'
elif interface.upper().startswith('ETHERNET'):
iftype = 'ethernet'
elif interface.upper().startswith('FCOE-PORT'):
iftype = 'fcoe-port'
elif interface.upper().startswith('FABRIC-PORT'):
iftype = 'fabric-port'
elif interface.upper().startswith('STACK-PORT'):
iftype = 'stack-port'
elif interface.upper().startswith('NULL'):
iftype = 'null'
else:
return None
return iftype.lower()
def is_valid_v4addr(addr):
"""check is ipv4 addr is valid"""
if not addr:
return False
if addr.find('.') != -1:
addr_list = addr.split('.')
if len(addr_list) != 4:
return False
for each_num in addr_list:
if not each_num.isdigit():
return False
if int(each_num) > 255:
return False
return True
return False
class InterfaceOSPF(object):
"""
Manages configuration of an OSPF interface instance.
"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.init_module()
# module input info
self.interface = self.module.params['interface']
self.process_id = self.module.params['process_id']
self.area = self.module.params['area']
self.cost = self.module.params['cost']
self.hello_interval = self.module.params['hello_interval']
self.dead_interval = self.module.params['dead_interval']
self.silent_interface = self.module.params['silent_interface']
self.auth_mode = self.module.params['auth_mode']
self.auth_text_simple = self.module.params['auth_text_simple']
self.auth_key_id = self.module.params['auth_key_id']
self.auth_text_md5 = self.module.params['auth_text_md5']
self.state = self.module.params['state']
# ospf info
self.ospf_info = dict()
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
def init_module(self):
"""init module"""
self.module = AnsibleModule(
argument_spec=self.spec, supports_check_mode=True)
def netconf_set_config(self, xml_str, xml_name):
"""netconf set config"""
rcv_xml = set_nc_config(self.module, xml_str)
if "<ok/>" not in rcv_xml:
self.module.fail_json(msg='Error: %s failed.' % xml_name)
def get_area_ip(self):
"""convert integer to ip address"""
if not self.area.isdigit():
return self.area
addr_int = ['0'] * 4
addr_int[0] = str(((int(self.area) & 0xFF000000) >> 24) & 0xFF)
addr_int[1] = str(((int(self.area) & 0x00FF0000) >> 16) & 0xFF)
addr_int[2] = str(((int(self.area) & 0x0000FF00) >> 8) & 0XFF)
addr_int[3] = str(int(self.area) & 0xFF)
return '.'.join(addr_int)
def get_ospf_dict(self):
""" get one ospf attributes dict."""
ospf_info = dict()
conf_str = CE_NC_GET_OSPF % (
self.process_id, self.get_area_ip(), self.interface)
rcv_xml = get_nc_config(self.module, conf_str)
if "<data/>" in rcv_xml:
return ospf_info
xml_str = rcv_xml.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
# get process base info
root = ElementTree.fromstring(xml_str)
ospfsite = root.find("ospfv2/ospfv2comm/ospfSites/ospfSite")
if not ospfsite:
self.module.fail_json(msg="Error: ospf process does not exist.")
for site in ospfsite:
if site.tag in ["processId", "routerId", "vrfName"]:
ospf_info[site.tag] = site.text
# get areas info
ospf_info["areaId"] = ""
areas = root.find(
"ospfv2/ospfv2comm/ospfSites/ospfSite/areas/area")
if areas:
for area in areas:
if area.tag == "areaId":
ospf_info["areaId"] = area.text
break
# get interface info
ospf_info["interface"] = dict()
intf = root.find(
"ospfv2/ospfv2comm/ospfSites/ospfSite/areas/area/interfaces/interface")
if intf:
for attr in intf:
if attr.tag in ["ifName", "networkType",
"helloInterval", "deadInterval",
"silentEnable", "configCost",
"authenticationMode", "authTextSimple",
"keyId", "authTextMd5"]:
ospf_info["interface"][attr.tag] = attr.text
return ospf_info
def set_ospf_interface(self):
"""set interface ospf enable, and set its ospf attributes"""
xml_intf = CE_NC_XML_SET_IF_NAME % self.interface
# ospf view
self.updates_cmd.append("ospf %s" % self.process_id)
self.updates_cmd.append("area %s" % self.get_area_ip())
if self.silent_interface:
xml_intf += CE_NC_XML_SET_SILENT % str(self.silent_interface).lower()
if self.silent_interface:
self.updates_cmd.append("silent-interface %s" % self.interface)
else:
self.updates_cmd.append("undo silent-interface %s" % self.interface)
# interface view
self.updates_cmd.append("interface %s" % self.interface)
self.updates_cmd.append("ospf enable %s area %s" % (
self.process_id, self.get_area_ip()))
if self.cost:
xml_intf += CE_NC_XML_SET_COST % self.cost
self.updates_cmd.append("ospf cost %s" % self.cost)
if self.hello_interval:
xml_intf += CE_NC_XML_SET_HELLO % self.hello_interval
self.updates_cmd.append("ospf timer hello %s" %
self.hello_interval)
if self.dead_interval:
xml_intf += CE_NC_XML_SET_DEAD % self.dead_interval
self.updates_cmd.append("ospf timer dead %s" % self.dead_interval)
if self.auth_mode:
xml_intf += CE_NC_XML_SET_AUTH_MODE % self.auth_mode
if self.auth_mode == "none":
self.updates_cmd.append("undo ospf authentication-mode")
else:
self.updates_cmd.append("ospf authentication-mode %s" % self.auth_mode)
if self.auth_mode == "simple" and self.auth_text_simple:
xml_intf += CE_NC_XML_SET_AUTH_TEXT_SIMPLE % self.auth_text_simple
self.updates_cmd.pop()
self.updates_cmd.append("ospf authentication-mode %s %s"
% (self.auth_mode, self.auth_text_simple))
elif self.auth_mode in ["hmac-sha256", "md5", "hmac-md5"] and self.auth_key_id:
xml_intf += CE_NC_XML_SET_AUTH_MD5 % (
self.auth_key_id, self.auth_text_md5)
self.updates_cmd.pop()
self.updates_cmd.append("ospf authentication-mode %s %s %s"
% (self.auth_mode, self.auth_key_id, self.auth_text_md5))
else:
pass
xml_str = CE_NC_XML_BUILD_PROCESS % (self.process_id,
self.get_area_ip(),
(CE_NC_XML_BUILD_MERGE_INTF % xml_intf))
self.netconf_set_config(xml_str, "SET_INTERFACE_OSPF")
self.changed = True
def merge_ospf_interface(self):
"""merge interface ospf attributes"""
intf_dict = self.ospf_info["interface"]
# ospf view
xml_ospf = ""
if intf_dict.get("silentEnable") != str(self.silent_interface).lower():
xml_ospf += CE_NC_XML_SET_SILENT % str(self.silent_interface).lower()
self.updates_cmd.append("ospf %s" % self.process_id)
self.updates_cmd.append("area %s" % self.get_area_ip())
if self.silent_interface:
self.updates_cmd.append("silent-interface %s" % self.interface)
else:
self.updates_cmd.append("undo silent-interface %s" % self.interface)
# interface view
xml_intf = ""
self.updates_cmd.append("interface %s" % self.interface)
if self.cost and intf_dict.get("configCost") != self.cost:
xml_intf += CE_NC_XML_SET_COST % self.cost
self.updates_cmd.append("ospf cost %s" % self.cost)
if self.hello_interval and intf_dict.get("helloInterval") != self.hello_interval:
xml_intf += CE_NC_XML_SET_HELLO % self.hello_interval
self.updates_cmd.append("ospf timer hello %s" %
self.hello_interval)
if self.dead_interval and intf_dict.get("deadInterval") != self.dead_interval:
xml_intf += CE_NC_XML_SET_DEAD % self.dead_interval
self.updates_cmd.append("ospf timer dead %s" % self.dead_interval)
if self.auth_mode:
# NOTE: for security, authentication config will always be update
xml_intf += CE_NC_XML_SET_AUTH_MODE % self.auth_mode
if self.auth_mode == "none":
self.updates_cmd.append("undo ospf authentication-mode")
else:
self.updates_cmd.append("ospf authentication-mode %s" % self.auth_mode)
if self.auth_mode == "simple" and self.auth_text_simple:
xml_intf += CE_NC_XML_SET_AUTH_TEXT_SIMPLE % self.auth_text_simple
self.updates_cmd.pop()
self.updates_cmd.append("ospf authentication-mode %s %s"
% (self.auth_mode, self.auth_text_simple))
elif self.auth_mode in ["hmac-sha256", "md5", "hmac-md5"] and self.auth_key_id:
xml_intf += CE_NC_XML_SET_AUTH_MD5 % (
self.auth_key_id, self.auth_text_md5)
self.updates_cmd.pop()
self.updates_cmd.append("ospf authentication-mode %s %s %s"
% (self.auth_mode, self.auth_key_id, self.auth_text_md5))
else:
pass
if not xml_intf:
self.updates_cmd.pop() # remove command: interface
if not xml_ospf and not xml_intf:
return
xml_sum = CE_NC_XML_SET_IF_NAME % self.interface
xml_sum += xml_ospf + xml_intf
xml_str = CE_NC_XML_BUILD_PROCESS % (self.process_id,
self.get_area_ip(),
(CE_NC_XML_BUILD_MERGE_INTF % xml_sum))
self.netconf_set_config(xml_str, "MERGE_INTERFACE_OSPF")
self.changed = True
def unset_ospf_interface(self):
"""set interface ospf disable, and all its ospf attributes will be removed"""
intf_dict = self.ospf_info["interface"]
xml_sum = ""
xml_intf = CE_NC_XML_SET_IF_NAME % self.interface
if intf_dict.get("silentEnable") == "true":
xml_sum += CE_NC_XML_BUILD_MERGE_INTF % (
xml_intf + (CE_NC_XML_SET_SILENT % "false"))
self.updates_cmd.append("ospf %s" % self.process_id)
self.updates_cmd.append("area %s" % self.get_area_ip())
self.updates_cmd.append(
"undo silent-interface %s" % self.interface)
xml_sum += CE_NC_XML_BUILD_DELETE_INTF % xml_intf
xml_str = CE_NC_XML_BUILD_PROCESS % (self.process_id,
self.get_area_ip(),
xml_sum)
self.netconf_set_config(xml_str, "DELETE_INTERFACE_OSPF")
self.updates_cmd.append("undo ospf cost")
self.updates_cmd.append("undo ospf timer hello")
self.updates_cmd.append("undo ospf timer dead")
self.updates_cmd.append("undo ospf authentication-mode")
self.updates_cmd.append("undo ospf enable %s area %s" % (
self.process_id, self.get_area_ip()))
self.changed = True
def check_params(self):
"""Check all input params"""
self.interface = self.interface.replace(" ", "").upper()
# interface check
if not get_interface_type(self.interface):
self.module.fail_json(msg="Error: interface is invalid.")
# process_id check
if not self.process_id.isdigit():
self.module.fail_json(msg="Error: process_id is not digit.")
if int(self.process_id) < 1 or int(self.process_id) > 4294967295:
self.module.fail_json(msg="Error: process_id must be an integer between 1 and 4294967295.")
# area check
if self.area.isdigit():
if int(self.area) < 0 or int(self.area) > 4294967295:
self.module.fail_json(msg="Error: area id (Integer) must be between 0 and 4294967295.")
else:
if not is_valid_v4addr(self.area):
self.module.fail_json(msg="Error: area id is invalid.")
# area authentication check
if self.state == "present":
if self.auth_mode:
if self.auth_mode == "simple":
if self.auth_text_simple and len(self.auth_text_simple) > 8:
self.module.fail_json(
msg="Error: auth_text_simple is not in the range from 1 to 8.")
if self.auth_mode in ["hmac-sha256", "hmac-sha256", "md5"]:
if self.auth_key_id and not self.auth_text_md5:
self.module.fail_json(
msg='Error: auth_key_id and auth_text_md5 should be set at the same time.')
if not self.auth_key_id and self.auth_text_md5:
self.module.fail_json(
msg='Error: auth_key_id and auth_text_md5 should be set at the same time.')
if self.auth_key_id:
if not self.auth_key_id.isdigit():
self.module.fail_json(
msg="Error: auth_key_id is not digit.")
if int(self.auth_key_id) < 1 or int(self.auth_key_id) > 255:
self.module.fail_json(
msg="Error: auth_key_id is not in the range from 1 to 255.")
if self.auth_text_md5 and len(self.auth_text_md5) > 255:
self.module.fail_json(
msg="Error: auth_text_md5 is not in the range from 1 to 255.")
# cost check
if self.cost:
if not self.cost.isdigit():
self.module.fail_json(msg="Error: cost is not digit.")
if int(self.cost) < 1 or int(self.cost) > 65535:
self.module.fail_json(
msg="Error: cost is not in the range from 1 to 65535")
# hello_interval check
if self.hello_interval:
if not self.hello_interval.isdigit():
self.module.fail_json(
msg="Error: hello_interval is not digit.")
if int(self.hello_interval) < 1 or int(self.hello_interval) > 65535:
self.module.fail_json(
msg="Error: hello_interval is not in the range from 1 to 65535")
# dead_interval check
if self.dead_interval:
if not self.dead_interval.isdigit():
self.module.fail_json(msg="Error: dead_interval is not digit.")
if int(self.dead_interval) < 1 or int(self.dead_interval) > 235926000:
self.module.fail_json(
msg="Error: dead_interval is not in the range from 1 to 235926000")
def get_proposed(self):
"""get proposed info"""
self.proposed["interface"] = self.interface
self.proposed["process_id"] = self.process_id
self.proposed["area"] = self.get_area_ip()
self.proposed["cost"] = self.cost
self.proposed["hello_interval"] = self.hello_interval
self.proposed["dead_interval"] = self.dead_interval
self.proposed["silent_interface"] = self.silent_interface
if self.auth_mode:
self.proposed["auth_mode"] = self.auth_mode
if self.auth_mode == "simple":
self.proposed["auth_text_simple"] = self.auth_text_simple
if self.auth_mode in ["hmac-sha256", "hmac-sha256", "md5"]:
self.proposed["auth_key_id"] = self.auth_key_id
self.proposed["auth_text_md5"] = self.auth_text_md5
self.proposed["state"] = self.state
def get_existing(self):
"""get existing info"""
if not self.ospf_info:
return
if self.ospf_info["interface"]:
self.existing["interface"] = self.interface
self.existing["cost"] = self.ospf_info["interface"].get("configCost")
self.existing["hello_interval"] = self.ospf_info["interface"].get("helloInterval")
self.existing["dead_interval"] = self.ospf_info["interface"].get("deadInterval")
self.existing["silent_interface"] = self.ospf_info["interface"].get("silentEnable")
self.existing["auth_mode"] = self.ospf_info["interface"].get("authenticationMode")
self.existing["auth_text_simple"] = self.ospf_info["interface"].get("authTextSimple")
self.existing["auth_key_id"] = self.ospf_info["interface"].get("keyId")
self.existing["auth_text_md5"] = self.ospf_info["interface"].get("authTextMd5")
self.existing["process_id"] = self.ospf_info["processId"]
self.existing["area"] = self.ospf_info["areaId"]
def get_end_state(self):
"""get end state info"""
ospf_info = self.get_ospf_dict()
if not ospf_info:
return
if ospf_info["interface"]:
self.end_state["interface"] = self.interface
self.end_state["cost"] = ospf_info["interface"].get("configCost")
self.end_state["hello_interval"] = ospf_info["interface"].get("helloInterval")
self.end_state["dead_interval"] = ospf_info["interface"].get("deadInterval")
self.end_state["silent_interface"] = ospf_info["interface"].get("silentEnable")
self.end_state["auth_mode"] = ospf_info["interface"].get("authenticationMode")
self.end_state["auth_text_simple"] = ospf_info["interface"].get("authTextSimple")
self.end_state["auth_key_id"] = ospf_info["interface"].get("keyId")
self.end_state["auth_text_md5"] = ospf_info["interface"].get("authTextMd5")
self.end_state["process_id"] = ospf_info["processId"]
self.end_state["area"] = ospf_info["areaId"]
def work(self):
"""worker"""
self.check_params()
self.ospf_info = self.get_ospf_dict()
self.get_existing()
self.get_proposed()
# deal present or absent
if self.state == "present":
if not self.ospf_info or not self.ospf_info["interface"]:
# create ospf area and set interface config
self.set_ospf_interface()
else:
# merge interface ospf area config
self.merge_ospf_interface()
else:
if self.ospf_info and self.ospf_info["interface"]:
# delete interface ospf area config
self.unset_ospf_interface()
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def main():
"""Module main"""
argument_spec = dict(
interface=dict(required=True, type='str'),
process_id=dict(required=True, type='str'),
area=dict(required=True, type='str'),
cost=dict(required=False, type='str'),
hello_interval=dict(required=False, type='str'),
dead_interval=dict(required=False, type='str'),
silent_interface=dict(required=False, default=False, type='bool'),
auth_mode=dict(required=False,
choices=['none', 'null', 'hmac-sha256', 'md5', 'hmac-md5', 'simple'], type='str'),
auth_text_simple=dict(required=False, type='str', no_log=True),
auth_key_id=dict(required=False, type='str'),
auth_text_md5=dict(required=False, type='str', no_log=True),
state=dict(required=False, default='present',
choices=['present', 'absent'])
)
argument_spec.update(ce_argument_spec)
module = InterfaceOSPF(argument_spec)
module.work()
if __name__ == '__main__':
main()
|
{
"content_hash": "2bb6b44db8a3ddc53ea0b3cbc8f7ee6a",
"timestamp": "",
"source": "github",
"line_count": 777,
"max_line_length": 105,
"avg_line_length": 38.77606177606177,
"alnum_prop": 0.5529224335357961,
"repo_name": "thaim/ansible",
"id": "02901b7b07993087cc4d67ae41d229f8c0412911",
"size": "30804",
"binary": false,
"copies": "6",
"ref": "refs/heads/fix-broken-link",
"path": "lib/ansible/modules/network/cloudengine/ce_interface_ospf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7"
},
{
"name": "Shell",
"bytes": "246"
}
],
"symlink_target": ""
}
|
"""
WSGI config for cotndemo project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cotndemo.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
{
"content_hash": "127902ca04a99e3d744bdd0a7e573d9d",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 78,
"avg_line_length": 27.928571428571427,
"alnum_prop": 0.7749360613810742,
"repo_name": "achoy/cotndemo",
"id": "76e6b038903a34e99f756626114cf09da2a5982c",
"size": "391",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cotndemo/wsgi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4660"
}
],
"symlink_target": ""
}
|
import random
def meistermind():
solution = ""
turns = 8
print "Meistermind"
print "You have 8 tries to guess the right sequence"
print "of four symbols (being the letters A through G)"
colors = {
1: "A",
2: "B",
3: "C",
4: "D",
5: "E",
6: "F",
7: "G"
}
c = 1
while c < 5:
s = random.randint(1,7)
solution += colors[s]
c += 1
while turns > 0: #game loop
turns -= 1
correct = 0
almost = 0
guess = ""
error = True
while error:
guess = str(raw_input("your guess: ",)).upper()
if len(guess) == 4:
error = False
else:
print "Error: the sequence must contain exactly 4 symbols."
if guess == solution:
print "You have won!"
break
else:
for index, c in enumerate(guess):
if c == solution[index]:
correct += 1
elif c in solution:
almost += 1
print correct, "correct, ", almost, "almost"
print "You have",
if turns == 0:
print "lost!"
print "Solution:", solution
else:
print turns, "guesses left."
print "the End"
|
{
"content_hash": "089e4c074183c9cf5c446317579586f1",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 75,
"avg_line_length": 23.322033898305083,
"alnum_prop": 0.4316860465116279,
"repo_name": "posiputt/Sandbox",
"id": "4f20c4dbd0d353da91bfd13a9ca1df9d1baeefdc",
"size": "1376",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "meistermind.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1830"
}
],
"symlink_target": ""
}
|
import os, sys, optparse
from datetime import datetime as dt
try:
from common_methods import *
except ImportError:
sys.exit("Could not find common_methods.py... download the full toolkit from https://github.com/MonroCoury/Forensic_Tools")
def read_accounts(db):
'''Read account details from skype database. Takes one argument: database file full path'''
command = "SELECT fullname, skypename, city, country, datetime(profile_timestamp, 'unixepoch') FROM Accounts;"
res = pull_from_db(db, command)
data = init_data("skype_scanner Account", len(res)) + init_table_header("./templates/init_account_html.html")
for row in res:
if not row[2]:
loc = str(row[3]) + ", unspecified city/town"
else:
loc = str(row[3]) + ', ' + str(row[2])
line = "<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>" % (row[0], row[1],
loc, row[4])
data += line
data += close_table_html()
tgt = "skype_scanner_accounts.html"
saveResult(tgt, data)
def read_contacts(db):
'''Read contacts details from skype database. Takes one argument: database file full path'''
command = "SELECT displayname, skypename, city, country, phone_mobile, birthday FROM Contacts;"
res = pull_from_db(db, command)
data = init_data("skype_scanner Contacts", len(res)) + init_table_header("./templates/init_contacts_html.html")
for row in res:
if not row[2]:
loc = str(row[3]) + ", unspecified city/town"
else:
loc = str(row[3]) + ', ' + str(row[2])
line = "<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>" % (row[0], row[1],
loc, row[4], row[5])
data += line
data += close_table_html()
tgt = "skype_scanner_contacts.html"
saveResult(tgt, data)
def read_call_log(db, partner=None, tm_min=0, tm_max=10000000000000):
'''Read call log details from skype database. Takes 4 arguments:
db: database file full path
partner: call partner, default value None
tm_min: minimum call timestamp, default value 0
tm_max: maximum call timestamp, default value 10000000000000'''
command = "SELECT datetime(begin_timestamp, 'unixepoch'), identity, duration, is_incoming FROM calls, conversations WHERE " \
+ "(calls.conv_dbid = conversations.id) AND (begin_timestamp > %s AND begin_timestamp < %s);" % (tm_min, tm_max)
if partner:
command = command[:-1] + " AND (chatname LIKE %s);" % ("'%" + partner + "%'")
res = pull_from_db(db, command)
data = init_data("skype_scanner Call Log", len(res)) + init_table_header("./templates/init_clog_html.html")
for row in res:
dir_dict = {"0" : "outgoing", "1" : "incoming"}
line = "<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>" % (row[0], row[1], row[2], dir_dict[str(row[3])])
data += line
data += close_table_html()
tgt = "skype_scanner_calls.html"
saveResult(tgt, data)
def read_msgs(db, partner=None, tm_min=0, tm_max=10000000000000):
'''Read Messages from skype database. Takes 4 arguments:
db: database file full path
partner: chat partner, default value None
tm_min: minimum Message timestamp, default value 0
tm_max: maximum Message timestamp, default value 10000000000000'''
command = "SELECT timestamp, dialog_partner, author, body_xml, chatmsg_status, sending_status, chatname " \
+ "FROM Messages WHERE (timestamp > %s AND timestamp < %s);" % (tm_min, tm_max)
if partner:
command = command[:-1] + " AND (chatname LIKE %s);" % ("'%" + partner + "%'")
res = pull_from_db(db, command)
user = pull_from_db(db, "SELECT skypename from Accounts;")
data = init_data("skype_scanner Messages", len(res)) + init_table_header("./templates/init_msgs_html.html")
for row in res:
try:
if 'partlist' not in str(row[3]):
if row[1]:
From = str(row[2])
To = str(row[1])
else:
From = str(row[2])
To = str(user[0][0])
status_dict = {"1" : "pending", "2" : "delivered"}
if str(row[5]) in ("1", "2"):
status = status_dict[str(row[4])]
else:
status = "incoming"
line = "\n\t\t\t\t\t<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>" % (dt.fromtimestamp(float(row[0])),
row[6], From, To, row[3],
status)
data += line
except:
pass
data += close_table_html()
tgt = "skype_scanner_msgs.html"
saveResult(tgt, data)
if __name__ == "__main__":
print('\n\n ##############A Python script to read skype profile data ################')
print(' # Coded by monrocoury #')
print(' # can read accounts data, messages, call log #')
print(' # and contacts to name a few #')
print(' #########################################################################\n\n')
parser = optparse.OptionParser("Usage: python %prog -t <target> -b <database location>" \
+ " --partner <(optional) partner name> --min_time <(optional)> --max_time <(optional)>" \
+ " or python skype_scanner.py -h for help")
target_help = "can take one of 4 values: accounts, msgs, clog, or contacts"
parser.add_option("-t", dest="target", type="string", help=target_help)
db_help = "The location of the database to parse, or the full path of the database file. " \
+ "ie: either c:\folder or c:\folder\db_file.db. If you enter a folder, the script will " \
+ "look for a main.db file which is the default name and extension of skype profile database." \
+ " On windows, the default location of the main.db file is: " \
+ r"C:\Users\(WindowsUserName)\AppData\Roaming\Skype\(SkypeUserName)\ on linux: " \
+ r"~/.Skype/(SkypeUserName)/ and on mac: ~/Library/Application Support/Skype/(SkypeUserName)/" \
+ r" if the location contains spaces, enclose it in quotes"
parser.add_option("-b", dest="db", type="string", help=db_help)
parser.add_option("--partner", dest="partner", type="string", help="enter only if target is 'msgs' or 'clog' to read messages/calls from the given skype username")
min_help = "enter only if target is 'msgs' or 'clog' to read messages/calls after a given date and time, must be a string separated by _ YYYY_MM_DD_HH_MM_SS"
parser.add_option("--min_time", dest="min", type="string", help=min_help)
max_help = "enter only if target is 'msgs' or 'clog' to read messages/calls before a given date and time, must be a string separated by _ YYYY_MM_DD_HH_MM_SS"
parser.add_option("--max_time", dest="max", type="string", help=max_help)
(options, args) = parser.parse_args()
if None in (options.target, options.db):
sys.exit("please enter a target:\n\n%s" % parser.usage)
if options.target not in ("accounts", "msgs", "clog", "contacts"):
sys.exit("Unrecognized target function!")
db = options.db
if not os.path.isfile(db):
db = os.path.join(db, "main.db")
if options.target.lower() == "accounts":
read_accounts(db)
elif options.target.lower() == "msgs":
if options.min:
min_time = time_to_epoch(options.min)
else:
min_time = 0
if options.max:
max_time = time_to_epoch(options.min)
else:
max_time = 10000000000000
read_msgs(db, options.partner, min_time, max_time)
elif options.target.lower() == "clog":
if options.min:
min_time = time_to_epoch(options.min)
else:
min_time = 0
if options.max:
max_time = time_to_epoch(options.min)
else:
max_time = 10000000000000
read_call_log(db, options.partner, min_time, max_time)
elif options.target.lower() == "contacts":
read_contacts(db)
|
{
"content_hash": "ef966a8e198d2e422a40ae087b0402f2",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 167,
"avg_line_length": 51.57831325301205,
"alnum_prop": 0.551156271899089,
"repo_name": "MonroCoury/Forensic-Tools",
"id": "bbd68c9e72a1abe2ce798241c8bcae02e548b0e4",
"size": "8584",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "skype_scanner.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "6356"
},
{
"name": "Python",
"bytes": "61841"
}
],
"symlink_target": ""
}
|
#!/usr/bin/env python
# May God Bless Us All
import struct, sys, subprocess, string, re, os, json
# Constants
COMMENT_STR = "{3}\n\tProblem Name = {0}\n\tProblem Link = {1}\n\tUser = {2}\n{4}\n"
PY_COMMENT_START = "'''"
PY_COMMENT_END = "'''"
CPP_JAVA_START = "/*"
CPP_JAVA_END = "*/"
IDE = {'c' : 'C_IDE', 'cpp' : 'CPP_IDE', 'java' : 'JAVA_IDE', 'py': 'PYTHON_IDE'}
# Helper function that sends a message to the chrome-plugin.
def send_message(message):
message = '{"msg": "%s"}' % message
sys.stdout.write(struct.pack('I', len(message)))
sys.stdout.write(message)
sys.stdout.flush()
# Function that reads messages from the chrome-plugin
def read_func():
while 1:
text_length_bytes = sys.stdin.read(4)
if len(text_length_bytes) == 0:
sys.exit(0)
text_length = struct.unpack('i', text_length_bytes)[0]
text = sys.stdin.read(text_length).decode('utf-8')
prob = json.loads(text)
prob['problem_name'] = re.sub('[ ]+', ' ', prob['problem_name'])
filename = re.sub(' ', '_', prob['problem_name'])
filename = "DEFAULT_SOLUTION_PATH" + os.sep + filename + "." + prob['lang']
if not os.path.isfile(filename) :
if prob['lang'] == 'py':
file_content = COMMENT_STR.format(
prob['problem_name'],
prob['problem_url'],
prob['user_name'],
PY_COMMENT_START,
PY_COMMENT_END
)
else:
file_content = COMMENT_STR.format(
prob['problem_name'],
prob['problem_url'],
prob['user_name'],
CPP_JAVA_START,
CPP_JAVA_END
)
try:
file_name = '{0}_template.{0}'.format(prob['lang'])
file_dir = os.path.dirname(__file__)
with open(os.path.join(file_dir, file_name), "r") as template_file:
file_content = file_content + template_file.read()
fp = open(filename, "w")
fp.write(file_content)
fp.close()
send_message("File %s successfully created" % filename)
except Exception, e:
send_message("Unable to create file %s" % e)
try:
exit_code = subprocess.Popen([IDE[prob['lang']], filename])
send_message("Subprocess started for %s with file %s" % (IDE[prob['lang']], filename))
except Exception, e:
send_message("Unable to start Subprocess!")
if __name__ == '__main__':
read_func()
sys.exit(0)
|
{
"content_hash": "323035415dc31c7b54f13cc1f454a319",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 92,
"avg_line_length": 31.824324324324323,
"alnum_prop": 0.5834394904458599,
"repo_name": "jarifibrahim/code-now",
"id": "acabb5fa063c3721ac924fc27a177e8953a0c24d",
"size": "2355",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "host-program/prog.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "122"
},
{
"name": "C++",
"bytes": "76"
},
{
"name": "JavaScript",
"bytes": "10885"
},
{
"name": "Python",
"bytes": "2355"
},
{
"name": "Shell",
"bytes": "7670"
}
],
"symlink_target": ""
}
|
import time
import json
import sys
import urllib
import urllib2
import tornado.autoreload
import tornado.httpserver
import tornado.ioloop
import tornado.web
from tornado import gen
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
from tornado.options import define, options
# 希望返回成功还是失败
# 1 - 成功
# -1 - 失败
is_success = 1
# 代付结果通知回调地址
callBackUrl = 'http://10.3.200.6:9002'
# 服务端口
define('port', default=9555, help='run on the given port', type=int)
reload(sys)
sys.setdefaultencoding('utf-8')
class RepayHandler(tornado.web.RequestHandler):
orderId = ''
@tornado.gen.coroutine
def get(self, *args, **kwargs):
self.post(args, kwargs)
@tornado.gen.coroutine
def post(self, *args, **kwargs):
#body = self.request.body.replace("'", '"')
#body_args = json.loads(body[7: ])
#self.orderId = body_args['order_id']
params = self.get_argument('params')
print('params:[' + params + ']')
args = json.loads(params)
self.orderId = args['order_id']
print('order_id:[' + self.orderId + ']')
respone = {'code': 2000}
self.write(json.dumps(respone))
self.call_back()
@tornado.gen.coroutine
def call_back(self):
yield gen.sleep(1)
request = {}
service_body = {}
if is_success == 1:
service_body['RCD'] = '0000'
service_body['RDESC'] = '支付成功'
service_body['ORDERID'] = self.orderId
else:
service_body['RCD'] = '0108'
service_body['RDESC'] = '支付失败'
service_body['ORDERID'] = self.orderId
request['serviceHeader'] = {'serviceId': 'TFCRepayresultNotice'}
request['serviceBody'] = service_body
http_request = HTTPRequest(url=callBackUrl, method='POST', body=json.dumps(request, ensure_ascii=False))
http_client = AsyncHTTPClient()
http_client.fetch(http_request)
class CollectHandler(tornado.web.RequestHandler):
def get(self, *args, **kwargs):
self.post(args, kwargs)
def post(self, *args, **kwargs):
#body = self.request.body.replace("'", '"')
#body_args = json.loads(body[7: ])
#self.orderId = body_args['repayment_id']
params = self.get_argument('params')
print('params:[' + params + ']')
args = json.loads(params)
respone = {}
result = {}
result['account_name'] = args['account_name']
result['card_num'] = args['card_num']
result['identity_id'] = args['identity_id']
result['repayment_date'] = args['repayment_date']
result['repayment_id'] = args['repayment_id']
result['transaction_amount'] = args['transaction_amount']
#result['response_code'] = '96'
#result['response_status'] = '0'
#result['status'] = 1
#result['purpose'] = '联机正常扣款'
if is_success == 1:
result['purpose'] = '联机正常扣款'
result['response_code'] = '00'
result['response_status'] = '00'
result['status'] = 1
else:
result['purpose'] = '联机扣款失败'
result['response_code'] = '96'
result['response_status'] = '05'
result['status'] = 2
respone['code'] = 2000
respone['result'] = result
self.write(json.dumps(respone, ensure_ascii=False))
if __name__ == '__main__':
handlers = [
(r'/repay', RepayHandler),
(r'/collect', CollectHandler),
]
webApp = tornado.web.Application(handlers)
httpServer = tornado.httpserver.HTTPServer(webApp)
httpServer.listen(options.port)
loop = tornado.ioloop.IOLoop.instance()
tornado.autoreload.start(loop)
loop.start()
|
{
"content_hash": "80057cebf25c78c4cbaaeca40038cdca",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 112,
"avg_line_length": 27.181159420289855,
"alnum_prop": 0.5851772860570514,
"repo_name": "wchming1987/Tumbler",
"id": "60de087326af1857e34cfcb293c68e6aee80cd46",
"size": "3901",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/mockServer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "28583"
}
],
"symlink_target": ""
}
|
from datetime import datetime
from google.appengine.api import memcache, users
from google.appengine.ext import db, webapp
from google.appengine.ext.webapp import util, template
from google.appengine.ext.webapp.util import login_required
from oauth2client.appengine import CredentialsProperty, StorageByKeyName
from oauth2client.client import OAuth2WebServerFlow
import httplib2
import os
import pickle
import urllib
FLOW = OAuth2WebServerFlow(
# Visit https://code.google.com/apis/console to
# generate your client_id, client_secret and to
# register your redirect_uri.
client_id='YOUR_CLIENT_ID_HERE',
client_secret='YOUR_CLIENT_SECRET_HERE',
scope='https://www.googleapis.com/auth/fusiontables',
user_agent='tech-testzone-server/1.0')
queryURL = "https://www.google.com/fusiontables/api/query?sql=%s"
insertSQL = "INSERT INTO 2040387 (timestamp, responseX, responseY) VALUES ('%s', %s, %s)"
memcache = memcache.Client()
class Credentials(db.Model):
credentials = CredentialsProperty()
class MainHandler(webapp.RequestHandler):
@login_required
def get(self):
user = users.get_current_user()
credentials = StorageByKeyName(
Credentials, user.user_id(), 'credentials').get()
if credentials is None or credentials.invalid == True:
callback = self.request.relative_url('/oauth2callback')
authorize_url = FLOW.step1_get_authorize_url(callback)
memcache.set("oauth-%s" % user.user_id(), pickle.dumps(FLOW))
self.redirect(authorize_url)
else:
path = os.path.join(os.path.dirname(__file__), 'form.html')
self.response.out.write(template.render(path, {}))
class OAuthHandler(webapp.RequestHandler):
@login_required
def get(self):
user = users.get_current_user()
flow = pickle.loads(memcache.get("oauth-%s" % user.user_id()))
if flow:
credentials = flow.step2_exchange(self.request.params)
StorageByKeyName(
Credentials, user.user_id(), 'credentials').put(credentials)
self.redirect("/")
else:
pass
class ViewHandler(webapp.RequestHandler):
def get(self):
path = os.path.join(os.path.dirname(__file__), 'view.html')
self.response.out.write(template.render(path, {}))
class SubmitHandler(webapp.RequestHandler):
def get(self):
self.redirect("/")
def post(self):
user = users.get_current_user()
credentials = StorageByKeyName(
Credentials, user.user_id(), 'credentials').get()
if credentials is None or credentials.invalid == True:
self.redirect("/")
else:
http = httplib2.Http()
http = credentials.authorize(http)
x = self.request.get("x")
y = self.request.get("y")
url = insertSQL % (datetime.utcnow().isoformat(' ')[:19], x, y)
(response, content) = http.request(queryURL % urllib.quote(url), "POST")
self.response.set_status(response['status'])
self.response.out.write(content)
def main():
application = webapp.WSGIApplication(
[
('/submit', SubmitHandler),
('/view', ViewHandler),
('/oauth2callback', OAuthHandler),
('/', MainHandler)
],
debug=True)
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
|
{
"content_hash": "cba89f6ef3df18dc2be3ed19b6d4b8ba",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 89,
"avg_line_length": 30.15686274509804,
"alnum_prop": 0.7139141742522757,
"repo_name": "aadityabhatia/testzone2011-ScatterGraph",
"id": "630f6d7036344f34ba38fba6f7e75ddd6fc0719a",
"size": "3645",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "154034"
}
],
"symlink_target": ""
}
|
from django.views.generic.base import TemplateView
class HomeView(TemplateView):
"""Create HomeView class."""
template_name = 'tracker/home.html'
class AboutView(TemplateView):
"""Create AboutView class."""
template_name = 'tracker/about.html'
|
{
"content_hash": "d52a1dc626163300569faafd7f73ac2e",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 50,
"avg_line_length": 24,
"alnum_prop": 0.7159090909090909,
"repo_name": "pattern-of-life/tracker",
"id": "2c189c63928a76f0fb4ecdd7c16ee94a11b483db",
"size": "264",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "tracker/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3721"
},
{
"name": "HTML",
"bytes": "18521"
},
{
"name": "Python",
"bytes": "61095"
}
],
"symlink_target": ""
}
|
import calendar
import datetime
import time
from google.appengine.api import quota
from google.appengine.api import taskqueue
from google.appengine.ext import db
import config
import delete
import model
import utils
CPU_MEGACYCLES_PER_REQUEST = 1000
EXPIRED_TTL = datetime.timedelta(delete.EXPIRED_TTL_DAYS, 0, 0)
FETCH_LIMIT = 100
class ScanForExpired(utils.BaseHandler):
"""Common logic for scanning the Person table looking for things to delete.
The common logic handles iterating through the query, updating the expiry
date and wiping/deleting as needed. The is_expired flag on all records whose
expiry_date has passed. Records that expired more than EXPIRED_TTL in the
past will also have their data fields, notes, and photos permanently
deleted.
Subclasses set the query and task_name."""
repo_required = False
def task_name(self):
"""Subclasses should implement this."""
pass
def query(self):
"""Subclasses should implement this."""
pass
def schedule_next_task(self, query):
"""Schedule the next task for to carry on with this query.
we pass the query as a parameter to make testing easier.
"""
self.add_task_for_repo(self.repo, self.task_name(), self.ACTION,
cursor=query.cursor(), queue_name='expiry')
def get(self):
if self.repo:
query = self.query()
if self.params.cursor:
query.with_cursor(self.params.cursor)
for person in query:
if quota.get_request_cpu_usage() > CPU_MEGACYCLES_PER_REQUEST:
# Stop before running into the hard limit on CPU time per
# request, to avoid aborting in the middle of an operation.
# Add task back in, restart at current spot:
self.schedule_next_task(query)
break
was_expired = person.is_expired
person.put_expiry_flags()
if (utils.get_utcnow() - person.get_effective_expiry_date() >
EXPIRED_TTL):
person.wipe_contents()
else:
# treat this as a regular deletion.
if person.is_expired and not was_expired:
delete.delete_person(self, person)
else:
for repo in model.Repo.list():
self.add_task_for_repo(repo, self.task_name(), self.ACTION)
class DeleteExpired(ScanForExpired):
"""Scan for person records with expiry date thats past."""
ACTION = 'tasks/delete_expired'
def task_name(self):
return 'delete-expired'
def query(self):
return model.Person.past_due_records(self.repo)
class DeleteOld(ScanForExpired):
"""Scan for person records with old source dates for expiration."""
ACTION = 'tasks/delete_old'
def task_name(self):
return 'delete-old'
def query(self):
return model.Person.potentially_expired_records(self.repo)
class CleanUpInTestMode(utils.BaseHandler):
"""If the repository is in "test mode", this task deletes all entries older
than DELETION_AGE_SECONDS (defined below), regardless of their actual
expiration specification.
We delete entries quickly so that most of the test data does not persist in
real mode, and to reduce the effect of spam.
"""
repo_required = False
ACTION = 'tasks/clean_up_in_test_mode'
# Entries older than this age in seconds are deleted in test mode.
#
# If you are maintaining a single repository and switching it between test
# mode (for drills) and real mode (for real crises), you should be sure to
# switch to real mode within DELETION_AGE_SECONDS after a real crisis
# occurs, because:
# - When the crisis happens, the users may be confused and enter real
# information on the repository, even though it's still in test mode.
# (All pages show "test mode" message, but some users may be still
# confused.)
# - If we fail to make the switch in DELETION_AGE_SECONDS, such real
# entries are deleted.
# - If we make the switch in DELETION_AGE_SECONDS, such entries are not
# deleted, and handled as a part of real mode data.
DELETION_AGE_SECONDS = 6 * 3600
def task_name(self):
return 'clean-up-in-test-mode'
def schedule_next_task(self, query, utcnow):
"""Schedule the next task for to carry on with this query.
We pass the query as a parameter to make testing easier.
"""
self.add_task_for_repo(
self.repo,
self.task_name(),
self.ACTION,
utcnow=str(calendar.timegm(utcnow.utctimetuple())),
cursor=query.cursor(),
queue_name='clean_up_in_test_mode')
def in_test_mode(self, repo):
"""Returns True if the repository is in test mode."""
return config.get('test_mode', repo=repo)
def get(self):
if self.repo:
# To reuse the cursor from the previous task, we need to apply
# exactly the same filter. So we use utcnow previously used
# instead of the current time.
utcnow = self.params.utcnow or utils.get_utcnow()
max_entry_date = (
utcnow -
datetime.timedelta(
seconds=CleanUpInTestMode.DELETION_AGE_SECONDS))
query = model.Person.all_in_repo(self.repo)
query.filter('entry_date <=', max_entry_date)
if self.params.cursor:
query.with_cursor(self.params.cursor)
# Uses query.get() instead of "for person in query".
# If we use for-loop, query.cursor() points to an unexpected
# position.
person = query.get()
# When the repository is no longer in test mode, aborts the
# deletion.
while person and self.in_test_mode(self.repo):
person.delete_related_entities(delete_self=True)
if quota.get_request_cpu_usage() > CPU_MEGACYCLES_PER_REQUEST:
# Stop before running into the hard limit on CPU time per
# request, to avoid aborting in the middle of an operation.
# Add task back in, restart at current spot:
self.schedule_next_task(query, utcnow)
break
person = query.get()
else:
for repo in model.Repo.list():
if self.in_test_mode(repo):
self.add_task_for_repo(repo, self.task_name(), self.ACTION)
def run_count(make_query, update_counter, counter):
"""Scans the entities matching a query for a limited amount of CPU time."""
while quota.get_request_cpu_usage() < CPU_MEGACYCLES_PER_REQUEST:
# Get the next batch of entities.
query = make_query()
if counter.last_key:
query = query.filter('__key__ >', db.Key(counter.last_key))
entities = query.order('__key__').fetch(FETCH_LIMIT)
if not entities:
counter.last_key = ''
break
# Pass the entities to the counting function.
for entity in entities:
update_counter(counter, entity)
# Remember where we left off.
counter.last_key = str(entities[-1].key())
class CountBase(utils.BaseHandler):
"""A base handler for counting tasks. Making a request to this handler
without a specified repo will start tasks for all repositories in parallel.
Each subclass of this class handles one scan through the datastore."""
repo_required = False # can run without a repo
SCAN_NAME = '' # Each subclass should choose a unique scan_name.
ACTION = '' # Each subclass should set the action path that it handles.
def get(self):
if self.repo: # Do some counting.
counter = model.Counter.get_unfinished_or_create(
self.repo, self.SCAN_NAME)
run_count(self.make_query, self.update_counter, counter)
counter.put()
if counter.last_key: # Continue counting in another task.
self.add_task_for_repo(self.repo, self.SCAN_NAME, self.ACTION)
else: # Launch counting tasks for all repositories.
for repo in model.Repo.list():
self.add_task_for_repo(repo, self.SCAN_NAME, self.ACTION)
def make_query(self):
"""Subclasses should implement this. This will be called to get the
datastore query; it should always return the same query."""
def update_counter(self, counter, entity):
"""Subclasses should implement this. This will be called once for
each entity that matches the query; it should call increment() on
the counter object for whatever accumulators it wants to increment."""
class CountPerson(CountBase):
SCAN_NAME = 'person'
ACTION = 'tasks/count/person'
def make_query(self):
return model.Person.all().filter('repo =', self.repo)
def update_counter(self, counter, person):
found = ''
if person.latest_found is not None:
found = person.latest_found and 'TRUE' or 'FALSE'
counter.increment('all')
counter.increment('original_domain=' + (person.original_domain or ''))
counter.increment('sex=' + (person.sex or ''))
counter.increment('home_country=' + (person.home_country or ''))
counter.increment('photo=' + (person.photo_url and 'present' or ''))
counter.increment('num_notes=%d' % len(person.get_notes()))
counter.increment('status=' + (person.latest_status or ''))
counter.increment('found=' + found)
counter.increment(
'linked_persons=%d' % len(person.get_linked_persons()))
class CountNote(CountBase):
SCAN_NAME = 'note'
ACTION = 'tasks/count/note'
def make_query(self):
return model.Note.all().filter('repo =', self.repo)
def update_counter(self, counter, note):
author_made_contact = ''
if note.author_made_contact is not None:
author_made_contact = note.author_made_contact and 'TRUE' or 'FALSE'
counter.increment('all')
counter.increment('status=' + (note.status or ''))
counter.increment('original_domain=' + (note.original_domain or ''))
counter.increment('author_made_contact=' + author_made_contact)
if note.linked_person_record_id:
counter.increment('linked_person')
if note.last_known_location:
counter.increment('last_known_location')
class AddReviewedProperty(CountBase):
"""Sets 'reviewed' to False on all notes that have no 'reviewed' property.
This task is for migrating datastores that were created before the
'reviewed' property existed; 'reviewed' has to be set to False so that
the Notes will be indexed."""
SCAN_NAME = 'unreview-note'
ACTION = 'tasks/count/unreview_note'
def make_query(self):
return model.Note.all().filter('repo =', self.repo)
def update_counter(self, counter, note):
if not note.reviewed:
note.reviewed = False
note.put()
class UpdateStatus(CountBase):
"""This task looks for Person records with the status 'believed_dead',
checks for the last non-hidden Note, and updates the status if necessary.
This is designed specifically to address bogus 'believed_dead' notes that
are flagged as spam. (This is a cleanup task, not a counting task.)"""
SCAN_NAME = 'update-status'
ACTION = 'tasks/count/update_status'
def make_query(self):
return model.Person.all().filter('repo =', self.repo
).filter('latest_status =', 'believed_dead')
def update_counter(self, counter, person):
status = None
status_source_date = None
for note in person.get_notes():
if note.status and not note.hidden:
status = note.status
status_source_date = note.source_date
if status != person.latest_status:
person.latest_status = status
person.latest_status_source_date = status_source_date
person.put()
class Reindex(CountBase):
"""A handler for re-indexing Persons."""
SCAN_NAME = 'reindex'
ACTION = 'tasks/count/reindex'
def make_query(self):
return model.Person.all().filter('repo =', self.repo)
def update_counter(self, counter, person):
person.update_index(['old', 'new'])
person.put()
|
{
"content_hash": "4f6db888eb588fc839fbd9a8ed924ccb",
"timestamp": "",
"source": "github",
"line_count": 323,
"max_line_length": 80,
"avg_line_length": 39.253869969040245,
"alnum_prop": 0.6200015774114678,
"repo_name": "santoshsahoo/personfinder",
"id": "a3fb8779424f58f76266661c74df483a2c9403a3",
"size": "13277",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/tasks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "877"
},
{
"name": "JavaScript",
"bytes": "15294"
},
{
"name": "Python",
"bytes": "1045151"
}
],
"symlink_target": ""
}
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import os, sys, warnings
def fn_name(): return sys._getframe(1).f_code.co_name
if six.PY3:
warnings.warn(
"The gtk* backends have not been tested with Python 3.x",
ImportWarning)
try:
import gobject
import gtk; gdk = gtk.gdk
import pango
except ImportError:
raise ImportError("Gtk* backend requires pygtk to be installed.")
pygtk_version_required = (2,4,0)
if gtk.pygtk_version < pygtk_version_required:
raise ImportError ("PyGTK %d.%d.%d is installed\n"
"PyGTK %d.%d.%d or later is required"
% (gtk.pygtk_version + pygtk_version_required))
del pygtk_version_required
_new_tooltip_api = (gtk.pygtk_version[1] >= 12)
import matplotlib
from matplotlib._pylab_helpers import Gcf
from matplotlib.backend_bases import RendererBase, GraphicsContextBase, \
FigureManagerBase, FigureCanvasBase, NavigationToolbar2, cursors, TimerBase
from matplotlib.backend_bases import ShowBase
from matplotlib.backends.backend_gdk import RendererGDK, FigureCanvasGDK
from matplotlib.cbook import is_string_like, is_writable_file_like
from matplotlib.figure import Figure
from matplotlib.widgets import SubplotTool
from matplotlib.cbook import warn_deprecated
from matplotlib import (
cbook, colors as mcolors, lines, markers, rcParams, verbose)
backend_version = "%d.%d.%d" % gtk.pygtk_version
_debug = False
#_debug = True
# the true dots per inch on the screen; should be display dependent
# see http://groups.google.com/groups?q=screen+dpi+x11&hl=en&lr=&ie=UTF-8&oe=UTF-8&safe=off&selm=7077.26e81ad5%40swift.cs.tcd.ie&rnum=5 for some info about screen dpi
PIXELS_PER_INCH = 96
# Hide the benign warning that it can't stat a file that doesn't
warnings.filterwarnings('ignore', '.*Unable to retrieve the file info for.*', gtk.Warning)
cursord = {
cursors.MOVE : gdk.Cursor(gdk.FLEUR),
cursors.HAND : gdk.Cursor(gdk.HAND2),
cursors.POINTER : gdk.Cursor(gdk.LEFT_PTR),
cursors.SELECT_REGION : gdk.Cursor(gdk.TCROSS),
}
# ref gtk+/gtk/gtkwidget.h
def GTK_WIDGET_DRAWABLE(w):
flags = w.flags();
return flags & gtk.VISIBLE != 0 and flags & gtk.MAPPED != 0
def draw_if_interactive():
"""
Is called after every pylab drawing command
"""
if matplotlib.is_interactive():
figManager = Gcf.get_active()
if figManager is not None:
figManager.canvas.draw_idle()
class Show(ShowBase):
def mainloop(self):
if gtk.main_level() == 0:
gtk.main()
show = Show()
def new_figure_manager(num, *args, **kwargs):
"""
Create a new figure manager instance
"""
FigureClass = kwargs.pop('FigureClass', Figure)
thisFig = FigureClass(*args, **kwargs)
return new_figure_manager_given_figure(num, thisFig)
def new_figure_manager_given_figure(num, figure):
"""
Create a new figure manager instance for the given figure.
"""
canvas = FigureCanvasGTK(figure)
manager = FigureManagerGTK(canvas, num)
return manager
class TimerGTK(TimerBase):
'''
Subclass of :class:`backend_bases.TimerBase` that uses GTK for timer events.
Attributes:
* interval: The time between timer events in milliseconds. Default
is 1000 ms.
* single_shot: Boolean flag indicating whether this timer should
operate as single shot (run once and then stop). Defaults to False.
* callbacks: Stores list of (func, args) tuples that will be called
upon timer events. This list can be manipulated directly, or the
functions add_callback and remove_callback can be used.
'''
def _timer_start(self):
# Need to stop it, otherwise we potentially leak a timer id that will
# never be stopped.
self._timer_stop()
self._timer = gobject.timeout_add(self._interval, self._on_timer)
def _timer_stop(self):
if self._timer is not None:
gobject.source_remove(self._timer)
self._timer = None
def _timer_set_interval(self):
# Only stop and restart it if the timer has already been started
if self._timer is not None:
self._timer_stop()
self._timer_start()
def _on_timer(self):
TimerBase._on_timer(self)
# Gtk timeout_add() requires that the callback returns True if it
# is to be called again.
if len(self.callbacks) > 0 and not self._single:
return True
else:
self._timer = None
return False
class FigureCanvasGTK (gtk.DrawingArea, FigureCanvasBase):
keyvald = {65507 : 'control',
65505 : 'shift',
65513 : 'alt',
65508 : 'control',
65506 : 'shift',
65514 : 'alt',
65361 : 'left',
65362 : 'up',
65363 : 'right',
65364 : 'down',
65307 : 'escape',
65470 : 'f1',
65471 : 'f2',
65472 : 'f3',
65473 : 'f4',
65474 : 'f5',
65475 : 'f6',
65476 : 'f7',
65477 : 'f8',
65478 : 'f9',
65479 : 'f10',
65480 : 'f11',
65481 : 'f12',
65300 : 'scroll_lock',
65299 : 'break',
65288 : 'backspace',
65293 : 'enter',
65379 : 'insert',
65535 : 'delete',
65360 : 'home',
65367 : 'end',
65365 : 'pageup',
65366 : 'pagedown',
65438 : '0',
65436 : '1',
65433 : '2',
65435 : '3',
65430 : '4',
65437 : '5',
65432 : '6',
65429 : '7',
65431 : '8',
65434 : '9',
65451 : '+',
65453 : '-',
65450 : '*',
65455 : '/',
65439 : 'dec',
65421 : 'enter',
65511 : 'super',
65512 : 'super',
65406 : 'alt',
65289 : 'tab',
}
# Setting this as a static constant prevents
# this resulting expression from leaking
event_mask = (gdk.BUTTON_PRESS_MASK |
gdk.BUTTON_RELEASE_MASK |
gdk.EXPOSURE_MASK |
gdk.KEY_PRESS_MASK |
gdk.KEY_RELEASE_MASK |
gdk.ENTER_NOTIFY_MASK |
gdk.LEAVE_NOTIFY_MASK |
gdk.POINTER_MOTION_MASK |
gdk.POINTER_MOTION_HINT_MASK)
def __init__(self, figure):
if self.__class__ == matplotlib.backends.backend_gtk.FigureCanvasGTK:
warn_deprecated('2.0', message="The GTK backend is "
"deprecated. It is untested, known to be "
"broken and will be removed in Matplotlib 2.2. "
"Use the GTKAgg backend instead. "
"See Matplotlib usage FAQ for"
" more info on backends.",
alternative="GTKAgg")
if _debug: print('FigureCanvasGTK.%s' % fn_name())
FigureCanvasBase.__init__(self, figure)
gtk.DrawingArea.__init__(self)
self._idle_draw_id = 0
self._need_redraw = True
self._pixmap_width = -1
self._pixmap_height = -1
self._lastCursor = None
self.connect('scroll_event', self.scroll_event)
self.connect('button_press_event', self.button_press_event)
self.connect('button_release_event', self.button_release_event)
self.connect('configure_event', self.configure_event)
self.connect('expose_event', self.expose_event)
self.connect('key_press_event', self.key_press_event)
self.connect('key_release_event', self.key_release_event)
self.connect('motion_notify_event', self.motion_notify_event)
self.connect('leave_notify_event', self.leave_notify_event)
self.connect('enter_notify_event', self.enter_notify_event)
self.set_events(self.__class__.event_mask)
self.set_double_buffered(False)
self.set_flags(gtk.CAN_FOCUS)
self._renderer_init()
self.last_downclick = {}
def destroy(self):
#gtk.DrawingArea.destroy(self)
self.close_event()
if self._idle_draw_id != 0:
gobject.source_remove(self._idle_draw_id)
def scroll_event(self, widget, event):
if _debug: print('FigureCanvasGTK.%s' % fn_name())
x = event.x
# flipy so y=0 is bottom of canvas
y = self.allocation.height - event.y
if event.direction==gdk.SCROLL_UP:
step = 1
else:
step = -1
FigureCanvasBase.scroll_event(self, x, y, step, guiEvent=event)
return False # finish event propagation?
def button_press_event(self, widget, event):
if _debug: print('FigureCanvasGTK.%s' % fn_name())
x = event.x
# flipy so y=0 is bottom of canvas
y = self.allocation.height - event.y
dblclick = (event.type == gdk._2BUTTON_PRESS)
if not dblclick:
# GTK is the only backend that generates a DOWN-UP-DOWN-DBLCLICK-UP event
# sequence for a double click. All other backends have a DOWN-UP-DBLCLICK-UP
# sequence. In order to provide consistency to matplotlib users, we will
# eat the extra DOWN event in the case that we detect it is part of a double
# click.
# first, get the double click time in milliseconds.
current_time = event.get_time()
last_time = self.last_downclick.get(event.button,0)
dblclick_time = gtk.settings_get_for_screen(gdk.screen_get_default()).get_property('gtk-double-click-time')
delta_time = current_time-last_time
if delta_time < dblclick_time:
del self.last_downclick[event.button] # we do not want to eat more than one event.
return False # eat.
self.last_downclick[event.button] = current_time
FigureCanvasBase.button_press_event(self, x, y, event.button, dblclick=dblclick, guiEvent=event)
return False # finish event propagation?
def button_release_event(self, widget, event):
if _debug: print('FigureCanvasGTK.%s' % fn_name())
x = event.x
# flipy so y=0 is bottom of canvas
y = self.allocation.height - event.y
FigureCanvasBase.button_release_event(self, x, y, event.button, guiEvent=event)
return False # finish event propagation?
def key_press_event(self, widget, event):
if _debug: print('FigureCanvasGTK.%s' % fn_name())
key = self._get_key(event)
if _debug: print("hit", key)
FigureCanvasBase.key_press_event(self, key, guiEvent=event)
return True # stop event propagation
def key_release_event(self, widget, event):
if _debug: print('FigureCanvasGTK.%s' % fn_name())
key = self._get_key(event)
if _debug: print("release", key)
FigureCanvasBase.key_release_event(self, key, guiEvent=event)
return True # stop event propagation
def motion_notify_event(self, widget, event):
if _debug: print('FigureCanvasGTK.%s' % fn_name())
if event.is_hint:
x, y, state = event.window.get_pointer()
else:
x, y, state = event.x, event.y, event.state
# flipy so y=0 is bottom of canvas
y = self.allocation.height - y
FigureCanvasBase.motion_notify_event(self, x, y, guiEvent=event)
return False # finish event propagation?
def leave_notify_event(self, widget, event):
FigureCanvasBase.leave_notify_event(self, event)
def enter_notify_event(self, widget, event):
x, y, state = event.window.get_pointer()
FigureCanvasBase.enter_notify_event(self, event, xy=(x, y))
def _get_key(self, event):
if event.keyval in self.keyvald:
key = self.keyvald[event.keyval]
elif event.keyval < 256:
key = chr(event.keyval)
else:
key = None
for key_mask, prefix in (
[gdk.MOD4_MASK, 'super'],
[gdk.MOD1_MASK, 'alt'],
[gdk.CONTROL_MASK, 'ctrl'], ):
if event.state & key_mask:
key = '{0}+{1}'.format(prefix, key)
return key
def configure_event(self, widget, event):
if _debug: print('FigureCanvasGTK.%s' % fn_name())
if widget.window is None:
return
w, h = event.width, event.height
if w < 3 or h < 3:
return # empty fig
# resize the figure (in inches)
dpi = self.figure.dpi
self.figure.set_size_inches(w/dpi, h/dpi, forward=False)
self._need_redraw = True
return False # finish event propagation?
def draw(self):
# Note: FigureCanvasBase.draw() is inconveniently named as it clashes
# with the deprecated gtk.Widget.draw()
self._need_redraw = True
if GTK_WIDGET_DRAWABLE(self):
self.queue_draw()
# do a synchronous draw (its less efficient than an async draw,
# but is required if/when animation is used)
self.window.process_updates (False)
def draw_idle(self):
if self._idle_draw_id != 0:
return
def idle_draw(*args):
try:
self.draw()
finally:
self._idle_draw_id = 0
return False
self._idle_draw_id = gobject.idle_add(idle_draw)
def _renderer_init(self):
"""Override by GTK backends to select a different renderer
Renderer should provide the methods:
set_pixmap ()
set_width_height ()
that are used by
_render_figure() / _pixmap_prepare()
"""
self._renderer = RendererGDK (self, self.figure.dpi)
def _pixmap_prepare(self, width, height):
"""
Make sure _._pixmap is at least width, height,
create new pixmap if necessary
"""
if _debug: print('FigureCanvasGTK.%s' % fn_name())
create_pixmap = False
if width > self._pixmap_width:
# increase the pixmap in 10%+ (rather than 1 pixel) steps
self._pixmap_width = max (int (self._pixmap_width * 1.1),
width)
create_pixmap = True
if height > self._pixmap_height:
self._pixmap_height = max (int (self._pixmap_height * 1.1),
height)
create_pixmap = True
if create_pixmap:
self._pixmap = gdk.Pixmap (self.window, self._pixmap_width,
self._pixmap_height)
self._renderer.set_pixmap (self._pixmap)
def _render_figure(self, pixmap, width, height):
"""used by GTK and GTKcairo. GTKAgg overrides
"""
self._renderer.set_width_height (width, height)
self.figure.draw (self._renderer)
def expose_event(self, widget, event):
"""Expose_event for all GTK backends. Should not be overridden.
"""
if _debug: print('FigureCanvasGTK.%s' % fn_name())
if GTK_WIDGET_DRAWABLE(self):
if self._need_redraw:
x, y, w, h = self.allocation
self._pixmap_prepare (w, h)
self._render_figure(self._pixmap, w, h)
self._need_redraw = False
x, y, w, h = event.area
self.window.draw_drawable (self.style.fg_gc[self.state],
self._pixmap, x, y, x, y, w, h)
return False # finish event propagation?
filetypes = FigureCanvasBase.filetypes.copy()
filetypes['jpg'] = 'JPEG'
filetypes['jpeg'] = 'JPEG'
filetypes['png'] = 'Portable Network Graphics'
def print_jpeg(self, filename, *args, **kwargs):
return self._print_image(filename, 'jpeg')
print_jpg = print_jpeg
def print_png(self, filename, *args, **kwargs):
return self._print_image(filename, 'png')
def _print_image(self, filename, format, *args, **kwargs):
if self.flags() & gtk.REALIZED == 0:
# for self.window(for pixmap) and has a side effect of altering
# figure width,height (via configure-event?)
gtk.DrawingArea.realize(self)
width, height = self.get_width_height()
pixmap = gdk.Pixmap (self.window, width, height)
self._renderer.set_pixmap (pixmap)
self._render_figure(pixmap, width, height)
# jpg colors don't match the display very well, png colors match
# better
pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, 0, 8, width, height)
pixbuf.get_from_drawable(pixmap, pixmap.get_colormap(),
0, 0, 0, 0, width, height)
# set the default quality, if we are writing a JPEG.
# http://www.pygtk.org/docs/pygtk/class-gdkpixbuf.html#method-gdkpixbuf--save
options = cbook.restrict_dict(kwargs, ['quality'])
if format in ['jpg','jpeg']:
if 'quality' not in options:
options['quality'] = rcParams['savefig.jpeg_quality']
options['quality'] = str(options['quality'])
if is_string_like(filename):
try:
pixbuf.save(filename, format, options=options)
except gobject.GError as exc:
error_msg_gtk('Save figure failure:\n%s' % (exc,), parent=self)
elif is_writable_file_like(filename):
if hasattr(pixbuf, 'save_to_callback'):
def save_callback(buf, data=None):
data.write(buf)
try:
pixbuf.save_to_callback(save_callback, format, user_data=filename, options=options)
except gobject.GError as exc:
error_msg_gtk('Save figure failure:\n%s' % (exc,), parent=self)
else:
raise ValueError("Saving to a Python file-like object is only supported by PyGTK >= 2.8")
else:
raise ValueError("filename must be a path or a file-like object")
def new_timer(self, *args, **kwargs):
"""
Creates a new backend-specific subclass of :class:`backend_bases.Timer`.
This is useful for getting periodic events through the backend's native
event loop. Implemented only for backends with GUIs.
optional arguments:
*interval*
Timer interval in milliseconds
*callbacks*
Sequence of (func, args, kwargs) where func(*args, **kwargs) will
be executed by the timer every *interval*.
"""
return TimerGTK(*args, **kwargs)
def flush_events(self):
gtk.gdk.threads_enter()
while gtk.events_pending():
gtk.main_iteration(True)
gtk.gdk.flush()
gtk.gdk.threads_leave()
def start_event_loop(self,timeout):
FigureCanvasBase.start_event_loop_default(self,timeout)
start_event_loop.__doc__=FigureCanvasBase.start_event_loop_default.__doc__
def stop_event_loop(self):
FigureCanvasBase.stop_event_loop_default(self)
stop_event_loop.__doc__=FigureCanvasBase.stop_event_loop_default.__doc__
class FigureManagerGTK(FigureManagerBase):
"""
Public attributes
canvas : The FigureCanvas instance
num : The Figure number
toolbar : The gtk.Toolbar (gtk only)
vbox : The gtk.VBox containing the canvas and toolbar (gtk only)
window : The gtk.Window (gtk only)
"""
def __init__(self, canvas, num):
if _debug: print('FigureManagerGTK.%s' % fn_name())
FigureManagerBase.__init__(self, canvas, num)
self.window = gtk.Window()
self.set_window_title("Figure %d" % num)
if (window_icon):
try:
self.window.set_icon_from_file(window_icon)
except:
# some versions of gtk throw a glib.GError but not
# all, so I am not sure how to catch it. I am unhappy
# diong a blanket catch here, but an not sure what a
# better way is - JDH
verbose.report('Could not load matplotlib icon: %s' % sys.exc_info()[1])
self.vbox = gtk.VBox()
self.window.add(self.vbox)
self.vbox.show()
self.canvas.show()
self.vbox.pack_start(self.canvas, True, True)
self.toolbar = self._get_toolbar(canvas)
# calculate size for window
w = int (self.canvas.figure.bbox.width)
h = int (self.canvas.figure.bbox.height)
if self.toolbar is not None:
self.toolbar.show()
self.vbox.pack_end(self.toolbar, False, False)
tb_w, tb_h = self.toolbar.size_request()
h += tb_h
self.window.set_default_size (w, h)
def destroy(*args):
Gcf.destroy(num)
self.window.connect("destroy", destroy)
self.window.connect("delete_event", destroy)
if matplotlib.is_interactive():
self.window.show()
self.canvas.draw_idle()
def notify_axes_change(fig):
'this will be called whenever the current axes is changed'
if self.toolbar is not None: self.toolbar.update()
self.canvas.figure.add_axobserver(notify_axes_change)
self.canvas.grab_focus()
def destroy(self, *args):
if _debug: print('FigureManagerGTK.%s' % fn_name())
if hasattr(self, 'toolbar') and self.toolbar is not None:
self.toolbar.destroy()
if hasattr(self, 'vbox'):
self.vbox.destroy()
if hasattr(self, 'window'):
self.window.destroy()
if hasattr(self, 'canvas'):
self.canvas.destroy()
self.__dict__.clear() #Is this needed? Other backends don't have it.
if Gcf.get_num_fig_managers()==0 and \
not matplotlib.is_interactive() and \
gtk.main_level() >= 1:
gtk.main_quit()
def show(self):
# show the figure window
self.window.show()
def full_screen_toggle(self):
self._full_screen_flag = not self._full_screen_flag
if self._full_screen_flag:
self.window.fullscreen()
else:
self.window.unfullscreen()
_full_screen_flag = False
def _get_toolbar(self, canvas):
# must be inited after the window, drawingArea and figure
# attrs are set
if rcParams['toolbar'] == 'toolbar2':
toolbar = NavigationToolbar2GTK (canvas, self.window)
else:
toolbar = None
return toolbar
def get_window_title(self):
return self.window.get_title()
def set_window_title(self, title):
self.window.set_title(title)
def resize(self, width, height):
'set the canvas size in pixels'
#_, _, cw, ch = self.canvas.allocation
#_, _, ww, wh = self.window.allocation
#self.window.resize (width-cw+ww, height-ch+wh)
self.window.resize(width, height)
class NavigationToolbar2GTK(NavigationToolbar2, gtk.Toolbar):
def __init__(self, canvas, window):
self.win = window
gtk.Toolbar.__init__(self)
NavigationToolbar2.__init__(self, canvas)
def set_message(self, s):
self.message.set_label(s)
def set_cursor(self, cursor):
self.canvas.window.set_cursor(cursord[cursor])
def release(self, event):
try: del self._pixmapBack
except AttributeError: pass
def dynamic_update(self):
# legacy method; new method is canvas.draw_idle
self.canvas.draw_idle()
def draw_rubberband(self, event, x0, y0, x1, y1):
'adapted from http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/189744'
drawable = self.canvas.window
if drawable is None:
return
gc = drawable.new_gc()
height = self.canvas.figure.bbox.height
y1 = height - y1
y0 = height - y0
w = abs(x1 - x0)
h = abs(y1 - y0)
rect = [int(val)for val in (min(x0,x1), min(y0, y1), w, h)]
try:
lastrect, pixmapBack = self._pixmapBack
except AttributeError:
#snap image back
if event.inaxes is None:
return
ax = event.inaxes
l,b,w,h = [int(val) for val in ax.bbox.bounds]
b = int(height)-(b+h)
axrect = l,b,w,h
self._pixmapBack = axrect, gtk.gdk.Pixmap(drawable, w, h)
self._pixmapBack[1].draw_drawable(gc, drawable, l, b, 0, 0, w, h)
else:
drawable.draw_drawable(gc, pixmapBack, 0, 0, *lastrect)
drawable.draw_rectangle(gc, False, *rect)
def _init_toolbar(self):
self.set_style(gtk.TOOLBAR_ICONS)
self._init_toolbar2_4()
def _init_toolbar2_4(self):
basedir = os.path.join(rcParams['datapath'],'images')
if not _new_tooltip_api:
self.tooltips = gtk.Tooltips()
for text, tooltip_text, image_file, callback in self.toolitems:
if text is None:
self.insert( gtk.SeparatorToolItem(), -1 )
continue
fname = os.path.join(basedir, image_file + '.png')
image = gtk.Image()
image.set_from_file(fname)
tbutton = gtk.ToolButton(image, text)
self.insert(tbutton, -1)
tbutton.connect('clicked', getattr(self, callback))
if _new_tooltip_api:
tbutton.set_tooltip_text(tooltip_text)
else:
tbutton.set_tooltip(self.tooltips, tooltip_text, 'Private')
toolitem = gtk.SeparatorToolItem()
self.insert(toolitem, -1)
# set_draw() not making separator invisible,
# bug #143692 fixed Jun 06 2004, will be in GTK+ 2.6
toolitem.set_draw(False)
toolitem.set_expand(True)
toolitem = gtk.ToolItem()
self.insert(toolitem, -1)
self.message = gtk.Label()
toolitem.add(self.message)
self.show_all()
def get_filechooser(self):
fc = FileChooserDialog(
title='Save the figure',
parent=self.win,
path=os.path.expanduser(rcParams.get('savefig.directory', '')),
filetypes=self.canvas.get_supported_filetypes(),
default_filetype=self.canvas.get_default_filetype())
fc.set_current_name(self.canvas.get_default_filename())
return fc
def save_figure(self, *args):
chooser = self.get_filechooser()
fname, format = chooser.get_filename_from_user()
chooser.destroy()
if fname:
startpath = os.path.expanduser(rcParams.get('savefig.directory', ''))
if startpath == '':
# explicitly missing key or empty str signals to use cwd
rcParams['savefig.directory'] = startpath
else:
# save dir for next time
rcParams['savefig.directory'] = os.path.dirname(six.text_type(fname))
try:
self.canvas.print_figure(fname, format=format)
except Exception as e:
error_msg_gtk(str(e), parent=self)
def configure_subplots(self, button):
toolfig = Figure(figsize=(6,3))
canvas = self._get_canvas(toolfig)
toolfig.subplots_adjust(top=0.9)
tool = SubplotTool(self.canvas.figure, toolfig)
w = int (toolfig.bbox.width)
h = int (toolfig.bbox.height)
window = gtk.Window()
if (window_icon):
try: window.set_icon_from_file(window_icon)
except:
# we presumably already logged a message on the
# failure of the main plot, don't keep reporting
pass
window.set_title("Subplot Configuration Tool")
window.set_default_size(w, h)
vbox = gtk.VBox()
window.add(vbox)
vbox.show()
canvas.show()
vbox.pack_start(canvas, True, True)
window.show()
def _get_canvas(self, fig):
return FigureCanvasGTK(fig)
class FileChooserDialog(gtk.FileChooserDialog):
"""GTK+ 2.4 file selector which presents the user with a menu
of supported image formats
"""
def __init__ (self,
title = 'Save file',
parent = None,
action = gtk.FILE_CHOOSER_ACTION_SAVE,
buttons = (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_SAVE, gtk.RESPONSE_OK),
path = None,
filetypes = [],
default_filetype = None
):
super(FileChooserDialog, self).__init__ (title, parent, action,
buttons)
super(FileChooserDialog, self).set_do_overwrite_confirmation(True)
self.set_default_response (gtk.RESPONSE_OK)
if not path: path = os.getcwd() + os.sep
# create an extra widget to list supported image formats
self.set_current_folder (path)
self.set_current_name ('image.' + default_filetype)
hbox = gtk.HBox (spacing=10)
hbox.pack_start (gtk.Label ("File Format:"), expand=False)
liststore = gtk.ListStore(gobject.TYPE_STRING)
cbox = gtk.ComboBox(liststore)
cell = gtk.CellRendererText()
cbox.pack_start(cell, True)
cbox.add_attribute(cell, 'text', 0)
hbox.pack_start (cbox)
self.filetypes = filetypes
self.sorted_filetypes = list(six.iteritems(filetypes))
self.sorted_filetypes.sort()
default = 0
for i, (ext, name) in enumerate(self.sorted_filetypes):
cbox.append_text ("%s (*.%s)" % (name, ext))
if ext == default_filetype:
default = i
cbox.set_active(default)
self.ext = default_filetype
def cb_cbox_changed (cbox, data=None):
"""File extension changed"""
head, filename = os.path.split(self.get_filename())
root, ext = os.path.splitext(filename)
ext = ext[1:]
new_ext = self.sorted_filetypes[cbox.get_active()][0]
self.ext = new_ext
if ext in self.filetypes:
filename = root + '.' + new_ext
elif ext == '':
filename = filename.rstrip('.') + '.' + new_ext
self.set_current_name (filename)
cbox.connect ("changed", cb_cbox_changed)
hbox.show_all()
self.set_extra_widget(hbox)
def get_filename_from_user (self):
while True:
filename = None
if self.run() != int(gtk.RESPONSE_OK):
break
filename = self.get_filename()
break
return filename, self.ext
class DialogLineprops(object):
"""
A GUI dialog for controlling lineprops
"""
signals = (
'on_combobox_lineprops_changed',
'on_combobox_linestyle_changed',
'on_combobox_marker_changed',
'on_colorbutton_linestyle_color_set',
'on_colorbutton_markerface_color_set',
'on_dialog_lineprops_okbutton_clicked',
'on_dialog_lineprops_cancelbutton_clicked',
)
linestyles = [ls for ls in lines.Line2D.lineStyles if ls.strip()]
linestyled = dict([ (s,i) for i,s in enumerate(linestyles)])
markers = [m for m in markers.MarkerStyle.markers if cbook.is_string_like(m)]
markerd = dict([(s,i) for i,s in enumerate(markers)])
def __init__(self, lines):
import gtk.glade
datadir = matplotlib.get_data_path()
gladefile = os.path.join(datadir, 'lineprops.glade')
if not os.path.exists(gladefile):
raise IOError('Could not find gladefile lineprops.glade in %s'%datadir)
self._inited = False
self._updateson = True # suppress updates when setting widgets manually
self.wtree = gtk.glade.XML(gladefile, 'dialog_lineprops')
self.wtree.signal_autoconnect(dict([(s, getattr(self, s)) for s in self.signals]))
self.dlg = self.wtree.get_widget('dialog_lineprops')
self.lines = lines
cbox = self.wtree.get_widget('combobox_lineprops')
cbox.set_active(0)
self.cbox_lineprops = cbox
cbox = self.wtree.get_widget('combobox_linestyles')
for ls in self.linestyles:
cbox.append_text(ls)
cbox.set_active(0)
self.cbox_linestyles = cbox
cbox = self.wtree.get_widget('combobox_markers')
for m in self.markers:
cbox.append_text(m)
cbox.set_active(0)
self.cbox_markers = cbox
self._lastcnt = 0
self._inited = True
def show(self):
'populate the combo box'
self._updateson = False
# flush the old
cbox = self.cbox_lineprops
for i in range(self._lastcnt-1,-1,-1):
cbox.remove_text(i)
# add the new
for line in self.lines:
cbox.append_text(line.get_label())
cbox.set_active(0)
self._updateson = True
self._lastcnt = len(self.lines)
self.dlg.show()
def get_active_line(self):
'get the active line'
ind = self.cbox_lineprops.get_active()
line = self.lines[ind]
return line
def get_active_linestyle(self):
'get the active lineinestyle'
ind = self.cbox_linestyles.get_active()
ls = self.linestyles[ind]
return ls
def get_active_marker(self):
'get the active lineinestyle'
ind = self.cbox_markers.get_active()
m = self.markers[ind]
return m
def _update(self):
'update the active line props from the widgets'
if not self._inited or not self._updateson: return
line = self.get_active_line()
ls = self.get_active_linestyle()
marker = self.get_active_marker()
line.set_linestyle(ls)
line.set_marker(marker)
button = self.wtree.get_widget('colorbutton_linestyle')
color = button.get_color()
r, g, b = [val/65535. for val in (color.red, color.green, color.blue)]
line.set_color((r,g,b))
button = self.wtree.get_widget('colorbutton_markerface')
color = button.get_color()
r, g, b = [val/65535. for val in (color.red, color.green, color.blue)]
line.set_markerfacecolor((r,g,b))
line.figure.canvas.draw()
def on_combobox_lineprops_changed(self, item):
'update the widgets from the active line'
if not self._inited: return
self._updateson = False
line = self.get_active_line()
ls = line.get_linestyle()
if ls is None: ls = 'None'
self.cbox_linestyles.set_active(self.linestyled[ls])
marker = line.get_marker()
if marker is None: marker = 'None'
self.cbox_markers.set_active(self.markerd[marker])
rgba = mcolors.to_rgba(line.get_color())
color = gtk.gdk.Color(*[int(val*65535) for val in rgba[:3]])
button = self.wtree.get_widget('colorbutton_linestyle')
button.set_color(color)
rgba = mcolors.to_rgba(line.get_markerfacecolor())
color = gtk.gdk.Color(*[int(val*65535) for val in rgba[:3]])
button = self.wtree.get_widget('colorbutton_markerface')
button.set_color(color)
self._updateson = True
def on_combobox_linestyle_changed(self, item):
self._update()
def on_combobox_marker_changed(self, item):
self._update()
def on_colorbutton_linestyle_color_set(self, button):
self._update()
def on_colorbutton_markerface_color_set(self, button):
'called colorbutton marker clicked'
self._update()
def on_dialog_lineprops_okbutton_clicked(self, button):
self._update()
self.dlg.hide()
def on_dialog_lineprops_cancelbutton_clicked(self, button):
self.dlg.hide()
# set icon used when windows are minimized
# Unfortunately, the SVG renderer (rsvg) leaks memory under earlier
# versions of pygtk, so we have to use a PNG file instead.
try:
if gtk.pygtk_version < (2, 8, 0) or sys.platform == 'win32':
icon_filename = 'matplotlib.png'
else:
icon_filename = 'matplotlib.svg'
window_icon = os.path.join(rcParams['datapath'], 'images', icon_filename)
except:
window_icon = None
verbose.report('Could not load matplotlib icon: %s' % sys.exc_info()[1])
def error_msg_gtk(msg, parent=None):
if parent is not None: # find the toplevel gtk.Window
parent = parent.get_toplevel()
if parent.flags() & gtk.TOPLEVEL == 0:
parent = None
if not is_string_like(msg):
msg = ','.join(map(str,msg))
dialog = gtk.MessageDialog(
parent = parent,
type = gtk.MESSAGE_ERROR,
buttons = gtk.BUTTONS_OK,
message_format = msg)
dialog.run()
dialog.destroy()
FigureCanvas = FigureCanvasGTK
FigureManager = FigureManagerGTK
|
{
"content_hash": "6867cda4fbb2503432a7ef8cc43b6b18",
"timestamp": "",
"source": "github",
"line_count": 1075,
"max_line_length": 166,
"avg_line_length": 35.11906976744186,
"alnum_prop": 0.5730935289910736,
"repo_name": "andyraib/data-storage",
"id": "83b5c4a5cbaceaa8924531330096f36923eb30c9",
"size": "37753",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "python_scripts/env/lib/python3.6/site-packages/matplotlib/backends/backend_gtk.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "12403"
}
],
"symlink_target": ""
}
|
"""
Swaggy Jenkins
Jenkins API clients generated from Swagger / Open API specification # noqa: E501
The version of the OpenAPI document: 1.5.1-pre.0
Contact: blah@cliffano.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import openapi_client
from openapi_client.models.github_organization import GithubOrganization # noqa: E501
from openapi_client.rest import ApiException
class TestGithubOrganization(unittest.TestCase):
"""GithubOrganization unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test GithubOrganization
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = openapi_client.models.github_organization.GithubOrganization() # noqa: E501
if include_optional :
return GithubOrganization(
_class = '',
links = openapi_client.models.github_organizationlinks.GithubOrganizationlinks(
repositories = openapi_client.models.link.Link(
_class = '',
href = '', ),
self = openapi_client.models.link.Link(
_class = '',
href = '', ),
_class = '', ),
jenkins_organization_pipeline = True,
name = ''
)
else :
return GithubOrganization(
)
def testGithubOrganization(self):
"""Test GithubOrganization"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "483014afc97de5fae1c55ec1c12e98b1",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 95,
"avg_line_length": 32.233333333333334,
"alnum_prop": 0.594105480868666,
"repo_name": "cliffano/swaggy-jenkins",
"id": "dd793cd757165991a5d74657ef31ce0b49a883f3",
"size": "1951",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "clients/python-legacy/generated/test/test_github_organization.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ada",
"bytes": "569823"
},
{
"name": "Apex",
"bytes": "741346"
},
{
"name": "Batchfile",
"bytes": "14792"
},
{
"name": "C",
"bytes": "971274"
},
{
"name": "C#",
"bytes": "5131336"
},
{
"name": "C++",
"bytes": "7799032"
},
{
"name": "CMake",
"bytes": "20609"
},
{
"name": "CSS",
"bytes": "4873"
},
{
"name": "Clojure",
"bytes": "129018"
},
{
"name": "Crystal",
"bytes": "864941"
},
{
"name": "Dart",
"bytes": "876777"
},
{
"name": "Dockerfile",
"bytes": "7385"
},
{
"name": "Eiffel",
"bytes": "424642"
},
{
"name": "Elixir",
"bytes": "139252"
},
{
"name": "Elm",
"bytes": "187067"
},
{
"name": "Emacs Lisp",
"bytes": "191"
},
{
"name": "Erlang",
"bytes": "373074"
},
{
"name": "F#",
"bytes": "556012"
},
{
"name": "Gherkin",
"bytes": "951"
},
{
"name": "Go",
"bytes": "345227"
},
{
"name": "Groovy",
"bytes": "89524"
},
{
"name": "HTML",
"bytes": "2367424"
},
{
"name": "Haskell",
"bytes": "680841"
},
{
"name": "Java",
"bytes": "12164874"
},
{
"name": "JavaScript",
"bytes": "1959006"
},
{
"name": "Kotlin",
"bytes": "1280953"
},
{
"name": "Lua",
"bytes": "322316"
},
{
"name": "Makefile",
"bytes": "11882"
},
{
"name": "Nim",
"bytes": "65818"
},
{
"name": "OCaml",
"bytes": "94665"
},
{
"name": "Objective-C",
"bytes": "464903"
},
{
"name": "PHP",
"bytes": "4383673"
},
{
"name": "Perl",
"bytes": "743304"
},
{
"name": "PowerShell",
"bytes": "678274"
},
{
"name": "Python",
"bytes": "5529523"
},
{
"name": "QMake",
"bytes": "6915"
},
{
"name": "R",
"bytes": "840841"
},
{
"name": "Raku",
"bytes": "10945"
},
{
"name": "Ruby",
"bytes": "328360"
},
{
"name": "Rust",
"bytes": "1735375"
},
{
"name": "Scala",
"bytes": "1387368"
},
{
"name": "Shell",
"bytes": "407167"
},
{
"name": "Swift",
"bytes": "342562"
},
{
"name": "TypeScript",
"bytes": "3060093"
}
],
"symlink_target": ""
}
|
from datetime import timedelta
import numpy as np
import warnings
import copy
from textwrap import dedent
import pandas as pd
from pandas.core.base import AbstractMethodError, GroupByMixin
from pandas.core.groupby import (BinGrouper, Grouper, _GroupBy, GroupBy,
SeriesGroupBy, groupby, PanelGroupBy)
from pandas.tseries.frequencies import to_offset, is_subperiod, is_superperiod
from pandas.core.indexes.datetimes import DatetimeIndex, date_range
from pandas.core.indexes.timedeltas import TimedeltaIndex
from pandas.tseries.offsets import DateOffset, Tick, Day, delta_to_nanoseconds
from pandas.core.indexes.period import PeriodIndex
import pandas.core.common as com
import pandas.core.algorithms as algos
from pandas.core.dtypes.generic import ABCDataFrame, ABCSeries
import pandas.compat as compat
from pandas.compat.numpy import function as nv
from pandas._libs import lib, tslib
from pandas._libs.lib import Timestamp
from pandas._libs.period import IncompatibleFrequency
from pandas.util._decorators import Appender
from pandas.core.generic import _shared_docs
_shared_docs_kwargs = dict()
class Resampler(_GroupBy):
"""
Class for resampling datetimelike data, a groupby-like operation.
See aggregate, transform, and apply functions on this object.
It's easiest to use obj.resample(...) to use Resampler.
Parameters
----------
obj : pandas object
groupby : a TimeGrouper object
axis : int, default 0
kind : str or None
'period', 'timestamp' to override default index treatement
Notes
-----
After resampling, see aggregate, apply, and transform functions.
Returns
-------
a Resampler of the appropriate type
"""
# to the groupby descriptor
_attributes = ['freq', 'axis', 'closed', 'label', 'convention',
'loffset', 'base', 'kind']
# API compat of allowed attributes
_deprecated_valids = _attributes + ['__doc__', '_cache', '_attributes',
'binner', 'grouper', 'groupby',
'sort', 'kind', 'squeeze', 'keys',
'group_keys', 'as_index', 'exclusions',
'_groupby']
# don't raise deprecation warning on attributes starting with these
# patterns - prevents warnings caused by IPython introspection
_deprecated_valid_patterns = ['_ipython', '_repr']
# API compat of disallowed attributes
_deprecated_invalids = ['iloc', 'loc', 'ix', 'iat', 'at']
def __init__(self, obj, groupby=None, axis=0, kind=None, **kwargs):
self.groupby = groupby
self.keys = None
self.sort = True
self.axis = axis
self.kind = kind
self.squeeze = False
self.group_keys = True
self.as_index = True
self.exclusions = set()
self.binner = None
self.grouper = None
if self.groupby is not None:
self.groupby._set_grouper(self._convert_obj(obj), sort=True)
def __unicode__(self):
""" provide a nice str repr of our rolling object """
attrs = ["{k}={v}".format(k=k, v=getattr(self.groupby, k))
for k in self._attributes if
getattr(self.groupby, k, None) is not None]
return "{klass} [{attrs}]".format(klass=self.__class__.__name__,
attrs=', '.join(attrs))
@property
def obj(self):
return self.groupby.obj
@property
def ax(self):
return self.groupby.ax
@property
def _typ(self):
""" masquerade for compat as a Series or a DataFrame """
if isinstance(self._selected_obj, pd.Series):
return 'series'
return 'dataframe'
@property
def _from_selection(self):
""" is the resampling from a DataFrame column or MultiIndex level """
# upsampling and PeriodIndex resampling do not work
# with selection, this state used to catch and raise an error
return (self.groupby is not None and
(self.groupby.key is not None or
self.groupby.level is not None))
def _deprecated(self, op):
warnings.warn(("\n.resample() is now a deferred operation\n"
"You called {op}(...) on this deferred object "
"which materialized it into a {klass}\nby implicitly "
"taking the mean. Use .resample(...).mean() "
"instead").format(op=op, klass=self._typ),
FutureWarning, stacklevel=3)
return self.mean()
def _make_deprecated_binop(op):
# op is a string
def _evaluate_numeric_binop(self, other):
result = self._deprecated(op)
return getattr(result, op)(other)
return _evaluate_numeric_binop
def _make_deprecated_unary(op, name):
# op is a callable
def _evaluate_numeric_unary(self):
result = self._deprecated(name)
return op(result)
return _evaluate_numeric_unary
def __array__(self):
return self._deprecated('__array__').__array__()
__gt__ = _make_deprecated_binop('__gt__')
__ge__ = _make_deprecated_binop('__ge__')
__lt__ = _make_deprecated_binop('__lt__')
__le__ = _make_deprecated_binop('__le__')
__eq__ = _make_deprecated_binop('__eq__')
__ne__ = _make_deprecated_binop('__ne__')
__add__ = __radd__ = _make_deprecated_binop('__add__')
__sub__ = __rsub__ = _make_deprecated_binop('__sub__')
__mul__ = __rmul__ = _make_deprecated_binop('__mul__')
__floordiv__ = __rfloordiv__ = _make_deprecated_binop('__floordiv__')
__truediv__ = __rtruediv__ = _make_deprecated_binop('__truediv__')
if not compat.PY3:
__div__ = __rdiv__ = _make_deprecated_binop('__div__')
__neg__ = _make_deprecated_unary(lambda x: -x, '__neg__')
__pos__ = _make_deprecated_unary(lambda x: x, '__pos__')
__abs__ = _make_deprecated_unary(lambda x: np.abs(x), '__abs__')
__inv__ = _make_deprecated_unary(lambda x: -x, '__inv__')
def __getattr__(self, attr):
if attr in self._internal_names_set:
return object.__getattribute__(self, attr)
if attr in self._attributes:
return getattr(self.groupby, attr)
if attr in self.obj:
return self[attr]
if attr in self._deprecated_invalids:
raise ValueError(".resample() is now a deferred operation\n"
"\tuse .resample(...).mean() instead of "
".resample(...)")
matches_pattern = any(attr.startswith(x) for x
in self._deprecated_valid_patterns)
if not matches_pattern and attr not in self._deprecated_valids:
# avoid the warning, if it's just going to be an exception
# anyway.
if not hasattr(self.obj, attr):
raise AttributeError("'{}' has no attribute '{}'".format(
type(self.obj).__name__, attr
))
self = self._deprecated(attr)
return object.__getattribute__(self, attr)
def __setattr__(self, attr, value):
if attr not in self._deprecated_valids:
raise ValueError("cannot set values on {0}".format(
self.__class__.__name__))
object.__setattr__(self, attr, value)
def __getitem__(self, key):
try:
return super(Resampler, self).__getitem__(key)
except (KeyError, com.AbstractMethodError):
# compat for deprecated
if isinstance(self.obj, com.ABCSeries):
return self._deprecated('__getitem__')[key]
raise
def __setitem__(self, attr, value):
raise ValueError("cannot set items on {0}".format(
self.__class__.__name__))
def _convert_obj(self, obj):
"""
provide any conversions for the object in order to correctly handle
Parameters
----------
obj : the object to be resampled
Returns
-------
obj : converted object
"""
obj = obj._consolidate()
return obj
def _get_binner_for_time(self):
raise AbstractMethodError(self)
def _set_binner(self):
"""
setup our binners
cache these as we are an immutable object
"""
if self.binner is None:
self.binner, self.grouper = self._get_binner()
def _get_binner(self):
"""
create the BinGrouper, assume that self.set_grouper(obj)
has already been called
"""
binner, bins, binlabels = self._get_binner_for_time()
bin_grouper = BinGrouper(bins, binlabels, indexer=self.groupby.indexer)
return binner, bin_grouper
def _assure_grouper(self):
""" make sure that we are creating our binner & grouper """
self._set_binner()
def plot(self, *args, **kwargs):
# for compat with prior versions, we want to
# have the warnings shown here and just have this work
return self._deprecated('plot').plot(*args, **kwargs)
_agg_doc = dedent("""
Examples
--------
>>> s = Series([1,2,3,4,5],
index=pd.date_range('20130101',
periods=5,freq='s'))
2013-01-01 00:00:00 1
2013-01-01 00:00:01 2
2013-01-01 00:00:02 3
2013-01-01 00:00:03 4
2013-01-01 00:00:04 5
Freq: S, dtype: int64
>>> r = s.resample('2s')
DatetimeIndexResampler [freq=<2 * Seconds>, axis=0, closed=left,
label=left, convention=start, base=0]
>>> r.agg(np.sum)
2013-01-01 00:00:00 3
2013-01-01 00:00:02 7
2013-01-01 00:00:04 5
Freq: 2S, dtype: int64
>>> r.agg(['sum','mean','max'])
sum mean max
2013-01-01 00:00:00 3 1.5 2
2013-01-01 00:00:02 7 3.5 4
2013-01-01 00:00:04 5 5.0 5
>>> r.agg({'result' : lambda x: x.mean() / x.std(),
'total' : np.sum})
total result
2013-01-01 00:00:00 3 2.121320
2013-01-01 00:00:02 7 4.949747
2013-01-01 00:00:04 5 NaN
See also
--------
pandas.DataFrame.groupby.aggregate
pandas.DataFrame.resample.transform
pandas.DataFrame.aggregate
""")
@Appender(_agg_doc)
@Appender(_shared_docs['aggregate'] % dict(
klass='DataFrame',
versionadded=''))
def aggregate(self, arg, *args, **kwargs):
self._set_binner()
result, how = self._aggregate(arg, *args, **kwargs)
if result is None:
result = self._groupby_and_aggregate(arg,
*args,
**kwargs)
result = self._apply_loffset(result)
return result
agg = aggregate
apply = aggregate
def transform(self, arg, *args, **kwargs):
"""
Call function producing a like-indexed Series on each group and return
a Series with the transformed values
Parameters
----------
func : function
To apply to each group. Should return a Series with the same index
Examples
--------
>>> resampled.transform(lambda x: (x - x.mean()) / x.std())
Returns
-------
transformed : Series
"""
return self._selected_obj.groupby(self.groupby).transform(
arg, *args, **kwargs)
def _downsample(self, f):
raise AbstractMethodError(self)
def _upsample(self, f, limit=None, fill_value=None):
raise AbstractMethodError(self)
def _gotitem(self, key, ndim, subset=None):
"""
sub-classes to define
return a sliced object
Parameters
----------
key : string / list of selections
ndim : 1,2
requested ndim of result
subset : object, default None
subset to act on
"""
self._set_binner()
grouper = self.grouper
if subset is None:
subset = self.obj
grouped = groupby(subset, by=None, grouper=grouper, axis=self.axis)
# try the key selection
try:
return grouped[key]
except KeyError:
return grouped
def _groupby_and_aggregate(self, how, grouper=None, *args, **kwargs):
""" re-evaluate the obj with a groupby aggregation """
if grouper is None:
self._set_binner()
grouper = self.grouper
obj = self._selected_obj
try:
grouped = groupby(obj, by=None, grouper=grouper, axis=self.axis)
except TypeError:
# panel grouper
grouped = PanelGroupBy(obj, grouper=grouper, axis=self.axis)
try:
if isinstance(obj, ABCDataFrame) and compat.callable(how):
# Check if the function is reducing or not.
result = grouped._aggregate_item_by_item(how, *args, **kwargs)
else:
result = grouped.aggregate(how, *args, **kwargs)
except Exception:
# we have a non-reducing function
# try to evaluate
result = grouped.apply(how, *args, **kwargs)
result = self._apply_loffset(result)
return self._wrap_result(result)
def _apply_loffset(self, result):
"""
if loffset is set, offset the result index
This is NOT an idempotent routine, it will be applied
exactly once to the result.
Parameters
----------
result : Series or DataFrame
the result of resample
"""
needs_offset = (
isinstance(self.loffset, (DateOffset, timedelta)) and
isinstance(result.index, DatetimeIndex) and
len(result.index) > 0
)
if needs_offset:
result.index = result.index + self.loffset
self.loffset = None
return result
def _get_resampler_for_grouping(self, groupby, **kwargs):
""" return the correct class for resampling with groupby """
return self._resampler_for_grouping(self, groupby=groupby, **kwargs)
def _wrap_result(self, result):
""" potentially wrap any results """
if isinstance(result, com.ABCSeries) and self._selection is not None:
result.name = self._selection
if isinstance(result, ABCSeries) and result.empty:
obj = self.obj
result.index = obj.index._shallow_copy(freq=to_offset(self.freq))
result.name = getattr(obj, 'name', None)
return result
def pad(self, limit=None):
"""
Forward fill the values
Parameters
----------
limit : integer, optional
limit of how many values to fill
Returns
-------
an upsampled Series
See Also
--------
Series.fillna
DataFrame.fillna
"""
return self._upsample('pad', limit=limit)
ffill = pad
def nearest(self, limit=None):
"""
Fill values with nearest neighbor starting from center
Parameters
----------
limit : integer, optional
limit of how many values to fill
.. versionadded:: 0.21.0
Returns
-------
an upsampled Series
See Also
--------
Series.fillna
DataFrame.fillna
"""
return self._upsample('nearest', limit=limit)
def backfill(self, limit=None):
"""
Backward fill the values
Parameters
----------
limit : integer, optional
limit of how many values to fill
Returns
-------
an upsampled Series
See Also
--------
Series.fillna
DataFrame.fillna
"""
return self._upsample('backfill', limit=limit)
bfill = backfill
def fillna(self, method, limit=None):
"""
Fill missing values
Parameters
----------
method : str, method of resampling ('ffill', 'bfill')
limit : integer, optional
limit of how many values to fill
See Also
--------
Series.fillna
DataFrame.fillna
"""
return self._upsample(method, limit=limit)
@Appender(_shared_docs['interpolate'] % _shared_docs_kwargs)
def interpolate(self, method='linear', axis=0, limit=None, inplace=False,
limit_direction='forward', downcast=None, **kwargs):
"""
Interpolate values according to different methods.
.. versionadded:: 0.18.1
"""
result = self._upsample(None)
return result.interpolate(method=method, axis=axis, limit=limit,
inplace=inplace,
limit_direction=limit_direction,
downcast=downcast, **kwargs)
def asfreq(self, fill_value=None):
"""
return the values at the new freq,
essentially a reindex
Parameters
----------
fill_value: scalar, optional
Value to use for missing values, applied during upsampling (note
this does not fill NaNs that already were present).
.. versionadded:: 0.20.0
See Also
--------
Series.asfreq
DataFrame.asfreq
"""
return self._upsample('asfreq', fill_value=fill_value)
def std(self, ddof=1, *args, **kwargs):
"""
Compute standard deviation of groups, excluding missing values
Parameters
----------
ddof : integer, default 1
degrees of freedom
"""
nv.validate_resampler_func('std', args, kwargs)
return self._downsample('std', ddof=ddof)
def var(self, ddof=1, *args, **kwargs):
"""
Compute variance of groups, excluding missing values
Parameters
----------
ddof : integer, default 1
degrees of freedom
"""
nv.validate_resampler_func('var', args, kwargs)
return self._downsample('var', ddof=ddof)
@Appender(GroupBy.size.__doc__)
def size(self):
# It's a special case as higher level does return
# a copy of 0-len objects. GH14962
result = self._downsample('size')
if not len(self.ax) and isinstance(self._selected_obj, ABCDataFrame):
result = pd.Series([], index=result.index, dtype='int64')
return result
Resampler._deprecated_valids += dir(Resampler)
# downsample methods
for method in ['min', 'max', 'first', 'last', 'sum', 'mean', 'sem',
'median', 'prod', 'ohlc']:
def f(self, _method=method, *args, **kwargs):
nv.validate_resampler_func(_method, args, kwargs)
return self._downsample(_method)
f.__doc__ = getattr(GroupBy, method).__doc__
setattr(Resampler, method, f)
# groupby & aggregate methods
for method in ['count']:
def f(self, _method=method):
return self._downsample(_method)
f.__doc__ = getattr(GroupBy, method).__doc__
setattr(Resampler, method, f)
# series only methods
for method in ['nunique']:
def f(self, _method=method):
return self._downsample(_method)
f.__doc__ = getattr(SeriesGroupBy, method).__doc__
setattr(Resampler, method, f)
def _maybe_process_deprecations(r, how=None, fill_method=None, limit=None):
""" potentially we might have a deprecation warning, show it
but call the appropriate methods anyhow """
if how is not None:
# .resample(..., how='sum')
if isinstance(how, compat.string_types):
method = "{0}()".format(how)
# .resample(..., how=lambda x: ....)
else:
method = ".apply(<func>)"
# if we have both a how and fill_method, then show
# the following warning
if fill_method is None:
warnings.warn("how in .resample() is deprecated\n"
"the new syntax is "
".resample(...).{method}".format(
method=method),
FutureWarning, stacklevel=3)
r = r.aggregate(how)
if fill_method is not None:
# show the prior function call
method = '.' + method if how is not None else ''
args = "limit={0}".format(limit) if limit is not None else ""
warnings.warn("fill_method is deprecated to .resample()\n"
"the new syntax is .resample(...){method}"
".{fill_method}({args})".format(
method=method,
fill_method=fill_method,
args=args),
FutureWarning, stacklevel=3)
if how is not None:
r = getattr(r, fill_method)(limit=limit)
else:
r = r.aggregate(fill_method, limit=limit)
return r
class _GroupByMixin(GroupByMixin):
""" provide the groupby facilities """
def __init__(self, obj, *args, **kwargs):
parent = kwargs.pop('parent', None)
groupby = kwargs.pop('groupby', None)
if parent is None:
parent = obj
# initialize our GroupByMixin object with
# the resampler attributes
for attr in self._attributes:
setattr(self, attr, kwargs.get(attr, getattr(parent, attr)))
super(_GroupByMixin, self).__init__(None)
self._groupby = groupby
self._groupby.mutated = True
self._groupby.grouper.mutated = True
self.groupby = copy.copy(parent.groupby)
def _apply(self, f, **kwargs):
"""
dispatch to _upsample; we are stripping all of the _upsample kwargs and
performing the original function call on the grouped object
"""
def func(x):
x = self._shallow_copy(x, groupby=self.groupby)
if isinstance(f, compat.string_types):
return getattr(x, f)(**kwargs)
return x.apply(f, **kwargs)
result = self._groupby.apply(func)
return self._wrap_result(result)
_upsample = _apply
_downsample = _apply
_groupby_and_aggregate = _apply
class DatetimeIndexResampler(Resampler):
@property
def _resampler_for_grouping(self):
return DatetimeIndexResamplerGroupby
def _get_binner_for_time(self):
# this is how we are actually creating the bins
if self.kind == 'period':
return self.groupby._get_time_period_bins(self.ax)
return self.groupby._get_time_bins(self.ax)
def _downsample(self, how, **kwargs):
"""
Downsample the cython defined function
Parameters
----------
how : string / cython mapped function
**kwargs : kw args passed to how function
"""
self._set_binner()
how = self._is_cython_func(how) or how
ax = self.ax
obj = self._selected_obj
if not len(ax):
# reset to the new freq
obj = obj.copy()
obj.index.freq = self.freq
return obj
# do we have a regular frequency
if ax.freq is not None or ax.inferred_freq is not None:
if len(self.grouper.binlabels) > len(ax) and how is None:
# let's do an asfreq
return self.asfreq()
# we are downsampling
# we want to call the actual grouper method here
result = obj.groupby(
self.grouper, axis=self.axis).aggregate(how, **kwargs)
result = self._apply_loffset(result)
return self._wrap_result(result)
def _adjust_binner_for_upsample(self, binner):
""" adjust our binner when upsampling """
if self.closed == 'right':
binner = binner[1:]
else:
binner = binner[:-1]
return binner
def _upsample(self, method, limit=None, fill_value=None):
"""
method : string {'backfill', 'bfill', 'pad',
'ffill', 'asfreq'} method for upsampling
limit : int, default None
Maximum size gap to fill when reindexing
fill_value : scalar, default None
Value to use for missing values
See also
--------
.fillna
"""
self._set_binner()
if self.axis:
raise AssertionError('axis must be 0')
if self._from_selection:
raise ValueError("Upsampling from level= or on= selection"
" is not supported, use .set_index(...)"
" to explicitly set index to"
" datetime-like")
ax = self.ax
obj = self._selected_obj
binner = self.binner
res_index = self._adjust_binner_for_upsample(binner)
# if we have the same frequency as our axis, then we are equal sampling
if limit is None and to_offset(ax.inferred_freq) == self.freq:
result = obj.copy()
result.index = res_index
else:
result = obj.reindex(res_index, method=method,
limit=limit, fill_value=fill_value)
return self._wrap_result(result)
def _wrap_result(self, result):
result = super(DatetimeIndexResampler, self)._wrap_result(result)
# we may have a different kind that we were asked originally
# convert if needed
if self.kind == 'period' and not isinstance(result.index, PeriodIndex):
result.index = result.index.to_period(self.freq)
return result
class DatetimeIndexResamplerGroupby(_GroupByMixin, DatetimeIndexResampler):
"""
Provides a resample of a groupby implementation
.. versionadded:: 0.18.1
"""
@property
def _constructor(self):
return DatetimeIndexResampler
class PeriodIndexResampler(DatetimeIndexResampler):
@property
def _resampler_for_grouping(self):
return PeriodIndexResamplerGroupby
def _get_binner_for_time(self):
if self.kind == 'timestamp':
return super(PeriodIndexResampler, self)._get_binner_for_time()
return self.groupby._get_period_bins(self.ax)
def _convert_obj(self, obj):
obj = super(PeriodIndexResampler, self)._convert_obj(obj)
if self._from_selection:
# see GH 14008, GH 12871
msg = ("Resampling from level= or on= selection"
" with a PeriodIndex is not currently supported,"
" use .set_index(...) to explicitly set index")
raise NotImplementedError(msg)
if self.loffset is not None:
# Cannot apply loffset/timedelta to PeriodIndex -> convert to
# timestamps
self.kind = 'timestamp'
# convert to timestamp
if self.kind == 'timestamp':
obj = obj.to_timestamp(how=self.convention)
return obj
def _downsample(self, how, **kwargs):
"""
Downsample the cython defined function
Parameters
----------
how : string / cython mapped function
**kwargs : kw args passed to how function
"""
# we may need to actually resample as if we are timestamps
if self.kind == 'timestamp':
return super(PeriodIndexResampler, self)._downsample(how, **kwargs)
how = self._is_cython_func(how) or how
ax = self.ax
if is_subperiod(ax.freq, self.freq):
# Downsampling
return self._groupby_and_aggregate(how, grouper=self.grouper)
elif is_superperiod(ax.freq, self.freq):
if how == 'ohlc':
# GH #13083
# upsampling to subperiods is handled as an asfreq, which works
# for pure aggregating/reducing methods
# OHLC reduces along the time dimension, but creates multiple
# values for each period -> handle by _groupby_and_aggregate()
return self._groupby_and_aggregate(how, grouper=self.grouper)
return self.asfreq()
elif ax.freq == self.freq:
return self.asfreq()
raise IncompatibleFrequency(
'Frequency {} cannot be resampled to {}, as they are not '
'sub or super periods'.format(ax.freq, self.freq))
def _upsample(self, method, limit=None, fill_value=None):
"""
method : string {'backfill', 'bfill', 'pad', 'ffill'}
method for upsampling
limit : int, default None
Maximum size gap to fill when reindexing
fill_value : scalar, default None
Value to use for missing values
See also
--------
.fillna
"""
# we may need to actually resample as if we are timestamps
if self.kind == 'timestamp':
return super(PeriodIndexResampler, self)._upsample(
method, limit=limit, fill_value=fill_value)
self._set_binner()
ax = self.ax
obj = self.obj
new_index = self.binner
# Start vs. end of period
memb = ax.asfreq(self.freq, how=self.convention)
# Get the fill indexer
indexer = memb.get_indexer(new_index, method=method, limit=limit)
return self._wrap_result(_take_new_index(
obj, indexer, new_index, axis=self.axis))
class PeriodIndexResamplerGroupby(_GroupByMixin, PeriodIndexResampler):
"""
Provides a resample of a groupby implementation
.. versionadded:: 0.18.1
"""
@property
def _constructor(self):
return PeriodIndexResampler
class TimedeltaIndexResampler(DatetimeIndexResampler):
@property
def _resampler_for_grouping(self):
return TimedeltaIndexResamplerGroupby
def _get_binner_for_time(self):
return self.groupby._get_time_delta_bins(self.ax)
def _adjust_binner_for_upsample(self, binner):
""" adjust our binner when upsampling """
ax = self.ax
if is_subperiod(ax.freq, self.freq):
# We are actually downsampling
# but are in the asfreq path
# GH 12926
if self.closed == 'right':
binner = binner[1:]
else:
binner = binner[:-1]
return binner
class TimedeltaIndexResamplerGroupby(_GroupByMixin, TimedeltaIndexResampler):
"""
Provides a resample of a groupby implementation
.. versionadded:: 0.18.1
"""
@property
def _constructor(self):
return TimedeltaIndexResampler
def resample(obj, kind=None, **kwds):
""" create a TimeGrouper and return our resampler """
tg = TimeGrouper(**kwds)
return tg._get_resampler(obj, kind=kind)
resample.__doc__ = Resampler.__doc__
def get_resampler_for_grouping(groupby, rule, how=None, fill_method=None,
limit=None, kind=None, **kwargs):
""" return our appropriate resampler when grouping as well """
# .resample uses 'on' similar to how .groupby uses 'key'
kwargs['key'] = kwargs.pop('on', None)
tg = TimeGrouper(freq=rule, **kwargs)
resampler = tg._get_resampler(groupby.obj, kind=kind)
r = resampler._get_resampler_for_grouping(groupby=groupby)
return _maybe_process_deprecations(r,
how=how,
fill_method=fill_method,
limit=limit)
class TimeGrouper(Grouper):
"""
Custom groupby class for time-interval grouping
Parameters
----------
freq : pandas date offset or offset alias for identifying bin edges
closed : closed end of interval; 'left' or 'right'
label : interval boundary to use for labeling; 'left' or 'right'
convention : {'start', 'end', 'e', 's'}
If axis is PeriodIndex
"""
_attributes = Grouper._attributes + ('closed', 'label', 'how',
'loffset', 'kind', 'convention',
'base')
def __init__(self, freq='Min', closed=None, label=None, how='mean',
axis=0, fill_method=None, limit=None, loffset=None,
kind=None, convention=None, base=0, **kwargs):
freq = to_offset(freq)
end_types = set(['M', 'A', 'Q', 'BM', 'BA', 'BQ', 'W'])
rule = freq.rule_code
if (rule in end_types or
('-' in rule and rule[:rule.find('-')] in end_types)):
if closed is None:
closed = 'right'
if label is None:
label = 'right'
else:
if closed is None:
closed = 'left'
if label is None:
label = 'left'
self.closed = closed
self.label = label
self.kind = kind
self.convention = convention or 'E'
self.convention = self.convention.lower()
if isinstance(loffset, compat.string_types):
loffset = to_offset(loffset)
self.loffset = loffset
self.how = how
self.fill_method = fill_method
self.limit = limit
self.base = base
# always sort time groupers
kwargs['sort'] = True
super(TimeGrouper, self).__init__(freq=freq, axis=axis, **kwargs)
def _get_resampler(self, obj, kind=None):
"""
return my resampler or raise if we have an invalid axis
Parameters
----------
obj : input object
kind : string, optional
'period','timestamp','timedelta' are valid
Returns
-------
a Resampler
Raises
------
TypeError if incompatible axis
"""
self._set_grouper(obj)
ax = self.ax
if isinstance(ax, DatetimeIndex):
return DatetimeIndexResampler(obj,
groupby=self,
kind=kind,
axis=self.axis)
elif isinstance(ax, PeriodIndex) or kind == 'period':
return PeriodIndexResampler(obj,
groupby=self,
kind=kind,
axis=self.axis)
elif isinstance(ax, TimedeltaIndex):
return TimedeltaIndexResampler(obj,
groupby=self,
axis=self.axis)
raise TypeError("Only valid with DatetimeIndex, "
"TimedeltaIndex or PeriodIndex, "
"but got an instance of %r" % type(ax).__name__)
def _get_grouper(self, obj, validate=True):
# create the resampler and return our binner
r = self._get_resampler(obj)
r._set_binner()
return r.binner, r.grouper, r.obj
def _get_time_bins(self, ax):
if not isinstance(ax, DatetimeIndex):
raise TypeError('axis must be a DatetimeIndex, but got '
'an instance of %r' % type(ax).__name__)
if len(ax) == 0:
binner = labels = DatetimeIndex(
data=[], freq=self.freq, name=ax.name)
return binner, [], labels
first, last = ax.min(), ax.max()
first, last = _get_range_edges(first, last, self.freq,
closed=self.closed,
base=self.base)
tz = ax.tz
# GH #12037
# use first/last directly instead of call replace() on them
# because replace() will swallow the nanosecond part
# thus last bin maybe slightly before the end if the end contains
# nanosecond part and lead to `Values falls after last bin` error
binner = labels = DatetimeIndex(freq=self.freq,
start=first,
end=last,
tz=tz,
name=ax.name)
# GH 15549
# In edge case of tz-aware resapmling binner last index can be
# less than the last variable in data object, this happens because of
# DST time change
if len(binner) > 1 and binner[-1] < last:
extra_date_range = pd.date_range(binner[-1], last + self.freq,
freq=self.freq, tz=tz,
name=ax.name)
binner = labels = binner.append(extra_date_range[1:])
# a little hack
trimmed = False
if (len(binner) > 2 and binner[-2] == last and
self.closed == 'right'):
binner = binner[:-1]
trimmed = True
ax_values = ax.asi8
binner, bin_edges = self._adjust_bin_edges(binner, ax_values)
# general version, knowing nothing about relative frequencies
bins = lib.generate_bins_dt64(
ax_values, bin_edges, self.closed, hasnans=ax.hasnans)
if self.closed == 'right':
labels = binner
if self.label == 'right':
labels = labels[1:]
elif not trimmed:
labels = labels[:-1]
else:
if self.label == 'right':
labels = labels[1:]
elif not trimmed:
labels = labels[:-1]
if ax.hasnans:
binner = binner.insert(0, tslib.NaT)
labels = labels.insert(0, tslib.NaT)
# if we end up with more labels than bins
# adjust the labels
# GH4076
if len(bins) < len(labels):
labels = labels[:len(bins)]
return binner, bins, labels
def _adjust_bin_edges(self, binner, ax_values):
# Some hacks for > daily data, see #1471, #1458, #1483
bin_edges = binner.asi8
if self.freq != 'D' and is_superperiod(self.freq, 'D'):
day_nanos = delta_to_nanoseconds(timedelta(1))
if self.closed == 'right':
bin_edges = bin_edges + day_nanos - 1
# intraday values on last day
if bin_edges[-2] > ax_values.max():
bin_edges = bin_edges[:-1]
binner = binner[:-1]
return binner, bin_edges
def _get_time_delta_bins(self, ax):
if not isinstance(ax, TimedeltaIndex):
raise TypeError('axis must be a TimedeltaIndex, but got '
'an instance of %r' % type(ax).__name__)
if not len(ax):
binner = labels = TimedeltaIndex(
data=[], freq=self.freq, name=ax.name)
return binner, [], labels
start = ax[0]
end = ax[-1]
labels = binner = TimedeltaIndex(start=start,
end=end,
freq=self.freq,
name=ax.name)
end_stamps = labels + 1
bins = ax.searchsorted(end_stamps, side='left')
# Addresses GH #10530
if self.base > 0:
labels += type(self.freq)(self.base)
return binner, bins, labels
def _get_time_period_bins(self, ax):
if not isinstance(ax, DatetimeIndex):
raise TypeError('axis must be a DatetimeIndex, but got '
'an instance of %r' % type(ax).__name__)
if not len(ax):
binner = labels = PeriodIndex(
data=[], freq=self.freq, name=ax.name)
return binner, [], labels
labels = binner = PeriodIndex(start=ax[0],
end=ax[-1],
freq=self.freq,
name=ax.name)
end_stamps = (labels + 1).asfreq(self.freq, 's').to_timestamp()
if ax.tzinfo:
end_stamps = end_stamps.tz_localize(ax.tzinfo)
bins = ax.searchsorted(end_stamps, side='left')
return binner, bins, labels
def _get_period_bins(self, ax):
if not isinstance(ax, PeriodIndex):
raise TypeError('axis must be a PeriodIndex, but got '
'an instance of %r' % type(ax).__name__)
memb = ax.asfreq(self.freq, how=self.convention)
# NaT handling as in pandas._lib.lib.generate_bins_dt64()
nat_count = 0
if memb.hasnans:
nat_count = np.sum(memb._isnan)
memb = memb[~memb._isnan]
# if index contains no valid (non-NaT) values, return empty index
if not len(memb):
binner = labels = PeriodIndex(
data=[], freq=self.freq, name=ax.name)
return binner, [], labels
start = ax.min().asfreq(self.freq, how=self.convention)
end = ax.max().asfreq(self.freq, how='end')
labels = binner = PeriodIndex(start=start, end=end,
freq=self.freq, name=ax.name)
i8 = memb.asi8
freq_mult = self.freq.n
# when upsampling to subperiods, we need to generate enough bins
expected_bins_count = len(binner) * freq_mult
i8_extend = expected_bins_count - (i8[-1] - i8[0])
rng = np.arange(i8[0], i8[-1] + i8_extend, freq_mult)
rng += freq_mult
bins = memb.searchsorted(rng, side='left')
if nat_count > 0:
# NaT handling as in pandas._lib.lib.generate_bins_dt64()
# shift bins by the number of NaT
bins += nat_count
bins = np.insert(bins, 0, nat_count)
binner = binner.insert(0, tslib.NaT)
labels = labels.insert(0, tslib.NaT)
return binner, bins, labels
def _take_new_index(obj, indexer, new_index, axis=0):
from pandas.core.api import Series, DataFrame
if isinstance(obj, Series):
new_values = algos.take_1d(obj.values, indexer)
return Series(new_values, index=new_index, name=obj.name)
elif isinstance(obj, DataFrame):
if axis == 1:
raise NotImplementedError("axis 1 is not supported")
return DataFrame(obj._data.reindex_indexer(
new_axis=new_index, indexer=indexer, axis=1))
else:
raise ValueError("'obj' should be either a Series or a DataFrame")
def _get_range_edges(first, last, offset, closed='left', base=0):
if isinstance(offset, compat.string_types):
offset = to_offset(offset)
if isinstance(offset, Tick):
is_day = isinstance(offset, Day)
day_nanos = delta_to_nanoseconds(timedelta(1))
# #1165
if (is_day and day_nanos % offset.nanos == 0) or not is_day:
return _adjust_dates_anchored(first, last, offset,
closed=closed, base=base)
if not isinstance(offset, Tick): # and first.time() != last.time():
# hack!
first = first.normalize()
last = last.normalize()
if closed == 'left':
first = Timestamp(offset.rollback(first))
else:
first = Timestamp(first - offset)
last = Timestamp(last + offset)
return first, last
def _adjust_dates_anchored(first, last, offset, closed='right', base=0):
# First and last offsets should be calculated from the start day to fix an
# error cause by resampling across multiple days when a one day period is
# not a multiple of the frequency.
#
# See https://github.com/pandas-dev/pandas/issues/8683
# 14682 - Since we need to drop the TZ information to perform
# the adjustment in the presence of a DST change,
# save TZ Info and the DST state of the first and last parameters
# so that we can accurately rebuild them at the end.
first_tzinfo = first.tzinfo
last_tzinfo = last.tzinfo
first_dst = bool(first.dst())
last_dst = bool(last.dst())
first = first.tz_localize(None)
last = last.tz_localize(None)
start_day_nanos = first.normalize().value
base_nanos = (base % offset.n) * offset.nanos // offset.n
start_day_nanos += base_nanos
foffset = (first.value - start_day_nanos) % offset.nanos
loffset = (last.value - start_day_nanos) % offset.nanos
if closed == 'right':
if foffset > 0:
# roll back
fresult = first.value - foffset
else:
fresult = first.value - offset.nanos
if loffset > 0:
# roll forward
lresult = last.value + (offset.nanos - loffset)
else:
# already the end of the road
lresult = last.value
else: # closed == 'left'
if foffset > 0:
fresult = first.value - foffset
else:
# start of the road
fresult = first.value
if loffset > 0:
# roll forward
lresult = last.value + (offset.nanos - loffset)
else:
lresult = last.value + offset.nanos
return (Timestamp(fresult).tz_localize(first_tzinfo, ambiguous=first_dst),
Timestamp(lresult).tz_localize(last_tzinfo, ambiguous=last_dst))
def asfreq(obj, freq, method=None, how=None, normalize=False, fill_value=None):
"""
Utility frequency conversion method for Series/DataFrame
"""
if isinstance(obj.index, PeriodIndex):
if method is not None:
raise NotImplementedError("'method' argument is not supported")
if how is None:
how = 'E'
new_obj = obj.copy()
new_obj.index = obj.index.asfreq(freq, how=how)
elif len(obj.index) == 0:
new_obj = obj.copy()
new_obj.index = obj.index._shallow_copy(freq=to_offset(freq))
else:
dti = date_range(obj.index[0], obj.index[-1], freq=freq)
dti.name = obj.index.name
new_obj = obj.reindex(dti, method=method, fill_value=fill_value)
if normalize:
new_obj.index = new_obj.index.normalize()
return new_obj
|
{
"content_hash": "bd863f7646d5f75f85b74d8f87c4cb2f",
"timestamp": "",
"source": "github",
"line_count": 1424,
"max_line_length": 79,
"avg_line_length": 32.51755617977528,
"alnum_prop": 0.5518410538818702,
"repo_name": "winklerand/pandas",
"id": "bd441a8248841f2ed9c6bfb8615d22fd602bfd32",
"size": "46305",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pandas/core/resample.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4071"
},
{
"name": "C",
"bytes": "493226"
},
{
"name": "C++",
"bytes": "17353"
},
{
"name": "HTML",
"bytes": "551706"
},
{
"name": "Makefile",
"bytes": "907"
},
{
"name": "PowerShell",
"bytes": "2972"
},
{
"name": "Python",
"bytes": "12249109"
},
{
"name": "R",
"bytes": "1177"
},
{
"name": "Shell",
"bytes": "23114"
},
{
"name": "Smarty",
"bytes": "2045"
}
],
"symlink_target": ""
}
|
import kbr.file_utils as file_utils
def to_list(value) -> list:
if isinstance(value, list):
return value
return [ value ]
def readin_if_file(name:str) -> str:
if os.path.isfile( name):
name = file_utils.read(name)
return name
|
{
"content_hash": "546d6d07a309e52286921be0434d4479",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 36,
"avg_line_length": 18.785714285714285,
"alnum_prop": 0.623574144486692,
"repo_name": "brugger/kbr-tools",
"id": "50b7ceb3a2a4ca997ea83dde410252673f4caba8",
"size": "263",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kbr/misc_utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "120520"
},
{
"name": "TypeScript",
"bytes": "105"
}
],
"symlink_target": ""
}
|
"""
This script asks cdash to give it a summary of all of the failing tests
on the nightly expected section. It presents the tests ranked by the number
of failing machines. From this view you can more easily see what is in
the greatest need of fixing.
"""
import sys
import time
import datetime
import urllib
# Process args
csvOutput = True
wikiOutput = False
dashDate = str(datetime.date.today())
argc = len(sys.argv)
while argc > 1:
argc = argc - 1
if sys.argv[argc] == "--csv":
csvOutput = True
wikiOutput = False
elif sys.argv[argc] == "--wiki":
wikiOutput = True
csvOutput = False
if wikiOutput:
print "==Dashboard for " + dashDate + "=="
url = 'http://open.cdash.org/api/?method=build&task=sitetestfailures&project=VTK&group=Nightly%20Expected'
page = urllib.urlopen(url)
data = page.readlines()
if len(data[0]) == 2: #"[]"
print "Cdash returned nothing useful, please try again later."
raise SystemExit
submissions = eval(data[0])
tfails = dict()
if csvOutput:
print "-"*20, "ANALYZING", "-"*20
elif wikiOutput:
print "===Builds for " + dashDate + "==="
print r'{| class="wikitable sortable" border="1" cellpadding="5" cellspacing="0"'
print r'|-'
print r'| Build Name'
print r'| Failing'
for skey in submissions.keys():
submission = submissions[skey]
bname = submission['buildname']
bfails = submission['tests']
if len(bfails) > 100:
continue
if csvOutput:
print bname
print len(bfails)
elif wikiOutput:
print r'|-'
print r'| ',
print r'[http://open.cdash.org/index.php?project=VTK' + '&date=' + dashDate + r'&filtercount=1' + r'&field1=buildname/string&compare1=61&value1=' + bname + " " + bname + "]"
print r'|'
print len(bfails)
for tnum in range(0, len(bfails)):
test = bfails[tnum]
tname = test['name']
if not tname in tfails:
tfails[tname] = list()
tfails[tname].append(bname)
if wikiOutput:
print r'|}'
if csvOutput:
print "-"*20, "REPORT", "-"*20
print len(tfails)," FAILURES"
elif wikiOutput:
print "===Tests for " + dashDate + "==="
print r'{| class="wikitable sortable" border="1" cellpadding="5" cellspacing="0"'
print r'|-'
print r'| Test'
print r'| Failing'
print r'| Platforms'
failcounts = map(lambda x: (x,len(tfails[x])), tfails.keys())
sortedfails = sorted(failcounts, key=lambda fail: fail[1])
for test in sortedfails:
tname = test[0]
if csvOutput:
print tname, ",", len(tfails[tname]), ",", tfails[tname]
elif wikiOutput:
print r'|-'
print r'| '
print r'[http://open.cdash.org/testSummary.php?' + r'project=11' + r'&date=' + dashDate + r'&name=' + tname + ' ' + tname + ']'
print r'|',
print len(tfails[tname])
print r'|',
print tfails[tname]
|
{
"content_hash": "d198156dd49cd508a9c8a2d8cd9948fa",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 177,
"avg_line_length": 28.309278350515463,
"alnum_prop": 0.6504005826656956,
"repo_name": "biddisco/VTK",
"id": "e5b07bff7f39347cb14fd18504652736059f56f9",
"size": "2764",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Testing/Core/vtk_fail_summary.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "37444"
},
{
"name": "C",
"bytes": "45542302"
},
{
"name": "C++",
"bytes": "60467840"
},
{
"name": "CSS",
"bytes": "157961"
},
{
"name": "Cuda",
"bytes": "28721"
},
{
"name": "GAP",
"bytes": "14120"
},
{
"name": "IDL",
"bytes": "4406"
},
{
"name": "Java",
"bytes": "184678"
},
{
"name": "JavaScript",
"bytes": "978324"
},
{
"name": "Objective-C",
"bytes": "121232"
},
{
"name": "Objective-C++",
"bytes": "101052"
},
{
"name": "Pascal",
"bytes": "3255"
},
{
"name": "Perl",
"bytes": "177007"
},
{
"name": "Python",
"bytes": "13262355"
},
{
"name": "Shell",
"bytes": "41929"
},
{
"name": "Tcl",
"bytes": "1894036"
}
],
"symlink_target": ""
}
|
"""
Common Policy Engine Implementation
Policies can be expressed in one of two forms: A list of lists, or a
string written in the new policy language.
In the list-of-lists representation, each check inside the innermost
list is combined as with an "and" conjunction--for that check to pass,
all the specified checks must pass. These innermost lists are then
combined as with an "or" conjunction. As an example, take the following
rule, expressed in the list-of-lists representation::
[["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]]
This is the original way of expressing policies, but there now exists a
new way: the policy language.
In the policy language, each check is specified the same way as in the
list-of-lists representation: a simple "a:b" pair that is matched to
the correct class to perform that check::
+===========================================================================+
| TYPE | SYNTAX |
+===========================================================================+
|User's Role | role:admin |
+---------------------------------------------------------------------------+
|Rules already defined on policy | rule:admin_required |
+---------------------------------------------------------------------------+
|Against URL's¹ | http://my-url.org/check |
+---------------------------------------------------------------------------+
|User attributes² | project_id:%(target.project.id)s |
+---------------------------------------------------------------------------+
|Strings | <variable>:'xpto2035abc' |
| | 'myproject':<variable> |
+---------------------------------------------------------------------------+
| | project_id:xpto2035abc |
|Literals | domain_id:20 |
| | True:%(user.enabled)s |
+===========================================================================+
¹URL checking must return 'True' to be valid
²User attributes (obtained through the token): user_id, domain_id or project_id
Conjunction operators are available, allowing for more expressiveness
in crafting policies. So, in the policy language, the previous check in
list-of-lists becomes::
role:admin or (project_id:%(project_id)s and role:projectadmin)
The policy language also has the "not" operator, allowing a richer
policy rule::
project_id:%(project_id)s and not role:dunce
Attributes sent along with API calls can be used by the policy engine
(on the right side of the expression), by using the following syntax::
<some_value>:%(user.id)s
Contextual attributes of objects identified by their IDs are loaded
from the database. They are also available to the policy engine and
can be checked through the `target` keyword::
<some_value>:%(target.role.name)s
Finally, two special policy checks should be mentioned; the policy
check "@" will always accept an access, and the policy check "!" will
always reject an access. (Note that if a rule is either the empty
list ("[]") or the empty string, this is equivalent to the "@" policy
check.) Of these, the "!" policy check is probably the most useful,
as it allows particular rules to be explicitly disabled.
"""
import abc
import ast
import copy
import logging
import os
import re
from oslo_config import cfg
from oslo_serialization import jsonutils
import six
import six.moves.urllib.parse as urlparse
import six.moves.urllib.request as urlrequest
from nova.openstack.common import fileutils
from nova.openstack.common._i18n import _, _LE, _LI
policy_opts = [
cfg.StrOpt('policy_file',
default='policy.json',
help=_('The JSON file that defines policies.')),
cfg.StrOpt('policy_default_rule',
default='default',
help=_('Default rule. Enforced when a requested rule is not '
'found.')),
cfg.MultiStrOpt('policy_dirs',
default=['policy.d'],
help=_('Directories where policy configuration files are '
'stored. They can be relative to any directory '
'in the search path defined by the config_dir '
'option, or absolute paths. The file defined by '
'policy_file must exist for these directories to '
'be searched.')),
]
CONF = cfg.CONF
CONF.register_opts(policy_opts)
LOG = logging.getLogger(__name__)
_checks = {}
def list_opts():
"""Entry point for oslo.config-generator."""
return [(None, copy.deepcopy(policy_opts))]
class PolicyNotAuthorized(Exception):
def __init__(self, rule):
msg = _("Policy doesn't allow %s to be performed.") % rule
super(PolicyNotAuthorized, self).__init__(msg)
class Rules(dict):
"""A store for rules. Handles the default_rule setting directly."""
@classmethod
def load_json(cls, data, default_rule=None):
"""Allow loading of JSON rule data."""
# Suck in the JSON data and parse the rules
rules = dict((k, parse_rule(v)) for k, v in
jsonutils.loads(data).items())
return cls(rules, default_rule)
def __init__(self, rules=None, default_rule=None):
"""Initialize the Rules store."""
super(Rules, self).__init__(rules or {})
self.default_rule = default_rule
def __missing__(self, key):
"""Implements the default rule handling."""
if isinstance(self.default_rule, dict):
raise KeyError(key)
# If the default rule isn't actually defined, do something
# reasonably intelligent
if not self.default_rule:
raise KeyError(key)
if isinstance(self.default_rule, BaseCheck):
return self.default_rule
# We need to check this or we can get infinite recursion
if self.default_rule not in self:
raise KeyError(key)
elif isinstance(self.default_rule, six.string_types):
return self[self.default_rule]
def __str__(self):
"""Dumps a string representation of the rules."""
# Start by building the canonical strings for the rules
out_rules = {}
for key, value in self.items():
# Use empty string for singleton TrueCheck instances
if isinstance(value, TrueCheck):
out_rules[key] = ''
else:
out_rules[key] = str(value)
# Dump a pretty-printed JSON representation
return jsonutils.dumps(out_rules, indent=4)
class Enforcer(object):
"""Responsible for loading and enforcing rules.
:param policy_file: Custom policy file to use, if none is
specified, `CONF.policy_file` will be
used.
:param rules: Default dictionary / Rules to use. It will be
considered just in the first instantiation. If
`load_rules(True)`, `clear()` or `set_rules(True)`
is called this will be overwritten.
:param default_rule: Default rule to use, CONF.default_rule will
be used if none is specified.
:param use_conf: Whether to load rules from cache or config file.
:param overwrite: Whether to overwrite existing rules when reload rules
from config file.
"""
def __init__(self, policy_file=None, rules=None,
default_rule=None, use_conf=True, overwrite=True):
self.default_rule = default_rule or CONF.policy_default_rule
self.rules = Rules(rules, self.default_rule)
self.policy_path = None
self.policy_file = policy_file or CONF.policy_file
self.use_conf = use_conf
self.overwrite = overwrite
def set_rules(self, rules, overwrite=True, use_conf=False):
"""Create a new Rules object based on the provided dict of rules.
:param rules: New rules to use. It should be an instance of dict.
:param overwrite: Whether to overwrite current rules or update them
with the new rules.
:param use_conf: Whether to reload rules from cache or config file.
"""
if not isinstance(rules, dict):
raise TypeError(_("Rules must be an instance of dict or Rules, "
"got %s instead") % type(rules))
self.use_conf = use_conf
if overwrite:
self.rules = Rules(rules, self.default_rule)
else:
self.rules.update(rules)
def clear(self):
"""Clears Enforcer rules, policy's cache and policy's path."""
self.set_rules({})
fileutils.delete_cached_file(self.policy_path)
self.default_rule = None
self.policy_path = None
def load_rules(self, force_reload=False):
"""Loads policy_path's rules.
Policy file is cached and will be reloaded if modified.
:param force_reload: Whether to reload rules from config file.
"""
if force_reload:
self.use_conf = force_reload
if self.use_conf:
if not self.policy_path:
self.policy_path = self._get_policy_path(self.policy_file)
self._load_policy_file(self.policy_path, force_reload,
overwrite=self.overwrite)
for path in CONF.policy_dirs:
try:
path = self._get_policy_path(path)
except cfg.ConfigFilesNotFoundError:
LOG.info(_LI("Can not find policy directory: %s"), path)
continue
self._walk_through_policy_directory(path,
self._load_policy_file,
force_reload, False)
@staticmethod
def _walk_through_policy_directory(path, func, *args):
# We do not iterate over sub-directories.
policy_files = next(os.walk(path))[2]
policy_files.sort()
for policy_file in [p for p in policy_files if not p.startswith('.')]:
func(os.path.join(path, policy_file), *args)
def _load_policy_file(self, path, force_reload, overwrite=True):
reloaded, data = fileutils.read_cached_file(
path, force_reload=force_reload)
if reloaded or not self.rules or not overwrite:
rules = Rules.load_json(data, self.default_rule)
self.set_rules(rules, overwrite=overwrite, use_conf=True)
LOG.debug("Rules successfully reloaded")
def _get_policy_path(self, path):
"""Locate the policy json data file/path.
:param path: It's value can be a full path or related path. When
full path specified, this function just returns the full
path. When related path specified, this function will
search configuration directories to find one that exists.
:returns: The policy path
:raises: ConfigFilesNotFoundError if the file/path couldn't
be located.
"""
policy_path = CONF.find_file(path)
if policy_path:
return policy_path
raise cfg.ConfigFilesNotFoundError((path,))
def enforce(self, rule, target, creds, do_raise=False,
exc=None, *args, **kwargs):
"""Checks authorization of a rule against the target and credentials.
:param rule: A string or BaseCheck instance specifying the rule
to evaluate.
:param target: As much information about the object being operated
on as possible, as a dictionary.
:param creds: As much information about the user performing the
action as possible, as a dictionary.
:param do_raise: Whether to raise an exception or not if check
fails.
:param exc: Class of the exception to raise if the check fails.
Any remaining arguments passed to enforce() (both
positional and keyword arguments) will be passed to
the exception class. If not specified, PolicyNotAuthorized
will be used.
:return: Returns False if the policy does not allow the action and
exc is not provided; otherwise, returns a value that
evaluates to True. Note: for rules using the "case"
expression, this True value will be the specified string
from the expression.
"""
self.load_rules()
# Allow the rule to be a Check tree
if isinstance(rule, BaseCheck):
result = rule(target, creds, self)
elif not self.rules:
# No rules to reference means we're going to fail closed
result = False
else:
try:
# Evaluate the rule
result = self.rules[rule](target, creds, self)
except KeyError:
LOG.debug("Rule [%s] doesn't exist" % rule)
# If the rule doesn't exist, fail closed
result = False
# If it is False, raise the exception if requested
if do_raise and not result:
if exc:
raise exc(*args, **kwargs)
raise PolicyNotAuthorized(rule)
return result
@six.add_metaclass(abc.ABCMeta)
class BaseCheck(object):
"""Abstract base class for Check classes."""
@abc.abstractmethod
def __str__(self):
"""String representation of the Check tree rooted at this node."""
pass
@abc.abstractmethod
def __call__(self, target, cred, enforcer):
"""Triggers if instance of the class is called.
Performs the check. Returns False to reject the access or a
true value (not necessary True) to accept the access.
"""
pass
class FalseCheck(BaseCheck):
"""A policy check that always returns False (disallow)."""
def __str__(self):
"""Return a string representation of this check."""
return "!"
def __call__(self, target, cred, enforcer):
"""Check the policy."""
return False
class TrueCheck(BaseCheck):
"""A policy check that always returns True (allow)."""
def __str__(self):
"""Return a string representation of this check."""
return "@"
def __call__(self, target, cred, enforcer):
"""Check the policy."""
return True
class Check(BaseCheck):
"""A base class to allow for user-defined policy checks."""
def __init__(self, kind, match):
"""Initiates Check instance.
:param kind: The kind of the check, i.e., the field before the
':'.
:param match: The match of the check, i.e., the field after
the ':'.
"""
self.kind = kind
self.match = match
def __str__(self):
"""Return a string representation of this check."""
return "%s:%s" % (self.kind, self.match)
class NotCheck(BaseCheck):
"""Implements the "not" logical operator.
A policy check that inverts the result of another policy check.
"""
def __init__(self, rule):
"""Initialize the 'not' check.
:param rule: The rule to negate. Must be a Check.
"""
self.rule = rule
def __str__(self):
"""Return a string representation of this check."""
return "not %s" % self.rule
def __call__(self, target, cred, enforcer):
"""Check the policy.
Returns the logical inverse of the wrapped check.
"""
return not self.rule(target, cred, enforcer)
class AndCheck(BaseCheck):
"""Implements the "and" logical operator.
A policy check that requires that a list of other checks all return True.
"""
def __init__(self, rules):
"""Initialize the 'and' check.
:param rules: A list of rules that will be tested.
"""
self.rules = rules
def __str__(self):
"""Return a string representation of this check."""
return "(%s)" % ' and '.join(str(r) for r in self.rules)
def __call__(self, target, cred, enforcer):
"""Check the policy.
Requires that all rules accept in order to return True.
"""
for rule in self.rules:
if not rule(target, cred, enforcer):
return False
return True
def add_check(self, rule):
"""Adds rule to be tested.
Allows addition of another rule to the list of rules that will
be tested. Returns the AndCheck object for convenience.
"""
self.rules.append(rule)
return self
class OrCheck(BaseCheck):
"""Implements the "or" operator.
A policy check that requires that at least one of a list of other
checks returns True.
"""
def __init__(self, rules):
"""Initialize the 'or' check.
:param rules: A list of rules that will be tested.
"""
self.rules = rules
def __str__(self):
"""Return a string representation of this check."""
return "(%s)" % ' or '.join(str(r) for r in self.rules)
def __call__(self, target, cred, enforcer):
"""Check the policy.
Requires that at least one rule accept in order to return True.
"""
for rule in self.rules:
if rule(target, cred, enforcer):
return True
return False
def add_check(self, rule):
"""Adds rule to be tested.
Allows addition of another rule to the list of rules that will
be tested. Returns the OrCheck object for convenience.
"""
self.rules.append(rule)
return self
def _parse_check(rule):
"""Parse a single base check rule into an appropriate Check object."""
# Handle the special checks
if rule == '!':
return FalseCheck()
elif rule == '@':
return TrueCheck()
try:
kind, match = rule.split(':', 1)
except Exception:
LOG.exception(_LE("Failed to understand rule %s") % rule)
# If the rule is invalid, we'll fail closed
return FalseCheck()
# Find what implements the check
if kind in _checks:
return _checks[kind](kind, match)
elif None in _checks:
return _checks[None](kind, match)
else:
LOG.error(_LE("No handler for matches of kind %s") % kind)
return FalseCheck()
def _parse_list_rule(rule):
"""Translates the old list-of-lists syntax into a tree of Check objects.
Provided for backwards compatibility.
"""
# Empty rule defaults to True
if not rule:
return TrueCheck()
# Outer list is joined by "or"; inner list by "and"
or_list = []
for inner_rule in rule:
# Elide empty inner lists
if not inner_rule:
continue
# Handle bare strings
if isinstance(inner_rule, six.string_types):
inner_rule = [inner_rule]
# Parse the inner rules into Check objects
and_list = [_parse_check(r) for r in inner_rule]
# Append the appropriate check to the or_list
if len(and_list) == 1:
or_list.append(and_list[0])
else:
or_list.append(AndCheck(and_list))
# If we have only one check, omit the "or"
if not or_list:
return FalseCheck()
elif len(or_list) == 1:
return or_list[0]
return OrCheck(or_list)
# Used for tokenizing the policy language
_tokenize_re = re.compile(r'\s+')
def _parse_tokenize(rule):
"""Tokenizer for the policy language.
Most of the single-character tokens are specified in the
_tokenize_re; however, parentheses need to be handled specially,
because they can appear inside a check string. Thankfully, those
parentheses that appear inside a check string can never occur at
the very beginning or end ("%(variable)s" is the correct syntax).
"""
for tok in _tokenize_re.split(rule):
# Skip empty tokens
if not tok or tok.isspace():
continue
# Handle leading parens on the token
clean = tok.lstrip('(')
for i in range(len(tok) - len(clean)):
yield '(', '('
# If it was only parentheses, continue
if not clean:
continue
else:
tok = clean
# Handle trailing parens on the token
clean = tok.rstrip(')')
trail = len(tok) - len(clean)
# Yield the cleaned token
lowered = clean.lower()
if lowered in ('and', 'or', 'not'):
# Special tokens
yield lowered, clean
elif clean:
# Not a special token, but not composed solely of ')'
if len(tok) >= 2 and ((tok[0], tok[-1]) in
[('"', '"'), ("'", "'")]):
# It's a quoted string
yield 'string', tok[1:-1]
else:
yield 'check', _parse_check(clean)
# Yield the trailing parens
for i in range(trail):
yield ')', ')'
class ParseStateMeta(type):
"""Metaclass for the ParseState class.
Facilitates identifying reduction methods.
"""
def __new__(mcs, name, bases, cls_dict):
"""Create the class.
Injects the 'reducers' list, a list of tuples matching token sequences
to the names of the corresponding reduction methods.
"""
reducers = []
for key, value in cls_dict.items():
if not hasattr(value, 'reducers'):
continue
for reduction in value.reducers:
reducers.append((reduction, key))
cls_dict['reducers'] = reducers
return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict)
def reducer(*tokens):
"""Decorator for reduction methods.
Arguments are a sequence of tokens, in order, which should trigger running
this reduction method.
"""
def decorator(func):
# Make sure we have a list of reducer sequences
if not hasattr(func, 'reducers'):
func.reducers = []
# Add the tokens to the list of reducer sequences
func.reducers.append(list(tokens))
return func
return decorator
@six.add_metaclass(ParseStateMeta)
class ParseState(object):
"""Implement the core of parsing the policy language.
Uses a greedy reduction algorithm to reduce a sequence of tokens into
a single terminal, the value of which will be the root of the Check tree.
Note: error reporting is rather lacking. The best we can get with
this parser formulation is an overall "parse failed" error.
Fortunately, the policy language is simple enough that this
shouldn't be that big a problem.
"""
def __init__(self):
"""Initialize the ParseState."""
self.tokens = []
self.values = []
def reduce(self):
"""Perform a greedy reduction of the token stream.
If a reducer method matches, it will be executed, then the
reduce() method will be called recursively to search for any more
possible reductions.
"""
for reduction, methname in self.reducers:
if (len(self.tokens) >= len(reduction) and
self.tokens[-len(reduction):] == reduction):
# Get the reduction method
meth = getattr(self, methname)
# Reduce the token stream
results = meth(*self.values[-len(reduction):])
# Update the tokens and values
self.tokens[-len(reduction):] = [r[0] for r in results]
self.values[-len(reduction):] = [r[1] for r in results]
# Check for any more reductions
return self.reduce()
def shift(self, tok, value):
"""Adds one more token to the state. Calls reduce()."""
self.tokens.append(tok)
self.values.append(value)
# Do a greedy reduce...
self.reduce()
@property
def result(self):
"""Obtain the final result of the parse.
Raises ValueError if the parse failed to reduce to a single result.
"""
if len(self.values) != 1:
raise ValueError("Could not parse rule")
return self.values[0]
@reducer('(', 'check', ')')
@reducer('(', 'and_expr', ')')
@reducer('(', 'or_expr', ')')
def _wrap_check(self, _p1, check, _p2):
"""Turn parenthesized expressions into a 'check' token."""
return [('check', check)]
@reducer('check', 'and', 'check')
def _make_and_expr(self, check1, _and, check2):
"""Create an 'and_expr'.
Join two checks by the 'and' operator.
"""
return [('and_expr', AndCheck([check1, check2]))]
@reducer('and_expr', 'and', 'check')
def _extend_and_expr(self, and_expr, _and, check):
"""Extend an 'and_expr' by adding one more check."""
return [('and_expr', and_expr.add_check(check))]
@reducer('check', 'or', 'check')
def _make_or_expr(self, check1, _or, check2):
"""Create an 'or_expr'.
Join two checks by the 'or' operator.
"""
return [('or_expr', OrCheck([check1, check2]))]
@reducer('or_expr', 'or', 'check')
def _extend_or_expr(self, or_expr, _or, check):
"""Extend an 'or_expr' by adding one more check."""
return [('or_expr', or_expr.add_check(check))]
@reducer('not', 'check')
def _make_not_expr(self, _not, check):
"""Invert the result of another check."""
return [('check', NotCheck(check))]
def _parse_text_rule(rule):
"""Parses policy to the tree.
Translates a policy written in the policy language into a tree of
Check objects.
"""
# Empty rule means always accept
if not rule:
return TrueCheck()
# Parse the token stream
state = ParseState()
for tok, value in _parse_tokenize(rule):
state.shift(tok, value)
try:
return state.result
except ValueError:
# Couldn't parse the rule
LOG.exception(_LE("Failed to understand rule %s") % rule)
# Fail closed
return FalseCheck()
def parse_rule(rule):
"""Parses a policy rule into a tree of Check objects."""
# If the rule is a string, it's in the policy language
if isinstance(rule, six.string_types):
return _parse_text_rule(rule)
return _parse_list_rule(rule)
def register(name, func=None):
"""Register a function or Check class as a policy check.
:param name: Gives the name of the check type, e.g., 'rule',
'role', etc. If name is None, a default check type
will be registered.
:param func: If given, provides the function or class to register.
If not given, returns a function taking one argument
to specify the function or class to register,
allowing use as a decorator.
"""
# Perform the actual decoration by registering the function or
# class. Returns the function or class for compliance with the
# decorator interface.
def decorator(func):
_checks[name] = func
return func
# If the function or class is given, do the registration
if func:
return decorator(func)
return decorator
@register("rule")
class RuleCheck(Check):
def __call__(self, target, creds, enforcer):
"""Recursively checks credentials based on the defined rules."""
try:
return enforcer.rules[self.match](target, creds, enforcer)
except KeyError:
# We don't have any matching rule; fail closed
return False
@register("role")
class RoleCheck(Check):
def __call__(self, target, creds, enforcer):
"""Check that there is a matching role in the cred dict."""
return self.match.lower() in [x.lower() for x in creds['roles']]
@register('http')
class HttpCheck(Check):
def __call__(self, target, creds, enforcer):
"""Check http: rules by calling to a remote server.
This example implementation simply verifies that the response
is exactly 'True'.
"""
url = ('http:' + self.match) % target
# Convert instances of object() in target temporarily to
# empty dict to avoid circular reference detection
# errors in jsonutils.dumps().
temp_target = copy.deepcopy(target)
for key in target.keys():
element = target.get(key)
if type(element) is object:
temp_target[key] = {}
data = {'target': jsonutils.dumps(temp_target),
'credentials': jsonutils.dumps(creds)}
post_data = urlparse.urlencode(data)
f = urlrequest.urlopen(url, post_data)
return f.read() == "True"
@register(None)
class GenericCheck(Check):
def __call__(self, target, creds, enforcer):
"""Check an individual match.
Matches look like:
tenant:%(tenant_id)s
role:compute:admin
True:%(user.enabled)s
'Member':%(role.name)s
"""
try:
match = self.match % target
except KeyError:
# While doing GenericCheck if key not
# present in Target return false
return False
try:
# Try to interpret self.kind as a literal
leftval = ast.literal_eval(self.kind)
except ValueError:
try:
kind_parts = self.kind.split('.')
leftval = creds
for kind_part in kind_parts:
leftval = leftval[kind_part]
except KeyError:
return False
return match == six.text_type(leftval)
|
{
"content_hash": "5ad2b09d9bf804d566ede13d36fd6418",
"timestamp": "",
"source": "github",
"line_count": 945,
"max_line_length": 79,
"avg_line_length": 32.11957671957672,
"alnum_prop": 0.5733864856851053,
"repo_name": "sajeeshcs/nested_quota_final",
"id": "d4be54643aea89a9fdc18ef96459c5e434180d1e",
"size": "31024",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "nova/openstack/common/policy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "5941"
},
{
"name": "Python",
"bytes": "15636233"
},
{
"name": "Shell",
"bytes": "17729"
},
{
"name": "XML",
"bytes": "45372"
}
],
"symlink_target": ""
}
|
from System.IO import *
from System.Drawing import *
from System.Runtime.Remoting import *
from System.Threading import *
from System.Windows.Forms import *
from System.Xml.Serialization import *
from System import *
from Analysis.EDM import *
from DAQ.Environment import *
from EDMConfig import *
r = Random()
def saveBlockConfig(path, config):
fs = FileStream(path, FileMode.Create)
s = XmlSerializer(BlockConfig)
s.Serialize(fs,config)
fs.Close()
def loadBlockConfig(path):
fs = FileStream(path, FileMode.Open)
s = XmlSerializer(BlockConfig)
bc = s.Deserialize(fs)
fs.Close()
return bc
def writeLatestBlockNotificationFile(cluster, blockIndex):
fs = FileStream(Environs.FileSystem.Paths["settingsPath"] + "\\BlockHead\\latestBlock.txt", FileMode.Create)
sw = StreamWriter(fs)
sw.WriteLine(cluster + "\t" + str(blockIndex))
sw.Close()
fs.Close()
def checkYAGAndFix():
interlockFailed = hc.YAGInterlockFailed;
if (interlockFailed):
bh.StopPattern();
bh.StartPattern();
def printWaveformCode(bc, name):
print(name + ": " + str(bc.GetModulationByName(name).Waveform.Code) + " -- " + str(bc.GetModulationByName(name).Waveform.Inverted))
def prompt(text):
sys.stdout.write(text)
return sys.stdin.readline().strip()
def measureParametersAndMakeBC(cluster, eState, bState, rfState, scramblerV, probePolAngle, pumpPolAngle):
fileSystem = Environs.FileSystem
print("Measuring parameters ...")
bh.StopPattern()
hc.UpdateRFPowerMonitor()
hc.UpdateRFFrequencyMonitor()
bh.StartPattern()
hc.UpdateBCurrentMonitor()
hc.UpdateVMonitor()
hc.UpdateI2AOMFreqMonitor()
print("V plus: " + str(hc.CPlusMonitorVoltage * hc.CPlusMonitorScale))
print("V minus: " + str(hc.CMinusMonitorVoltage * hc.CMinusMonitorScale))
print("Bias: " + str(hc.BiasCurrent))
print("B step: " + str(abs(hc.FlipStepCurrent)))
print("DB step: " + str(abs(hc.CalStepCurrent)))
# load a default BlockConfig and customise it appropriately
settingsPath = fileSystem.Paths["settingsPath"] + "\\BlockHead\\"
bc = loadBlockConfig(settingsPath + "default.xml")
bc.Settings["cluster"] = cluster
bc.Settings["eState"] = eState
bc.Settings["bState"] = bState
bc.Settings["rfState"] = rfState
bc.Settings["phaseScramblerV"] = scramblerV
bc.Settings["probePolarizerAngle"] = probePolAngle
bc.Settings["pumpPolarizerAngle"] = pumpPolAngle
bc.Settings["ePlus"] = hc.CPlusMonitorVoltage * hc.CPlusMonitorScale
bc.Settings["eMinus"] = hc.CMinusMonitorVoltage * hc.CMinusMonitorScale
bc.GetModulationByName("B").Centre = (hc.BiasCurrent)/1000
bc.GetModulationByName("B").Step = abs(hc.FlipStepCurrent)/1000
bc.GetModulationByName("DB").Step = abs(hc.CalStepCurrent)/1000
# these next 3, seemingly redundant, lines are to preserve backward compatibility
bc.GetModulationByName("B").PhysicalCentre = (hc.BiasCurrent)/1000
bc.GetModulationByName("B").PhysicalStep = abs(hc.FlipStepCurrent)/1000
bc.GetModulationByName("DB").PhysicalStep = abs(hc.CalStepCurrent)/1000
bc.GetModulationByName("RF1A").Centre = hc.RF1AttCentre
bc.GetModulationByName("RF1A").Step = hc.RF1AttStep
bc.GetModulationByName("RF1A").PhysicalCentre = hc.RF1PowerCentre
bc.GetModulationByName("RF1A").PhysicalStep = hc.RF1PowerStep
bc.GetModulationByName("RF2A").Centre = hc.RF2AttCentre
bc.GetModulationByName("RF2A").Step = hc.RF2AttStep
bc.GetModulationByName("RF2A").PhysicalCentre = hc.RF2PowerCentre
bc.GetModulationByName("RF2A").PhysicalStep = hc.RF2PowerStep
bc.GetModulationByName("RF1F").Centre = hc.RF1FMCentre
bc.GetModulationByName("RF1F").Step = hc.RF1FMStep
bc.GetModulationByName("RF1F").PhysicalCentre = hc.RF1FrequencyCentre
bc.GetModulationByName("RF1F").PhysicalStep = hc.RF1FrequencyStep
bc.GetModulationByName("RF2F").Centre = hc.RF2FMCentre
bc.GetModulationByName("RF2F").Step = hc.RF2FMStep
bc.GetModulationByName("RF2F").PhysicalCentre = hc.RF2FrequencyCentre
bc.GetModulationByName("RF2F").PhysicalStep = hc.RF2FrequencyStep
bc.GetModulationByName("LF1").Centre = hc.FLPZTVoltage
bc.GetModulationByName("LF1").Step = hc.FLPZTStep
bc.GetModulationByName("LF1").PhysicalCentre = hc.I2LockAOMFrequencyCentre
bc.GetModulationByName("LF1").PhysicalStep = hc.I2LockAOMFrequencyStep
# generate the waveform codes
print("Generating waveform codes ...")
eWave = bc.GetModulationByName("E").Waveform
eWave.Name = "E"
lf1Wave = bc.GetModulationByName("LF1").Waveform
lf1Wave.Name = "LF1"
ws = WaveformSetGenerator.GenerateWaveforms( (eWave, lf1Wave), ("B","DB","PI","RF1A","RF2A","RF1F","RF2F") )
bc.GetModulationByName("B").Waveform = ws["B"]
bc.GetModulationByName("DB").Waveform = ws["DB"]
bc.GetModulationByName("PI").Waveform = ws["PI"]
bc.GetModulationByName("RF1A").Waveform = ws["RF1A"]
bc.GetModulationByName("RF2A").Waveform = ws["RF2A"]
bc.GetModulationByName("RF1F").Waveform = ws["RF1F"]
bc.GetModulationByName("RF2F").Waveform = ws["RF2F"]
# change the inversions of the static codes E and LF1
bc.GetModulationByName("E").Waveform.Inverted = WaveformSetGenerator.RandomBool()
bc.GetModulationByName("LF1").Waveform.Inverted = WaveformSetGenerator.RandomBool()
# print the waveform codes
# printWaveformCode(bc, "E")
# printWaveformCode(bc, "B")
# printWaveformCode(bc, "DB")
# printWaveformCode(bc, "PI")
# printWaveformCode(bc, "RF1A")
# printWaveformCode(bc, "RF2A")
# printWaveformCode(bc, "RF1F")
# printWaveformCode(bc, "RF2F")
# printWaveformCode(bc, "LF1")
# store e-switch info in block config
print("Storing E switch parameters ...")
bc.Settings["eRampDownTime"] = hc.ERampDownTime
bc.Settings["eRampDownDelay"] = hc.ERampDownDelay
bc.Settings["eBleedTime"] = hc.EBleedTime
bc.Settings["eSwitchTime"] = hc.ESwitchTime
bc.Settings["eRampUpTime"] = hc.ERampUpTime
bc.Settings["eRampUpDelay"] = hc.ERampUpDelay
# this is for legacy analysis compatibility
bc.Settings["eDischargeTime"] = hc.ERampDownTime + hc.ERampDownDelay
bc.Settings["eChargeTime"] = hc.ERampUpTime + hc.ERampUpDelay
# store the E switch asymmetry in the block
bc.Settings["E0PlusBoost"] = hc.E0PlusBoost
return bc
# lock gains
# microamps of current per volt of control input
kSteppingBiasCurrentPerVolt = 1000.0
# max change in the b-bias voltage per block
kBMaxChange = 0.05
# volts of rf*a input required per cal's worth of offset
kRFAVoltsPerCal = 3.2
kRFAMaxChange = 0.1
# volts of rf*f input required per cal's worth of offset
kRFFVoltsPerCal = 8
kRFFMaxChange = 0.1
def updateLocks(bState):
pmtChannelValues = bh.DBlock.ChannelValues[0]
# note the weird python syntax for a one element list
sigIndex = pmtChannelValues.GetChannelIndex(("SIG",))
sigValue = pmtChannelValues.GetValue(sigIndex)
bIndex = pmtChannelValues.GetChannelIndex(("B",))
bValue = pmtChannelValues.GetValue(bIndex)
#bError = pmtChannelValues.GetError(bIndex)
dbIndex = pmtChannelValues.GetChannelIndex(("DB",))
dbValue = pmtChannelValues.GetValue(dbIndex)
#dbError = pmtChannelValues.GetError(dbIndex)
rf1aIndex = pmtChannelValues.GetChannelIndex(("RF1A","DB"))
rf1aValue = pmtChannelValues.GetValue(rf1aIndex)
#rf1aError = pmtChannelValues.GetError(rf1aIndex)
rf2aIndex = pmtChannelValues.GetChannelIndex(("RF2A","DB"))
rf2aValue = pmtChannelValues.GetValue(rf2aIndex)
#rf2aError = pmtChannelValues.GetError(rf2aIndex)
rf1fIndex = pmtChannelValues.GetChannelIndex(("RF1F","DB"))
rf1fValue = pmtChannelValues.GetValue(rf1fIndex)
#rf1fError = pmtChannelValues.GetError(rf1fIndex)
rf2fIndex = pmtChannelValues.GetChannelIndex(("RF2F","DB"))
rf2fValue = pmtChannelValues.GetValue(rf2fIndex)
#rf2fError = pmtChannelValues.GetError(rf2fIndex)
lf1Index = pmtChannelValues.GetChannelIndex(("LF1",))
lf1Value = pmtChannelValues.GetValue(lf1Index)
#lf1Error = pmtChannelValues.GetError(lf1Index)
lf1dbIndex = pmtChannelValues.GetChannelIndex(("LF1","DB"))
lf1dbValue = pmtChannelValues.GetValue(lf1dbIndex)
print "SIG: " + str(sigValue)
print "B: " + str(bValue) + " DB: " + str(dbValue)
print "RF1A: " + str(rf1aValue) + " RF2A: " + str(rf2aValue)
print "RF1F: " + str(rf1fValue) + " RF2F: " + str(rf2fValue)
print "LF1: " + str(lf1Value) + " LF1.DB: " + str(lf1dbValue)
# B bias lock
# the sign of the feedback depends on the b-state
if bState:
feedbackSign = 1
else:
feedbackSign = -1
deltaBias = - (1.0/8.0) * feedbackSign * (hc.CalStepCurrent * (bValue / dbValue)) / kSteppingBiasCurrentPerVolt
deltaBias = windowValue(deltaBias, -kBMaxChange, kBMaxChange)
print "Attempting to change stepping B bias by " + str(deltaBias) + " V."
newBiasVoltage = windowValue( hc.SteppingBiasVoltage - deltaBias, 0, 5)
hc.SetSteppingBBiasVoltage( newBiasVoltage )
# RFA locks
deltaRF1A = - (1.0/3.0) * (rf1aValue / dbValue) * kRFAVoltsPerCal
deltaRF1A = windowValue(deltaRF1A, -kRFAMaxChange, kRFAMaxChange)
print "Attempting to change RF1A by " + str(deltaRF1A) + " V."
newRF1A = windowValue( hc.RF1AttCentre - deltaRF1A, hc.RF1AttStep, 5 - hc.RF1AttStep)
hc.SetRF1AttCentre( newRF1A )
#
deltaRF2A = - (1.0/3.0) * (rf2aValue / dbValue) * kRFAVoltsPerCal
deltaRF2A = windowValue(deltaRF2A, -kRFAMaxChange, kRFAMaxChange)
print "Attempting to change RF2A by " + str(deltaRF2A) + " V."
newRF2A = windowValue( hc.RF2AttCentre - deltaRF2A, hc.RF2AttStep, 5 - hc.RF2AttStep )
hc.SetRF2AttCentre( newRF2A )
# RFF locks
#deltaRF1F = - (1.0/4.0) * (rf1fValue / dbValue) * kRFFVoltsPerCal
#deltaRF1F = windowValue(deltaRF1F, -kRFFMaxChange, kRFFMaxChange)
#print "Attempting to change RF1F by " + str(deltaRF1F) + " V."
#newRF1F = windowValue( hc.RF1FMCentre - deltaRF1F, hc.RF1FMStep, 5 - hc.RF1FMStep)
newRF1F = 0.1 + 1.2 * r.NextDouble()
print "Changing RF1F to " + str(newRF1F) + " V."
hc.SetRF1FMCentre( newRF1F )
#
deltaRF2F = - (1.0/4.0) * (rf2fValue / dbValue) * kRFFVoltsPerCal
deltaRF2F = windowValue(deltaRF2F, -kRFFMaxChange, kRFFMaxChange)
print "Attempting to change RF2F by " + str(deltaRF2F) + " V."
newRF2F = windowValue( hc.RF2FMCentre - deltaRF2F, hc.RF2FMStep, 5 - hc.RF2FMStep )
hc.SetRF2FMCentre( newRF2F )
# Laser frequency lock (-ve multiplier in f0 mode and +ve in f1)
deltaLF1 = -1.25 * (lf1Value / dbValue)
deltaLF1 = windowValue(deltaLF1, -0.1, 0.1)
print "Attempting to change LF1 by " + str(deltaLF1) + " V."
newLF1 = windowValue( hc.FLPZTVoltage - deltaLF1, hc.FLPZTStep, 5 - hc.FLPZTStep )
hc.SetFLPZTVoltage( newLF1 )
def windowValue(value, minValue, maxValue):
if ( (value < maxValue) & (value > minValue) ):
return value
else:
if (value < minValue):
return minValue
else:
return maxValue
kTargetRotationPeriod = 10
kReZeroLeakageMonitorsPeriod = 10
def EDMGo():
# Setup
f = None
fileSystem = Environs.FileSystem
dataPath = fileSystem.GetDataDirectory(fileSystem.Paths["edmDataPath"])
settingsPath = fileSystem.Paths["settingsPath"] + "\\BlockHead\\"
print("Data directory is : " + dataPath)
print("")
suggestedClusterName = fileSystem.GenerateNextDataFileName()
sm.SelectProfile("Scan B")
# User inputs data
cluster = prompt("Cluster name [" + suggestedClusterName +"]: ")
if cluster == "":
cluster = suggestedClusterName
print("Using cluster " + suggestedClusterName)
eState = hc.EManualState
print("E-state: " + str(eState))
bState = hc.BManualState
print("B-state: " + str(bState))
rfState = hc.RFManualState
print("rf-state: " + str(rfState))
# this is to make sure the B current monitor is in a sensible state
hc.UpdateBCurrentMonitor()
# randomise Ramsey phase
scramblerV = 0.724774 * r.NextDouble()
hc.SetScramblerVoltage(scramblerV)
# randomise polarizations
probePolAngle = 360.0 * r.NextDouble()
hc.SetProbePolarizerAngle(probePolAngle)
pumpPolAngle = 360.0 * r.NextDouble()
hc.SetPumpPolarizerAngle(pumpPolAngle)
bc = measureParametersAndMakeBC(cluster, eState, bState, rfState, scramblerV, probePolAngle, pumpPolAngle)
# loop and take data
blockIndex = 0
maxBlockIndex = 10000
while blockIndex < maxBlockIndex:
print("Acquiring block " + str(blockIndex) + " ...")
# save the block config and load into blockhead
print("Saving temp config.")
bc.Settings["clusterIndex"] = blockIndex
tempConfigFile ='%(p)stemp%(c)s_%(i)s.xml' % {'p': settingsPath, 'c': cluster, 'i': blockIndex}
saveBlockConfig(tempConfigFile, bc)
System.Threading.Thread.Sleep(500)
print("Loading temp config.")
bh.LoadConfig(tempConfigFile)
# take the block and save it
print("Running ...")
bh.AcquireAndWait()
print("Done.")
blockPath = '%(p)s%(c)s_%(i)s.zip' % {'p': dataPath, 'c': cluster, 'i': blockIndex}
bh.SaveBlock(blockPath)
print("Saved block "+ str(blockIndex) + ".")
# give mma a chance to analyse the block
print("Notifying Mathematica and waiting ...")
writeLatestBlockNotificationFile(cluster, blockIndex)
System.Threading.Thread.Sleep(5000)
print("Done.")
# increment and loop
File.Delete(tempConfigFile)
checkYAGAndFix()
blockIndex = blockIndex + 1
updateLocks(bState)
# randomise Ramsey phase
scramblerV = 0.724774 * r.NextDouble()
hc.SetScramblerVoltage(scramblerV)
# randomise polarizations
probePolAngle = 360.0 * r.NextDouble()
hc.SetProbePolarizerAngle(probePolAngle)
pumpPolAngle = 360.0 * r.NextDouble()
hc.SetPumpPolarizerAngle(pumpPolAngle)
bc = measureParametersAndMakeBC(cluster, eState, bState, rfState, scramblerV, probePolAngle, pumpPolAngle)
hc.StepTarget(1)
# do things that need periodically doing
# if ((blockIndex % kTargetRotationPeriod) == 0):
# print("Rotating target.")
# hc.StepTarget(10)
pmtChannelValues = bh.DBlock.ChannelValues[0]
dbIndex = pmtChannelValues.GetChannelIndex(("DB",))
dbValue = pmtChannelValues.GetValue(dbIndex)
if (dbValue < 8.4):
print("Dodgy spot target rotation.")
hc.StepTarget(5)
if ((blockIndex % kReZeroLeakageMonitorsPeriod) == 0):
print("Recalibrating leakage monitors.")
hc.EnableEField( False )
System.Threading.Thread.Sleep(10000)
hc.EnableBleed( True )
System.Threading.Thread.Sleep(1000)
hc.EnableBleed( False )
System.Threading.Thread.Sleep(5000)
hc.CalibrateIMonitors()
hc.EnableEField( True )
bh.StopPattern()
def run_script():
EDMGo()
|
{
"content_hash": "328cf33a5072d54cd722c4a253e06de9",
"timestamp": "",
"source": "github",
"line_count": 349,
"max_line_length": 132,
"avg_line_length": 40.37535816618911,
"alnum_prop": 0.7439500390320063,
"repo_name": "ColdMatter/EDMSuite",
"id": "83bc681c88046bebee5fd2396b6ad5aa521e5f58",
"size": "14122",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "EDMScripts/OldScripts/EDMLoop_random.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2489"
},
{
"name": "C#",
"bytes": "6547131"
},
{
"name": "F#",
"bytes": "1565"
},
{
"name": "Forth",
"bytes": "767"
},
{
"name": "HTML",
"bytes": "241926"
},
{
"name": "Mathematica",
"bytes": "452861"
},
{
"name": "Python",
"bytes": "798129"
},
{
"name": "Shell",
"bytes": "33"
},
{
"name": "TSQL",
"bytes": "1768"
},
{
"name": "TeX",
"bytes": "8393"
}
],
"symlink_target": ""
}
|
from consul import Consul, ConsulException
from common.utils.asleep import asleep
from requests import ConnectionError
from twisted.internet.defer import inlineCallbacks, returnValue
import etcd3
import structlog
class ConsulStore(object):
""" Config kv store for consul with a cache for quicker subsequent reads
TODO: This will block the reactor. Should either change
whole call stack to yield or put the put/delete transactions into a
queue to write later with twisted. Will need a transaction
log to ensure we don't lose anything.
Making the whole callstack yield is troublesome because other tasks can
come in on the side and start modifying things which could be bad.
"""
CONNECT_RETRY_INTERVAL_SEC = 1
RETRY_BACKOFF = [0.05, 0.1, 0.2, 0.5, 1, 2, 5]
def __init__(self, host, port, path_prefix):
self.log = structlog.get_logger()
self._consul = Consul(host=host, port=port)
self.host = host
self.port = port
self._path_prefix = path_prefix
self._cache = {}
self.retries = 0
def make_path(self, key):
return '{}/{}'.format(self._path_prefix, key)
def __getitem__(self, key):
if key in self._cache:
return self._cache[key]
value = self._kv_get(self.make_path(key))
if value is not None:
# consul turns empty strings to None, so we do the reverse here
self._cache[key] = value['Value'] or ''
return value['Value'] or ''
else:
raise KeyError(key)
def __contains__(self, key):
if key in self._cache:
return True
value = self._kv_get(self.make_path(key))
if value is not None:
self._cache[key] = value['Value']
return True
else:
return False
def __setitem__(self, key, value):
try:
assert isinstance(value, basestring)
self._cache[key] = value
self._kv_put(self.make_path(key), value)
except Exception, e:
self.log.exception('cannot-set-item', e=e)
def __delitem__(self, key):
self._cache.pop(key, None)
self._kv_delete(self.make_path(key))
@inlineCallbacks
def _backoff(self, msg):
wait_time = self.RETRY_BACKOFF[min(self.retries,
len(self.RETRY_BACKOFF) - 1)]
self.retries += 1
self.log.error(msg, retry_in=wait_time)
yield asleep(wait_time)
def _redo_consul_connection(self):
self._consul = Consul(host=self.host, port=self.port)
self._cache.clear()
def _clear_backoff(self):
if self.retries:
self.log.info('reconnected-to-consul', after_retries=self.retries)
self.retries = 0
def _get_consul(self):
return self._consul
# Proxy methods for consul with retry support
def _kv_get(self, *args, **kw):
return self._retry('GET', *args, **kw)
def _kv_put(self, *args, **kw):
return self._retry('PUT', *args, **kw)
def _kv_delete(self, *args, **kw):
return self._retry('DELETE', *args, **kw)
def _retry(self, operation, *args, **kw):
while 1:
try:
consul = self._get_consul()
self.log.debug('consul', consul=consul, operation=operation,
args=args)
if operation == 'GET':
index, result = consul.kv.get(*args, **kw)
elif operation == 'PUT':
result = consul.kv.put(*args, **kw)
elif operation == 'DELETE':
result = consul.kv.delete(*args, **kw)
else:
# Default case - consider operation as a function call
result = operation(*args, **kw)
self._clear_backoff()
break
except ConsulException, e:
self.log.exception('consul-not-up', e=e)
self._backoff('consul-not-up')
except ConnectionError, e:
self.log.exception('cannot-connect-to-consul', e=e)
self._backoff('cannot-connect-to-consul')
except Exception, e:
self.log.exception(e)
self._backoff('unknown-error')
self._redo_consul_connection()
return result
class EtcdStore(object):
""" Config kv store for etcd with a cache for quicker subsequent reads
TODO: This will block the reactor. Should either change
whole call stack to yield or put the put/delete transactions into a
queue to write later with twisted. Will need a transaction
log to ensure we don't lose anything.
Making the whole callstack yield is troublesome because other tasks can
come in on the side and start modifying things which could be bad.
"""
CONNECT_RETRY_INTERVAL_SEC = 1
RETRY_BACKOFF = [0.05, 0.1, 0.2, 0.5, 1, 2, 5]
def __init__(self, host, port, path_prefix):
self.log = structlog.get_logger()
self._etcd = etcd3.client(host=host, port=port)
self.host = host
self.port = port
self._path_prefix = path_prefix
self._cache = {}
self.retries = 0
def make_path(self, key):
return '{}/{}'.format(self._path_prefix, key)
def __getitem__(self, key):
if key in self._cache:
return self._cache[key]
(value, meta) = self._kv_get(self.make_path(key))
if value is not None:
self._cache[key] = value
return value
else:
raise KeyError(key)
def __contains__(self, key):
if key in self._cache:
return True
(value, meta) = self._kv_get(self.make_path(key))
if value is not None:
self._cache[key] = value
return True
else:
return False
def __setitem__(self, key, value):
try:
assert isinstance(value, basestring)
self._cache[key] = value
self._kv_put(self.make_path(key), value)
except Exception, e:
self.log.exception('cannot-set-item', e=e)
def __delitem__(self, key):
self._cache.pop(key, None)
self._kv_delete(self.make_path(key))
@inlineCallbacks
def _backoff(self, msg):
wait_time = self.RETRY_BACKOFF[min(self.retries,
len(self.RETRY_BACKOFF) - 1)]
self.retries += 1
self.log.error(msg, retry_in=wait_time)
yield asleep(wait_time)
def _redo_etcd_connection(self):
self._etcd = etcd3.client(host=self.host, port=self.port)
self._cache.clear()
def _clear_backoff(self):
if self.retries:
self.log.info('reconnected-to-etcd', after_retries=self.retries)
self.retries = 0
def _get_etcd(self):
return self._etcd
# Proxy methods for etcd with retry support
def _kv_get(self, *args, **kw):
return self._retry('GET', *args, **kw)
def _kv_put(self, *args, **kw):
return self._retry('PUT', *args, **kw)
def _kv_delete(self, *args, **kw):
return self._retry('DELETE', *args, **kw)
def _retry(self, operation, *args, **kw):
# etcd data sometimes contains non-utf8 sequences, replace
self.log.debug('backend-op',
operation=operation,
args=map(lambda x : unicode(x,'utf8','replace'), args),
kw=kw)
while 1:
try:
etcd = self._get_etcd()
self.log.debug('etcd', etcd=etcd, operation=operation,
args=map(lambda x : unicode(x,'utf8','replace'), args))
if operation == 'GET':
(value, meta) = etcd.get(*args, **kw)
result = (value, meta)
elif operation == 'PUT':
result = etcd.put(*args, **kw)
elif operation == 'DELETE':
result = etcd.delete(*args, **kw)
else:
# Default case - consider operation as a function call
result = operation(*args, **kw)
self._clear_backoff()
break
except Exception, e:
self.log.exception(e)
self._backoff('unknown-error-with-etcd')
self._redo_etcd_connection()
return result
def load_backend(store_id, store_prefix, args):
""" Return the kv store backend based on the command line arguments
"""
def load_consul_store():
instance_core_store_prefix = '{}/{}'.format(store_prefix, store_id)
host, port = args.consul.split(':', 1)
return ConsulStore(host, int(port), instance_core_store_prefix)
def load_etcd_store():
instance_core_store_prefix = '{}/{}'.format(store_prefix, store_id)
host, port = args.etcd.split(':', 1)
return EtcdStore(host, int(port), instance_core_store_prefix)
loaders = {
'none': lambda: None,
'consul': load_consul_store,
'etcd': load_etcd_store
}
return loaders[args.backend]()
|
{
"content_hash": "dff4daa0d3d1ec1a6cc2adc3110b065b",
"timestamp": "",
"source": "github",
"line_count": 275,
"max_line_length": 79,
"avg_line_length": 33.81818181818182,
"alnum_prop": 0.551505376344086,
"repo_name": "opencord/voltha",
"id": "d9063488ab7745358e86935804ea0c276a37a85b",
"size": "9898",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "voltha/core/config/config_backend.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "30265"
},
{
"name": "Dockerfile",
"bytes": "2881"
},
{
"name": "Go",
"bytes": "181529"
},
{
"name": "Jinja",
"bytes": "25855"
},
{
"name": "Makefile",
"bytes": "76329"
},
{
"name": "Python",
"bytes": "9758796"
},
{
"name": "RobotFramework",
"bytes": "10188"
},
{
"name": "Ruby",
"bytes": "1126"
},
{
"name": "Shell",
"bytes": "758475"
},
{
"name": "XSLT",
"bytes": "175917"
}
],
"symlink_target": ""
}
|
"""Changes to start the implementation of the version 2
Revision ID: 415746eb9f6
Revises: None
Create Date: 2014-10-23 16:00:47.940216
"""
# revision identifiers, used by Alembic.
revision = '415746eb9f6'
down_revision = '166ff2dcc48d'
from alembic import op, context
from sqlalchemy import Column, ForeignKey, Table, MetaData
from sqlalchemy.types import Integer, Boolean, Unicode, Float
def upgrade():
schema = context.get_context().config.get_main_option('schema')
engine = op.get_bind().engine
if op.get_context().dialect.has_table(
engine, 'interface', schema=schema
): # pragma: nocover
return
op.drop_table('user_functionality', schema=schema)
op.create_table(
'interface',
Column(
'id', Integer, primary_key=True
),
Column('name', Unicode),
Column('description', Unicode),
schema=schema,
)
op.create_table(
'interface_layer',
Column(
'interface_id', Integer, ForeignKey(schema + '.interface.id'), primary_key=True
),
Column(
'layer_id', Integer, ForeignKey(schema + '.layer.id'), primary_key=True
),
schema=schema,
)
op.create_table(
'interface_theme',
Column(
'interface_id', Integer, ForeignKey(schema + '.interface.id'), primary_key=True
),
Column(
'theme_id', Integer, ForeignKey(schema + '.theme.id'), primary_key=True
),
schema=schema,
)
op.create_table(
'layerv1',
Column(
'id', Integer, ForeignKey(schema + '.layer.id'), primary_key=True
),
Column("is_checked", Boolean, default=True),
Column("icon", Unicode),
Column("layer_type", Unicode(12)),
Column("url", Unicode),
Column("image_type", Unicode(10)),
Column("style", Unicode),
Column("dimensions", Unicode),
Column("matrix_set", Unicode),
Column("wms_url", Unicode),
Column("wms_layers", Unicode),
Column("query_layers", Unicode),
Column("kml", Unicode),
Column("is_single_tile", Boolean),
Column("legend", Boolean, default=True),
Column("legend_image", Unicode),
Column("legend_rule", Unicode),
Column("is_legend_expanded", Boolean, default=False),
Column("min_resolution", Float),
Column("max_resolution", Float),
Column("disclaimer", Unicode),
Column("identifier_attribute_field", Unicode),
Column("exclude_properties", Unicode),
Column("time_mode", Unicode(8)),
schema=schema,
)
op.execute(
"UPDATE ONLY %(schema)s.treeitem SET type = 'layerv1' "
"WHERE type='layer'" % {'schema': schema}
)
op.execute(
'INSERT INTO %(schema)s.layerv1 ('
'id, is_checked, icon, layer_type, url, image_type, style, dimensions, matrix_set, '
'wms_url, wms_layers, query_layers, kml, is_single_tile, legend, legend_image, legend_rule, '
'is_legend_expanded, min_resolution, max_resolution, disclaimer, identifier_attribute_field, '
'exclude_properties, time_mode) '
'(SELECT '
'id, "isChecked" AS is_checked, icon, "layerType" AS layer_type, url, "imageType" AS image_type, '
'style, dimensions, "matrixSet" AS matrix_set, "wmsUrl" AS wms_url, "wmsLayers" AS wms_layers, '
'"queryLayers" AS query_layers, kml, "isSingleTile" AS is_single_tile, legend, '
'"legendImage" AS legend_image, "legendRule" AS legend_rule, '
'"isLegendExpanded" AS is_legend_expanded, "minResolution" AS min_resolution, '
'"maxResolution" AS max_resolution, disclaimer, '
'"identifierAttributeField" AS identifier_attribute_field, '
'"excludeProperties" AS exclude_properties, "timeMode" AS time_mode '
'FROM %(schema)s.layer)' % {'schema': schema}
)
op.drop_column('layer', 'isChecked', schema=schema)
op.drop_column('layer', 'icon', schema=schema)
op.drop_column('layer', 'layerType', schema=schema)
op.drop_column('layer', 'url', schema=schema)
op.drop_column('layer', 'imageType', schema=schema)
op.drop_column('layer', 'style', schema=schema)
op.drop_column('layer', 'dimensions', schema=schema)
op.drop_column('layer', 'matrixSet', schema=schema)
op.drop_column('layer', 'wmsUrl', schema=schema)
op.drop_column('layer', 'wmsLayers', schema=schema)
op.drop_column('layer', 'queryLayers', schema=schema)
op.drop_column('layer', 'kml', schema=schema)
op.drop_column('layer', 'isSingleTile', schema=schema)
op.drop_column('layer', 'legend', schema=schema)
op.drop_column('layer', 'legendImage', schema=schema)
op.drop_column('layer', 'legendRule', schema=schema)
op.drop_column('layer', 'isLegendExpanded', schema=schema)
op.drop_column('layer', 'minResolution', schema=schema)
op.drop_column('layer', 'maxResolution', schema=schema)
op.drop_column('layer', 'disclaimer', schema=schema)
op.drop_column('layer', 'identifierAttributeField', schema=schema)
op.drop_column('layer', 'excludeProperties', schema=schema)
op.drop_column('layer', 'timeMode', schema=schema)
interface = Table(
'interface', MetaData(),
Column('name', Unicode),
schema=schema,
)
op.bulk_insert(interface, [
{'name': 'main'},
{'name': 'mobile'},
{'name': 'edit'},
{'name': 'routing'},
])
op.execute(
'INSERT INTO %(schema)s.interface_layer (layer_id, interface_id) '
'(SELECT l.id AS layer_id, i.id AS interface_id '
'FROM %(schema)s.layer AS l, %(schema)s.interface AS i '
'WHERE i.name in (\'main\', \'edit\', \'routing\') AND l."inDesktopViewer")' % {'schema': schema}
)
op.execute(
'INSERT INTO %(schema)s.interface_layer (layer_id, interface_id) '
'(SELECT l.id AS layer_id, i.id AS interface_id '
'FROM %(schema)s.layer AS l, %(schema)s.interface AS i '
'WHERE i.name = \'mobile\' AND l."inMobileViewer")' % {'schema': schema}
)
op.execute(
'INSERT INTO %(schema)s.interface_theme (theme_id, interface_id) '
'(SELECT l.id AS theme_id, i.id AS interface_id '
'FROM %(schema)s.theme AS l, %(schema)s.interface AS i '
'WHERE i.name in (\'main\', \'edit\', \'routing\') AND l."inDesktopViewer")' % {'schema': schema}
)
op.execute(
'INSERT INTO %(schema)s.interface_theme (theme_id, interface_id) '
'(SELECT l.id AS theme_id, i.id AS interface_id '
'FROM %(schema)s.theme AS l, %(schema)s.interface AS i '
'WHERE i.name = \'mobile\' AND l."inMobileViewer")' % {'schema': schema}
)
op.drop_column('layer', 'inMobileViewer', schema=schema)
op.drop_column('layer', 'inDesktopViewer', schema=schema)
op.alter_column('layer', 'geoTable', new_column_name='geo_table', schema=schema)
op.drop_column('theme', 'inMobileViewer', schema=schema)
op.drop_column('theme', 'inDesktopViewer', schema=schema)
op.alter_column('treeitem', 'metadataURL', new_column_name='metadata_url', schema=schema)
op.alter_column('layergroup', 'isExpanded', new_column_name='is_expanded', schema=schema)
op.alter_column('layergroup', 'isInternalWMS', new_column_name='is_internal_wms', schema=schema)
op.alter_column('layergroup', 'isBaseLayer', new_column_name='is_base_layer', schema=schema)
op.create_table(
'layer_internal_wms',
Column(
'id', Integer, ForeignKey(schema + '.layer.id'), primary_key=True
),
Column('layer', Unicode),
Column('image_type', Unicode(10)),
Column('style', Unicode),
Column('time_mode', Unicode(8)),
schema=schema,
)
op.create_table(
'layer_external_wms',
Column(
'id', Integer, ForeignKey(schema + '.layer.id'), primary_key=True
),
Column('url', Unicode),
Column('layer', Unicode),
Column('image_type', Unicode(10)),
Column('style', Unicode),
Column('is_single_tile', Boolean),
Column('time_mode', Unicode(8)),
schema=schema,
)
op.create_table(
'layer_wmts',
Column(
'id', Integer, ForeignKey(schema + '.layer.id'), primary_key=True,
),
Column('url', Unicode),
Column('layer', Unicode),
Column('style', Unicode),
Column('matrix_set', Unicode),
schema=schema,
)
op.create_table(
'ui_metadata',
Column(
'id', Integer, primary_key=True
),
Column('name', Unicode),
Column('value', Unicode),
Column('description', Unicode),
Column('item_id', Integer, ForeignKey(schema + '.treeitem.id'), nullable=False),
schema=schema,
)
op.create_table(
'wmts_dimension',
Column(
'id', Integer, primary_key=True
),
Column('name', Unicode),
Column('value', Unicode),
Column('description', Unicode),
Column('layer_id', Integer, ForeignKey(schema + '.layer_wmts.id'), nullable=False),
schema=schema,
)
def downgrade():
schema = context.get_context().config.get_main_option('schema')
op.drop_table('wmts_dimension', schema=schema)
op.drop_table('ui_metadata', schema=schema)
op.drop_table('layer_wmts', schema=schema)
op.drop_table('layer_external_wms', schema=schema)
op.drop_table('layer_internal_wms', schema=schema)
op.add_column('layer', Column('inMobileViewer', Boolean, default=False), schema=schema)
op.add_column('layer', Column('inDesktopViewer', Boolean, default=True), schema=schema)
op.alter_column('layer', 'geo_table', new_column_name='geoTable', schema=schema)
op.add_column('theme', Column('inMobileViewer', Boolean, default=False), schema=schema)
op.add_column('theme', Column('inDesktopViewer', Boolean, default=True), schema=schema)
op.alter_column('treeitem', 'metadata_url', new_column_name='metadataURL', schema=schema)
op.alter_column('layergroup', 'is_expanded', new_column_name='isExpanded', schema=schema)
op.alter_column('layergroup', 'is_internal_wms', new_column_name='isInternalWMS', schema=schema)
op.alter_column('layergroup', 'is_base_layer', new_column_name='isBaseLayer', schema=schema)
op.execute(
'UPDATE ONLY %(schema)s.theme AS t '
'SET "inDesktopViewer" = FALSE' % {'schema': schema}
)
op.execute(
'UPDATE ONLY %(schema)s.layer AS t '
'SET "inDesktopViewer" = FALSE' % {'schema': schema}
)
op.execute(
'UPDATE ONLY %(schema)s.theme AS t '
'SET "inMobileViewer" = TRUE '
'FROM %(schema)s.interface AS i, %(schema)s.interface_theme AS it '
'WHERE i.name = \'mobile\' AND i.id = it.interface_id AND it.theme_id = t.id' % {'schema': schema}
)
op.execute(
'UPDATE ONLY %(schema)s.theme AS t '
'SET "inDesktopViewer" = TRUE '
'FROM %(schema)s.interface AS i, %(schema)s.interface_theme AS it '
'WHERE i.name = \'main\' AND i.id = it.interface_id AND it.theme_id = t.id' % {'schema': schema}
)
op.execute(
'UPDATE ONLY %(schema)s.layer AS l '
'SET "inMobileViewer" = TRUE '
'FROM %(schema)s.interface AS i, %(schema)s.interface_layer AS il '
'WHERE i.name = \'mobile\' AND i.id = il.interface_id AND il.layer_id = l.id' % {'schema': schema}
)
op.execute(
'UPDATE ONLY %(schema)s.layer AS l '
'SET "inDesktopViewer" = TRUE '
'FROM %(schema)s.interface AS i, %(schema)s.interface_layer AS il '
'WHERE i.name = \'main\' AND i.id = il.interface_id AND il.layer_id = l.id' % {'schema': schema}
)
op.add_column('layer', Column('timeMode', Unicode(8)), schema=schema)
op.add_column('layer', Column('excludeProperties', Unicode), schema=schema)
op.add_column('layer', Column('identifierAttributeField', Unicode), schema=schema)
op.add_column('layer', Column('disclaimer', Unicode), schema=schema)
op.add_column('layer', Column('maxResolution', Float), schema=schema)
op.add_column('layer', Column('minResolution', Float), schema=schema)
op.add_column('layer', Column('isLegendExpanded', Boolean, default=False), schema=schema)
op.add_column('layer', Column('legendRule', Unicode), schema=schema)
op.add_column('layer', Column('legendImage', Unicode), schema=schema)
op.add_column('layer', Column('legend', Boolean, default=True), schema=schema)
op.add_column('layer', Column('isSingleTile', Boolean, default=False), schema=schema)
op.add_column('layer', Column('kml', Unicode), schema=schema)
op.add_column('layer', Column('queryLayers', Unicode), schema=schema)
op.add_column('layer', Column('wmsLayers', Unicode), schema=schema)
op.add_column('layer', Column('wmsUrl', Unicode), schema=schema)
op.add_column('layer', Column('matrixSet', Unicode), schema=schema)
op.add_column('layer', Column('dimensions', Unicode), schema=schema)
op.add_column('layer', Column('style', Unicode), schema=schema)
op.add_column('layer', Column('imageType', Unicode(10)), schema=schema)
op.add_column('layer', Column('url', Unicode), schema=schema)
op.add_column('layer', Column('layerType', Unicode(12)), schema=schema)
op.add_column('layer', Column('icon', Unicode), schema=schema)
op.add_column('layer', Column('isChecked', Boolean, default=True), schema=schema)
op.execute(
'UPDATE %(schema)s.layer AS l SET ('
'id, "isChecked", icon, "layerType", url, "imageType", style, dimensions, "matrixSet", '
'"wmsUrl", "wmsLayers", "queryLayers", kml, "isSingleTile", legend, "legendImage", "legendRule", '
'"isLegendExpanded", "minResolution", "maxResolution", disclaimer, "identifierAttributeField", '
'"excludeProperties", "timeMode"'
') = ('
'o.id, o.is_checked, o.icon, o.layer_type, o.url, o.image_type, o.style, o.dimensions, o.matrix_set, '
'o.wms_url, o.wms_layers, o.query_layers, o.kml, o.is_single_tile, o.legend, o.legend_image, o.legend_rule, '
'o.is_legend_expanded, o.min_resolution, o.max_resolution, o.disclaimer, o.identifier_attribute_field, '
'o.exclude_properties, o.time_mode '
') FROM %(schema)s.layerv1 AS o WHERE o.id = l.id' % {'schema': schema}
)
op.drop_table('layerv1', schema=schema)
op.drop_table('interface_theme', schema=schema)
op.drop_table('interface_layer', schema=schema)
op.drop_table('interface', schema=schema)
op.create_table(
'user_functionality',
Column(
'user_id', Integer,
ForeignKey(schema + '.user.id'), primary_key=True
),
Column(
'functionality_id', Integer,
ForeignKey(schema + '.functionality.id'), primary_key=True
),
schema=schema,
)
|
{
"content_hash": "82d5914b495b74d830075d138695a357",
"timestamp": "",
"source": "github",
"line_count": 363,
"max_line_length": 117,
"avg_line_length": 41.421487603305785,
"alnum_prop": 0.6174514498536845,
"repo_name": "tsauerwein/c2cgeoportal",
"id": "19d43040382b25c9e589dd22202a9dbe6d768cee",
"size": "16624",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "c2cgeoportal/scaffolds/update/CONST_alembic/versions/415746eb9f6_changes_for_v2.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "3701979"
},
{
"name": "JavaScript",
"bytes": "12750450"
},
{
"name": "Makefile",
"bytes": "11077"
},
{
"name": "Python",
"bytes": "615585"
},
{
"name": "Ruby",
"bytes": "2164"
},
{
"name": "Shell",
"bytes": "4476"
}
],
"symlink_target": ""
}
|
"""
NOTE: This file is only used for backward compatibility tests with new dist_utils.py in
st2common/tests/unit/test_dist_utils.py.
DO NOT USE THIS FILE ANYWHERE ELSE!
"""
from __future__ import absolute_import
import os
import re
import sys
from distutils.version import StrictVersion
# NOTE: This script can't rely on any 3rd party dependency so we need to use this code here
PY3 = sys.version_info[0] == 3
if PY3:
text_type = str
else:
text_type = unicode # noqa # pylint: disable=E0602
GET_PIP = "curl https://bootstrap.pypa.io/get-pip.py | python"
try:
import pip
from pip import __version__ as pip_version
except ImportError as e:
print("Failed to import pip: %s" % (text_type(e)))
print("")
print("Download pip:\n%s" % (GET_PIP))
sys.exit(1)
try:
# pip < 10.0
from pip.req import parse_requirements
except ImportError:
# pip >= 10.0
try:
from pip._internal.req.req_file import parse_requirements
except ImportError as e:
print("Failed to import parse_requirements from pip: %s" % (text_type(e)))
print("Using pip: %s" % (str(pip_version)))
sys.exit(1)
__all__ = [
"check_pip_version",
"fetch_requirements",
"apply_vagrant_workaround",
"get_version_string",
"parse_version_string",
]
def check_pip_version(min_version="6.0.0"):
"""
Ensure that a minimum supported version of pip is installed.
"""
if StrictVersion(pip.__version__) < StrictVersion(min_version):
print(
"Upgrade pip, your version '{0}' "
"is outdated. Minimum required version is '{1}':\n{2}".format(
pip.__version__, min_version, GET_PIP
)
)
sys.exit(1)
def fetch_requirements(requirements_file_path):
"""
Return a list of requirements and links by parsing the provided requirements file.
"""
links = []
reqs = []
for req in parse_requirements(requirements_file_path, session=False):
# Note: req.url was used before 9.0.0 and req.link is used in all the recent versions
link = getattr(req, "link", getattr(req, "url", None))
if link:
links.append(str(link))
reqs.append(str(req.req))
return (reqs, links)
def apply_vagrant_workaround():
"""
Function which detects if the script is being executed inside vagrant and if it is, it deletes
"os.link" attribute.
Note: Without this workaround, setup.py sdist will fail when running inside a shared directory
(nfs / virtualbox shared folders).
"""
if os.environ.get("USER", None) == "vagrant":
del os.link
def get_version_string(init_file):
"""
Read __version__ string for an init file.
"""
with open(init_file, "r") as fp:
content = fp.read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", content, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string in %s." % (init_file))
# alias for get_version_string
parse_version_string = get_version_string
|
{
"content_hash": "719afb4c0a17637048e7b82bb7d16895",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 98,
"avg_line_length": 27.883928571428573,
"alnum_prop": 0.6295228946525776,
"repo_name": "Plexxi/st2",
"id": "da38f6edbf4f0c2458645e9f5ebc30c99791bf94",
"size": "3775",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "scripts/dist_utils_old.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "198"
},
{
"name": "JavaScript",
"bytes": "444"
},
{
"name": "Jinja",
"bytes": "174532"
},
{
"name": "Makefile",
"bytes": "75242"
},
{
"name": "PowerShell",
"bytes": "856"
},
{
"name": "Python",
"bytes": "6453910"
},
{
"name": "Shell",
"bytes": "93607"
},
{
"name": "Starlark",
"bytes": "7236"
}
],
"symlink_target": ""
}
|
from flask import Blueprint, render_template, redirect, session, request
import MySQLdb as mdb
import json
import hashlib
blueprint = Blueprint('admin', __name__, url_prefix='/admin', static_folder='../static', template_folder='../templates')
#database query functions
def getHours(): #gets fec hours
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "select * from hours"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
rows = cur.fetchall()
return rows
def getReservations(): #gets fec reservations
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "select * from reservations where date > CURDATE()"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
rows = cur.fetchall()
x = 0
while x < len(rows):
rows[x]["details"] = unicode(rows[x]["details"], "utf-8")
x += 1
return rows
def deleteReservation(i): #deletes fec reservations
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "delete from reservations where id = " + str(i)
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
def updateDay(day, start, end, o): #gets fec hours
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "update hours set start = " + start + ", end = " + end + ", open = " + o + " where day = '" + day + "'"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
def updatePassword(password):
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "update password set password = '" + hashlib.md5(password).hexdigest() + "'"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
def updatePeople(people):
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "update people set people = " + str(people)
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
def updatePhone(phone):
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "update phone set phone = '" + str(phone) + "'"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
def updateEmail(email):
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "update email set email = '" + email + "'"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
def getPassword():
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "select password from password"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
rows = cur.fetchall()
return rows[0]["password"]
def getPeople():
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "select people from people"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
rows = cur.fetchall()
return rows[0]["people"]
def getPhone():
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "select phone from phone"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
rows = cur.fetchall()
return rows[0]["phone"]
def getEmail():
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "select email from email"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
rows = cur.fetchall()
return rows[0]["email"]
def getReservationsByMonth():
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "select months.id, months.month, count(reservations.id) count from months left outer join reservations on months.id = month(reservations.date) group by months.id order by months.id asc;"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
rows = cur.fetchall()
return rows
def getReservationsByDay():
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "select days.id, days.day, count(reservations.id) count from days left outer join reservations on days.id = dayofweek(reservations.date) group by days.id order by days.id asc;"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
rows = cur.fetchall()
return rows
def getPackagesByType():
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "select packages.id - 1 id, packages.package, count(reservations.id) count from packages left outer join reservations on packages.id - 1 = reservations.package group by packages.id order by packages.id asc;"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
rows = cur.fetchall()
return rows
def getReservationsByCounty():
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "SELECT county, COUNT(*) count FROM reservations where county <> 'NULL' GROUP BY county ORDER BY count asc;"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
rows = cur.fetchall()
return rows
def getMessages():
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
con.set_character_set('utf8')
query = "select *, DATE_FORMAT(ts, '%c/%e/%Y') smalldate,DATE_FORMAT(ts, '%l:%i %p') time, DATE_FORMAT(ts, '%M %e, %Y') date from messages order by ts desc;"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute('SET NAMES utf8;')
cur.execute('SET CHARACTER SET utf8;')
cur.execute('SET character_set_connection=utf8;')
cur.execute(query)
rows = cur.fetchall()
x = 0
while x < len(rows):
rows[x]["message"] = unicode(rows[x]["message"], "utf-8")
x += 1
return rows
def numberOfMessages():
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "select count(*) count from messages"
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
rows = cur.fetchall()
return int(rows[0]["count"])
def deleteMessage(i):
con = mdb.connect('localhost', 'root', 'visibilitymatters', 'fbla')
query = "delete from messages where id = " + str(i)
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute(query)
#session functions
def login():
session["admin"] = True
def logout():
session.pop("admin")
@blueprint.route('/')
def indexRoute():
if "admin" in session:
return render_template("admin_pages/index.html", page="home", messageNum=numberOfMessages(), reservationsByMonth=getReservationsByMonth(), reservationsByDay=getReservationsByDay(), packagesByType=getPackagesByType(), reservationsByCounty=getReservationsByCounty())
else:
return redirect("/admin/login")
@blueprint.route('/hours')
def formRoute():
if "admin" in session:
return render_template("admin_pages/hours.html", page="hours", messageNum=numberOfMessages(), hours=getHours())
else:
return redirect("/admin/login")
@blueprint.route('/reservations')
def reservationsRoute():
if "admin" in session:
return render_template("admin_pages/reservations.html", page="reservations", messageNum=numberOfMessages(), reservations=getReservations())
else:
return redirect("/admin/login")
@blueprint.route('/settings')
def settingsRoute():
if "admin" in session:
return render_template("admin_pages/settings.html", page="settings", messageNum=numberOfMessages(), people=getPeople(), phone=getPhone(), email=getEmail())
else:
return redirect("/admin/login")
@blueprint.route('/messages')
def messagesRoute():
if "admin" in session:
return render_template("admin_pages/messages.html", page="messages", messageNum=numberOfMessages(), messages=getMessages())
else:
return redirect("/admin/login")
#login routes
@blueprint.route('/login', methods=["GET", "POST"])
def loginRoute():
if "admin" not in session:
if request.method == "GET":
error = False
try:
request.args["password"]
error = True
except:
pass
return render_template("admin_pages/login.html", page="login", error=error)
if request.method == "POST":
if hashlib.md5(request.form["password"]).hexdigest() == getPassword():
login()
return redirect("/admin")
else:
return redirect("/admin/login?password=false")
else:
return redirect("/admin")
@blueprint.route('/logout')
def logoutRoute():
logout()
return redirect("/admin/login")
#api routes
@blueprint.route('/api/updatehours', methods=["POST"])
def apiUpdateHoursRoute():
if "admin" in session:
hours = json.loads(request.form["hours"])
updateDay("mon", hours["mon"]["start"], hours["mon"]["end"], hours["mon"]["open"])
updateDay("tue", hours["tue"]["start"], hours["tue"]["end"], hours["tue"]["open"])
updateDay("wed", hours["wed"]["start"], hours["wed"]["end"], hours["wed"]["open"])
updateDay("thu", hours["thu"]["start"], hours["thu"]["end"], hours["thu"]["open"])
updateDay("fri", hours["fri"]["start"], hours["fri"]["end"], hours["fri"]["open"])
updateDay("sat", hours["sat"]["start"], hours["sat"]["end"], hours["sat"]["open"])
updateDay("sun", hours["sun"]["start"], hours["sun"]["end"], hours["sun"]["open"])
return "true"
else:
return "false"
@blueprint.route('/api/deletereservation', methods=["POST"])
def apiDeleteReservationRoute():
if "admin" in session:
deleteReservation(request.form["id"])
return "true"
else:
return "false"
@blueprint.route('/api/updatepassword', methods=["POST"])
def apiUpdatePasswordRoute():
if "admin" in session:
updatePassword(request.form["password"])
return "true"
else:
return "false"
@blueprint.route('/api/updatepeople', methods=["POST"])
def apiUpdatePeopleRoute():
if "admin" in session:
updatePeople(request.form["people"])
return "true"
else:
return "false"
@blueprint.route('/api/updatephone', methods=["POST"])
def apiUpdatePhoneRoute():
if "admin" in session:
updatePhone(request.form["phone"])
return "true"
else:
return "false"
@blueprint.route('/api/updateemail', methods=["POST"])
def apiUpdateEmailRoute():
if "admin" in session:
updateEmail(request.form["email"])
return "true"
else:
return "false"
@blueprint.route('/api/deletemessage', methods=["POST"])
def apiDeleteMessageRoute():
if "admin" in session:
deleteMessage(request.form["id"])
return "true"
else:
return "false"
|
{
"content_hash": "6150f5c517c6829e3a51be9c151f4626",
"timestamp": "",
"source": "github",
"line_count": 309,
"max_line_length": 266,
"avg_line_length": 32.76051779935275,
"alnum_prop": 0.6956435839178109,
"repo_name": "rileymjohnson/fbla",
"id": "ad91dbd061f41092e1199a577103a89267035bf0",
"size": "10140",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/admin/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "271399"
},
{
"name": "HTML",
"bytes": "117948"
},
{
"name": "JavaScript",
"bytes": "454805"
},
{
"name": "Python",
"bytes": "24252"
}
],
"symlink_target": ""
}
|
from util import *
import sys
# Usage: z3-lib-dir benchmark-dir
test_cs(sys.argv[1], sys.argv[2], ext="cs", timeout_duration=60.0)
exit(0)
|
{
"content_hash": "f4ce3c0486b2c6d54a5b00d76fb2745d",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 66,
"avg_line_length": 27.8,
"alnum_prop": 0.7050359712230215,
"repo_name": "dstaple/z3test",
"id": "58ff9b27d0a5eb5898e9c3e78bc884b57b03514d",
"size": "183",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "scripts/test_cs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "21980"
},
{
"name": "Batchfile",
"bytes": "4009"
},
{
"name": "C#",
"bytes": "758062"
},
{
"name": "C++",
"bytes": "9623"
},
{
"name": "CSS",
"bytes": "3966"
},
{
"name": "Python",
"bytes": "131312"
},
{
"name": "SMT",
"bytes": "6844416"
},
{
"name": "Shell",
"bytes": "2179"
}
],
"symlink_target": ""
}
|
import os
from stash.tests.stashtest import StashTestCase
class Sha256sumTests(StashTestCase):
"""tests for the sha256sum command."""
def setUp(self):
"""setup the tests"""
self.cwd = self.get_data_path()
StashTestCase.setUp(self)
def get_data_path(self):
"""return the data/ sibling path"""
return os.path.abspath(os.path.join(os.path.dirname(__file__), "data"))
def test_help(self):
"""test sha256sum --help"""
output = self.run_command("sha256sum --help", exitcode=0)
# check for code words in output
self.assertIn("sha256sum", output)
self.assertIn("-h", output)
self.assertIn("-c", output)
def test_filehash(self):
"""tests the hashes of the files in data/"""
fp = self.get_data_path()
for fn in os.listdir(fp):
if "." in fn:
# file used for something else
continue
expected_hash = fn
fullp = os.path.join(fp, fn)
output = self.run_command("sha256sum " + fullp, exitcode=0)
result = output.split(" ")[0]
self.assertEqual(result, expected_hash)
def test_checkhash(self):
"""test sha256sum -c"""
output = self.run_command("sha256sum -c results.sha256sum", exitcode=0)
self.assertIn("Pass", output)
self.assertNotIn("Fail", output)
def test_checkhash_fail(self):
"""test failure sha256sum -c with invalid data"""
output = self.run_command("sha256sum -c wrong_results.sha256sum", exitcode=1)
self.assertIn("Pass", output) # some files should have the correct hash
self.assertIn("Fail", output)
def test_hash_stdin_implicit(self):
"""test hashing of stdin without arg"""
output = self.run_command("echo test | sha256sum", exitcode=0).replace("\n", "")
expected = "f2ca1bb6c7e907d06dafe4687e579fce76b37e4e93b7605022da52e6ccc26fd2"
self.assertEqual(output, expected)
def test_hash_stdin_explicit(self):
"""test hashing of stdin with '-' arg"""
output = self.run_command("echo test | sha256sum -", exitcode=0).replace("\n", "")
expected = "f2ca1bb6c7e907d06dafe4687e579fce76b37e4e93b7605022da52e6ccc26fd2"
self.assertEqual(output, expected)
|
{
"content_hash": "1f7c772fd1fcec68abca4549f66af5c0",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 90,
"avg_line_length": 38.19672131147541,
"alnum_prop": 0.6158798283261803,
"repo_name": "ywangd/stash",
"id": "2a56141e64c1a5ad101ec63453c05fcb832060fe",
"size": "2354",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/sha256sum/test_sha256sum.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "939583"
},
{
"name": "Shell",
"bytes": "1648"
}
],
"symlink_target": ""
}
|
import copy
import logging
import google.oauth2.service_account
from ibis_bigquery.client import BigQueryClient
from data_validation import clients, consts, state_manager
from data_validation.result_handlers.bigquery import BigQueryResultHandler
from data_validation.result_handlers.text import TextResultHandler
from data_validation.validation_builder import ValidationBuilder
class ConfigManager(object):
_config: dict = None
_source_conn = None
_target_conn = None
_state_manager = None
source_client = None
target_client = None
def __init__(self, config, source_client=None, target_client=None, verbose=False):
"""Initialize a ConfigManager client which supplies the
source and target queries to run.
Args:
config (Dict): The Validation config supplied
source_client (IbisClient): The Ibis client for the source DB
target_client (IbisClient): The Ibis client for the target DB
verbose (Bool): If verbose, the Data Validation client will print queries run
google_credentials (google.auth.credentials.Credentials):
Explicit credentials to use in case default credentials
aren't working properly.
"""
self._state_manager = state_manager.StateManager()
self._config = config
self.source_client = source_client or clients.get_data_client(
self.get_source_connection()
)
self.target_client = target_client or clients.get_data_client(
self.get_target_connection()
)
self.verbose = verbose
if self.validation_type not in consts.CONFIG_TYPES:
raise ValueError(f"Unknown Configuration Type: {self.validation_type}")
@property
def config(self):
"""Return config object."""
return self._config
def get_source_connection(self):
"""Return source connection object."""
if not self._source_conn:
if self._config.get(consts.CONFIG_SOURCE_CONN):
self._source_conn = self._config.get(consts.CONFIG_SOURCE_CONN)
else:
conn_name = self._config.get(consts.CONFIG_SOURCE_CONN_NAME)
self._source_conn = self._state_manager.get_connection_config(conn_name)
return self._source_conn
def get_target_connection(self):
"""Return target connection object."""
if not self._target_conn:
if self._config.get(consts.CONFIG_TARGET_CONN):
self._target_conn = self._config.get(consts.CONFIG_TARGET_CONN)
else:
conn_name = self._config.get(consts.CONFIG_TARGET_CONN_NAME)
self._target_conn = self._state_manager.get_connection_config(conn_name)
return self._target_conn
@property
def validation_type(self):
"""Return string validation type (Column|Schema)."""
return self._config[consts.CONFIG_TYPE]
def use_random_rows(self):
"""Return if the validation should use a random row filter."""
return self._config.get(consts.CONFIG_USE_RANDOM_ROWS) or False
def random_row_batch_size(self):
"""Return if the validation should use a random row filter."""
return (
self._config.get(consts.CONFIG_RANDOM_ROW_BATCH_SIZE)
or consts.DEFAULT_NUM_RANDOM_ROWS
)
def get_random_row_batch_size(self):
"""Return number of random rows or None."""
return self.random_row_batch_size() if self.use_random_rows() else None
def process_in_memory(self):
"""Return whether to process in memory or on a remote platform."""
return True
@property
def max_recursive_query_size(self):
"""Return Aggregates from Config"""
return self._config.get(consts.CONFIG_MAX_RECURSIVE_QUERY_SIZE, 50000)
@property
def aggregates(self):
"""Return Aggregates from Config"""
return self._config.get(consts.CONFIG_AGGREGATES, [])
def append_aggregates(self, aggregate_configs):
"""Append aggregate configs to existing config."""
self._config[consts.CONFIG_AGGREGATES] = self.aggregates + aggregate_configs
@property
def calculated_fields(self):
return self._config.get(consts.CONFIG_CALCULATED_FIELDS, [])
def append_calculated_fields(self, calculated_configs):
self._config[consts.CONFIG_CALCULATED_FIELDS] = (
self.calculated_fields + calculated_configs
)
@property
def query_groups(self):
"""Return Query Groups from Config"""
return self._config.get(consts.CONFIG_GROUPED_COLUMNS, [])
def append_query_groups(self, grouped_column_configs):
"""Append grouped configs to existing config."""
self._config[consts.CONFIG_GROUPED_COLUMNS] = (
self.query_groups + grouped_column_configs
)
@property
def custom_query_type(self):
"""Return custom query type from config"""
return self._config.get(consts.CONFIG_CUSTOM_QUERY_TYPE, "")
def append_custom_query_type(self, custom_query_type):
"""Append custom query type config to existing config."""
self._config[consts.CONFIG_CUSTOM_QUERY_TYPE] = (
self.custom_query_type + custom_query_type
)
@property
def source_query_file(self):
"""Return SQL Query File from Config"""
return self._config.get(consts.CONFIG_SOURCE_QUERY_FILE, [])
def append_source_query_file(self, query_file_configs):
"""Append grouped configs to existing config."""
self._config[consts.CONFIG_SOURCE_QUERY_FILE] = (
self.source_query_file + query_file_configs
)
@property
def target_query_file(self):
"""Return SQL Query File from Config"""
return self._config.get(consts.CONFIG_TARGET_QUERY_FILE, [])
def append_target_query_file(self, query_file_configs):
"""Append grouped configs to existing config."""
self._config[consts.CONFIG_TARGET_QUERY_FILE] = (
self.target_query_file + query_file_configs
)
@property
def primary_keys(self):
"""Return Primary keys from Config"""
return self._config.get(consts.CONFIG_PRIMARY_KEYS, [])
def append_primary_keys(self, primary_key_configs):
"""Append primary key configs to existing config."""
self._config[consts.CONFIG_PRIMARY_KEYS] = (
self.primary_keys + primary_key_configs
)
def get_primary_keys_list(self):
"""Return list of primary key column names"""
return [key[consts.CONFIG_SOURCE_COLUMN] for key in self.primary_keys]
@property
def comparison_fields(self):
"""Return fields from Config"""
return self._config.get(consts.CONFIG_COMPARISON_FIELDS, [])
def append_comparison_fields(self, field_configs):
"""Append field configs to existing config."""
self._config[consts.CONFIG_COMPARISON_FIELDS] = (
self.comparison_fields + field_configs
)
@property
def filters(self):
"""Return Filters from Config"""
return self._config.get(consts.CONFIG_FILTERS, [])
@property
def source_schema(self):
"""Return string value of source schema."""
if self.source_client._source_type == "FileSystem":
return None
return self._config.get(consts.CONFIG_SCHEMA_NAME, None)
@property
def source_table(self):
"""Return string value of source table."""
return self._config[consts.CONFIG_TABLE_NAME]
@property
def target_schema(self):
"""Return string value of target schema."""
if self.target_client._source_type == "FileSystem":
return None
return self._config.get(consts.CONFIG_TARGET_SCHEMA_NAME, self.source_schema)
@property
def target_table(self):
"""Return string value of target table."""
return self._config.get(
consts.CONFIG_TARGET_TABLE_NAME, self._config[consts.CONFIG_TABLE_NAME]
)
@property
def full_target_table(self):
"""Return string value of fully qualified target table."""
if self.target_schema:
return self.target_schema + "." + self.target_table
else:
return self.target_table
@property
def full_source_table(self):
"""Return string value of target table."""
if self.source_schema:
return self.source_schema + "." + self.source_table
else:
return self.source_table
@property
def labels(self):
"""Return labels."""
return self._config.get(consts.CONFIG_LABELS, [])
@property
def result_handler_config(self):
"""Return int limit for query executions."""
return self._config.get(consts.CONFIG_RESULT_HANDLER) or {}
@property
def query_limit(self):
"""Return int limit for query executions."""
return self._config.get(consts.CONFIG_LIMIT)
@property
def threshold(self):
"""Return threshold from Config"""
return self._config.get(consts.CONFIG_THRESHOLD, 0.0)
@property
def source_query(self):
return self._config.get(consts.CONFIG_SOURCE_QUERY, None)
def append_source_query(self, source_query):
self._config["source_query"] = source_query
@property
def target_query(self):
return self._config.get(consts.CONFIG_TARGET_QUERY, None)
def append_target_query(self, target_query):
self._config["target_query"] = target_query
@property
def exclusion_columns(self):
"""Return the exclusion columns from Config"""
return self._config.get(consts.CONFIG_EXCLUSION_COLUMNS, [])
@property
def filter_status(self):
"""Return filter status list from Config"""
return self._config.get(consts.CONFIG_FILTER_STATUS, None)
def append_exclusion_columns(self, column_configs):
"""Append exclusion columns to existing config."""
self._config[consts.CONFIG_EXCLUSION_COLUMNS] = (
self.exclusion_columns + column_configs
)
def get_source_ibis_table(self):
"""Return IbisTable from source."""
if not hasattr(self, "_source_ibis_table"):
self._source_ibis_table = clients.get_ibis_table(
self.source_client, self.source_schema, self.source_table
)
return self._source_ibis_table
def get_source_ibis_table_from_query(self):
"""Return IbisTable from source."""
self._source_ibis_table = clients.get_ibis_query(
self.source_client, self.source_query
)
return self._source_ibis_table
def get_source_ibis_calculated_table(self, depth=None):
"""Return mutated IbisTable from source
n: Int the depth of subquery requested"""
if self.validation_type == consts.CUSTOM_QUERY:
table = self.get_source_ibis_table_from_query()
else:
table = self.get_source_ibis_table()
vb = ValidationBuilder(self)
calculated_table = table.mutate(
vb.source_builder.compile_calculated_fields(table, n=depth)
)
return calculated_table
def get_target_ibis_table(self):
"""Return IbisTable from target."""
if not hasattr(self, "_target_ibis_table"):
self._target_ibis_table = clients.get_ibis_table(
self.target_client, self.target_schema, self.target_table
)
return self._target_ibis_table
def get_target_ibis_table_from_query(self):
"""Return IbisTable from source."""
self._target_ibis_table = clients.get_ibis_query(
self.target_client, self.target_query
)
return self._target_ibis_table
def get_target_ibis_calculated_table(self, depth=None):
"""Return mutated IbisTable from target
n: Int the depth of subquery requested"""
if self.validation_type == consts.CUSTOM_QUERY:
table = self.get_target_ibis_table_from_query()
else:
table = self.get_target_ibis_table()
vb = ValidationBuilder(self)
calculated_table = table.mutate(
vb.target_builder.compile_calculated_fields(table, n=depth)
)
return calculated_table
def get_yaml_validation_block(self):
"""Return Dict object formatted for a Yaml file."""
config = copy.deepcopy(self.config)
config.pop(consts.CONFIG_SOURCE_CONN, None)
config.pop(consts.CONFIG_TARGET_CONN, None)
config.pop(consts.CONFIG_SOURCE_CONN_NAME, None)
config.pop(consts.CONFIG_TARGET_CONN_NAME, None)
config.pop(consts.CONFIG_RESULT_HANDLER, None)
return config
def get_result_handler(self):
"""Return ResultHandler instance from supplied config."""
if not self.result_handler_config:
if self.config[consts.CONFIG_TYPE] == consts.SCHEMA_VALIDATION:
cols_filter_list = consts.SCHEMA_VALIDATION_COLUMN_FILTER_LIST
else:
cols_filter_list = consts.COLUMN_FILTER_LIST
# handler that display results either to output or in a file
return TextResultHandler(
self._config.get(consts.CONFIG_FORMAT, "table"),
self.filter_status,
cols_filter_list,
)
result_type = self.result_handler_config[consts.CONFIG_TYPE]
if result_type == "BigQuery":
project_id = self.result_handler_config[consts.PROJECT_ID]
table_id = self.result_handler_config[consts.TABLE_ID]
key_path = self.result_handler_config.get(
consts.GOOGLE_SERVICE_ACCOUNT_KEY_PATH
)
if key_path:
credentials = (
google.oauth2.service_account.Credentials.from_service_account_file(
key_path
)
)
else:
credentials = None
return BigQueryResultHandler.get_handler_for_project(
project_id,
self.filter_status,
table_id=table_id,
credentials=credentials,
)
else:
raise ValueError(f"Unknown ResultHandler Class: {result_type}")
@staticmethod
def build_config_manager(
config_type,
source_conn_name,
target_conn_name,
table_obj,
labels,
threshold,
format,
use_random_rows=None,
random_row_batch_size=None,
source_client=None,
target_client=None,
result_handler_config=None,
filter_config=None,
filter_status=None,
verbose=False,
):
if isinstance(filter_config, dict):
filter_config = [filter_config]
"""Return a ConfigManager instance with available config."""
config = {
consts.CONFIG_TYPE: config_type,
consts.CONFIG_SOURCE_CONN_NAME: source_conn_name,
consts.CONFIG_TARGET_CONN_NAME: target_conn_name,
consts.CONFIG_TABLE_NAME: table_obj.get(consts.CONFIG_TABLE_NAME, None),
consts.CONFIG_SCHEMA_NAME: table_obj.get(consts.CONFIG_SCHEMA_NAME, None),
consts.CONFIG_TARGET_SCHEMA_NAME: table_obj.get(
consts.CONFIG_TARGET_SCHEMA_NAME,
table_obj.get(consts.CONFIG_SCHEMA_NAME, None),
),
consts.CONFIG_TARGET_TABLE_NAME: table_obj.get(
consts.CONFIG_TARGET_TABLE_NAME,
table_obj.get(consts.CONFIG_TABLE_NAME, None),
),
consts.CONFIG_LABELS: labels,
consts.CONFIG_THRESHOLD: threshold,
consts.CONFIG_FORMAT: format,
consts.CONFIG_RESULT_HANDLER: result_handler_config,
consts.CONFIG_FILTERS: filter_config,
consts.CONFIG_USE_RANDOM_ROWS: use_random_rows,
consts.CONFIG_RANDOM_ROW_BATCH_SIZE: random_row_batch_size,
consts.CONFIG_FILTER_STATUS: filter_status,
}
return ConfigManager(
config,
source_client=source_client,
target_client=target_client,
verbose=verbose,
)
def build_config_comparison_fields(self, fields, depth=None):
"""Return list of field config objects."""
field_configs = []
for field in fields:
column_config = {
consts.CONFIG_SOURCE_COLUMN: field.casefold(),
consts.CONFIG_TARGET_COLUMN: field.casefold(),
consts.CONFIG_FIELD_ALIAS: field,
consts.CONFIG_CAST: None,
}
field_configs.append(column_config)
return field_configs
def build_column_configs(self, columns):
"""Return list of grouped column config objects."""
column_configs = []
source_table = self.get_source_ibis_calculated_table()
target_table = self.get_target_ibis_calculated_table()
casefold_source_columns = {x.casefold(): str(x) for x in source_table.columns}
casefold_target_columns = {x.casefold(): str(x) for x in target_table.columns}
for column in columns:
if column.casefold() not in casefold_source_columns:
raise ValueError(f"Grouped Column DNE in source: {column}")
if column.casefold() not in casefold_target_columns:
raise ValueError(f"Grouped Column DNE in target: {column}")
column_config = {
consts.CONFIG_SOURCE_COLUMN: casefold_source_columns[column.casefold()],
consts.CONFIG_TARGET_COLUMN: casefold_target_columns[column.casefold()],
consts.CONFIG_FIELD_ALIAS: column,
consts.CONFIG_CAST: None,
}
column_configs.append(column_config)
return column_configs
def build_config_count_aggregate(self):
"""Return dict aggregate for COUNT(*)."""
aggregate_config = {
consts.CONFIG_SOURCE_COLUMN: None,
consts.CONFIG_TARGET_COLUMN: None,
consts.CONFIG_FIELD_ALIAS: "count",
consts.CONFIG_TYPE: "count",
}
return aggregate_config
def build_and_append_pre_agg_calc_config(
self, column, calc_func, cast_type=None, depth=0
):
"""Create calculated field config used as a pre-aggregation step. Appends to calulated fields if does not already exist and returns created config."""
calculated_config = {
consts.CONFIG_CALCULATED_SOURCE_COLUMNS: [column],
consts.CONFIG_CALCULATED_TARGET_COLUMNS: [column],
consts.CONFIG_FIELD_ALIAS: f"{calc_func}__{column}",
consts.CONFIG_TYPE: calc_func,
consts.CONFIG_DEPTH: depth,
}
if calc_func == "cast" and cast_type is not None:
calculated_config[consts.CONFIG_DEFAULT_CAST] = cast_type
calculated_config[
consts.CONFIG_FIELD_ALIAS
] = f"{calc_func}_{cast_type}__{column}"
existing_calc_fields = [
config[consts.CONFIG_FIELD_ALIAS] for config in self.calculated_fields
]
if calculated_config[consts.CONFIG_FIELD_ALIAS] not in existing_calc_fields:
self.append_calculated_fields([calculated_config])
return calculated_config
def append_pre_agg_calc_field(self, column, agg_type, column_type):
"""Append calculated field for length(string) or epoch_seconds(timestamp) for preprocessing before column validation aggregation."""
depth, cast_type = 0, None
if column_type == "string":
calc_func = "length"
elif column_type == "timestamp":
if isinstance(self.source_client, BigQueryClient) or isinstance(
self.target_client, BigQueryClient
):
calc_func = "cast"
cast_type = "timestamp"
pre_calculated_config = self.build_and_append_pre_agg_calc_config(
column, calc_func, cast_type, depth
)
column = pre_calculated_config[consts.CONFIG_FIELD_ALIAS]
depth = 1
calc_func = "epoch_seconds"
elif column_type == "int32":
calc_func = "cast"
cast_type = "int64"
else:
raise ValueError(f"Unsupported column type: {column_type}")
calculated_config = self.build_and_append_pre_agg_calc_config(
column, calc_func, cast_type, depth
)
aggregate_config = {
consts.CONFIG_SOURCE_COLUMN: f"{calculated_config[consts.CONFIG_FIELD_ALIAS]}",
consts.CONFIG_TARGET_COLUMN: f"{calculated_config[consts.CONFIG_FIELD_ALIAS]}",
consts.CONFIG_FIELD_ALIAS: f"{agg_type}__{calculated_config[consts.CONFIG_FIELD_ALIAS]}",
consts.CONFIG_TYPE: agg_type,
}
return aggregate_config
def build_config_column_aggregates(
self, agg_type, arg_value, supported_types, cast_to_bigint=False
):
"""Return list of aggregate objects of given agg_type."""
aggregate_configs = []
source_table = self.get_source_ibis_calculated_table()
target_table = self.get_target_ibis_calculated_table()
casefold_source_columns = {x.casefold(): str(x) for x in source_table.columns}
casefold_target_columns = {x.casefold(): str(x) for x in target_table.columns}
if arg_value and supported_types:
supported_types.append("string")
allowlist_columns = arg_value or casefold_source_columns
for column in casefold_source_columns:
# Get column type and remove precision/scale attributes
column_type_str = str(source_table[casefold_source_columns[column]].type())
column_type = column_type_str.split("(")[0]
if column not in allowlist_columns:
continue
elif column not in casefold_target_columns:
logging.warning(
f"Skipping {agg_type} on {column} as column is not present in target table"
)
continue
elif supported_types and column_type not in supported_types:
if self.verbose:
logging.info(
f"Skipping {agg_type} on {column} due to data type: {column_type}"
)
continue
if (
column_type == "string"
or (cast_to_bigint and column_type == "int32")
or (
column_type == "timestamp"
and agg_type
in (
"sum",
"avg",
"bit_xor",
) # For timestamps: do not convert to epoch seconds for min/max
)
):
aggregate_config = self.append_pre_agg_calc_field(
column, agg_type, column_type
)
else:
aggregate_config = {
consts.CONFIG_SOURCE_COLUMN: casefold_source_columns[column],
consts.CONFIG_TARGET_COLUMN: casefold_target_columns[column],
consts.CONFIG_FIELD_ALIAS: f"{agg_type}__{column}",
consts.CONFIG_TYPE: agg_type,
}
aggregate_configs.append(aggregate_config)
return aggregate_configs
def build_config_calculated_fields(
self, reference, calc_type, alias, depth, supported_types, arg_value=None
):
"""Returns list of calculated fields"""
source_table = self.get_source_ibis_calculated_table(depth=depth)
target_table = self.get_target_ibis_calculated_table(depth=depth)
casefold_source_columns = {x.casefold(): str(x) for x in source_table.columns}
casefold_target_columns = {x.casefold(): str(x) for x in target_table.columns}
allowlist_columns = arg_value or casefold_source_columns
for column in casefold_source_columns:
column_type_str = str(source_table[casefold_source_columns[column]].type())
column_type = column_type_str.split("(")[0]
if column not in allowlist_columns:
continue
elif column not in casefold_target_columns:
logging.info(
f"Skipping {calc_type} on {column} as column is not present in target table"
)
continue
elif supported_types and column_type not in supported_types:
if self.verbose:
msg = f"Skipping {calc_type} on {column} due to data type: {column_type}"
logging.info(msg)
continue
calculated_config = {
consts.CONFIG_CALCULATED_SOURCE_COLUMNS: reference,
consts.CONFIG_CALCULATED_TARGET_COLUMNS: reference,
consts.CONFIG_FIELD_ALIAS: alias,
consts.CONFIG_TYPE: calc_type,
consts.CONFIG_DEPTH: depth,
}
return calculated_config
def _build_dependent_aliases(self, calc_type, col_list=None):
"""This is a utility function for determining the required depth of all fields"""
order_of_operations = []
if col_list is None:
source_table = self.get_source_ibis_calculated_table()
casefold_source_columns = {
x.casefold(): str(x) for x in source_table.columns
}
else:
casefold_source_columns = {x.casefold(): str(x) for x in col_list}
if calc_type == "hash":
order_of_operations = [
"cast",
"ifnull",
"rstrip",
"upper",
"concat",
"hash",
]
if calc_type == "concat":
order_of_operations = [
"cast",
"ifnull",
"rstrip",
"upper",
"concat",
]
column_aliases = {}
col_names = []
for i, calc in enumerate(order_of_operations):
if i == 0:
previous_level = [x for x in casefold_source_columns.values()]
else:
previous_level = [k for k, v in column_aliases.items() if v == i - 1]
if calc in ["concat", "hash"]:
col = {}
col["reference"] = previous_level
col["name"] = f"{calc}__all"
col["calc_type"] = calc
col["depth"] = i
name = col["name"]
# need to capture all aliases at the previous level. probably name concat__all
column_aliases[name] = i
col_names.append(col)
else:
for (
column
) in (
previous_level
): # this needs to be the previous manifest of columns
col = {}
col["reference"] = [column]
col["name"] = f"{calc}__" + column
col["calc_type"] = calc
col["depth"] = i
name = col["name"]
column_aliases[name] = i
col_names.append(col)
return col_names
def get_query_from_file(self, filename):
"""Return query from input file"""
query = ""
try:
file = open(filename, "r")
query = file.read()
query = query.rstrip(";\n")
except IOError:
logging.warning("Cannot read query file: ", filename)
if not query or query.isspace():
raise ValueError(
"Expected file with sql query, got empty file or file with white spaces. "
f"input file: {filename}"
)
file.close()
return query
|
{
"content_hash": "90d11c6e220321db809e18520552289b",
"timestamp": "",
"source": "github",
"line_count": 742,
"max_line_length": 158,
"avg_line_length": 38.014824797843666,
"alnum_prop": 0.5871237636047789,
"repo_name": "GoogleCloudPlatform/professional-services-data-validator",
"id": "d9f224cb51bb76fefbeca9fff0420d7b49bf0aaa",
"size": "28783",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "data_validation/config_manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HCL",
"bytes": "2694"
},
{
"name": "Python",
"bytes": "396721"
},
{
"name": "Shell",
"bytes": "9068"
}
],
"symlink_target": ""
}
|
import cPickle
import gzip
import os
import astropy.units as u
from astropy.coordinates import SkyCoord
from XtDac.data_files import get_data_file_path
class ChandraSourceCatalog(object):
def __init__(self):
# Find the catalog data file
catalog_file = get_data_file_path('chandra_csc_1.1.pickle.gz')
# Read the chandra source catalog from the CSV file
f = gzip.GzipFile(catalog_file)
data = cPickle.load(f)
self._catalog = data['data_frame']
self._sky_coords = data['sky_coords']
def _compute_distances(self, ra, dec):
# Instance the SkyCoord instance
cone_center = SkyCoord(ra=ra, dec=dec, unit='deg')
# Find all sources within the requested cone
distances = self._sky_coords.separation(cone_center).to(u.arcmin)
return distances
def cone_search(self, ra, dec, radius, unit='arcmin'):
"""
Find sources within the given radius of the given position
:param ra: R.A. of position
:param dec: Dec of position
:param radius: radius
:param unit: units to use for the radius (default: arcmin)
:return: a pandas DataFrame containing the sources within the given radius
"""
distances = self._compute_distances(ra, dec) # arcmin
idx = distances <= radius * u.Unit(unit)
# Return the results
# Copy the array, to avoid returning a slice instead
results = self._catalog.copy().loc[idx]
# Add the distance column
results['distance'] = distances[idx]
return results
def find_closest_source(self, ra, dec):
"""
Finds the closest source to the given position
:param ra:
:param dec:
:return:
"""
distances = self._compute_distances(ra, dec) # arcmin
temp_catalog = self._catalog.copy()
temp_catalog['distance'] = distances
src_id = temp_catalog['distance'].argmin()
return temp_catalog.loc[src_id, :]
def find_variable_sources(self, ra, dec, radius, unit='arcmin', column='var_flag'):
"""
Find all variable sources within the given cone
:param ra: R.A. of position
:param dec: Dec of position
:param radius: radius
:param unit: units to use for the radius (default: arcmin)
:return: a pandas DataFrame containing the variable sources within the given radius
"""
# Get all sources within the cone
temp_results = self.cone_search(ra, dec, radius, unit=unit)
# Now select only the variable sources
idx = temp_results[column] == True
# Get a copy
results = temp_results.copy().loc[idx]
return results
def find_closest_variable_source(self, ra, dec, column='var_flag'):
"""
Finds the closest source to the given position
:param ra:
:param dec:
:return:
"""
distances = self._compute_distances(ra, dec) # arcmin
temp_catalog = self._catalog.copy()
temp_catalog['distance'] = distances
# Select only variable sources
idx = temp_catalog[column] == True
variable_sources = temp_catalog.copy().loc[idx]
src_id = variable_sources['distance'].argmin()
return variable_sources.loc[src_id, :]
|
{
"content_hash": "93c205815c98a526dd426c40d99655b7",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 91,
"avg_line_length": 26.40625,
"alnum_prop": 0.6085798816568048,
"repo_name": "giacomov/XtDac",
"id": "2e875360ef0004d00db48f3b3809b392442fa9c9",
"size": "3380",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "XtDac/ChandraUtils/chandra_catalog.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "337802"
}
],
"symlink_target": ""
}
|
from lite_boolean_formulae import L
def test_get_literals_from_literal():
assert L("x").get_literals() == frozenset(("x",))
def test_get_literals_from_formula():
assert ((L("x") & L("y")) | L("z")).get_literals() == frozenset(("x", "y", "z"))
|
{
"content_hash": "ce8e6de8aa5cf14f13cbc009a9063adf",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 85,
"avg_line_length": 28.444444444444443,
"alnum_prop": 0.59375,
"repo_name": "aubreystarktoller/lite-boolean-formulae",
"id": "788bedb6f4abe47fdddeb78d11243547f7b545f7",
"size": "256",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_get_literals.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1057"
},
{
"name": "Python",
"bytes": "19572"
}
],
"symlink_target": ""
}
|
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base import serialize
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class SyncListItemTestCase(IntegrationTestCase):
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.preview.sync.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items(index=1).fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://preview.twilio.com/Sync/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Items/1',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"created_by": "created_by",
"data": {},
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"index": 100,
"list_sid": "ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"revision": "revision",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items/100"
}
'''
))
actual = self.client.preview.sync.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items(index=1).fetch()
self.assertIsNotNone(actual)
def test_delete_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.preview.sync.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items(index=1).delete()
self.holodeck.assert_has_request(Request(
'delete',
'https://preview.twilio.com/Sync/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Items/1',
))
def test_delete_response(self):
self.holodeck.mock(Response(
204,
None,
))
actual = self.client.preview.sync.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items(index=1).delete()
self.assertTrue(actual)
def test_create_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.preview.sync.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items.create(data={})
values = {'Data': serialize.object({}), }
self.holodeck.assert_has_request(Request(
'post',
'https://preview.twilio.com/Sync/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Items',
data=values,
))
def test_create_response(self):
self.holodeck.mock(Response(
201,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"created_by": "created_by",
"data": {},
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"index": 100,
"list_sid": "ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"revision": "revision",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items/100"
}
'''
))
actual = self.client.preview.sync.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items.create(data={})
self.assertIsNotNone(actual)
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.preview.sync.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items.list()
self.holodeck.assert_has_request(Request(
'get',
'https://preview.twilio.com/Sync/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Items',
))
def test_read_empty_response(self):
self.holodeck.mock(Response(
200,
'''
{
"items": [],
"meta": {
"first_page_url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items?From=from&Bounds=inclusive&Order=asc&PageSize=50&Page=0",
"key": "items",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items?From=from&Bounds=inclusive&Order=asc&PageSize=50&Page=0"
}
}
'''
))
actual = self.client.preview.sync.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items.list()
self.assertIsNotNone(actual)
def test_read_full_response(self):
self.holodeck.mock(Response(
200,
'''
{
"items": [
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"created_by": "created_by",
"data": {},
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"index": 100,
"list_sid": "ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"revision": "revision",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items/100"
}
],
"meta": {
"first_page_url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items?From=from&Bounds=inclusive&Order=asc&PageSize=50&Page=0",
"key": "items",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items?From=from&Bounds=inclusive&Order=asc&PageSize=50&Page=0"
}
}
'''
))
actual = self.client.preview.sync.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items.list()
self.assertIsNotNone(actual)
def test_update_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.preview.sync.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items(index=1).update(data={})
values = {'Data': serialize.object({}), }
self.holodeck.assert_has_request(Request(
'post',
'https://preview.twilio.com/Sync/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Lists/ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Items/1',
data=values,
))
def test_update_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"created_by": "created_by",
"data": {},
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"index": 100,
"list_sid": "ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"revision": "revision",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"url": "https://preview.twilio.com/Sync/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Lists/ESaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Items/100"
}
'''
))
actual = self.client.preview.sync.services(sid="ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_lists(sid="ESXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_list_items(index=1).update(data={})
self.assertIsNotNone(actual)
|
{
"content_hash": "319a05ad1f7a3dcb3df738d01432c3cd",
"timestamp": "",
"source": "github",
"line_count": 236,
"max_line_length": 219,
"avg_line_length": 43.45762711864407,
"alnum_prop": 0.5513845553822153,
"repo_name": "tysonholub/twilio-python",
"id": "5ebb704ca701ae35567556a204c226d7fda03aa8",
"size": "10271",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/integration/preview/sync/service/sync_list/test_sync_list_item.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "173"
},
{
"name": "Makefile",
"bytes": "2081"
},
{
"name": "Python",
"bytes": "8063586"
}
],
"symlink_target": ""
}
|
import sys
import os
import fcntl
import subprocess
from optparse import OptionParser
import select
# Mwa-ha-ha, this is easiest way. Hardly portable to windowz, but who cares?
TAILF_COMMAND = ['/usr/bin/tail', '-F', '-n']
def tailf_init(filename, start_count):
process = subprocess.Popen(
TAILF_COMMAND + [str(start_count), filename],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# set non-blocking mode for file
fl = fcntl.fcntl(process.stdout, fcntl.F_GETFL)
fcntl.fcntl(process.stdout, fcntl.F_SETFL, fl | os.O_NONBLOCK)
fl = fcntl.fcntl(process.stderr, fcntl.F_GETFL)
fcntl.fcntl(process.stderr, fcntl.F_SETFL, fl | os.O_NONBLOCK)
return process
def tailf(filename, start_count=0, ignore_stderr=True):
process = tailf_init(filename, start_count)
buf = ''
while True:
reads, writes, errors = select.select([process.stdout, process.stderr], [], [
process.stdout, process.stderr], 0.1)
if process.stdout in reads:
buf += process.stdout.read()
lines = buf.split('\n')
if lines[-1] == '':
# whole line received
buf = ''
else:
buf = lines[-1]
lines = lines[:-1]
if lines:
for line in lines:
if ignore_stderr:
yield line
else:
yield (line, None)
if process.stderr in reads:
stderr_input = process.stderr.read()
if not ignore_stderr:
yield (None, stderr_input)
if process.stderr in errors or process.stdout in errors:
print "Error received. Errors: ", errors
process = tailf_init(filename)
if __name__ == "__main__":
parser = OptionParser(
usage=u"%prog <filename>\nWatch for file tail (with retry) and process all incoming data")
parser.add_option("-n", "--lines", dest="start_count", type="int",
default=0, help="Output last N lines (default: %DEFAULT)")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error("Please provide filename as argument")
for line in tailf(args[0], options.start_count):
print line
|
{
"content_hash": "d0a45d7d905944b6cfa9f62312e35e2f",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 98,
"avg_line_length": 30.623376623376622,
"alnum_prop": 0.5712468193384224,
"repo_name": "shadowleaves/acr",
"id": "ace90534c6a4c2676f7214b9c296350438c8cda2",
"size": "2381",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tailf/__init__.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15029"
},
{
"name": "Shell",
"bytes": "1019"
}
],
"symlink_target": ""
}
|
"""Open metadata information in a text editor to let the user edit it.
"""
from __future__ import division, absolute_import, print_function
from beets import plugins
from beets import util
from beets import ui
from beets.dbcore import types
from beets.importer import action
from beets.ui.commands import _do_query, PromptChoice
from copy import deepcopy
import subprocess
import yaml
from tempfile import NamedTemporaryFile
import os
import six
# These "safe" types can avoid the format/parse cycle that most fields go
# through: they are safe to edit with native YAML types.
SAFE_TYPES = (types.Float, types.Integer, types.Boolean)
class ParseError(Exception):
"""The modified file is unreadable. The user should be offered a chance to
fix the error.
"""
def edit(filename, log):
"""Open `filename` in a text editor.
"""
cmd = util.shlex_split(util.editor_command())
cmd.append(filename)
log.debug(u'invoking editor command: {!r}', cmd)
try:
subprocess.call(cmd)
except OSError as exc:
raise ui.UserError(u'could not run editor command {!r}: {}'.format(
cmd[0], exc
))
def dump(arg):
"""Dump a sequence of dictionaries as YAML for editing.
"""
return yaml.safe_dump_all(
arg,
allow_unicode=True,
default_flow_style=False,
)
def load(s):
"""Read a sequence of YAML documents back to a list of dictionaries
with string keys.
Can raise a `ParseError`.
"""
try:
out = []
for d in yaml.load_all(s):
if not isinstance(d, dict):
raise ParseError(
u'each entry must be a dictionary; found {}'.format(
type(d).__name__
)
)
# Convert all keys to strings. They started out as strings,
# but the user may have inadvertently messed this up.
out.append({six.text_type(k): v for k, v in d.items()})
except yaml.YAMLError as e:
raise ParseError(u'invalid YAML: {}'.format(e))
return out
def _safe_value(obj, key, value):
"""Check whether the `value` is safe to represent in YAML and trust as
returned from parsed YAML.
This ensures that values do not change their type when the user edits their
YAML representation.
"""
typ = obj._type(key)
return isinstance(typ, SAFE_TYPES) and isinstance(value, typ.model_type)
def flatten(obj, fields):
"""Represent `obj`, a `dbcore.Model` object, as a dictionary for
serialization. Only include the given `fields` if provided;
otherwise, include everything.
The resulting dictionary's keys are strings and the values are
safely YAML-serializable types.
"""
# Format each value.
d = {}
for key in obj.keys():
value = obj[key]
if _safe_value(obj, key, value):
# A safe value that is faithfully representable in YAML.
d[key] = value
else:
# A value that should be edited as a string.
d[key] = obj.formatted()[key]
# Possibly filter field names.
if fields:
return {k: v for k, v in d.items() if k in fields}
else:
return d
def apply_(obj, data):
"""Set the fields of a `dbcore.Model` object according to a
dictionary.
This is the opposite of `flatten`. The `data` dictionary should have
strings as values.
"""
for key, value in data.items():
if _safe_value(obj, key, value):
# A safe value *stayed* represented as a safe type. Assign it
# directly.
obj[key] = value
else:
# Either the field was stringified originally or the user changed
# it from a safe type to an unsafe one. Parse it as a string.
obj.set_parse(key, six.text_type(value))
class EditPlugin(plugins.BeetsPlugin):
def __init__(self):
super(EditPlugin, self).__init__()
self.config.add({
# The default fields to edit.
'albumfields': 'album albumartist',
'itemfields': 'track title artist album',
# Silently ignore any changes to these fields.
'ignore_fields': 'id path',
})
self.register_listener('before_choose_candidate',
self.before_choose_candidate_listener)
def commands(self):
edit_command = ui.Subcommand(
'edit',
help=u'interactively edit metadata'
)
edit_command.parser.add_option(
u'-f', u'--field',
metavar='FIELD',
action='append',
help=u'edit this field also',
)
edit_command.parser.add_option(
u'--all',
action='store_true', dest='all',
help=u'edit all fields',
)
edit_command.parser.add_album_option()
edit_command.func = self._edit_command
return [edit_command]
def _edit_command(self, lib, opts, args):
"""The CLI command function for the `beet edit` command.
"""
# Get the objects to edit.
query = ui.decargs(args)
items, albums = _do_query(lib, query, opts.album, False)
objs = albums if opts.album else items
if not objs:
ui.print_(u'Nothing to edit.')
return
# Get the fields to edit.
if opts.all:
fields = None
else:
fields = self._get_fields(opts.album, opts.field)
self.edit(opts.album, objs, fields)
def _get_fields(self, album, extra):
"""Get the set of fields to edit.
"""
# Start with the configured base fields.
if album:
fields = self.config['albumfields'].as_str_seq()
else:
fields = self.config['itemfields'].as_str_seq()
# Add the requested extra fields.
if extra:
fields += extra
# Ensure we always have the `id` field for identification.
fields.append('id')
return set(fields)
def edit(self, album, objs, fields):
"""The core editor function.
- `album`: A flag indicating whether we're editing Items or Albums.
- `objs`: The `Item`s or `Album`s to edit.
- `fields`: The set of field names to edit (or None to edit
everything).
"""
# Present the YAML to the user and let her change it.
success = self.edit_objects(objs, fields)
# Save the new data.
if success:
self.save_changes(objs)
def edit_objects(self, objs, fields):
"""Dump a set of Model objects to a file as text, ask the user
to edit it, and apply any changes to the objects.
Return a boolean indicating whether the edit succeeded.
"""
# Get the content to edit as raw data structures.
old_data = [flatten(o, fields) for o in objs]
# Set up a temporary file with the initial data for editing.
new = NamedTemporaryFile(mode='w', suffix='.yaml', delete=False)
old_str = dump(old_data)
new.write(old_str)
new.close()
# Loop until we have parseable data and the user confirms.
try:
while True:
# Ask the user to edit the data.
edit(new.name, self._log)
# Read the data back after editing and check whether anything
# changed.
with open(new.name) as f:
new_str = f.read()
if new_str == old_str:
ui.print_(u"No changes; aborting.")
return False
# Parse the updated data.
try:
new_data = load(new_str)
except ParseError as e:
ui.print_(u"Could not read data: {}".format(e))
if ui.input_yn(u"Edit again to fix? (Y/n)", True):
continue
else:
return False
# Show the changes.
# If the objects are not on the DB yet, we need a copy of their
# original state for show_model_changes.
objs_old = [deepcopy(obj) if not obj._db else None
for obj in objs]
self.apply_data(objs, old_data, new_data)
changed = False
for obj, obj_old in zip(objs, objs_old):
changed |= ui.show_model_changes(obj, obj_old)
if not changed:
ui.print_(u'No changes to apply.')
return False
# Confirm the changes.
choice = ui.input_options(
(u'continue Editing', u'apply', u'cancel')
)
if choice == u'a': # Apply.
return True
elif choice == u'c': # Cancel.
return False
elif choice == u'e': # Keep editing.
# Reset the temporary changes to the objects.
for obj in objs:
obj.read()
continue
# Remove the temporary file before returning.
finally:
os.remove(new.name)
def apply_data(self, objs, old_data, new_data):
"""Take potentially-updated data and apply it to a set of Model
objects.
The objects are not written back to the database, so the changes
are temporary.
"""
if len(old_data) != len(new_data):
self._log.warn(u'number of objects changed from {} to {}',
len(old_data), len(new_data))
obj_by_id = {o.id: o for o in objs}
ignore_fields = self.config['ignore_fields'].as_str_seq()
for old_dict, new_dict in zip(old_data, new_data):
# Prohibit any changes to forbidden fields to avoid
# clobbering `id` and such by mistake.
forbidden = False
for key in ignore_fields:
if old_dict.get(key) != new_dict.get(key):
self._log.warn(u'ignoring object whose {} changed', key)
forbidden = True
break
if forbidden:
continue
id_ = int(old_dict['id'])
apply_(obj_by_id[id_], new_dict)
def save_changes(self, objs):
"""Save a list of updated Model objects to the database.
"""
# Save to the database and possibly write tags.
for ob in objs:
if ob._dirty:
self._log.debug(u'saving changes to {}', ob)
ob.try_sync(ui.should_write(), ui.should_move())
# Methods for interactive importer execution.
def before_choose_candidate_listener(self, session, task):
"""Append an "Edit" choice and an "edit Candidates" choice (if
there are candidates) to the interactive importer prompt.
"""
choices = [PromptChoice('d', 'eDit', self.importer_edit)]
if task.candidates:
choices.append(PromptChoice('c', 'edit Candidates',
self.importer_edit_candidate))
return choices
def importer_edit(self, session, task):
"""Callback for invoking the functionality during an interactive
import session on the *original* item tags.
"""
# Assign temporary ids to the Items.
for i, obj in enumerate(task.items):
obj.id = i + 1
# Present the YAML to the user and let her change it.
fields = self._get_fields(album=False, extra=[])
success = self.edit_objects(task.items, fields)
# Remove temporary ids.
for obj in task.items:
obj.id = None
# Save the new data.
if success:
# Return action.RETAG, which makes the importer write the tags
# to the files if needed without re-applying metadata.
return action.RETAG
else:
# Edit cancelled / no edits made. Revert changes.
for obj in task.items:
obj.read()
def importer_edit_candidate(self, session, task):
"""Callback for invoking the functionality during an interactive
import session on a *candidate*. The candidate's metadata is
applied to the original items.
"""
# Prompt the user for a candidate.
sel = ui.input_options([], numrange=(1, len(task.candidates)))
# Force applying the candidate on the items.
task.match = task.candidates[sel - 1]
task.apply_metadata()
return self.importer_edit(session, task)
|
{
"content_hash": "965740a768bfd3566782c1e89bd6da3b",
"timestamp": "",
"source": "github",
"line_count": 379,
"max_line_length": 79,
"avg_line_length": 33.733509234828496,
"alnum_prop": 0.5590144700821275,
"repo_name": "jcoady9/beets",
"id": "4a55e59ed55ed39765718f9329474a0b416eb738",
"size": "13415",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "beetsplug/edit.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2951"
},
{
"name": "HTML",
"bytes": "3307"
},
{
"name": "JavaScript",
"bytes": "85950"
},
{
"name": "Python",
"bytes": "1767900"
},
{
"name": "Shell",
"bytes": "7413"
}
],
"symlink_target": ""
}
|
import base64
import datetime
import urlparse
import uuid
import iso8601
from lxml import etree
from oslo.config import cfg
import webob
from nova.api.openstack import compute
from nova.api.openstack.compute import ips
from nova.api.openstack.compute import servers
from nova.api.openstack.compute import views
from nova.api.openstack import extensions
from nova.api.openstack import xmlutil
from nova.compute import api as compute_api
from nova.compute import instance_types
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import db
from nova.db.sqlalchemy import models
from nova import exception
from nova.network import manager
from nova.network.quantumv2 import api as quantum_api
from nova.openstack.common import jsonutils
from nova.openstack.common import policy as common_policy
from nova.openstack.common import rpc
from nova import policy
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_network
from nova.tests.image import fake
from nova.tests import matchers
from nova.tests import utils
CONF = cfg.CONF
CONF.import_opt('password_length', 'nova.utils')
CONF.import_opt('scheduler_topic', 'nova.scheduler.rpcapi')
FAKE_UUID = fakes.FAKE_UUID
NS = "{http://docs.openstack.org/compute/api/v1.1}"
ATOMNS = "{http://www.w3.org/2005/Atom}"
XPATH_NS = {
'atom': 'http://www.w3.org/2005/Atom',
'ns': 'http://docs.openstack.org/compute/api/v1.1'
}
INSTANCE_IDS = {FAKE_UUID: 1}
def fake_gen_uuid():
return FAKE_UUID
def return_servers_empty(context, *args, **kwargs):
return []
def return_security_group(context, instance_id, security_group_id):
pass
def instance_update(context, instance_uuid, values):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
return (inst, inst)
def fake_compute_api(cls, req, id):
return True
class MockSetAdminPassword(object):
def __init__(self):
self.instance_id = None
self.password = None
def __call__(self, context, instance_id, password):
self.instance_id = instance_id
self.password = password
class Base64ValidationTest(test.TestCase):
def setUp(self):
super(Base64ValidationTest, self).setUp()
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
def test_decode_base64(self):
value = "A random string"
result = self.controller._decode_base64(base64.b64encode(value))
self.assertEqual(result, value)
def test_decode_base64_binary(self):
value = "\x00\x12\x75\x99"
result = self.controller._decode_base64(base64.b64encode(value))
self.assertEqual(result, value)
def test_decode_base64_whitespace(self):
value = "A random string"
encoded = base64.b64encode(value)
white = "\n \n%s\t%s\n" % (encoded[:2], encoded[2:])
result = self.controller._decode_base64(white)
self.assertEqual(result, value)
def test_decode_base64_invalid(self):
invalid = "A random string"
result = self.controller._decode_base64(invalid)
self.assertEqual(result, None)
def test_decode_base64_illegal_bytes(self):
value = "A random string"
encoded = base64.b64encode(value)
white = ">\x01%s*%s()" % (encoded[:2], encoded[2:])
result = self.controller._decode_base64(white)
self.assertEqual(result, None)
class QuantumV2Subclass(quantum_api.API):
"""Used to ensure that API handles subclasses properly."""
pass
class ServersControllerTest(test.TestCase):
def setUp(self):
super(ServersControllerTest, self).setUp()
self.flags(verbose=True, use_ipv6=False)
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
return_server = fakes.fake_instance_get()
return_servers = fakes.fake_instance_get_all_by_filters()
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers)
self.stubs.Set(db, 'instance_get_by_uuid',
return_server)
self.stubs.Set(db, 'instance_add_security_group',
return_security_group)
self.stubs.Set(db, 'instance_update_and_get_original',
instance_update)
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
self.ips_controller = ips.Controller()
policy.reset()
policy.init()
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs,
spectacular=True)
def test_can_check_loaded_extensions(self):
self.ext_mgr.extensions = {'os-fake': None}
self.assertTrue(self.controller.ext_mgr.is_loaded('os-fake'))
self.assertFalse(self.controller.ext_mgr.is_loaded('os-not-loaded'))
def test_requested_networks_prefix(self):
uuid = 'br-00000000-0000-0000-0000-000000000000'
requested_networks = [{'uuid': uuid}]
res = self.controller._get_requested_networks(requested_networks)
self.assertTrue((uuid, None) in res)
def test_requested_networks_quantumv2_enabled_with_port(self):
self.flags(network_api_class='nova.network.quantumv2.api.API')
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEquals(res, [(None, None, port)])
def test_requested_networks_quantumv2_enabled_with_network(self):
self.flags(network_api_class='nova.network.quantumv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEquals(res, [(network, None, None)])
def test_requested_networks_quantumv2_enabled_with_network_and_port(self):
self.flags(network_api_class='nova.network.quantumv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEquals(res, [(None, None, port)])
def test_requested_networks_quantumv2_disabled_with_port(self):
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_api_enabled_with_v2_subclass(self):
self.flags(network_api_class='nova.network.quantumv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEquals(res, [(None, None, port)])
def test_requested_networks_quantumv2_subclass_with_port(self):
cls = 'nova.tests.api.openstack.compute.test_servers.QuantumV2Subclass'
self.flags(network_api_class=cls)
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEquals(res, [(None, None, port)])
def test_get_server_by_uuid(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
def test_unique_host_id(self):
"""Create two servers with the same host and different
project_ids and check that the hostId's are unique"""
def return_instance_with_host(self, *args):
project_id = str(uuid.uuid4())
return fakes.stub_instance(id=1, uuid=FAKE_UUID,
project_id=project_id,
host='fake_host')
self.stubs.Set(db, 'instance_get_by_uuid',
return_instance_with_host)
self.stubs.Set(db, 'instance_get',
return_instance_with_host)
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
server1 = self.controller.show(req, FAKE_UUID)
server2 = self.controller.show(req, FAKE_UUID)
self.assertNotEqual(server1['server']['hostId'],
server2['server']['hostId'])
def test_get_server_by_id(self):
self.flags(use_ipv6=True)
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/1"
uuid = FAKE_UUID
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = {
"server": {
"id": uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "server1",
"status": "BUILD",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {
"seq": "1",
},
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" % uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" % uuid,
},
],
}
}
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_with_active_status_by_id(self):
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/1"
new_return_server = fakes.fake_instance_get(
vm_state=vm_states.ACTIVE, progress=100)
self.stubs.Set(db, 'instance_get_by_uuid', new_return_server)
uuid = FAKE_UUID
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = {
"server": {
"id": uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 100,
"name": "server1",
"status": "ACTIVE",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {
"seq": "1",
},
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" % uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" % uuid,
},
],
}
}
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_with_id_image_ref_by_id(self):
image_ref = "10"
image_bookmark = "http://localhost/fake/images/10"
flavor_id = "1"
flavor_bookmark = "http://localhost/fake/flavors/1"
new_return_server = fakes.fake_instance_get(
vm_state=vm_states.ACTIVE, image_ref=image_ref,
flavor_id=flavor_id, progress=100)
self.stubs.Set(db, 'instance_get_by_uuid', new_return_server)
uuid = FAKE_UUID
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = {
"server": {
"id": uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 100,
"name": "server1",
"status": "ACTIVE",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {
"seq": "1",
},
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" % uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" % uuid,
},
],
}
}
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_addresses_from_cache(self):
pub0 = ('172.19.0.1', '172.19.0.2',)
pub1 = ('1.2.3.4',)
pub2 = ('b33f::fdee:ddff:fecc:bbaa',)
priv0 = ('192.168.0.3', '192.168.0.4',)
def _ip(ip):
return {'address': ip, 'type': 'fixed'}
nw_cache = [
{'address': 'aa:aa:aa:aa:aa:aa',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'public',
'subnets': [{'cidr': '172.19.0.0/24',
'ips': [_ip(ip) for ip in pub0]},
{'cidr': '1.2.3.0/16',
'ips': [_ip(ip) for ip in pub1]},
{'cidr': 'b33f::/64',
'ips': [_ip(ip) for ip in pub2]}]}},
{'address': 'bb:bb:bb:bb:bb:bb',
'id': 2,
'network': {'bridge': 'br1',
'id': 2,
'label': 'private',
'subnets': [{'cidr': '192.168.0.0/24',
'ips': [_ip(ip) for ip in priv0]}]}}]
return_server = fakes.fake_instance_get(nw_cache=nw_cache)
self.stubs.Set(db, 'instance_get_by_uuid', return_server)
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s/ips' % FAKE_UUID)
res_dict = self.ips_controller.index(req, FAKE_UUID)
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3'},
{'version': 4, 'addr': '192.168.0.4'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1'},
{'version': 4, 'addr': '172.19.0.2'},
{'version': 4, 'addr': '1.2.3.4'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa'},
],
},
}
self.assertThat(res_dict, matchers.DictMatches(expected))
def test_get_server_addresses_nonexistent_network(self):
url = '/v2/fake/servers/%s/ips/network_0' % FAKE_UUID
req = fakes.HTTPRequest.blank(url)
self.assertRaises(webob.exc.HTTPNotFound, self.ips_controller.show,
req, FAKE_UUID, 'network_0')
def test_get_server_addresses_nonexistent_server(self):
def fake_instance_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
server_id = str(uuid.uuid4())
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s/ips' % server_id)
self.assertRaises(webob.exc.HTTPNotFound,
self.ips_controller.index, req, server_id)
def test_get_server_list_empty(self):
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers_empty)
req = fakes.HTTPRequest.blank('/v2/fake/servers')
res_dict = self.controller.index(req)
num_servers = len(res_dict['servers'])
self.assertEqual(0, num_servers)
def test_get_server_list_with_reservation_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers?reservation_id=foo')
res_dict = self.controller.index(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_empty(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail?'
'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_details(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail?'
'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers')
res_dict = self.controller.index(req)
self.assertEqual(len(res_dict['servers']), 5)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertEqual(s.get('image', None), None)
expected_links = [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" % s['id'],
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" % s['id'],
},
]
self.assertEqual(s['links'], expected_links)
def test_get_servers_with_limit(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers?limit=3')
res_dict = self.controller.index(req)
servers = res_dict['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res_dict['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v2/fake/servers', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected_params = {'limit': ['3'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected_params))
def test_get_servers_with_limit_bad_value(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers?limit=aaa')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_server_details_empty(self):
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers_empty)
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail')
res_dict = self.controller.index(req)
num_servers = len(res_dict['servers'])
self.assertEqual(0, num_servers)
def test_get_server_details_with_limit(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail?limit=3')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v2/fake/servers', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_server_details_with_limit_bad_value(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail?limit=aaa')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.detail, req)
def test_get_server_details_with_limit_and_other_params(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail'
'?limit=3&blah=2:t')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v2/fake/servers', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'blah': ['2:t'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_servers_with_too_big_limit(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers?limit=30')
res_dict = self.controller.index(req)
self.assertTrue('servers_links' not in res_dict)
def test_get_servers_with_bad_limit(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers?limit=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_marker(self):
url = '/v2/fake/servers?marker=%s' % fakes.get_fake_uuid(2)
req = fakes.HTTPRequest.blank(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ["server4", "server5"])
def test_get_servers_with_limit_and_marker(self):
url = '/v2/fake/servers?limit=2&marker=%s' % fakes.get_fake_uuid(1)
req = fakes.HTTPRequest.blank(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ['server3', 'server4'])
def test_get_servers_with_bad_marker(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers?limit=2&marker=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_bad_option(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?unknownoption=whee')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_image(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
self.assertTrue('image' in search_opts)
self.assertEqual(search_opts['image'], '12345')
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?image=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_tenant_id_filter_converts_to_project_id_for_admin(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None):
self.assertNotEqual(filters, None)
self.assertEqual(filters['project_id'], 'fake')
self.assertFalse(filters.get('tenant_id'))
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?tenant_id=fake',
use_admin_context=True)
res = self.controller.index(req)
self.assertTrue('servers' in res)
def test_admin_restricted_tenant(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None):
self.assertNotEqual(filters, None)
self.assertEqual(filters['project_id'], 'fake')
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers',
use_admin_context=True)
res = self.controller.index(req)
self.assertTrue('servers' in res)
def test_all_tenants_pass_policy(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None):
self.assertNotEqual(filters, None)
self.assertTrue('project_id' not in filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
rules = {
"compute:get_all_tenants":
common_policy.parse_rule("project_id:fake"),
"compute:get_all":
common_policy.parse_rule("project_id:fake"),
}
common_policy.set_rules(common_policy.Rules(rules))
req = fakes.HTTPRequest.blank('/v2/fake/servers?all_tenants=1')
res = self.controller.index(req)
self.assertTrue('servers' in res)
def test_all_tenants_fail_policy(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None):
self.assertNotEqual(filters, None)
return [fakes.stub_instance(100)]
rules = {
"compute:get_all_tenants":
common_policy.parse_rule("project_id:non_fake"),
"compute:get_all":
common_policy.parse_rule("project_id:fake"),
}
common_policy.set_rules(common_policy.Rules(rules))
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?all_tenants=1')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index, req)
def test_get_servers_allows_flavor(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
self.assertTrue('flavor' in search_opts)
# flavor is an integer ID
self.assertEqual(search_opts['flavor'], '12345')
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?flavor=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_with_bad_flavor(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers?flavor=abcde')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_servers_allows_status(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
self.assertTrue('vm_state' in search_opts)
self.assertEqual(search_opts['vm_state'], vm_states.ACTIVE)
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?status=active')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_invalid_status(self):
# Test getting servers by invalid status.
req = fakes.HTTPRequest.blank('/v2/fake/servers?status=baloney',
use_admin_context=False)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_servers_deleted_status_as_user(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers?status=deleted',
use_admin_context=False)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.detail, req)
def test_get_servers_deleted_status_as_admin(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertTrue('vm_state' in search_opts)
self.assertEqual(search_opts['vm_state'], 'deleted')
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?status=deleted',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_name(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
self.assertTrue('name' in search_opts)
self.assertEqual(search_opts['name'], 'whee.*')
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?name=whee.*')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_changes_since(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
self.assertTrue('changes-since' in search_opts)
changes_since = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-since'], changes_since)
self.assertTrue('deleted' not in search_opts)
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
params = 'changes-since=2011-01-24T17:08:01Z'
req = fakes.HTTPRequest.blank('/v2/fake/servers?%s' % params)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_changes_since_bad_value(self):
params = 'changes-since=asdf'
req = fakes.HTTPRequest.blank('/v2/fake/servers?%s' % params)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req)
def test_get_servers_admin_filters_as_user(self):
"""Test getting servers by admin-only or unknown options when
context is not admin. Make sure the admin and unknown options
are stripped before they get to compute_api.get_all()
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
# Allowed by user
self.assertTrue('name' in search_opts)
# OSAPI converts status to vm_state
self.assertTrue('vm_state' in search_opts)
# Allowed only by admins with admin API on
self.assertFalse('ip' in search_opts)
self.assertFalse('unknown_option' in search_opts)
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequest.blank('/v2/fake/servers?%s' % query_str)
res = self.controller.index(req)
servers = res['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_admin_options_as_admin(self):
"""Test getting servers by admin-only or unknown options when
context is admin. All options should be passed
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
# Allowed by user
self.assertTrue('name' in search_opts)
# OSAPI converts status to vm_state
self.assertTrue('vm_state' in search_opts)
# Allowed only by admins with admin API on
self.assertTrue('ip' in search_opts)
self.assertTrue('unknown_option' in search_opts)
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequest.blank('/v2/fake/servers?%s' % query_str,
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_admin_allows_ip(self):
"""Test getting servers by ip with admin_api enabled and
admin context
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
self.assertTrue('ip' in search_opts)
self.assertEqual(search_opts['ip'], '10\..*')
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?ip=10\..*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_admin_allows_ip6(self):
"""Test getting servers by ip6 with admin_api enabled and
admin context
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
self.assertTrue('ip6' in search_opts)
self.assertEqual(search_opts['ip6'], 'ffff.*')
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?ip6=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_update_server_all_attributes(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(name='server_test',
access_ipv4='0.0.0.0',
access_ipv6='beef::0123'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {
'name': 'server_test',
'accessIPv4': '0.0.0.0',
'accessIPv6': 'beef::0123',
}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
self.assertEqual(res_dict['server']['accessIPv4'], '0.0.0.0')
self.assertEqual(res_dict['server']['accessIPv6'], 'beef::0123')
def test_update_server_invalid_xml_raises_lookup(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/xml'
#xml request which raises LookupError
req.body = """<?xml version="1.0" encoding="TF-8"?>
<metadata
xmlns="http://docs.openstack.org/compute/api/v1.1"
key="Label"></meta>"""
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_update_server_invalid_xml_raises_expat(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/xml'
#xml request which raises ExpatError
req.body = """<?xml version="1.0" encoding="UTF-8"?>
<metadata
xmlns="http://docs.openstack.org/compute/api/v1.1"
key="Label"></meta>"""
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_update_server_name(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(name='server_test'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': 'server_test'}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_name_too_long(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(name='server_test'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': 'x' * 256}}
req.body = jsonutils.dumps(body)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
req, FAKE_UUID, body)
def test_update_server_access_ipv4(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv4='0.0.0.0'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv4': '0.0.0.0'}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['accessIPv4'], '0.0.0.0')
def test_update_server_access_ipv4_bad_format(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv4='0.0.0.0'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv4': 'bad_format'}}
req.body = jsonutils.dumps(body)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
req, FAKE_UUID, body)
def test_update_server_access_ipv4_none(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv4='0.0.0.0'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv4': None}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['accessIPv4'], '')
def test_update_server_access_ipv4_blank(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv4='0.0.0.0'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv4': ''}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['accessIPv4'], '')
def test_update_server_access_ipv6(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv6='beef::0123'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv6': 'beef::0123'}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['accessIPv6'], 'beef::0123')
def test_update_server_access_ipv6_bad_format(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv6='beef::0123'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv6': 'bad_format'}}
req.body = jsonutils.dumps(body)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
req, FAKE_UUID, body)
def test_update_server_access_ipv6_none(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv6='beef::0123'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv6': None}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['accessIPv6'], '')
def test_update_server_access_ipv6_blank(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv6='beef::0123'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv6': ''}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['accessIPv6'], '')
def test_update_server_personality(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {
'server': {
'personality': []
}
}
req.body = jsonutils.dumps(body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, req, FAKE_UUID, body)
def test_update_server_adminPass_ignored(self):
inst_dict = dict(name='server_test', adminPass='bacon')
body = dict(server=inst_dict)
def server_update(context, id, params):
filtered_dict = {
'display_name': 'server_test',
}
self.assertEqual(params, filtered_dict)
filtered_dict['uuid'] = id
return filtered_dict
self.stubs.Set(db, 'instance_update', server_update)
# FIXME (comstud)
# self.stubs.Set(db, 'instance_get',
# return_server_with_attributes(name='server_test'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_not_found(self):
def fake_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(compute_api.API, 'get', fake_get)
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': 'server_test'}}
req.body = jsonutils.dumps(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body)
def test_update_server_not_found_on_update(self):
def fake_update(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(compute_api.API, 'update', fake_update)
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': 'server_test'}}
req.body = jsonutils.dumps(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body)
def test_rebuild_instance_with_access_ipv4_bad_format(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
access_ipv4 = 'bad_format'
access_ipv6 = 'fead::1234'
body = {
'rebuild': {
'name': 'new_name',
'imageRef': image_href,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, req, FAKE_UUID, body)
def test_rebuild_instance_with_blank_metadata_key(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
access_ipv4 = '0.0.0.0'
access_ipv6 = 'fead::1234'
body = {
'rebuild': {
'name': 'new_name',
'imageRef': image_href,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, req, FAKE_UUID, body)
def test_rebuild_instance_with_metadata_key_too_long(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
access_ipv4 = '0.0.0.0'
access_ipv6 = 'fead::1234'
body = {
'rebuild': {
'name': 'new_name',
'imageRef': image_href,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
('a' * 260): 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller._action_rebuild, req, FAKE_UUID, body)
def test_rebuild_instance_with_metadata_value_too_long(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
access_ipv4 = '0.0.0.0'
access_ipv6 = 'fead::1234'
body = {
'rebuild': {
'name': 'new_name',
'imageRef': image_href,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'key1': ('a' * 260),
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller._action_rebuild, req, FAKE_UUID, body)
def test_rebuild_instance_fails_when_min_ram_too_small(self):
# make min_ram larger than our instance ram size
def fake_get_image(self, context, image_href):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="4096", min_disk="10")
self.stubs.Set(compute_api.API, '_get_image',
fake_get_image)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
body = {
'rebuild': {
'name': 'new_name',
'imageRef': image_href,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, req, FAKE_UUID, body)
def test_rebuild_instance_fails_when_min_disk_too_small(self):
# make min_disk larger than our instance disk size
def fake_get_image(self, context, image_href):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="128", min_disk="100000")
self.stubs.Set(compute_api.API, '_get_image',
fake_get_image)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
body = {
'rebuild': {
'name': 'new_name',
'imageRef': image_href,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, req, FAKE_UUID, body)
def test_rebuild_instance_with_access_ipv6_bad_format(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
access_ipv4 = '1.2.3.4'
access_ipv6 = 'bad_format'
body = {
'rebuild': {
'name': 'new_name',
'imageRef': image_href,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, req, FAKE_UUID, body)
def test_get_all_server_details(self):
expected_flavor = {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": 'http://localhost/fake/flavors/1',
},
],
}
expected_image = {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": 'http://localhost/fake/images/10',
},
],
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail')
res_dict = self.controller.detail(req)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], '')
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertEqual(s['image'], expected_image)
self.assertEqual(s['flavor'], expected_flavor)
self.assertEqual(s['status'], 'BUILD')
self.assertEqual(s['metadata']['seq'], str(i + 1))
def test_get_all_server_details_with_host(self):
'''
We want to make sure that if two instances are on the same host, then
they return the same hostId. If two instances are on different hosts,
they should return different hostId's. In this test, there are 5
instances - 2 on one host and 3 on another.
'''
def return_servers_with_host(context, *args, **kwargs):
return [fakes.stub_instance(i + 1, 'fake', 'fake', host=i % 2,
uuid=fakes.get_fake_uuid(i))
for i in xrange(5)]
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers_with_host)
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail')
res_dict = self.controller.detail(req)
server_list = res_dict['servers']
host_ids = [server_list[0]['hostId'], server_list[1]['hostId']]
self.assertTrue(host_ids[0] and host_ids[1])
self.assertNotEqual(host_ids[0], host_ids[1])
for i, s in enumerate(server_list):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], host_ids[i % 2])
self.assertEqual(s['name'], 'server%d' % (i + 1))
def _delete_server_instance(self, uuid=FAKE_UUID):
fakes.stub_out_instance_quota(self.stubs, 0, 10)
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % uuid)
req.method = 'DELETE'
self.server_delete_called = False
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
self.stubs.Set(db, 'instance_destroy', instance_destroy_mock)
self.controller.delete(req, uuid)
def test_delete_server_instance(self):
self._delete_server_instance()
self.assertEqual(self.server_delete_called, True)
def test_delete_server_instance_not_found(self):
self.assertRaises(webob.exc.HTTPNotFound,
self._delete_server_instance,
uuid='non-existent-uuid')
def test_delete_server_instance_while_building(self):
fakes.stub_out_instance_quota(self.stubs, 0, 10)
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'DELETE'
self.server_delete_called = False
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
self.stubs.Set(db, 'instance_destroy', instance_destroy_mock)
self.controller.delete(req, FAKE_UUID)
self.assertEqual(self.server_delete_called, True)
def test_delete_server_instance_while_resize(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'DELETE'
self.server_delete_called = False
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE,
task_state=task_states.RESIZE_PREP))
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
self.stubs.Set(db, 'instance_destroy', instance_destroy_mock)
self.controller.delete(req, FAKE_UUID)
# Delete shoud be allowed in any case, even during resizing,
# because it may get stuck.
self.assertEqual(self.server_delete_called, True)
class ServerStatusTest(test.TestCase):
def setUp(self):
super(ServerStatusTest, self).setUp()
fakes.stub_out_nw_api(self.stubs)
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
def _get_with_state(self, vm_state, task_state=None):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_state,
task_state=task_state))
request = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
return self.controller.show(request, FAKE_UUID)
def test_active(self):
response = self._get_with_state(vm_states.ACTIVE)
self.assertEqual(response['server']['status'], 'ACTIVE')
def test_reboot(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING)
self.assertEqual(response['server']['status'], 'REBOOT')
def test_reboot_hard(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING_HARD)
self.assertEqual(response['server']['status'], 'HARD_REBOOT')
def test_rebuild(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBUILDING)
self.assertEqual(response['server']['status'], 'REBUILD')
def test_rebuild_error(self):
response = self._get_with_state(vm_states.ERROR)
self.assertEqual(response['server']['status'], 'ERROR')
def test_resize(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.RESIZE_PREP)
self.assertEqual(response['server']['status'], 'RESIZE')
def test_verify_resize(self):
response = self._get_with_state(vm_states.RESIZED, None)
self.assertEqual(response['server']['status'], 'VERIFY_RESIZE')
def test_revert_resize(self):
response = self._get_with_state(vm_states.RESIZED,
task_states.RESIZE_REVERTING)
self.assertEqual(response['server']['status'], 'REVERT_RESIZE')
def test_password_update(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.UPDATING_PASSWORD)
self.assertEqual(response['server']['status'], 'PASSWORD')
def test_stopped(self):
response = self._get_with_state(vm_states.STOPPED)
self.assertEqual(response['server']['status'], 'SHUTOFF')
class ServersControllerCreateTest(test.TestCase):
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTest, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
def instance_create(context, inst):
inst_type = instance_types.get_instance_type_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/images/%s' % image_uuid
self.instance_cache_num += 1
instance = {
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'uuid': FAKE_UUID,
'instance_type': dict(inst_type),
'access_ip_v4': '1.2.3.4',
'access_ip_v6': 'fead::1234',
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': 'fake',
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"config_drive": None,
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
}
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
"""Stub for compute/api create() pulling in instance after
scheduling
"""
return self.instance_cache_by_id[instance_id]
def instance_update(context, uuid, values):
instance = self.instance_cache_by_uuid[uuid]
instance.update(values)
return instance
def rpc_call_wrapper(context, topic, msg, timeout=None):
"""Stub out the scheduler creating the instance entry."""
if (topic == CONF.scheduler_topic and
msg['method'] == 'run_instance'):
request_spec = msg['args']['request_spec']
num_instances = request_spec.get('num_instances', 1)
instances = []
for x in xrange(num_instances):
instances.append(instance_create(context,
request_spec['instance_properties']))
return instances
def server_update(context, instance_uuid, params):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return (inst, inst)
def fake_method(*args, **kwargs):
pass
def project_get_networks(context, user_id):
return dict(id='1', host='localhost')
def queue_get_for(context, *args):
return 'network_topic'
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(uuid, 'uuid4', fake_gen_uuid)
self.stubs.Set(db, 'instance_add_security_group',
return_security_group)
self.stubs.Set(db, 'project_get_networks',
project_get_networks)
self.stubs.Set(db, 'instance_create', instance_create)
self.stubs.Set(db, 'instance_system_metadata_update',
fake_method)
self.stubs.Set(db, 'instance_get', instance_get)
self.stubs.Set(db, 'instance_update', instance_update)
self.stubs.Set(rpc, 'cast', fake_method)
self.stubs.Set(rpc, 'call', rpc_call_wrapper)
self.stubs.Set(db, 'instance_update_and_get_original',
server_update)
self.stubs.Set(rpc, 'queue_get_for', queue_get_for)
self.stubs.Set(manager.VlanManager, 'allocate_fixed_ip',
fake_method)
def _check_admin_pass_len(self, server_dict):
"""utility function - check server_dict for adminPass length."""
self.assertEqual(CONF.password_length,
len(server_dict["adminPass"]))
def _check_admin_pass_missing(self, server_dict):
"""utility function - check server_dict for absence of adminPass."""
self.assertTrue("adminPass" not in server_dict)
def _test_create_instance(self):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
body = dict(server=dict(
name='server_test', imageRef=image_uuid, flavorRef=2,
metadata={'hello': 'world', 'open': 'stack'},
personality={}))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
server = self.controller.create(req, body).obj['server']
self._check_admin_pass_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_server_bad_image_href(self):
image_href = 1
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 1,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_create_instance_invalid_negative_min(self):
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': -1,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_create_instance_invalid_negative_max(self):
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': -1,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_create_instance_invalid_alpha_min(self):
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 'abcd',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_create_instance_invalid_alpha_max(self):
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': 'abcd',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_create_multiple_instances(self):
"""Test creating multiple instances but not asking for
reservation_id
"""
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
'personality': []
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_pass_len(res["server"])
def test_create_multiple_instances_pass_disabled(self):
"""Test creating multiple instances but not asking for
reservation_id
"""
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
self.flags(enable_instance_password=False)
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
'personality': []
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_pass_missing(res["server"])
def test_create_multiple_instances_resv_id_return(self):
"""Test creating multiple instances with asking for
reservation_id
"""
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
'personality': [],
'return_reservation_id': True
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body)
reservation_id = res.obj.get('reservation_id')
self.assertNotEqual(reservation_id, "")
self.assertNotEqual(reservation_id, None)
self.assertTrue(len(reservation_id) > 1)
def test_create_multiple_instances_with_multiple_volume_bdm(self):
"""
Test that a BadRequest is raised if multiple instances
are requested with a list of block device mappings for volumes.
"""
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
min_count = 2
bdm = [{'device_name': 'foo1', 'volume_id': 'vol-xxxx'},
{'device_name': 'foo2', 'volume_id': 'vol-yyyy'}
]
params = {
'block_device_mapping': bdm,
'min_count': min_count
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(len(kwargs['block_device_mapping']), 2)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params, no_image=True)
def test_create_multiple_instances_with_single_volume_bdm(self):
"""
Test that a BadRequest is raised if multiple instances
are requested to boot from a single volume.
"""
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
min_count = 2
bdm = [{'device_name': 'foo1', 'volume_id': 'vol-xxxx'}]
params = {
'block_device_mapping': bdm,
'min_count': min_count
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['block_device_mapping']['volume_id'],
'vol-xxxx')
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params, no_image=True)
def test_create_multiple_instance_with_non_integer_max_count(self):
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': 2.5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
'personality': []
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_multiple_instance_with_non_integer_min_count(self):
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2.5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
'personality': []
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_image_ref_is_bookmark(self):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/fake/images/%s' % image_uuid
flavor_ref = 'http://localhost/fake/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_image_ref_is_invalid(self):
image_uuid = 'this_is_not_a_valid_uuid'
image_href = 'http://localhost/fake/images/%s' % image_uuid
flavor_ref = 'http://localhost/fake/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, body)
def test_create_instance_no_key_pair(self):
fakes.stub_out_key_pair_funcs(self.stubs, have_key_pair=False)
self._test_create_instance()
def _test_create_extra(self, params, no_image=False):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
server = dict(name='server_test', imageRef=image_uuid, flavorRef=2)
if no_image:
server.pop('imageRef', None)
server.update(params)
body = dict(server=server)
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
server = self.controller.create(req, body).obj['server']
def test_create_instance_with_security_group_enabled(self):
self.ext_mgr.extensions = {'os-security-groups': 'fake'}
group = 'foo'
old_create = compute_api.API.create
def sec_group_get(ctx, proj, name):
if name == group:
return True
else:
raise exception.SecurityGroupNotFoundForProject(
project_id=proj, security_group_id=name)
def create(*args, **kwargs):
self.assertEqual(kwargs['security_group'], [group])
return old_create(*args, **kwargs)
self.stubs.Set(db, 'security_group_get_by_name', sec_group_get)
# negative test
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra,
{'security_groups': [{'name': 'bogus'}]})
# positive test - extra assert in create path
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra({'security_groups': [{'name': group}]})
def test_create_instance_with_security_group_disabled(self):
group = 'foo'
params = {'security_groups': [{'name': group}]}
old_create = compute_api.API.create
def create(*args, **kwargs):
# NOTE(vish): if the security groups extension is not
# enabled, then security groups passed in
# are ignored.
self.assertEqual(kwargs['security_group'], ['default'])
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_disk_config_enabled(self):
self.ext_mgr.extensions = {'OS-DCF': 'fake'}
# NOTE(vish): the extension converts OS-DCF:disk_config into
# auto_disk_config, so we are testing with
# the_internal_value
params = {'auto_disk_config': 'AUTO'}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['auto_disk_config'], 'AUTO')
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_disk_config_disabled(self):
params = {'auto_disk_config': True}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['auto_disk_config'], False)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_scheduler_hints_enabled(self):
self.ext_mgr.extensions = {'OS-SCH-HNT': 'fake'}
hints = {'a': 'b'}
params = {'scheduler_hints': hints}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['scheduler_hints'], hints)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_scheduler_hints_disabled(self):
hints = {'a': 'b'}
params = {'scheduler_hints': hints}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['scheduler_hints'], {})
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_volumes_enabled(self):
self.ext_mgr.extensions = {'os-volumes': 'fake'}
bdm = [{'device_name': 'foo'}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], bdm)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_volumes_enabled_no_image(self):
"""
Test that the create will fail if there is no image
and no bdms supplied in the request
"""
self.ext_mgr.extensions = {'os-volumes': 'fake'}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('imageRef', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {}, no_image=True)
def test_create_instance_with_volumes_enabled_and_bdms_no_image(self):
"""
Test that the create works if there is no image supplied but
os-volumes extension is enabled and bdms are supplied
"""
self.ext_mgr.extensions = {'os-volumes': 'fake'}
bdm = [{'device_name': 'foo'}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], bdm)
self.assertNotIn('imageRef', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params, no_image=True)
def test_create_instance_with_volumes_disabled(self):
bdm = [{'device_name': 'foo'}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], None)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_device_name_not_string(self):
self.ext_mgr.extensions = {'os-volumes': 'fake'}
bdm = [{'delete_on_termination': 1,
'device_name': 123,
'volume_size': 1,
'volume_id': '11111111-1111-1111-1111-111111111111'}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], bdm)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_device_name_empty(self):
self.ext_mgr.extensions = {'os-volumes': 'fake'}
bdm = [{'delete_on_termination': 1,
'device_name': '',
'volume_size': 1,
'volume_id': '11111111-1111-1111-1111-111111111111'}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], bdm)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_device_name_too_long(self):
self.ext_mgr.extensions = {'os-volumes': 'fake'}
bdm = [{'delete_on_termination': 1,
'device_name': 'a' * 256,
'volume_size': 1,
'volume_id': '11111111-1111-1111-1111-111111111111'}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], bdm)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_space_in_device_name(self):
self.ext_mgr.extensions = {'os-volumes': 'fake'}
bdm = [{'delete_on_termination': 1,
'device_name': 'vd a',
'volume_size': 1,
'volume_id': '11111111-1111-1111-1111-111111111111'}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], bdm)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_bdm_delete_on_termination(self):
self.ext_mgr.extensions = {'os-volumes': 'fake'}
bdm = [{'device_name': 'foo1', 'delete_on_termination': 1},
{'device_name': 'foo2', 'delete_on_termination': True},
{'device_name': 'foo3', 'delete_on_termination': 'invalid'},
{'device_name': 'foo4', 'delete_on_termination': 0},
{'device_name': 'foo5', 'delete_on_termination': False}]
expected_dbm = [
{'device_name': 'foo1', 'delete_on_termination': True},
{'device_name': 'foo2', 'delete_on_termination': True},
{'device_name': 'foo3', 'delete_on_termination': False},
{'device_name': 'foo4', 'delete_on_termination': False},
{'device_name': 'foo5', 'delete_on_termination': False}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], expected_dbm)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_user_data_enabled(self):
self.ext_mgr.extensions = {'os-user-data': 'fake'}
user_data = 'fake'
params = {'user_data': user_data}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['user_data'], user_data)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_user_data_disabled(self):
user_data = 'fake'
params = {'user_data': user_data}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['user_data'], None)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_keypairs_enabled(self):
self.ext_mgr.extensions = {'os-keypairs': 'fake'}
key_name = 'green'
params = {'key_name': key_name}
old_create = compute_api.API.create
# NOTE(sdague): key pair goes back to the database,
# so we need to stub it out for tests
def key_pair_get(context, user_id, name):
return {'public_key': 'FAKE_KEY',
'fingerprint': 'FAKE_FINGERPRINT',
'name': name}
def create(*args, **kwargs):
self.assertEqual(kwargs['key_name'], key_name)
return old_create(*args, **kwargs)
self.stubs.Set(db, 'key_pair_get', key_pair_get)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_keypairs_disabled(self):
key_name = 'green'
params = {'key_name': key_name}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['key_name'], None)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_availability_zone_enabled(self):
self.ext_mgr.extensions = {'os-availability-zone': 'fake'}
availability_zone = 'fake'
params = {'availability_zone': availability_zone}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['availability_zone'], availability_zone)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_availability_zone_disabled(self):
availability_zone = 'fake'
params = {'availability_zone': availability_zone}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['availability_zone'], None)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_multiple_create_enabled(self):
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
min_count = 2
max_count = 3
params = {
'min_count': min_count,
'max_count': max_count,
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['max_count'], 3)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_multiple_create_disabled(self):
ret_res_id = True
min_count = 2
max_count = 3
params = {
'min_count': min_count,
'max_count': max_count,
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 1)
self.assertEqual(kwargs['max_count'], 1)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_networks_enabled(self):
self.ext_mgr.extensions = {'os-networks': 'fake'}
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None)]
self.assertEqual(kwargs['requested_networks'], result)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_networks_disabled_quantumv2(self):
self.flags(network_api_class='nova.network.quantumv2.api.API')
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None,
None)]
self.assertEqual(kwargs['requested_networks'], result)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_networks_disabled(self):
self.ext_mgr.extensions = {}
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['requested_networks'], None)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_access_ip(self):
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
flavor_ref = 'http://localhost/fake/flavors/3'
access_ipv4 = '1.2.3.4'
access_ipv6 = 'fead::1234'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self._check_admin_pass_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_access_ip_pass_disabled(self):
# test with admin passwords disabled See lp bug 921814
self.flags(enable_instance_password=False)
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
flavor_ref = 'http://localhost/fake/flavors/3'
access_ipv4 = '1.2.3.4'
access_ipv6 = 'fead::1234'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self._check_admin_pass_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_bad_format_access_ip_v4(self):
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
flavor_ref = 'http://localhost/fake/flavors/3'
access_ipv4 = 'bad_format'
access_ipv6 = 'fead::1234'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, body)
def test_create_instance_bad_format_access_ip_v6(self):
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
flavor_ref = 'http://localhost/fake/flavors/3'
access_ipv4 = '1.2.3.4'
access_ipv6 = 'bad_format'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, body)
def test_create_instance_name_too_long(self):
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'X' * 256,
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, body)
def test_create_instance(self):
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self._check_admin_pass_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_pass_disabled(self):
self.flags(enable_instance_password=False)
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self._check_admin_pass_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_too_much_metadata(self):
self.flags(quota_metadata_items=1)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
'vote': 'fiddletown',
},
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller.create, req, body)
def test_create_instance_metadata_key_too_long(self):
self.flags(quota_metadata_items=1)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
('a' * 260): '12345',
},
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller.create, req, body)
def test_create_instance_metadata_value_too_long(self):
self.flags(quota_metadata_items=1)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'key1': ('a' * 260),
},
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller.create, req, body)
def test_create_instance_metadata_key_blank(self):
self.flags(quota_metadata_items=1)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'': '12345',
},
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_invalid_key_name(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = 'http://localhost/flavors/3'
body = dict(server=dict(
name='server_test', imageRef=image_href, flavorRef=flavor_ref,
key_name='nonexistentkey'))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_valid_key_name(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/flavors/3'
body = dict(server=dict(
name='server_test', imageRef=image_href, flavorRef=flavor_ref,
key_name='key'))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_pass_len(res["server"])
def test_create_instance_invalid_flavor_href(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = 'http://localhost/v2/flavors/asdf'
body = dict(server=dict(
name='server_test', imageRef=image_href, flavorRef=flavor_ref,
metadata={'hello': 'world', 'open': 'stack'},
personality={}))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_invalid_flavor_id_int(self):
image_href = 'http://localhost/v2/fake/images/2'
flavor_ref = -1
body = dict(server=dict(
name='server_test', imageRef=image_href, flavorRef=flavor_ref,
metadata={'hello': 'world', 'open': 'stack'},
personality={}))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_bad_flavor_href(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = 'http://localhost/v2/flavors/17'
body = dict(server=dict(
name='server_test', imageRef=image_href, flavorRef=flavor_ref,
metadata={'hello': 'world', 'open': 'stack'},
personality={}))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_with_config_drive(self):
self.ext_mgr.extensions = {'os-config-drive': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/v2/fake/flavors/3'
body = {
'server': {
'name': 'config_drive_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': {},
'config_drive': "true",
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_config_drive_as_id(self):
self.ext_mgr.extensions = {'os-config-drive': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/v2/fake/flavors/3'
body = {
'server': {
'name': 'config_drive_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': {},
'config_drive': image_href,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_bad_config_drive(self):
self.ext_mgr.extensions = {'os-config-drive': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/v2/fake/flavors/3'
body = {
'server': {
'name': 'config_drive_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': {},
'config_drive': 'asdf',
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_without_config_drive(self):
self.ext_mgr.extensions = {'os-config-drive': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/v2/fake/flavors/3'
body = {
'server': {
'name': 'config_drive_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': {},
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_config_drive_disabled(self):
config_drive = [{'config_drive': 'foo'}]
params = {'config_drive': config_drive}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['config_drive'], None)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_bad_href(self):
image_href = 'asdf'
flavor_ref = 'http://localhost/v2/flavors/3'
body = dict(server=dict(
name='server_test', imageRef=image_href, flavorRef=flavor_ref,
metadata={'hello': 'world', 'open': 'stack'},
personality={}))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_local_href(self):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/v2/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_uuid,
'flavorRef': flavor_ref,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_admin_pass(self):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
body = {
'server': {
'name': 'server_test',
'imageRef': image_uuid,
'flavorRef': 3,
'adminPass': 'testpass',
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertEqual(server['adminPass'], body['server']['adminPass'])
def test_create_instance_admin_pass_pass_disabled(self):
self.flags(enable_instance_password=False)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
body = {
'server': {
'name': 'server_test',
'imageRef': image_uuid,
'flavorRef': 3,
'adminPass': 'testpass',
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertTrue('adminPass' in body['server'])
self.assertTrue('adminPass' not in server)
def test_create_instance_admin_pass_empty(self):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
body = {
'server': {
'name': 'server_test',
'imageRef': image_uuid,
'flavorRef': 3,
'adminPass': '',
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = "application/json"
# The fact that the action doesn't raise is enough validation
self.controller.create(req, body)
def test_create_instance_invalid_personality(self):
def fake_create(*args, **kwargs):
codec = 'utf8'
content = 'b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA=='
start_position = 19
end_position = 20
msg = 'invalid start byte'
raise UnicodeDecodeError(codec, content, start_position,
end_position, msg)
self.stubs.Set(compute_api.API,
'create',
fake_create)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/v2/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_uuid,
'flavorRef': flavor_ref,
'personality': [
{
"path": "/etc/banner.txt",
"contents": "b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_location(self):
selfhref = 'http://localhost/v2/fake/servers/%s' % FAKE_UUID
bookhref = 'http://localhost/fake/servers/%s' % FAKE_UUID
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
robj = self.controller.create(req, body)
self.assertEqual(robj['Location'], selfhref)
def _do_test_create_instance_above_quota(self, resource, allowed, quota,
expected_msg):
fakes.stub_out_instance_quota(self.stubs, allowed, quota, resource)
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
body = dict(server=dict(
name='server_test', imageRef=image_uuid, flavorRef=3,
metadata={'hello': 'world', 'open': 'stack'},
personality={}))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
try:
server = self.controller.create(req, body).obj['server']
self.fail('expected quota to be exceeded')
except webob.exc.HTTPRequestEntityTooLarge as e:
self.assertEquals(e.explanation, expected_msg)
def test_create_instance_above_quota_instances(self):
msg = _('Quota exceeded for instances: Requested 1, but'
' already used 10 of 10 instances')
self._do_test_create_instance_above_quota('instances', 0, 10, msg)
def test_create_instance_above_quota_ram(self):
msg = _('Quota exceeded for ram: Requested 4096, but'
' already used 8192 of 10240 ram')
self._do_test_create_instance_above_quota('ram', 2048, 10 * 1024, msg)
def test_create_instance_above_quota_cores(self):
msg = _('Quota exceeded for cores: Requested 2, but'
' already used 9 of 10 cores')
self._do_test_create_instance_above_quota('cores', 1, 10, msg)
class TestServerCreateRequestXMLDeserializer(test.TestCase):
def setUp(self):
super(TestServerCreateRequestXMLDeserializer, self).setUp()
self.deserializer = servers.CreateDeserializer()
def test_minimal_request(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
},
}
self.assertEquals(request['body'], expected)
def test_request_with_alternate_namespace_prefix(self):
serial_request = """
<ns2:server xmlns:ns2="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2">
<ns2:metadata><ns2:meta key="hello">world</ns2:meta></ns2:metadata>
</ns2:server>
"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
'metadata': {"hello": "world"},
},
}
self.assertEquals(request['body'], expected)
def test_request_with_scheduler_hints_and_alternate_namespace_prefix(self):
serial_request = """
<ns2:server xmlns:ns2="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2">
<ns2:metadata><ns2:meta key="hello">world</ns2:meta></ns2:metadata>
<os:scheduler_hints
xmlns:os="http://docs.openstack.org/compute/ext/scheduler-hints/api/v2">
<hypervisor>xen</hypervisor>
<near>eb999657-dd6b-464e-8713-95c532ac3b18</near>
</os:scheduler_hints>
</ns2:server>
"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
'OS-SCH-HNT:scheduler_hints': {
'hypervisor': ['xen'],
'near': ['eb999657-dd6b-464e-8713-95c532ac3b18']
},
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"metadata": {
"hello": "world"
}
}
}
self.assertEquals(request['body'], expected)
def test_access_ipv4(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2"
accessIPv4="1.2.3.4"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"accessIPv4": "1.2.3.4",
},
}
self.assertEquals(request['body'], expected)
def test_access_ipv6(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2"
accessIPv6="fead::1234"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"accessIPv6": "fead::1234",
},
}
self.assertEquals(request['body'], expected)
def test_access_ip(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2"
accessIPv4="1.2.3.4"
accessIPv6="fead::1234"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
},
}
self.assertEquals(request['body'], expected)
def test_admin_pass(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2"
adminPass="1234"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"adminPass": "1234",
},
}
self.assertEquals(request['body'], expected)
def test_image_link(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="http://localhost:8774/v2/images/2"
flavorRef="3"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "http://localhost:8774/v2/images/2",
"flavorRef": "3",
},
}
self.assertEquals(request['body'], expected)
def test_flavor_link(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="http://localhost:8774/v2/flavors/3"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "http://localhost:8774/v2/flavors/3",
},
}
self.assertEquals(request['body'], expected)
def test_empty_metadata_personality(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2">
<metadata/>
<personality/>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"metadata": {},
"personality": [],
},
}
self.assertEquals(request['body'], expected)
def test_multiple_metadata_items(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2">
<metadata>
<meta key="one">two</meta>
<meta key="open">snack</meta>
</metadata>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"metadata": {"one": "two", "open": "snack"},
},
}
self.assertEquals(request['body'], expected)
def test_multiple_personality_files(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2">
<personality>
<file path="/etc/banner.txt">MQ==</file>
<file path="/etc/hosts">Mg==</file>
</personality>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"personality": [
{"path": "/etc/banner.txt", "contents": "MQ=="},
{"path": "/etc/hosts", "contents": "Mg=="},
],
},
}
self.assertThat(request['body'], matchers.DictMatches(expected))
def test_spec_request(self):
image_bookmark_link = ("http://servers.api.openstack.org/1234/"
"images/52415800-8b69-11e0-9b19-734f6f006e54")
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
imageRef="%s"
flavorRef="52415800-8b69-11e0-9b19-734f1195ff37"
name="new-server-test">
<metadata>
<meta key="My Server Name">Apache1</meta>
</metadata>
<personality>
<file path="/etc/banner.txt">Mg==</file>
</personality>
</server>""" % (image_bookmark_link)
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": ("http://servers.api.openstack.org/1234/"
"images/52415800-8b69-11e0-9b19-734f6f006e54"),
"flavorRef": "52415800-8b69-11e0-9b19-734f1195ff37",
"metadata": {"My Server Name": "Apache1"},
"personality": [
{
"path": "/etc/banner.txt",
"contents": "Mg==",
},
],
},
}
self.assertEquals(request['body'], expected)
def test_request_with_empty_networks(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks/>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [],
}}
self.assertEquals(request['body'], expected)
def test_request_with_one_network(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network uuid="1" fixed_ip="10.0.1.12"/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"uuid": "1", "fixed_ip": "10.0.1.12"}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_two_networks(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network uuid="1" fixed_ip="10.0.1.12"/>
<network uuid="2" fixed_ip="10.0.2.12"/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"uuid": "1", "fixed_ip": "10.0.1.12"},
{"uuid": "2", "fixed_ip": "10.0.2.12"}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_second_network_node_ignored(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network uuid="1" fixed_ip="10.0.1.12"/>
</networks>
<networks>
<network uuid="2" fixed_ip="10.0.2.12"/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"uuid": "1", "fixed_ip": "10.0.1.12"}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_one_network_missing_id(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network fixed_ip="10.0.1.12"/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"fixed_ip": "10.0.1.12"}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_one_network_missing_fixed_ip(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network uuid="1"/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"uuid": "1"}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_one_network_empty_id(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network uuid="" fixed_ip="10.0.1.12"/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"uuid": "", "fixed_ip": "10.0.1.12"}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_one_network_empty_fixed_ip(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network uuid="1" fixed_ip=""/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"uuid": "1", "fixed_ip": ""}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_networks_duplicate_ids(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network uuid="1" fixed_ip="10.0.1.12"/>
<network uuid="1" fixed_ip="10.0.2.12"/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"uuid": "1", "fixed_ip": "10.0.1.12"},
{"uuid": "1", "fixed_ip": "10.0.2.12"}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_availability_zone(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1"
availability_zone="some_zone:some_host">
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"availability_zone": "some_zone:some_host",
}}
self.assertEquals(request['body'], expected)
def test_request_with_multiple_create_args(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1"
min_count="1" max_count="3" return_reservation_id="True">
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"min_count": "1",
"max_count": "3",
"return_reservation_id": True,
}}
self.assertEquals(request['body'], expected)
def test_request_with_disk_config(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
xmlns:OS-DCF="http://docs.openstack.org/compute/ext/disk_config/api/v1.1"
name="new-server-test" imageRef="1" flavorRef="1"
OS-DCF:diskConfig="AUTO">
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"OS-DCF:diskConfig": "AUTO",
}}
self.assertEquals(request['body'], expected)
def test_request_with_scheduler_hints(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
xmlns:OS-SCH-HNT=
"http://docs.openstack.org/compute/ext/scheduler-hints/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<OS-SCH-HNT:scheduler_hints>
<different_host>
7329b667-50c7-46a6-b913-cb2a09dfeee0
</different_host>
<different_host>
f31efb24-34d2-43e1-8b44-316052956a39
</different_host>
</OS-SCH-HNT:scheduler_hints>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"OS-SCH-HNT:scheduler_hints": {
"different_host": [
"7329b667-50c7-46a6-b913-cb2a09dfeee0",
"f31efb24-34d2-43e1-8b44-316052956a39",
]
}
}}
self.assertEquals(request['body'], expected)
def test_request_with_block_device_mapping(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<block_device_mapping>
<mapping volume_id="7329b667-50c7-46a6-b913-cb2a09dfeee0"
device_name="/dev/vda" virtual_name="root"
delete_on_termination="False" />
<mapping snapshot_id="f31efb24-34d2-43e1-8b44-316052956a39"
device_name="/dev/vdb" virtual_name="ephemeral0"
delete_on_termination="False" />
<mapping device_name="/dev/vdc" no_device="True" />
</block_device_mapping>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"block_device_mapping": [
{
"volume_id": "7329b667-50c7-46a6-b913-cb2a09dfeee0",
"device_name": "/dev/vda",
"virtual_name": "root",
"delete_on_termination": False,
},
{
"snapshot_id": "f31efb24-34d2-43e1-8b44-316052956a39",
"device_name": "/dev/vdb",
"virtual_name": "ephemeral0",
"delete_on_termination": False,
},
{
"device_name": "/dev/vdc",
"no_device": True,
},
]
}}
self.assertEquals(request['body'], expected)
def test_request_with_config_drive(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="config_drive_test"
imageRef="1"
flavorRef="1"
config_drive="true"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "config_drive_test",
"imageRef": "1",
"flavorRef": "1",
"config_drive": "true"
},
}
self.assertEquals(request['body'], expected)
def test_corrupt_xml(self):
"""Should throw a 400 error on corrupt xml."""
self.assertRaises(
exception.MalformedRequestBody,
self.deserializer.deserialize,
utils.killer_xml_body())
class TestServerActionRequestXMLDeserializer(test.TestCase):
def setUp(self):
super(TestServerActionRequestXMLDeserializer, self).setUp()
self.deserializer = servers.ActionDeserializer()
def test_rebuild_request(self):
serial_request = """
<rebuild xmlns="http://docs.openstack.org/compute/api/v1.1"
xmlns:OS-DCF="http://docs.openstack.org/compute/ext/disk_config/api/v1.1"
OS-DCF:diskConfig="MANUAL" imageRef="1"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"rebuild": {
"imageRef": "1",
"OS-DCF:diskConfig": "MANUAL",
},
}
self.assertEquals(request['body'], expected)
def test_rebuild_request_auto_disk_config_compat(self):
serial_request = """
<rebuild xmlns="http://docs.openstack.org/compute/api/v1.1"
xmlns:OS-DCF="http://docs.openstack.org/compute/ext/disk_config/api/v1.1"
auto_disk_config="MANUAL" imageRef="1"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"rebuild": {
"imageRef": "1",
"OS-DCF:diskConfig": "MANUAL",
},
}
self.assertEquals(request['body'], expected)
def test_resize_request(self):
serial_request = """
<resize xmlns="http://docs.openstack.org/compute/api/v1.1"
xmlns:OS-DCF="http://docs.openstack.org/compute/ext/disk_config/api/v1.1"
OS-DCF:diskConfig="MANUAL" flavorRef="1"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"resize": {
"flavorRef": "1",
"OS-DCF:diskConfig": "MANUAL",
},
}
self.assertEquals(request['body'], expected)
def test_resize_request_auto_disk_config_compat(self):
serial_request = """
<resize xmlns="http://docs.openstack.org/compute/api/v1.1"
xmlns:OS-DCF="http://docs.openstack.org/compute/ext/disk_config/api/v1.1"
auto_disk_config="MANUAL" flavorRef="1"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"resize": {
"flavorRef": "1",
"OS-DCF:diskConfig": "MANUAL",
},
}
self.assertEquals(request['body'], expected)
class TestAddressesXMLSerialization(test.TestCase):
index_serializer = ips.AddressesTemplate()
show_serializer = ips.NetworkTemplate()
def test_xml_declaration(self):
fixture = {
'network_2': [
{'addr': '192.168.0.1', 'version': 4},
{'addr': 'fe80::beef', 'version': 6},
],
}
output = self.show_serializer.serialize(fixture)
has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
self.assertTrue(has_dec)
def test_show(self):
fixture = {
'network_2': [
{'addr': '192.168.0.1', 'version': 4},
{'addr': 'fe80::beef', 'version': 6},
],
}
output = self.show_serializer.serialize(fixture)
root = etree.XML(output)
network = fixture['network_2']
self.assertEqual(str(root.get('id')), 'network_2')
ip_elems = root.findall('{0}ip'.format(NS))
for z, ip_elem in enumerate(ip_elems):
ip = network[z]
self.assertEqual(str(ip_elem.get('version')),
str(ip['version']))
self.assertEqual(str(ip_elem.get('addr')),
str(ip['addr']))
def test_index(self):
fixture = {
'addresses': {
'network_1': [
{'addr': '192.168.0.3', 'version': 4},
{'addr': '192.168.0.5', 'version': 4},
],
'network_2': [
{'addr': '192.168.0.1', 'version': 4},
{'addr': 'fe80::beef', 'version': 6},
],
},
}
output = self.index_serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'addresses')
addresses_dict = fixture['addresses']
network_elems = root.findall('{0}network'.format(NS))
self.assertEqual(len(network_elems), 2)
for i, network_elem in enumerate(network_elems):
network = addresses_dict.items()[i]
self.assertEqual(str(network_elem.get('id')), str(network[0]))
ip_elems = network_elem.findall('{0}ip'.format(NS))
for z, ip_elem in enumerate(ip_elems):
ip = network[1][z]
self.assertEqual(str(ip_elem.get('version')),
str(ip['version']))
self.assertEqual(str(ip_elem.get('addr')),
str(ip['addr']))
class ServersViewBuilderTest(test.TestCase):
def setUp(self):
super(ServersViewBuilderTest, self).setUp()
self.flags(use_ipv6=True)
self.instance = fakes.stub_instance(
id=1,
image_ref="5",
uuid="deadbeef-feed-edee-beef-d0ea7beefedd",
display_name="test_server",
include_fake_metadata=False)
privates = ['172.19.0.1']
publics = ['192.168.0.3']
public6s = ['b33f::fdee:ddff:fecc:bbaa']
def nw_info(*args, **kwargs):
return [(None, {'label': 'public',
'ips': [dict(ip=ip) for ip in publics],
'ip6s': [dict(ip=ip) for ip in public6s]}),
(None, {'label': 'private',
'ips': [dict(ip=ip) for ip in privates]})]
def floaters(*args, **kwargs):
return []
fakes.stub_out_nw_api_get_instance_nw_info(self.stubs, nw_info)
fakes.stub_out_nw_api_get_floating_ips_by_fixed_address(self.stubs,
floaters)
self.uuid = self.instance['uuid']
self.view_builder = views.servers.ViewBuilder()
self.request = fakes.HTTPRequest.blank("/v2")
def test_get_flavor_valid_instance_type(self):
flavor_bookmark = "http://localhost/fake/flavors/1"
expected = {"id": "1",
"links": [{"rel": "bookmark",
"href": flavor_bookmark}]}
result = self.view_builder._get_flavor(self.request, self.instance)
self.assertEqual(result, expected)
def test_build_server(self):
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_with_project_id(self):
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" %
self.uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" % self.uuid,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail(self):
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_no_image(self):
self.instance["image_ref"] = ""
output = self.view_builder.show(self.request, self.instance)
self.assertEqual(output['server']['image'], "")
def test_build_server_detail_with_fault(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = {
'code': 404,
'instance_uuid': self.uuid,
'message': "HTTPNotFound",
'details': "Stock details for test",
'created_at': datetime.datetime(2010, 10, 10, 12, 0, 0),
}
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"name": "test_server",
"status": "ERROR",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
"fault": {
"code": 404,
"created": "2010-10-10T12:00:00Z",
"message": "HTTPNotFound",
"details": "Stock details for test",
},
}
}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_fault_no_details_not_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = {
'code': 500,
'instance_uuid': self.uuid,
'message': "Error",
'details': 'Stock details for test',
'created_at': datetime.datetime(2010, 10, 10, 12, 0, 0),
}
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = {
'code': 500,
'instance_uuid': self.uuid,
'message': "Error",
'details': 'Stock details for test',
'created_at': datetime.datetime(2010, 10, 10, 12, 0, 0),
}
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error",
'details': 'Stock details for test'}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_no_details_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = {
'code': 500,
'instance_uuid': self.uuid,
'message': "Error",
'details': '',
'created_at': datetime.datetime(2010, 10, 10, 12, 0, 0),
}
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_but_active(self):
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
self.instance['fault'] = {
'code': 404,
'instance_uuid': self.uuid,
'message': "HTTPNotFound",
'details': "Stock details for test",
'created_at': datetime.datetime(2010, 10, 10, 12, 0, 0),
}
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
output = self.view_builder.show(self.request, self.instance)
self.assertFalse('fault' in output['server'])
def test_build_server_detail_active_status(self):
#set the power state of the instance to running
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 100,
"name": "test_server",
"status": "ACTIVE",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_accessipv4(self):
self.instance['access_ip_v4'] = '1.2.3.4'
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {},
"accessIPv4": "1.2.3.4",
"accessIPv6": "",
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_accessipv6(self):
self.instance['access_ip_v6'] = 'fead::1234'
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {},
"accessIPv4": "",
"accessIPv6": "fead::1234",
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_metadata(self):
metadata = []
metadata.append(models.InstanceMetadata(key="Open", value="Stack"))
self.instance['metadata'] = metadata
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {"Open": "Stack"},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
class ServerXMLSerializationTest(test.TestCase):
TIMESTAMP = "2010-10-11T10:30:22Z"
SERVER_HREF = 'http://localhost/v2/servers/%s' % FAKE_UUID
SERVER_NEXT = 'http://localhost/v2/servers?limit=%s&marker=%s'
SERVER_BOOKMARK = 'http://localhost/servers/%s' % FAKE_UUID
IMAGE_BOOKMARK = 'http://localhost/images/5'
FLAVOR_BOOKMARK = 'http://localhost/flavors/1'
def test_xml_declaration(self):
serializer = servers.ServerTemplate()
fixture = {
"server": {
'id': FAKE_UUID,
'user_id': 'fake_user_id',
'tenant_id': 'fake_tenant_id',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": 'e4d909c290d0fb1ca068ffaddf22cbd0',
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": self.IMAGE_BOOKMARK,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": self.FLAVOR_BOOKMARK,
},
],
},
"addresses": {
"network_one": [
{
"version": 4,
"addr": "67.23.10.138",
},
{
"version": 6,
"addr": "::babe:67.23.10.138",
},
],
"network_two": [
{
"version": 4,
"addr": "67.23.10.139",
},
{
"version": 6,
"addr": "::babe:67.23.10.139",
},
],
},
"metadata": {
"Open": "Stack",
"Number": "1",
},
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
}
}
output = serializer.serialize(fixture)
has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
self.assertTrue(has_dec)
def test_show(self):
serializer = servers.ServerTemplate()
fixture = {
"server": {
"id": FAKE_UUID,
"user_id": "fake",
"tenant_id": "fake",
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": 'e4d909c290d0fb1ca068ffaddf22cbd0',
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": self.IMAGE_BOOKMARK,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": self.FLAVOR_BOOKMARK,
},
],
},
"addresses": {
"network_one": [
{
"version": 4,
"addr": "67.23.10.138",
},
{
"version": 6,
"addr": "::babe:67.23.10.138",
},
],
"network_two": [
{
"version": 4,
"addr": "67.23.10.139",
},
{
"version": 6,
"addr": "::babe:67.23.10.139",
},
],
},
"metadata": {
"Open": "Stack",
"Number": "1",
},
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
}
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'server')
server_dict = fixture['server']
for key in ['name', 'id', 'created', 'accessIPv4',
'updated', 'progress', 'status', 'hostId',
'accessIPv6']:
self.assertEqual(root.get(key), str(server_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(server_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 2)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = server_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
image_root = root.find('{0}image'.format(NS))
self.assertEqual(image_root.get('id'), server_dict['image']['id'])
link_nodes = image_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['image']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
flavor_root = root.find('{0}flavor'.format(NS))
self.assertEqual(flavor_root.get('id'), server_dict['flavor']['id'])
link_nodes = flavor_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['flavor']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
addresses_root = root.find('{0}addresses'.format(NS))
addresses_dict = server_dict['addresses']
network_elems = addresses_root.findall('{0}network'.format(NS))
self.assertEqual(len(network_elems), 2)
for i, network_elem in enumerate(network_elems):
network = addresses_dict.items()[i]
self.assertEqual(str(network_elem.get('id')), str(network[0]))
ip_elems = network_elem.findall('{0}ip'.format(NS))
for z, ip_elem in enumerate(ip_elems):
ip = network[1][z]
self.assertEqual(str(ip_elem.get('version')),
str(ip['version']))
self.assertEqual(str(ip_elem.get('addr')),
str(ip['addr']))
def test_create(self):
serializer = servers.FullServerTemplate()
fixture = {
"server": {
"id": FAKE_UUID,
"user_id": "fake",
"tenant_id": "fake",
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
"progress": 0,
"name": "test_server",
"status": "BUILD",
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
"hostId": "e4d909c290d0fb1ca068ffaddf22cbd0",
"adminPass": "test_password",
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": self.IMAGE_BOOKMARK,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": self.FLAVOR_BOOKMARK,
},
],
},
"addresses": {
"network_one": [
{
"version": 4,
"addr": "67.23.10.138",
},
{
"version": 6,
"addr": "::babe:67.23.10.138",
},
],
"network_two": [
{
"version": 4,
"addr": "67.23.10.139",
},
{
"version": 6,
"addr": "::babe:67.23.10.139",
},
],
},
"metadata": {
"Open": "Stack",
"Number": "1",
},
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
}
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'server')
server_dict = fixture['server']
for key in ['name', 'id', 'created', 'accessIPv4',
'updated', 'progress', 'status', 'hostId',
'accessIPv6', 'adminPass']:
self.assertEqual(root.get(key), str(server_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(server_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 2)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = server_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
image_root = root.find('{0}image'.format(NS))
self.assertEqual(image_root.get('id'), server_dict['image']['id'])
link_nodes = image_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['image']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
flavor_root = root.find('{0}flavor'.format(NS))
self.assertEqual(flavor_root.get('id'), server_dict['flavor']['id'])
link_nodes = flavor_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['flavor']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
addresses_root = root.find('{0}addresses'.format(NS))
addresses_dict = server_dict['addresses']
network_elems = addresses_root.findall('{0}network'.format(NS))
self.assertEqual(len(network_elems), 2)
for i, network_elem in enumerate(network_elems):
network = addresses_dict.items()[i]
self.assertEqual(str(network_elem.get('id')), str(network[0]))
ip_elems = network_elem.findall('{0}ip'.format(NS))
for z, ip_elem in enumerate(ip_elems):
ip = network[1][z]
self.assertEqual(str(ip_elem.get('version')),
str(ip['version']))
self.assertEqual(str(ip_elem.get('addr')),
str(ip['addr']))
def test_index(self):
serializer = servers.MinimalServersTemplate()
uuid1 = fakes.get_fake_uuid(1)
uuid2 = fakes.get_fake_uuid(2)
expected_server_href = 'http://localhost/v2/servers/%s' % uuid1
expected_server_bookmark = 'http://localhost/servers/%s' % uuid1
expected_server_href_2 = 'http://localhost/v2/servers/%s' % uuid2
expected_server_bookmark_2 = 'http://localhost/servers/%s' % uuid2
fixture = {"servers": [
{
"id": fakes.get_fake_uuid(1),
"name": "test_server",
'links': [
{
'href': expected_server_href,
'rel': 'self',
},
{
'href': expected_server_bookmark,
'rel': 'bookmark',
},
],
},
{
"id": fakes.get_fake_uuid(2),
"name": "test_server_2",
'links': [
{
'href': expected_server_href_2,
'rel': 'self',
},
{
'href': expected_server_bookmark_2,
'rel': 'bookmark',
},
],
},
]}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'servers_index')
server_elems = root.findall('{0}server'.format(NS))
self.assertEqual(len(server_elems), 2)
for i, server_elem in enumerate(server_elems):
server_dict = fixture['servers'][i]
for key in ['name', 'id']:
self.assertEqual(server_elem.get(key), str(server_dict[key]))
link_nodes = server_elem.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(server_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
def test_index_with_servers_links(self):
serializer = servers.MinimalServersTemplate()
uuid1 = fakes.get_fake_uuid(1)
uuid2 = fakes.get_fake_uuid(2)
expected_server_href = 'http://localhost/v2/servers/%s' % uuid1
expected_server_next = self.SERVER_NEXT % (2, 2)
expected_server_bookmark = 'http://localhost/servers/%s' % uuid1
expected_server_href_2 = 'http://localhost/v2/servers/%s' % uuid2
expected_server_bookmark_2 = 'http://localhost/servers/%s' % uuid2
fixture = {"servers": [
{
"id": fakes.get_fake_uuid(1),
"name": "test_server",
'links': [
{
'href': expected_server_href,
'rel': 'self',
},
{
'href': expected_server_bookmark,
'rel': 'bookmark',
},
],
},
{
"id": fakes.get_fake_uuid(2),
"name": "test_server_2",
'links': [
{
'href': expected_server_href_2,
'rel': 'self',
},
{
'href': expected_server_bookmark_2,
'rel': 'bookmark',
},
],
},
],
"servers_links": [
{
'rel': 'next',
'href': expected_server_next,
},
]}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'servers_index')
server_elems = root.findall('{0}server'.format(NS))
self.assertEqual(len(server_elems), 2)
for i, server_elem in enumerate(server_elems):
server_dict = fixture['servers'][i]
for key in ['name', 'id']:
self.assertEqual(server_elem.get(key), str(server_dict[key]))
link_nodes = server_elem.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(server_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
# Check servers_links
servers_links = root.findall('{0}link'.format(ATOMNS))
for i, link in enumerate(fixture['servers_links']):
for key, value in link.items():
self.assertEqual(servers_links[i].get(key), value)
def test_detail(self):
serializer = servers.ServersTemplate()
uuid1 = fakes.get_fake_uuid(1)
expected_server_href = 'http://localhost/v2/servers/%s' % uuid1
expected_server_bookmark = 'http://localhost/servers/%s' % uuid1
expected_image_bookmark = self.IMAGE_BOOKMARK
expected_flavor_bookmark = self.FLAVOR_BOOKMARK
uuid2 = fakes.get_fake_uuid(2)
expected_server_href_2 = 'http://localhost/v2/servers/%s' % uuid2
expected_server_bookmark_2 = 'http://localhost/servers/%s' % uuid2
fixture = {"servers": [
{
"id": fakes.get_fake_uuid(1),
"user_id": "fake",
"tenant_id": "fake",
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
"progress": 0,
"name": "test_server",
"status": "BUILD",
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
"hostId": 'e4d909c290d0fb1ca068ffaddf22cbd0',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": expected_image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": expected_flavor_bookmark,
},
],
},
"addresses": {
"network_one": [
{
"version": 4,
"addr": "67.23.10.138",
},
{
"version": 6,
"addr": "::babe:67.23.10.138",
},
],
},
"metadata": {
"Number": "1",
},
"links": [
{
"href": expected_server_href,
"rel": "self",
},
{
"href": expected_server_bookmark,
"rel": "bookmark",
},
],
},
{
"id": fakes.get_fake_uuid(2),
"user_id": 'fake',
"tenant_id": 'fake',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
"progress": 100,
"name": "test_server_2",
"status": "ACTIVE",
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
"hostId": 'e4d909c290d0fb1ca068ffaddf22cbd0',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": expected_image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": expected_flavor_bookmark,
},
],
},
"addresses": {
"network_one": [
{
"version": 4,
"addr": "67.23.10.138",
},
{
"version": 6,
"addr": "::babe:67.23.10.138",
},
],
},
"metadata": {
"Number": "2",
},
"links": [
{
"href": expected_server_href_2,
"rel": "self",
},
{
"href": expected_server_bookmark_2,
"rel": "bookmark",
},
],
},
]}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'servers')
server_elems = root.findall('{0}server'.format(NS))
self.assertEqual(len(server_elems), 2)
for i, server_elem in enumerate(server_elems):
server_dict = fixture['servers'][i]
for key in ['name', 'id', 'created', 'accessIPv4',
'updated', 'progress', 'status', 'hostId',
'accessIPv6']:
self.assertEqual(server_elem.get(key), str(server_dict[key]))
link_nodes = server_elem.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(server_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = server_elem.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = server_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(),
str(meta_value))
image_root = server_elem.find('{0}image'.format(NS))
self.assertEqual(image_root.get('id'), server_dict['image']['id'])
link_nodes = image_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['image']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
flavor_root = server_elem.find('{0}flavor'.format(NS))
self.assertEqual(flavor_root.get('id'),
server_dict['flavor']['id'])
link_nodes = flavor_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['flavor']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
addresses_root = server_elem.find('{0}addresses'.format(NS))
addresses_dict = server_dict['addresses']
network_elems = addresses_root.findall('{0}network'.format(NS))
for i, network_elem in enumerate(network_elems):
network = addresses_dict.items()[i]
self.assertEqual(str(network_elem.get('id')), str(network[0]))
ip_elems = network_elem.findall('{0}ip'.format(NS))
for z, ip_elem in enumerate(ip_elems):
ip = network[1][z]
self.assertEqual(str(ip_elem.get('version')),
str(ip['version']))
self.assertEqual(str(ip_elem.get('addr')),
str(ip['addr']))
def test_update(self):
serializer = servers.ServerTemplate()
fixture = {
"server": {
"id": FAKE_UUID,
"user_id": "fake",
"tenant_id": "fake",
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": 'e4d909c290d0fb1ca068ffaddf22cbd0',
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": self.IMAGE_BOOKMARK,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": self.FLAVOR_BOOKMARK,
},
],
},
"addresses": {
"network_one": [
{
"version": 4,
"addr": "67.23.10.138",
},
{
"version": 6,
"addr": "::babe:67.23.10.138",
},
],
"network_two": [
{
"version": 4,
"addr": "67.23.10.139",
},
{
"version": 6,
"addr": "::babe:67.23.10.139",
},
],
},
"metadata": {
"Open": "Stack",
"Number": "1",
},
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
"fault": {
"code": 500,
"created": self.TIMESTAMP,
"message": "Error Message",
"details": "Fault details",
}
}
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'server')
server_dict = fixture['server']
for key in ['name', 'id', 'created', 'accessIPv4',
'updated', 'progress', 'status', 'hostId',
'accessIPv6']:
self.assertEqual(root.get(key), str(server_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(server_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 2)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = server_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
image_root = root.find('{0}image'.format(NS))
self.assertEqual(image_root.get('id'), server_dict['image']['id'])
link_nodes = image_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['image']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
flavor_root = root.find('{0}flavor'.format(NS))
self.assertEqual(flavor_root.get('id'), server_dict['flavor']['id'])
link_nodes = flavor_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['flavor']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
addresses_root = root.find('{0}addresses'.format(NS))
addresses_dict = server_dict['addresses']
network_elems = addresses_root.findall('{0}network'.format(NS))
self.assertEqual(len(network_elems), 2)
for i, network_elem in enumerate(network_elems):
network = addresses_dict.items()[i]
self.assertEqual(str(network_elem.get('id')), str(network[0]))
ip_elems = network_elem.findall('{0}ip'.format(NS))
for z, ip_elem in enumerate(ip_elems):
ip = network[1][z]
self.assertEqual(str(ip_elem.get('version')),
str(ip['version']))
self.assertEqual(str(ip_elem.get('addr')),
str(ip['addr']))
fault_root = root.find('{0}fault'.format(NS))
fault_dict = server_dict['fault']
self.assertEqual(fault_root.get("code"), str(fault_dict["code"]))
self.assertEqual(fault_root.get("created"), fault_dict["created"])
msg_elem = fault_root.find('{0}message'.format(NS))
self.assertEqual(msg_elem.text, fault_dict["message"])
det_elem = fault_root.find('{0}details'.format(NS))
self.assertEqual(det_elem.text, fault_dict["details"])
def test_action(self):
serializer = servers.FullServerTemplate()
fixture = {
"server": {
"id": FAKE_UUID,
"user_id": "fake",
"tenant_id": "fake",
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
"progress": 0,
"name": "test_server",
"status": "BUILD",
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
"hostId": "e4d909c290d0fb1ca068ffaddf22cbd0",
"adminPass": "test_password",
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": self.IMAGE_BOOKMARK,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": self.FLAVOR_BOOKMARK,
},
],
},
"addresses": {
"network_one": [
{
"version": 4,
"addr": "67.23.10.138",
},
{
"version": 6,
"addr": "::babe:67.23.10.138",
},
],
"network_two": [
{
"version": 4,
"addr": "67.23.10.139",
},
{
"version": 6,
"addr": "::babe:67.23.10.139",
},
],
},
"metadata": {
"Open": "Stack",
"Number": "1",
},
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
}
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'server')
server_dict = fixture['server']
for key in ['name', 'id', 'created', 'accessIPv4',
'updated', 'progress', 'status', 'hostId',
'accessIPv6', 'adminPass']:
self.assertEqual(root.get(key), str(server_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(server_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 2)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = server_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
image_root = root.find('{0}image'.format(NS))
self.assertEqual(image_root.get('id'), server_dict['image']['id'])
link_nodes = image_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['image']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
flavor_root = root.find('{0}flavor'.format(NS))
self.assertEqual(flavor_root.get('id'), server_dict['flavor']['id'])
link_nodes = flavor_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['flavor']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
addresses_root = root.find('{0}addresses'.format(NS))
addresses_dict = server_dict['addresses']
network_elems = addresses_root.findall('{0}network'.format(NS))
self.assertEqual(len(network_elems), 2)
for i, network_elem in enumerate(network_elems):
network = addresses_dict.items()[i]
self.assertEqual(str(network_elem.get('id')), str(network[0]))
ip_elems = network_elem.findall('{0}ip'.format(NS))
for z, ip_elem in enumerate(ip_elems):
ip = network[1][z]
self.assertEqual(str(ip_elem.get('version')),
str(ip['version']))
self.assertEqual(str(ip_elem.get('addr')),
str(ip['addr']))
class ServersAllExtensionsTestCase(test.TestCase):
"""
Servers tests using default API router with all extensions enabled.
The intent here is to catch cases where extensions end up throwing
an exception because of a malformed request before the core API
gets a chance to validate the request and return a 422 response.
For example, ServerDiskConfigController extends servers.Controller:
@wsgi.extends
def create(self, req, body):
if 'server' in body:
self._set_disk_config(body['server'])
resp_obj = (yield)
self._show(req, resp_obj)
we want to ensure that the extension isn't barfing on an invalid
body.
"""
def setUp(self):
super(ServersAllExtensionsTestCase, self).setUp()
self.app = compute.APIRouter()
def test_create_missing_server(self):
# Test create with malformed body.
def fake_create(*args, **kwargs):
raise test.TestingException("Should not reach the compute API.")
self.stubs.Set(compute_api.API, 'create', fake_create)
req = fakes.HTTPRequest.blank('/fake/servers')
req.method = 'POST'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(422, res.status_int)
def test_update_missing_server(self):
# Test create with malformed body.
def fake_update(*args, **kwargs):
raise test.TestingException("Should not reach the compute API.")
self.stubs.Set(compute_api.API, 'create', fake_update)
req = fakes.HTTPRequest.blank('/fake/servers/1')
req.method = 'PUT'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(422, res.status_int)
class ServersUnprocessableEntityTestCase(test.TestCase):
"""
Tests of places we throw 422 Unprocessable Entity from
"""
def setUp(self):
super(ServersUnprocessableEntityTestCase, self).setUp()
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
def _unprocessable_server_create(self, body):
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.create, req, body)
def test_create_server_no_body(self):
self._unprocessable_server_create(body=None)
def test_create_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._unprocessable_server_create(body=body)
def test_create_server_malformed_entity(self):
body = {'server': 'string'}
self._unprocessable_server_create(body=body)
def _unprocessable_server_update(self, body):
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.update, req, FAKE_UUID, body)
def test_update_server_no_body(self):
self._unprocessable_server_update(body=None)
def test_update_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._unprocessable_server_update(body=body)
def test_create_update_malformed_entity(self):
body = {'server': 'string'}
self._unprocessable_server_update(body=body)
|
{
"content_hash": "a4554f33b251f72f5651eb7bdb129b71",
"timestamp": "",
"source": "github",
"line_count": 5570,
"max_line_length": 79,
"avg_line_length": 38.49533213644524,
"alnum_prop": 0.5030664260163512,
"repo_name": "jessicalucci/NovaOrc",
"id": "5a0c8a9017f15972cdbdade6c030fceb5d904e61",
"size": "215151",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/tests/api/openstack/compute/test_servers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
from tests import CloudBaseTest
from .blog import main
class BlogTestCase(CloudBaseTest):
"""Simple test case that ensures the blog code doesn't throw any errors."""
def test_main(self):
main(self.project_id)
|
{
"content_hash": "bee779b7d8d3fcbf4ba9fad6d889c7d7",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 79,
"avg_line_length": 22.9,
"alnum_prop": 0.7161572052401747,
"repo_name": "JPO1/python-docs-samples",
"id": "722ef624f2d0a0f982bf8725a97f54e43856339e",
"size": "805",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "blog/introduction_to_data_models_in_cloud_datastore/blog_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "150111"
},
{
"name": "Shell",
"bytes": "814"
}
],
"symlink_target": ""
}
|
from collections import defaultdict
from functools import partial
import numpy as np
from matplotlib import rcParams
from matplotlib.artist import Artist
from matplotlib.axes import Axes, subplot_class_factory
from matplotlib.transforms import Affine2D, Bbox, Transform
import astropy.units as u
from astropy.coordinates import BaseCoordinateFrame, SkyCoord
from astropy.utils import minversion
from astropy.utils.compat.optional_deps import HAS_PIL
from astropy.wcs import WCS
from astropy.wcs.wcsapi import BaseHighLevelWCS, BaseLowLevelWCS
from .coordinates_map import CoordinatesMap
from .frame import RectangularFrame, RectangularFrame1D
from .transforms import CoordinateTransform
from .utils import get_coord_meta, transform_contour_set_inplace
from .wcsapi import IDENTITY, transform_coord_meta_from_wcs
__all__ = ["WCSAxes", "WCSAxesSubplot"]
VISUAL_PROPERTIES = ["facecolor", "edgecolor", "linewidth", "alpha", "linestyle"]
class _WCSAxesArtist(Artist):
"""This is a dummy artist to enforce the correct z-order of axis ticks,
tick labels, and gridlines.
FIXME: This is a bit of a hack. ``Axes.draw`` sorts the artists by zorder
and then renders them in sequence. For normal Matplotlib axes, the ticks,
tick labels, and gridlines are included in this list of artists and hence
are automatically drawn in the correct order. However, ``WCSAxes`` disables
the native ticks, labels, and gridlines. Instead, ``WCSAxes.draw`` renders
ersatz ticks, labels, and gridlines by explicitly calling the functions
``CoordinateHelper._draw_ticks``, ``CoordinateHelper._draw_grid``, etc.
This hack would not be necessary if ``WCSAxes`` drew ticks, tick labels,
and gridlines in the standary way."""
def draw(self, renderer, *args, **kwargs):
self.axes.draw_wcsaxes(renderer)
class WCSAxes(Axes):
"""
The main axes class that can be used to show world coordinates from a WCS.
Parameters
----------
fig : `~matplotlib.figure.Figure`
The figure to add the axes to
*args
``*args`` can be a single ``(left, bottom, width, height)``
rectangle or a single `matplotlib.transforms.Bbox`. This specifies
the rectangle (in figure coordinates) where the Axes is positioned.
``*args`` can also consist of three numbers or a single three-digit
number; in the latter case, the digits are considered as
independent numbers. The numbers are interpreted as ``(nrows,
ncols, index)``: ``(nrows, ncols)`` specifies the size of an array
of subplots, and ``index`` is the 1-based index of the subplot
being created. Finally, ``*args`` can also directly be a
`matplotlib.gridspec.SubplotSpec` instance.
wcs : :class:`~astropy.wcs.WCS`, optional
The WCS for the data. If this is specified, ``transform`` cannot be
specified.
transform : `~matplotlib.transforms.Transform`, optional
The transform for the data. If this is specified, ``wcs`` cannot be
specified.
coord_meta : dict, optional
A dictionary providing additional metadata when ``transform`` is
specified. This should include the keys ``type``, ``wrap``, and
``unit``. Each of these should be a list with as many items as the
dimension of the WCS. The ``type`` entries should be one of
``longitude``, ``latitude``, or ``scalar``, the ``wrap`` entries should
give, for the longitude, the angle at which the coordinate wraps (and
`None` otherwise), and the ``unit`` should give the unit of the
coordinates as :class:`~astropy.units.Unit` instances. This can
optionally also include a ``format_unit`` entry giving the units to use
for the tick labels (if not specified, this defaults to ``unit``).
transData : `~matplotlib.transforms.Transform`, optional
Can be used to override the default data -> pixel mapping.
slices : tuple, optional
For WCS transformations with more than two dimensions, we need to
choose which dimensions are being shown in the 2D image. The slice
should contain one ``x`` entry, one ``y`` entry, and the rest of the
values should be integers indicating the slice through the data. The
order of the items in the slice should be the same as the order of the
dimensions in the :class:`~astropy.wcs.WCS`, and the opposite of the
order of the dimensions in Numpy. For example, ``(50, 'x', 'y')`` means
that the first WCS dimension (last Numpy dimension) will be sliced at
an index of 50, the second WCS and Numpy dimension will be shown on the
x axis, and the final WCS dimension (first Numpy dimension) will be
shown on the y-axis (and therefore the data will be plotted using
``data[:, :, 50].transpose()``)
frame_class : type, optional
The class for the frame, which should be a subclass of
:class:`~astropy.visualization.wcsaxes.frame.BaseFrame`. The default is to use a
:class:`~astropy.visualization.wcsaxes.frame.RectangularFrame`
"""
def __init__(
self,
fig,
*args,
wcs=None,
transform=None,
coord_meta=None,
transData=None,
slices=None,
frame_class=None,
**kwargs,
):
""" """
super().__init__(fig, *args, **kwargs)
self._bboxes = []
if frame_class is not None:
self.frame_class = frame_class
elif wcs is not None and (
wcs.pixel_n_dim == 1 or (slices is not None and "y" not in slices)
):
self.frame_class = RectangularFrame1D
else:
self.frame_class = RectangularFrame
if not (transData is None):
# User wants to override the transform for the final
# data->pixel mapping
self.transData = transData
self.reset_wcs(
wcs=wcs, slices=slices, transform=transform, coord_meta=coord_meta
)
self._hide_parent_artists()
self.format_coord = self._display_world_coords
self._display_coords_index = 0
fig.canvas.mpl_connect("key_press_event", self._set_cursor_prefs)
self.patch = self.coords.frame.patch
self._wcsaxesartist = _WCSAxesArtist()
self.add_artist(self._wcsaxesartist)
self._drawn = False
def _display_world_coords(self, x, y):
if not self._drawn:
return ""
if self._display_coords_index == -1:
return f"{x} {y} (pixel)"
pixel = np.array([x, y])
coords = self._all_coords[self._display_coords_index]
world = coords._transform.transform(np.array([pixel]))[0]
coord_strings = []
for idx, coord in enumerate(coords):
if coord.coord_index is not None:
coord_strings.append(
coord.format_coord(world[coord.coord_index], format="ascii")
)
coord_string = " ".join(coord_strings)
if self._display_coords_index == 0:
system = "world"
else:
system = f"world, overlay {self._display_coords_index}"
coord_string = f"{coord_string} ({system})"
return coord_string
def _set_cursor_prefs(self, event, **kwargs):
if event.key == "w":
self._display_coords_index += 1
if self._display_coords_index + 1 > len(self._all_coords):
self._display_coords_index = -1
def _hide_parent_artists(self):
# Turn off spines and current axes
for s in self.spines.values():
s.set_visible(False)
self.xaxis.set_visible(False)
if self.frame_class is not RectangularFrame1D:
self.yaxis.set_visible(False)
# We now overload ``imshow`` because we need to make sure that origin is
# set to ``lower`` for all images, which means that we need to flip RGB
# images.
def imshow(self, X, *args, **kwargs):
"""
Wrapper to Matplotlib's :meth:`~matplotlib.axes.Axes.imshow`.
If an RGB image is passed as a PIL object, it will be flipped
vertically and ``origin`` will be set to ``lower``, since WCS
transformations - like FITS files - assume that the origin is the lower
left pixel of the image (whereas RGB images have the origin in the top
left).
All arguments are passed to :meth:`~matplotlib.axes.Axes.imshow`.
"""
origin = kwargs.pop("origin", "lower")
# plt.imshow passes origin as None, which we should default to lower.
if origin is None:
origin = "lower"
elif origin == "upper":
raise ValueError("Cannot use images with origin='upper' in WCSAxes.")
if HAS_PIL:
from PIL.Image import Image
if minversion("PIL", "9.1"):
from PIL.Image import Transpose
FLIP_TOP_BOTTOM = Transpose.FLIP_TOP_BOTTOM
else:
from PIL.Image import FLIP_TOP_BOTTOM
if isinstance(X, Image) or hasattr(X, "getpixel"):
X = X.transpose(FLIP_TOP_BOTTOM)
return super().imshow(X, *args, origin=origin, **kwargs)
def contour(self, *args, **kwargs):
"""
Plot contours.
This is a custom implementation of :meth:`~matplotlib.axes.Axes.contour`
which applies the transform (if specified) to all contours in one go for
performance rather than to each contour line individually. All
positional and keyword arguments are the same as for
:meth:`~matplotlib.axes.Axes.contour`.
"""
# In Matplotlib, when calling contour() with a transform, each
# individual path in the contour map is transformed separately. However,
# this is much too slow for us since each call to the transforms results
# in an Astropy coordinate transformation, which has a non-negligible
# overhead - therefore a better approach is to override contour(), call
# the Matplotlib one with no transform, then apply the transform in one
# go to all the segments that make up the contour map.
transform = kwargs.pop("transform", None)
cset = super().contour(*args, **kwargs)
if transform is not None:
# The transform passed to self.contour will normally include
# a transData component at the end, but we can remove that since
# we are already working in data space.
transform = transform - self.transData
transform_contour_set_inplace(cset, transform)
return cset
def contourf(self, *args, **kwargs):
"""
Plot filled contours.
This is a custom implementation of :meth:`~matplotlib.axes.Axes.contourf`
which applies the transform (if specified) to all contours in one go for
performance rather than to each contour line individually. All
positional and keyword arguments are the same as for
:meth:`~matplotlib.axes.Axes.contourf`.
"""
# See notes for contour above.
transform = kwargs.pop("transform", None)
cset = super().contourf(*args, **kwargs)
if transform is not None:
# The transform passed to self.contour will normally include
# a transData component at the end, but we can remove that since
# we are already working in data space.
transform = transform - self.transData
transform_contour_set_inplace(cset, transform)
return cset
def _transform_plot_args(self, *args, **kwargs):
"""
Apply transformations to arguments to ``plot_coord`` and
``scatter_coord``
"""
if isinstance(args[0], (SkyCoord, BaseCoordinateFrame)):
# Extract the frame from the first argument.
frame0 = args[0]
if isinstance(frame0, SkyCoord):
frame0 = frame0.frame
native_frame = self._transform_pixel2world.frame_out
# Transform to the native frame of the plot
frame0 = frame0.transform_to(native_frame)
plot_data = []
for coord in self.coords:
if coord.coord_type == "longitude":
plot_data.append(frame0.spherical.lon.to_value(u.deg))
elif coord.coord_type == "latitude":
plot_data.append(frame0.spherical.lat.to_value(u.deg))
else:
raise NotImplementedError(
"Coordinates cannot be plotted with this "
"method because the WCS does not represent longitude/latitude."
)
if "transform" in kwargs.keys():
raise TypeError(
"The 'transform' keyword argument is not allowed,"
" as it is automatically determined by the input coordinate frame."
)
transform = self.get_transform(native_frame)
kwargs.update({"transform": transform})
args = tuple(plot_data) + args[1:]
return args, kwargs
def plot_coord(self, *args, **kwargs):
"""
Plot `~astropy.coordinates.SkyCoord` or
`~astropy.coordinates.BaseCoordinateFrame` objects onto the axes.
The first argument to
:meth:`~astropy.visualization.wcsaxes.WCSAxes.plot_coord` should be a
coordinate, which will then be converted to the first two parameters to
`matplotlib.axes.Axes.plot`. All other arguments are the same as
`matplotlib.axes.Axes.plot`. If not specified a ``transform`` keyword
argument will be created based on the coordinate.
Parameters
----------
coordinate : `~astropy.coordinates.SkyCoord` or `~astropy.coordinates.BaseCoordinateFrame`
The coordinate object to plot on the axes. This is converted to the
first two arguments to `matplotlib.axes.Axes.plot`.
See Also
--------
matplotlib.axes.Axes.plot :
This method is called from this function with all arguments passed to it.
"""
args, kwargs = self._transform_plot_args(*args, **kwargs)
return super().plot(*args, **kwargs)
def scatter_coord(self, *args, **kwargs):
"""
Scatter `~astropy.coordinates.SkyCoord` or
`~astropy.coordinates.BaseCoordinateFrame` objects onto the axes.
The first argument to
:meth:`~astropy.visualization.wcsaxes.WCSAxes.scatter_coord` should be a
coordinate, which will then be converted to the first two parameters to
`matplotlib.axes.Axes.scatter`. All other arguments are the same as
`matplotlib.axes.Axes.scatter`. If not specified a ``transform``
keyword argument will be created based on the coordinate.
Parameters
----------
coordinate : `~astropy.coordinates.SkyCoord` or `~astropy.coordinates.BaseCoordinateFrame`
The coordinate object to scatter on the axes. This is converted to
the first two arguments to `matplotlib.axes.Axes.scatter`.
See Also
--------
matplotlib.axes.Axes.scatter : This method is called from this function with all arguments passed to it.
"""
args, kwargs = self._transform_plot_args(*args, **kwargs)
return super().scatter(*args, **kwargs)
def reset_wcs(self, wcs=None, slices=None, transform=None, coord_meta=None):
"""
Reset the current Axes, to use a new WCS object.
"""
# Here determine all the coordinate axes that should be shown.
if wcs is None and transform is None:
self.wcs = IDENTITY
else:
# We now force call 'set', which ensures the WCS object is
# consistent, which will only be important if the WCS has been set
# by hand. For example if the user sets a celestial WCS by hand and
# forgets to set the units, WCS.wcs.set() will do this.
if wcs is not None:
# Check if the WCS object is an instance of `astropy.wcs.WCS`
# This check is necessary as only `astropy.wcs.WCS` supports
# wcs.set() method
if isinstance(wcs, WCS):
wcs.wcs.set()
if isinstance(wcs, BaseHighLevelWCS):
wcs = wcs.low_level_wcs
self.wcs = wcs
# If we are making a new WCS, we need to preserve the path object since
# it may already be used by objects that have been plotted, and we need
# to continue updating it. CoordinatesMap will create a new frame
# instance, but we can tell that instance to keep using the old path.
if hasattr(self, "coords"):
previous_frame = {
"path": self.coords.frame._path,
"color": self.coords.frame.get_color(),
"linewidth": self.coords.frame.get_linewidth(),
}
else:
previous_frame = {"path": None}
if self.wcs is not None:
transform, coord_meta = transform_coord_meta_from_wcs(
self.wcs, self.frame_class, slices=slices
)
self.coords = CoordinatesMap(
self,
transform=transform,
coord_meta=coord_meta,
frame_class=self.frame_class,
previous_frame_path=previous_frame["path"],
)
self._transform_pixel2world = transform
if previous_frame["path"] is not None:
self.coords.frame.set_color(previous_frame["color"])
self.coords.frame.set_linewidth(previous_frame["linewidth"])
self._all_coords = [self.coords]
# Common default settings for Rectangular Frame
for ind, pos in enumerate(
coord_meta.get("default_axislabel_position", ["b", "l"])
):
self.coords[ind].set_axislabel_position(pos)
for ind, pos in enumerate(
coord_meta.get("default_ticklabel_position", ["b", "l"])
):
self.coords[ind].set_ticklabel_position(pos)
for ind, pos in enumerate(
coord_meta.get("default_ticks_position", ["bltr", "bltr"])
):
self.coords[ind].set_ticks_position(pos)
if rcParams["axes.grid"]:
self.grid()
def draw_wcsaxes(self, renderer):
if not self.axison:
return
# Here need to find out range of all coordinates, and update range for
# each coordinate axis. For now, just assume it covers the whole sky.
self._bboxes = []
# This generates a structure like [coords][axis] = [...]
ticklabels_bbox = defaultdict(partial(defaultdict, list))
visible_ticks = []
for coords in self._all_coords:
# Draw grids
coords.frame.update()
for coord in coords:
coord._draw_grid(renderer)
for coords in self._all_coords:
# Draw tick labels
for coord in coords:
coord._draw_ticks(
renderer,
bboxes=self._bboxes,
ticklabels_bbox=ticklabels_bbox[coord],
)
visible_ticks.extend(coord.ticklabels.get_visible_axes())
for coords in self._all_coords:
# Draw axis labels
for coord in coords:
coord._draw_axislabels(
renderer,
bboxes=self._bboxes,
ticklabels_bbox=ticklabels_bbox,
visible_ticks=visible_ticks,
)
self.coords.frame.draw(renderer)
def draw(self, renderer, **kwargs):
"""Draw the axes."""
# Before we do any drawing, we need to remove any existing grid lines
# drawn with contours, otherwise if we try and remove the contours
# part way through drawing, we end up with the issue mentioned in
# https://github.com/astropy/astropy/issues/12446
for coords in self._all_coords:
for coord in coords:
coord._clear_grid_contour()
# In Axes.draw, the following code can result in the xlim and ylim
# values changing, so we need to force call this here to make sure that
# the limits are correct before we update the patch.
locator = self.get_axes_locator()
if locator:
pos = locator(self, renderer)
self.apply_aspect(pos)
else:
self.apply_aspect()
if self._axisbelow is True:
self._wcsaxesartist.set_zorder(0.5)
elif self._axisbelow is False:
self._wcsaxesartist.set_zorder(2.5)
else:
# 'line': above patches, below lines
self._wcsaxesartist.set_zorder(1.5)
# We need to make sure that that frame path is up to date
self.coords.frame._update_patch_path()
super().draw(renderer, **kwargs)
self._drawn = True
# Matplotlib internally sometimes calls set_xlabel(label=...).
def set_xlabel(self, xlabel=None, labelpad=1, loc=None, **kwargs):
"""Set x-label."""
if xlabel is None:
xlabel = kwargs.pop("label", None)
if xlabel is None:
raise TypeError(
"set_xlabel() missing 1 required positional argument: 'xlabel'"
)
for coord in self.coords:
if (
"b" in coord.axislabels.get_visible_axes()
or "h" in coord.axislabels.get_visible_axes()
):
coord.set_axislabel(xlabel, minpad=labelpad, **kwargs)
break
def set_ylabel(self, ylabel=None, labelpad=1, loc=None, **kwargs):
"""Set y-label"""
if ylabel is None:
ylabel = kwargs.pop("label", None)
if ylabel is None:
raise TypeError(
"set_ylabel() missing 1 required positional argument: 'ylabel'"
)
if self.frame_class is RectangularFrame1D:
return super().set_ylabel(ylabel, labelpad=labelpad, **kwargs)
for coord in self.coords:
if (
"l" in coord.axislabels.get_visible_axes()
or "c" in coord.axislabels.get_visible_axes()
):
coord.set_axislabel(ylabel, minpad=labelpad, **kwargs)
break
def get_xlabel(self):
for coord in self.coords:
if (
"b" in coord.axislabels.get_visible_axes()
or "h" in coord.axislabels.get_visible_axes()
):
return coord.get_axislabel()
def get_ylabel(self):
if self.frame_class is RectangularFrame1D:
return super().get_ylabel()
for coord in self.coords:
if (
"l" in coord.axislabels.get_visible_axes()
or "c" in coord.axislabels.get_visible_axes()
):
return coord.get_axislabel()
def get_coords_overlay(self, frame, coord_meta=None):
# Here we can't use get_transform because that deals with
# pixel-to-pixel transformations when passing a WCS object.
if isinstance(frame, WCS):
transform, coord_meta = transform_coord_meta_from_wcs(
frame, self.frame_class
)
else:
transform = self._get_transform_no_transdata(frame)
if coord_meta is None:
coord_meta = get_coord_meta(frame)
coords = CoordinatesMap(
self,
transform=transform,
coord_meta=coord_meta,
frame_class=self.frame_class,
)
self._all_coords.append(coords)
# Common settings for overlay
coords[0].set_axislabel_position("t")
coords[1].set_axislabel_position("r")
coords[0].set_ticklabel_position("t")
coords[1].set_ticklabel_position("r")
self.overlay_coords = coords
return coords
def get_transform(self, frame):
"""
Return a transform from the specified frame to display coordinates.
This does not include the transData transformation
Parameters
----------
frame : :class:`~astropy.wcs.WCS` or :class:`~matplotlib.transforms.Transform` or str
The ``frame`` parameter can have several possible types:
* :class:`~astropy.wcs.WCS` instance: assumed to be a
transformation from pixel to world coordinates, where the
world coordinates are the same as those in the WCS
transformation used for this ``WCSAxes`` instance. This is
used for example to show contours, since this involves
plotting an array in pixel coordinates that are not the
final data coordinate and have to be transformed to the
common world coordinate system first.
* :class:`~matplotlib.transforms.Transform` instance: it is
assumed to be a transform to the world coordinates that are
part of the WCS used to instantiate this ``WCSAxes``
instance.
* ``'pixel'`` or ``'world'``: return a transformation that
allows users to plot in pixel/data coordinates (essentially
an identity transform) and ``world`` (the default
world-to-pixel transformation used to instantiate the
``WCSAxes`` instance).
* ``'fk5'`` or ``'galactic'``: return a transformation from
the specified frame to the pixel/data coordinates.
* :class:`~astropy.coordinates.BaseCoordinateFrame` instance.
"""
return self._get_transform_no_transdata(frame).inverted() + self.transData
def _get_transform_no_transdata(self, frame):
"""
Return a transform from data to the specified frame
"""
if isinstance(frame, (BaseLowLevelWCS, BaseHighLevelWCS)):
if isinstance(frame, BaseHighLevelWCS):
frame = frame.low_level_wcs
transform, coord_meta = transform_coord_meta_from_wcs(
frame, self.frame_class
)
transform_world2pixel = transform.inverted()
if self._transform_pixel2world.frame_out == transform_world2pixel.frame_in:
return self._transform_pixel2world + transform_world2pixel
else:
return (
self._transform_pixel2world
+ CoordinateTransform(
self._transform_pixel2world.frame_out,
transform_world2pixel.frame_in,
)
+ transform_world2pixel
)
elif isinstance(frame, str) and frame == "pixel":
return Affine2D()
elif isinstance(frame, Transform):
return self._transform_pixel2world + frame
else:
if isinstance(frame, str) and frame == "world":
return self._transform_pixel2world
else:
coordinate_transform = CoordinateTransform(
self._transform_pixel2world.frame_out, frame
)
if coordinate_transform.same_frames:
return self._transform_pixel2world
else:
return self._transform_pixel2world + coordinate_transform
def get_tightbbox(self, renderer, *args, **kwargs):
# FIXME: we should determine what to do with the extra arguments here.
# Note that the expected signature of this method is different in
# Matplotlib 3.x compared to 2.x, but we only support 3.x now.
if not self.get_visible():
return
# Do a draw to populate the self._bboxes list
self.draw_wcsaxes(renderer)
bb = [b for b in self._bboxes if b and (b.width != 0 or b.height != 0)]
bb.append(super().get_tightbbox(renderer, *args, **kwargs))
if bb:
_bbox = Bbox.union(bb)
return _bbox
else:
return self.get_window_extent(renderer)
def grid(self, b=None, axis="both", *, which="major", **kwargs):
"""
Plot gridlines for both coordinates.
Standard matplotlib appearance options (color, alpha, etc.) can be
passed as keyword arguments. This behaves like `matplotlib.axes.Axes`
except that if no arguments are specified, the grid is shown rather
than toggled.
Parameters
----------
b : bool
Whether to show the gridlines.
axis : 'both', 'x', 'y'
Which axis to turn the gridlines on/off for.
which : str
Currently only ``'major'`` is supported.
"""
if not hasattr(self, "coords"):
return
if which != "major":
raise NotImplementedError(
"Plotting the grid for the minor ticks is not supported."
)
if axis == "both":
self.coords.grid(draw_grid=b, **kwargs)
elif axis == "x":
self.coords[0].grid(draw_grid=b, **kwargs)
elif axis == "y":
self.coords[1].grid(draw_grid=b, **kwargs)
else:
raise ValueError("axis should be one of x/y/both")
def tick_params(self, axis="both", **kwargs):
"""
Method to set the tick and tick label parameters in the same way as the
:meth:`~matplotlib.axes.Axes.tick_params` method in Matplotlib.
This is provided for convenience, but the recommended API is to use
:meth:`~astropy.visualization.wcsaxes.CoordinateHelper.set_ticks`,
:meth:`~astropy.visualization.wcsaxes.CoordinateHelper.set_ticklabel`,
:meth:`~astropy.visualization.wcsaxes.CoordinateHelper.set_ticks_position`,
:meth:`~astropy.visualization.wcsaxes.CoordinateHelper.set_ticklabel_position`,
and :meth:`~astropy.visualization.wcsaxes.CoordinateHelper.grid`.
Parameters
----------
axis : int or str, optional
Which axis to apply the parameters to. This defaults to 'both'
but this can also be set to an `int` or `str` that refers to the
axis to apply it to, following the valid values that can index
``ax.coords``. Note that ``'x'`` and ``'y``' are also accepted in
the case of rectangular axes.
which : {'both', 'major', 'minor'}, optional
Which ticks to apply the settings to. By default, setting are
applied to both major and minor ticks. Note that if ``'minor'`` is
specified, only the length of the ticks can be set currently.
direction : {'in', 'out'}, optional
Puts ticks inside the axes, or outside the axes.
length : float, optional
Tick length in points.
width : float, optional
Tick width in points.
color : color, optional
Tick color (accepts any valid Matplotlib color)
pad : float, optional
Distance in points between tick and label.
labelsize : float or str, optional
Tick label font size in points or as a string (e.g., 'large').
labelcolor : color, optional
Tick label color (accepts any valid Matplotlib color)
colors : color, optional
Changes the tick color and the label color to the same value
(accepts any valid Matplotlib color).
bottom, top, left, right : bool, optional
Where to draw the ticks. Note that this can only be given if a
specific coordinate is specified via the ``axis`` argument, and it
will not work correctly if the frame is not rectangular.
labelbottom, labeltop, labelleft, labelright : bool, optional
Where to draw the tick labels. Note that this can only be given if a
specific coordinate is specified via the ``axis`` argument, and it
will not work correctly if the frame is not rectangular.
grid_color : color, optional
The color of the grid lines (accepts any valid Matplotlib color).
grid_alpha : float, optional
Transparency of grid lines: 0 (transparent) to 1 (opaque).
grid_linewidth : float, optional
Width of grid lines in points.
grid_linestyle : str, optional
The style of the grid lines (accepts any valid Matplotlib line
style).
"""
if not hasattr(self, "coords"):
# Axes haven't been fully initialized yet, so just ignore, as
# Axes.__init__ calls this method
return
if axis == "both":
for pos in ("bottom", "left", "top", "right"):
if pos in kwargs:
raise ValueError(f"Cannot specify {pos}= when axis='both'")
if "label" + pos in kwargs:
raise ValueError(f"Cannot specify label{pos}= when axis='both'")
for coord in self.coords:
coord.tick_params(**kwargs)
elif axis in self.coords:
self.coords[axis].tick_params(**kwargs)
elif axis in ("x", "y") and self.frame_class is RectangularFrame:
spine = "b" if axis == "x" else "l"
for coord in self.coords:
if spine in coord.axislabels.get_visible_axes():
coord.tick_params(**kwargs)
# In the following, we put the generated subplot class in a temporary class and
# we then inherit it - if we don't do this, the generated class appears to
# belong in matplotlib, not in WCSAxes, from the API's point of view.
class WCSAxesSubplot(subplot_class_factory(WCSAxes)):
"""
A subclass class for WCSAxes
"""
pass
|
{
"content_hash": "39de674cff499a886bc872a4a09091c5",
"timestamp": "",
"source": "github",
"line_count": 859,
"max_line_length": 112,
"avg_line_length": 39.84167636786962,
"alnum_prop": 0.5970663861617578,
"repo_name": "astropy/astropy",
"id": "884fcd5bb5c5561005e1f395bc176d7b09e0e091",
"size": "34289",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "astropy/visualization/wcsaxes/core.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "11039709"
},
{
"name": "C++",
"bytes": "47001"
},
{
"name": "Cython",
"bytes": "79917"
},
{
"name": "HTML",
"bytes": "1172"
},
{
"name": "Lex",
"bytes": "183333"
},
{
"name": "M4",
"bytes": "18757"
},
{
"name": "Makefile",
"bytes": "52508"
},
{
"name": "Python",
"bytes": "12402561"
},
{
"name": "Shell",
"bytes": "17024"
},
{
"name": "TeX",
"bytes": "853"
}
],
"symlink_target": ""
}
|
import os
from enum import Enum
from typing import Sequence, Union
from annotypes import Anno, Array
from packaging.version import Version
from malcolm.core import IncompatibleError, Table
# If things don't get new frames in this time (seconds), consider them
# stalled and raise
FRAME_TIMEOUT = 60
class AttributeDatasetType(Enum):
"""Used to signal from a detector driver that it is producing an NDAttribute
that should be published to the user, and what its NeXus type is"""
#: Primary data that is directly relevant to the user, like a transmission
#: diode.
DETECTOR = "detector"
#: Data that only makes sense when considered with detector data, like a
#: measure of beam current with an ion chamber
MONITOR = "monitor"
#: Readback position of a motor that is taking part in the experiment
POSITION = "position"
class DataType(Enum):
"""The datatype that should be used for the NDAttribute"""
INT = "INT" #: int32
DOUBLE = "DOUBLE" #: float64
STRING = "STRING" #: string
DBRNATIVE = "DBR_NATIVE" #: Whatever native type the PV has
class SourceType(Enum):
"""Where to get the NDAttribute data from"""
PARAM = "paramAttribute" #: From an asyn parameter of this driver
PV = "PVAttribute" #: From a PV name
class StatisticsName(Enum):
"""The types of statistics calculated by the areaDetector NDPluginStats"""
MIN = "MIN_VALUE" #: Minimum counts in any element
MIN_X = "MIN_X" #: X position of minimum counts
MIN_Y = "MIN_Y" #: Y position of minimum counts
MAX = "MAX_VALUE" #: Maximum counts in any element
MAX_X = "MAX_X" #: X position of maximum counts
MAX_Y = "MAX_Y" #: Y position of maximum counts
MEAN = "MEAN_VALUE" #: Mean counts of all elements
SIGMA = "SIGMA_VALUE" #: Sigma of all elements
SUM = "TOTAL" #: Sum of all elements
NET = "NET" #: Sum of all elements not in background region
with Anno("Is the IOC this part connects to running on Windows?"):
APartRunsOnWindows = bool
with Anno("NDAttribute name to be exported"):
AAttributeNames = Union[Array[str]]
with Anno(
"source ID for attribute (PV name for PVAttribute,"
+ "asyn param name for paramAttribute)"
):
ASourceIds = Union[Array[str]]
with Anno("PV descriptions"):
ADescriptions = Union[Array[str]]
with Anno("Types of attribute dataset"):
AAttributeTypes = Union[Array[AttributeDatasetType]]
with Anno("Type of attribute source"):
ASourceTypes = Union[Array[SourceType]]
with Anno("Type of attribute data"):
ADataTypes = Union[Array[DataType]]
UAttributeNames = Union[AAttributeNames, Sequence[str]]
USourceIds = Union[ASourceIds, Sequence[str]]
UDescriptions = Union[ADescriptions, Sequence[str]]
UAttributeTypes = Union[AAttributeTypes, Sequence[AttributeDatasetType]]
UDataTypes = Union[ADataTypes, Sequence[DataType]]
USourceTypes = Union[ASourceTypes, Sequence[SourceType]]
with Anno("Minimum required version for compatibility"):
AVersionRequirement = str
class ExtraAttributesTable(Table):
# Allow CamelCase as arguments will be serialized
# noinspection PyPep8Naming
def __init__(
self,
name: UAttributeNames,
sourceId: USourceIds,
description: UDescriptions,
sourceType: USourceTypes,
dataType: UDataTypes,
datasetType: UAttributeTypes,
) -> None:
self.name = AAttributeNames(name)
self.sourceId = ASourceIds(sourceId)
self.description = ADescriptions(description)
self.sourceType = ASourceTypes(sourceType)
self.dataType = ADataTypes(dataType)
self.datasetType = AAttributeTypes(datasetType)
def make_xml_filename(file_dir, mri, suffix="attributes"):
"""Return a Block-specific filename for attribute or layout XML file"""
return os.path.join(file_dir, f"{mri.replace(':', '_')}-{suffix}.xml")
def check_driver_version(driver: str, required: AVersionRequirement):
driver_version = Version(driver)
required_version = Version(required)
if (
required_version.major != driver_version.major
or driver_version.minor < required_version.minor
):
raise (
IncompatibleError(
f"Detector driver v{driver_version} detected. "
f"Malcolm requires v{required_version}"
)
)
|
{
"content_hash": "bc671e08ebd8c260833e989438c1641d",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 80,
"avg_line_length": 34.44881889763779,
"alnum_prop": 0.6907428571428571,
"repo_name": "dls-controls/pymalcolm",
"id": "0064f253b497e5013da75f602decb2845ae038c3",
"size": "4375",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "malcolm/modules/ADCore/util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "549"
},
{
"name": "Python",
"bytes": "1583458"
},
{
"name": "Shell",
"bytes": "580"
}
],
"symlink_target": ""
}
|
"""Management script."""
import os
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager, Server, Shell
from flask_script.commands import Clean, ShowUrls
from flaskiwsapp.app import create_app
from flaskiwsapp.database import db
from flaskiwsapp.users.models.user import User
from flaskiwsapp.snippets.helpers import register_token_auth
from flaskiwsapp.settings.prodConfig import ProdConfig
from flaskiwsapp.settings.devConfig import DevConfig
from flaskiwsapp.users.controllers.userControllers import update_user, create_user
from flaskiwsapp.snippets.exceptions.userExceptions import UserExistsException
from flaskiwsapp.users.controllers.roleControllers import create_role
from flaskiwsapp.snippets.constants import ROLE_CLIENT, ROLE_EMPLOYEE
from flaskiwsapp.snippets.exceptions.roleExceptions import RoleExistsException
CONFIG = ProdConfig if os.environ.get('IWS_BE') == 'prod' else DevConfig
HERE = os.path.abspath(os.path.dirname(__file__))
TEST_PATH = os.path.join(HERE, 'tests')
app = create_app(CONFIG)
manager = Manager(app)
migrate = Migrate(app, db)
jwt = register_token_auth(app)
def _make_context():
"""Return context dict for a shell session so you can access app, db, and
the User model by default."""
return {'app': app, 'db': db, 'User': User}
@manager.command
def test():
"""run the tests."""
import pytest
exit_code = pytest.main([TEST_PATH, '--verbose'])
return exit_code
@manager.command
def create_admin():
"""Create a default admin user to get access to the admin panel."""
try:
user = create_user('admin@example.com', 'admin')
update_user(user.id, {'admin': True, 'active': True})
except UserExistsException:
print('Admin user already exists. Try to login with: \n',
'email: admin \n',
'password: admin')
@manager.command
def init_roles():
"""Create a default user roles."""
try:
create_role(ROLE_CLIENT)
create_role(ROLE_EMPLOYEE)
except RoleExistsException as e:
print(e.message)
manager.add_command('server', Server())
manager.add_command('shell', Shell(make_context=_make_context))
manager.add_command('db', MigrateCommand)
manager.add_command('urls', ShowUrls())
manager.add_command('clean', Clean())
if __name__ == '__main__':
manager.run()
|
{
"content_hash": "51f21e7cdd9da18c3ec2beceb0bb0877",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 82,
"avg_line_length": 32.19178082191781,
"alnum_prop": 0.7217021276595744,
"repo_name": "rafasis1986/EngineeringMidLevel",
"id": "03c3f1400764c6ed0776e883020110977a9afde0",
"size": "2396",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2736"
},
{
"name": "HTML",
"bytes": "56955"
},
{
"name": "JavaScript",
"bytes": "8835"
},
{
"name": "Mako",
"bytes": "436"
},
{
"name": "Nginx",
"bytes": "1468"
},
{
"name": "Python",
"bytes": "125961"
},
{
"name": "Shell",
"bytes": "7203"
},
{
"name": "TypeScript",
"bytes": "71432"
}
],
"symlink_target": ""
}
|
from __future__ import division, absolute_import, print_function
import sys
import warnings
import functools
import numpy as np
from numpy.core.multiarray_tests import array_indexing
from itertools import product
from numpy.testing import *
try:
cdll = np.ctypeslib.load_library('multiarray', np.core.multiarray.__file__)
_HAS_CTYPE = True
except ImportError:
_HAS_CTYPE = False
class TestIndexing(TestCase):
def test_none_index(self):
# `None` index adds newaxis
a = np.array([1, 2, 3])
assert_equal(a[None], a[np.newaxis])
assert_equal(a[None].ndim, a.ndim + 1)
def test_empty_tuple_index(self):
# Empty tuple index creates a view
a = np.array([1, 2, 3])
assert_equal(a[()], a)
assert_(a[()].base is a)
a = np.array(0)
assert_(isinstance(a[()], np.int_))
# Regression, it needs to fall through integer and fancy indexing
# cases, so need the with statement to ignore the non-integer error.
with warnings.catch_warnings():
warnings.filterwarnings('ignore', '', DeprecationWarning)
a = np.array([1.])
assert_(isinstance(a[0.], np.float_))
a = np.array([np.array(1)], dtype=object)
assert_(isinstance(a[0.], np.ndarray))
def test_same_kind_index_casting(self):
# Indexes should be cast with same-kind and not safe, even if
# that is somewhat unsafe. So test various different code paths.
index = np.arange(5)
u_index = index.astype(np.uintp)
arr = np.arange(10)
assert_array_equal(arr[index], arr[u_index])
arr[u_index] = np.arange(5)
assert_array_equal(arr, np.arange(10))
arr = np.arange(10).reshape(5, 2)
assert_array_equal(arr[index], arr[u_index])
arr[u_index] = np.arange(5)[:,None]
assert_array_equal(arr, np.arange(5)[:,None].repeat(2, axis=1))
arr = np.arange(25).reshape(5, 5)
assert_array_equal(arr[u_index, u_index], arr[index, index])
def test_empty_fancy_index(self):
# Empty list index creates an empty array
# with the same dtype (but with weird shape)
a = np.array([1, 2, 3])
assert_equal(a[[]], [])
assert_equal(a[[]].dtype, a.dtype)
b = np.array([], dtype=np.intp)
assert_equal(a[[]], [])
assert_equal(a[[]].dtype, a.dtype)
b = np.array([])
assert_raises(IndexError, a.__getitem__, b)
def test_ellipsis_index(self):
# Ellipsis index does not create a view
a = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
assert_equal(a[...], a)
assert_(a[...].base is a) # `a[...]` was `a` in numpy <1.9.)
# Slicing with ellipsis can skip an
# arbitrary number of dimensions
assert_equal(a[0, ...], a[0])
assert_equal(a[0, ...], a[0,:])
assert_equal(a[..., 0], a[:, 0])
# Slicing with ellipsis always results
# in an array, not a scalar
assert_equal(a[0, ..., 1], np.array(2))
# Assignment with `(Ellipsis,)` on 0-d arrays
b = np.array(1)
b[(Ellipsis,)] = 2
assert_equal(b, 2)
def test_single_int_index(self):
# Single integer index selects one row
a = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
assert_equal(a[0], [1, 2, 3])
assert_equal(a[-1], [7, 8, 9])
# Index out of bounds produces IndexError
assert_raises(IndexError, a.__getitem__, 1<<30)
# Index overflow produces IndexError
assert_raises(IndexError, a.__getitem__, 1<<64)
def test_single_bool_index(self):
# Single boolean index
a = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
# Python boolean converts to integer
# These are being deprecated (and test in test_deprecations)
#assert_equal(a[True], a[1])
#assert_equal(a[False], a[0])
# Same with NumPy boolean scalar
# Before DEPRECATE, this is an error (as always, but telling about
# future change):
assert_raises(IndexError, a.__getitem__, np.array(True))
assert_raises(IndexError, a.__getitem__, np.array(False))
# After DEPRECATE, this behaviour can be enabled:
#assert_equal(a[np.array(True)], a[None])
#assert_equal(a[np.array(False), a[None][0:0]])
def test_boolean_indexing_onedim(self):
# Indexing a 2-dimensional array with
# boolean array of length one
a = np.array([[ 0., 0., 0.]])
b = np.array([ True], dtype=bool)
assert_equal(a[b], a)
# boolean assignment
a[b] = 1.
assert_equal(a, [[1., 1., 1.]])
def test_boolean_assignment_value_mismatch(self):
# A boolean assignment should fail when the shape of the values
# cannot be broadcasted to the subscription. (see also gh-3458)
a = np.arange(4)
def f(a, v):
a[a > -1] = v
assert_raises(ValueError, f, a, [])
assert_raises(ValueError, f, a, [1, 2, 3])
assert_raises(ValueError, f, a[:1], [1, 2, 3])
def test_boolean_indexing_twodim(self):
# Indexing a 2-dimensional array with
# 2-dimensional boolean array
a = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
b = np.array([[ True, False, True],
[False, True, False],
[ True, False, True]])
assert_equal(a[b], [1, 3, 5, 7, 9])
assert_equal(a[b[1]], [[4, 5, 6]])
assert_equal(a[b[0]], a[b[2]])
# boolean assignment
a[b] = 0
assert_equal(a, [[0, 2, 0],
[4, 0, 6],
[0, 8, 0]])
def test_reverse_strides_and_subspace_bufferinit(self):
# This tests that the strides are not reversed for simple and
# subspace fancy indexing.
a = np.ones(5)
b = np.zeros(5, dtype=np.intp)[::-1]
c = np.arange(5)[::-1]
a[b] = c
# If the strides are not reversed, the 0 in the arange comes last.
assert_equal(a[0], 0)
# This also tests that the subspace buffer is initiliazed:
a = np.ones((5, 2))
c = np.arange(10).reshape(5, 2)[::-1]
a[b, :] = c
assert_equal(a[0], [0, 1])
def test_reversed_strides_result_allocation(self):
# Test a bug when calculating the output strides for a result array
# when the subspace size was 1 (and test other cases as well)
a = np.arange(10)[:, None]
i = np.arange(10)[::-1]
assert_array_equal(a[i], a[i.copy('C')])
a = np.arange(20).reshape(-1, 2)
def test_uncontiguous_subspace_assignment(self):
# During development there was a bug activating a skip logic
# based on ndim instead of size.
a = np.full((3, 4, 2), -1)
b = np.full((3, 4, 2), -1)
a[[0, 1]] = np.arange(2 * 4 * 2).reshape(2, 4, 2).T
b[[0, 1]] = np.arange(2 * 4 * 2).reshape(2, 4, 2).T.copy()
assert_equal(a, b)
def test_too_many_fancy_indices_special_case(self):
# Just documents behaviour, this is a small limitation.
a = np.ones((1,) * 32) # 32 is NPY_MAXDIMS
assert_raises(IndexError, a.__getitem__, (np.array([0]),) * 32)
def test_scalar_array_bool(self):
# Numpy bools can be used as boolean index (python ones as of yet not)
a = np.array(1)
assert_equal(a[np.bool_(True)], a[np.array(True)])
assert_equal(a[np.bool_(False)], a[np.array(False)])
# After deprecating bools as integers:
#a = np.array([0,1,2])
#assert_equal(a[True, :], a[None, :])
#assert_equal(a[:, True], a[:, None])
#
#assert_(not np.may_share_memory(a, a[True, :]))
def test_everything_returns_views(self):
# Before `...` would return a itself.
a = np.arange(5)
assert_(a is not a[()])
assert_(a is not a[...])
assert_(a is not a[:])
def test_broaderrors_indexing(self):
a = np.zeros((5, 5))
assert_raises(IndexError, a.__getitem__, ([0, 1], [0, 1, 2]))
assert_raises(IndexError, a.__setitem__, ([0, 1], [0, 1, 2]), 0)
def test_trivial_fancy_out_of_bounds(self):
a = np.zeros(5)
ind = np.ones(20, dtype=np.intp)
ind[-1] = 10
assert_raises(IndexError, a.__getitem__, ind)
assert_raises(IndexError, a.__setitem__, ind, 0)
ind = np.ones(20, dtype=np.intp)
ind[0] = 11
assert_raises(IndexError, a.__getitem__, ind)
assert_raises(IndexError, a.__setitem__, ind, 0)
def test_nonbaseclass_values(self):
class SubClass(np.ndarray):
def __array_finalize__(self, old):
# Have array finalize do funny things
self.fill(99)
a = np.zeros((5, 5))
s = a.copy().view(type=SubClass)
s.fill(1)
a[[0, 1, 2, 3, 4], :] = s
assert_((a == 1).all())
# Subspace is last, so transposing might want to finalize
a[:, [0, 1, 2, 3, 4]] = s
assert_((a == 1).all())
a.fill(0)
a[...] = s
assert_((a == 1).all())
def test_memory_order(self):
# This is not necessary to preserve. Memory layouts for
# more complex indices are not as simple.
a = np.arange(10)
b = np.arange(10).reshape(5,2).T
assert_(a[b].flags.f_contiguous)
# Takes a different implementation branch:
a = a.reshape(-1, 1)
assert_(a[b, 0].flags.f_contiguous)
def test_scalar_return_type(self):
# Full scalar indices should return scalars and object
# arrays should not call PyArray_Return on their items
class Zero(object):
# The most basic valid indexing
def __index__(self):
return 0
z = Zero()
class ArrayLike(object):
# Simple array, should behave like the array
def __array__(self):
return np.array(0)
a = np.zeros(())
assert_(isinstance(a[()], np.float_))
a = np.zeros(1)
assert_(isinstance(a[z], np.float_))
a = np.zeros((1, 1))
assert_(isinstance(a[z, np.array(0)], np.float_))
assert_(isinstance(a[z, ArrayLike()], np.float_))
# And object arrays do not call it too often:
b = np.array(0)
a = np.array(0, dtype=object)
a[()] = b
assert_(isinstance(a[()], np.ndarray))
a = np.array([b, None])
assert_(isinstance(a[z], np.ndarray))
a = np.array([[b, None]])
assert_(isinstance(a[z, np.array(0)], np.ndarray))
assert_(isinstance(a[z, ArrayLike()], np.ndarray))
def test_small_regressions(self):
# Reference count of intp for index checks
a = np.array([0])
refcount = sys.getrefcount(np.dtype(np.intp))
# item setting always checks indices in seperate function:
a[np.array([0], dtype=np.intp)] = 1
a[np.array([0], dtype=np.uint8)] = 1
assert_raises(IndexError, a.__setitem__,
np.array([1], dtype=np.intp), 1)
assert_raises(IndexError, a.__setitem__,
np.array([1], dtype=np.uint8), 1)
assert_equal(sys.getrefcount(np.dtype(np.intp)), refcount)
def test_unaligned(self):
v = (np.zeros(64, dtype=np.int8) + ord('a'))[1:-7]
d = v.view(np.dtype("S8"))
# unaligned source
x = (np.zeros(16, dtype=np.int8) + ord('a'))[1:-7]
x = x.view(np.dtype("S8"))
x[...] = np.array("b" * 8, dtype="S")
b = np.arange(d.size)
#trivial
assert_equal(d[b], d)
d[b] = x
# nontrivial
# unaligned index array
b = np.zeros(d.size + 1).view(np.int8)[1:-(np.intp(0).itemsize - 1)]
b = b.view(np.intp)[:d.size]
b[...] = np.arange(d.size)
assert_equal(d[b.astype(np.int16)], d)
d[b.astype(np.int16)] = x
# boolean
d[b % 2 == 0]
d[b % 2 == 0] = x[::2]
class TestFieldIndexing(TestCase):
def test_scalar_return_type(self):
# Field access on an array should return an array, even if it
# is 0-d.
a = np.zeros((), [('a','f8')])
assert_(isinstance(a['a'], np.ndarray))
assert_(isinstance(a[['a']], np.ndarray))
class TestBroadcastedAssignments(TestCase):
def assign(self, a, ind, val):
a[ind] = val
return a
def test_prepending_ones(self):
a = np.zeros((3, 2))
a[...] = np.ones((1, 3, 2))
# Fancy with subspace with and without transpose
a[[0, 1, 2], :] = np.ones((1, 3, 2))
a[:, [0, 1]] = np.ones((1, 3, 2))
# Fancy without subspace (with broadcasting)
a[[[0], [1], [2]], [0, 1]] = np.ones((1, 3, 2))
def test_prepend_not_one(self):
assign = self.assign
s_ = np.s_
a = np.zeros(5)
# Too large and not only ones.
assert_raises(ValueError, assign, a, s_[...], np.ones((2, 1)))
assert_raises(ValueError, assign, a, s_[[1, 2, 3],], np.ones((2, 1)))
assert_raises(ValueError, assign, a, s_[[[1], [2]],], np.ones((2,2,1)))
def test_simple_broadcasting_errors(self):
assign = self.assign
s_ = np.s_
a = np.zeros((5, 1))
assert_raises(ValueError, assign, a, s_[...], np.zeros((5, 2)))
assert_raises(ValueError, assign, a, s_[...], np.zeros((5, 0)))
assert_raises(ValueError, assign, a, s_[:, [0]], np.zeros((5, 2)))
assert_raises(ValueError, assign, a, s_[:, [0]], np.zeros((5, 0)))
assert_raises(ValueError, assign, a, s_[[0], :], np.zeros((2, 1)))
def test_index_is_larger(self):
# Simple case of fancy index broadcasting of the index.
a = np.zeros((5, 5))
a[[[0], [1], [2]], [0, 1, 2]] = [2, 3, 4]
assert_((a[:3, :3] == [2, 3, 4]).all())
def test_broadcast_subspace(self):
a = np.zeros((100, 100))
v = np.arange(100)[:,None]
b = np.arange(100)[::-1]
a[b] = v
assert_((a[::-1] == v).all())
class TestSubclasses(TestCase):
def test_basic(self):
class SubClass(np.ndarray):
pass
s = np.arange(5).view(SubClass)
assert_(isinstance(s[:3], SubClass))
assert_(s[:3].base is s)
assert_(isinstance(s[[0, 1, 2]], SubClass))
assert_(isinstance(s[s > 0], SubClass))
def test_matrix_fancy(self):
# The matrix class messes with the shape. While this is always
# weird (getitem is not used, it does not have setitem nor knows
# about fancy indexing), this tests gh-3110
m = np.matrix([[1, 2], [3, 4]])
assert_(isinstance(m[[0,1,0], :], np.matrix))
# gh-3110. Note the transpose currently because matrices do *not*
# support dimension fixing for fancy indexing correctly.
x = np.asmatrix(np.arange(50).reshape(5,10))
assert_equal(x[:2, np.array(-1)], x[:2, -1].T)
def test_finalize_gets_full_info(self):
# Array finalize should be called on the filled array.
class SubClass(np.ndarray):
def __array_finalize__(self, old):
self.finalize_status = np.array(self)
self.old = old
s = np.arange(10).view(SubClass)
new_s = s[:3]
assert_array_equal(new_s.finalize_status, new_s)
assert_array_equal(new_s.old, s)
new_s = s[[0,1,2,3]]
assert_array_equal(new_s.finalize_status, new_s)
assert_array_equal(new_s.old, s)
new_s = s[s > 0]
assert_array_equal(new_s.finalize_status, new_s)
assert_array_equal(new_s.old, s)
class TestFancyIndexingEquivalence(TestCase):
def test_object_assign(self):
# Check that the field and object special case using copyto is active.
# The right hand side cannot be converted to an array here.
a = np.arange(5, dtype=object)
b = a.copy()
a[:3] = [1, (1,2), 3]
b[[0, 1, 2]] = [1, (1,2), 3]
assert_array_equal(a, b)
# test same for subspace fancy indexing
b = np.arange(5, dtype=object)[None, :]
b[[0], :3] = [[1, (1,2), 3]]
assert_array_equal(a, b[0])
def test_cast_equivalence(self):
# Yes, normal slicing uses unsafe casting.
a = np.arange(5)
b = a.copy()
a[:3] = np.array(['2', '-3', '-1'])
b[[0, 2, 1]] = np.array(['2', '-1', '-3'])
assert_array_equal(a, b)
# test the same for subspace fancy indexing
b = np.arange(5)[None, :]
b[[0], :3] = np.array([['2', '-3', '-1']])
assert_array_equal(a, b[0])
class TestMultiIndexingAutomated(TestCase):
"""
These test use code to mimic the C-Code indexing for selection.
NOTE: * This still lacks tests for complex item setting.
* If you change behavoir of indexing, you might want to modify
these tests to try more combinations.
* Behavior was written to match numpy version 1.8. (though a
first version matched 1.7.)
* Only tuple indicies are supported by the mimicing code.
(and tested as of writing this)
* Error types should match most of the time as long as there
is only one error. For multiple errors, what gets raised
will usually not be the same one. They are *not* tested.
"""
def setUp(self):
self.a = np.arange(np.prod([3, 1, 5, 6])).reshape(3, 1, 5, 6)
self.b = np.empty((3, 0, 5, 6))
self.complex_indices = ['skip', Ellipsis,
0,
# Boolean indices, up to 3-d for some special cases of eating up
# dimensions, also need to test all False
np.array(False),
np.array([True, False, False]),
np.array([[True, False], [False, True]]),
np.array([[[False, False], [False, False]]]),
# Some slices:
slice(-5, 5, 2),
slice(1, 1, 100),
slice(4, -1, -2),
slice(None, None, -3),
# Some Fancy indexes:
np.empty((0, 1, 1), dtype=np.intp), # empty broadcastable
np.array([0, 1, -2]),
np.array([[2], [0], [1]]),
np.array([[0, -1], [0, 1]], dtype=np.dtype('intp').newbyteorder()),
np.array([2, -1], dtype=np.int8),
np.zeros([1]*31, dtype=int), # trigger too large array.
np.array([0., 1.])] # invalid datatype
# Some simpler indices that still cover a bit more
self.simple_indices = [Ellipsis, None, -1, [1], np.array([True]), 'skip']
# Very simple ones to fill the rest:
self.fill_indices = [slice(None, None), 0]
def _get_multi_index(self, arr, indices):
"""Mimic multi dimensional indexing.
Parameters
----------
arr : ndarray
Array to be indexed.
indices : tuple of index objects
Returns
-------
out : ndarray
An array equivalent to the indexing operation (but always a copy).
`arr[indices]` should be identical.
no_copy : bool
Whether the indexing operation requires a copy. If this is `True`,
`np.may_share_memory(arr, arr[indicies])` should be `True` (with
some exceptions for scalars and possibly 0-d arrays).
Notes
-----
While the function may mostly match the errors of normal indexing this
is generally not the case.
"""
in_indices = list(indices)
indices = []
# if False, this is a fancy or boolean index
no_copy = True
# number of fancy/scalar indexes that are not consecutive
num_fancy = 0
# number of dimensions indexed by a "fancy" index
fancy_dim = 0
# NOTE: This is a funny twist (and probably OK to change).
# The boolean array has illegal indexes, but this is
# allowed if the broadcasted fancy-indices are 0-sized.
# This variable is to catch that case.
error_unless_broadcast_to_empty = False
# We need to handle Ellipsis and make arrays from indices, also
# check if this is fancy indexing (set no_copy).
ndim = 0
ellipsis_pos = None # define here mostly to replace all but first.
for i, indx in enumerate(in_indices):
if indx is None:
continue
if isinstance(indx, np.ndarray) and indx.dtype == bool:
no_copy = False
if indx.ndim == 0:
raise IndexError
# boolean indices can have higher dimensions
ndim += indx.ndim
fancy_dim += indx.ndim
continue
if indx is Ellipsis:
if ellipsis_pos is None:
ellipsis_pos = i
continue # do not increment ndim counter
raise IndexError
if isinstance(indx, slice):
ndim += 1
continue
if not isinstance(indx, np.ndarray):
# This could be open for changes in numpy.
# numpy should maybe raise an error if casting to intp
# is not safe. It rejects np.array([1., 2.]) but not
# [1., 2.] as index (same for ie. np.take).
# (Note the importance of empty lists if changing this here)
indx = np.array(indx, dtype=np.intp)
in_indices[i] = indx
elif indx.dtype.kind != 'b' and indx.dtype.kind != 'i':
raise IndexError('arrays used as indices must be of integer (or boolean) type')
if indx.ndim != 0:
no_copy = False
ndim += 1
fancy_dim += 1
if arr.ndim - ndim < 0:
# we can't take more dimensions then we have, not even for 0-d arrays.
# since a[()] makes sense, but not a[(),]. We will raise an error
# lateron, unless a broadcasting error occurs first.
raise IndexError
if ndim == 0 and not None in in_indices:
# Well we have no indexes or one Ellipsis. This is legal.
return arr.copy(), no_copy
if ellipsis_pos is not None:
in_indices[ellipsis_pos:ellipsis_pos+1] = [slice(None, None)] * (arr.ndim - ndim)
for ax, indx in enumerate(in_indices):
if isinstance(indx, slice):
# convert to an index array anways:
indx = np.arange(*indx.indices(arr.shape[ax]))
indices.append(['s', indx])
continue
elif indx is None:
# this is like taking a slice with one element from a new axis:
indices.append(['n', np.array([0], dtype=np.intp)])
arr = arr.reshape((arr.shape[:ax] + (1,) + arr.shape[ax:]))
continue
if isinstance(indx, np.ndarray) and indx.dtype == bool:
# This may be open for improvement in numpy.
# numpy should probably cast boolean lists to boolean indices
# instead of intp!
# Numpy supports for a boolean index with
# non-matching shape as long as the True values are not
# out of bounds. Numpy maybe should maybe not allow this,
# (at least not array that are larger then the original one).
try:
flat_indx = np.ravel_multi_index(np.nonzero(indx),
arr.shape[ax:ax+indx.ndim], mode='raise')
except:
error_unless_broadcast_to_empty = True
# fill with 0s instead, and raise error later
flat_indx = np.array([0]*indx.sum(), dtype=np.intp)
# concatenate axis into a single one:
if indx.ndim != 0:
arr = arr.reshape((arr.shape[:ax]
+ (np.prod(arr.shape[ax:ax+indx.ndim]),)
+ arr.shape[ax+indx.ndim:]))
indx = flat_indx
else:
# This could be changed, a 0-d boolean index can
# make sense (even outide the 0-d indexed array case)
# Note that originally this is could be interpreted as
# integer in the full integer special case.
raise IndexError
else:
# If the index is a singleton, the bounds check is done
# before the broadcasting. This used to be different in <1.9
if indx.ndim == 0:
if indx >= arr.shape[ax] or indx < -arr.shape[ax]:
raise IndexError
if indx.ndim == 0:
# The index is a scalar. This used to be two fold, but if fancy
# indexing was active, the check was done later, possibly
# after broadcasting it away (1.7. or earlier). Now it is always
# done.
if indx >= arr.shape[ax] or indx < - arr.shape[ax]:
raise IndexError
if len(indices) > 0 and indices[-1][0] == 'f' and ax != ellipsis_pos:
# NOTE: There could still have been a 0-sized Ellipsis
# between them. Checked that with ellipsis_pos.
indices[-1].append(indx)
else:
# We have a fancy index that is not after an existing one.
# NOTE: A 0-d array triggers this as well, while
# one may expect it to not trigger it, since a scalar
# would not be considered fancy indexing.
num_fancy += 1
indices.append(['f', indx])
if num_fancy > 1 and not no_copy:
# We have to flush the fancy indexes left
new_indices = indices[:]
axes = list(range(arr.ndim))
fancy_axes = []
new_indices.insert(0, ['f'])
ni = 0
ai = 0
for indx in indices:
ni += 1
if indx[0] == 'f':
new_indices[0].extend(indx[1:])
del new_indices[ni]
ni -= 1
for ax in range(ai, ai + len(indx[1:])):
fancy_axes.append(ax)
axes.remove(ax)
ai += len(indx) - 1 # axis we are at
indices = new_indices
# and now we need to transpose arr:
arr = arr.transpose(*(fancy_axes + axes))
# We only have one 'f' index now and arr is transposed accordingly.
# Now handle newaxes by reshaping...
ax = 0
for indx in indices:
if indx[0] == 'f':
if len(indx) == 1:
continue
# First of all, reshape arr to combine fancy axes into one:
orig_shape = arr.shape
orig_slice = orig_shape[ax:ax + len(indx[1:])]
arr = arr.reshape((arr.shape[:ax]
+ (np.prod(orig_slice).astype(int),)
+ arr.shape[ax + len(indx[1:]):]))
# Check if broadcasting works
if len(indx[1:]) != 1:
res = np.broadcast(*indx[1:]) # raises ValueError...
else:
res = indx[1]
# unfortunatly the indices might be out of bounds. So check
# that first, and use mode='wrap' then. However only if
# there are any indices...
if res.size != 0:
if error_unless_broadcast_to_empty:
raise IndexError
for _indx, _size in zip(indx[1:], orig_slice):
if _indx.size == 0:
continue
if np.any(_indx >= _size) or np.any(_indx < -_size):
raise IndexError
if len(indx[1:]) == len(orig_slice):
if np.product(orig_slice) == 0:
# Work around for a crash or IndexError with 'wrap'
# in some 0-sized cases.
try:
mi = np.ravel_multi_index(indx[1:], orig_slice, mode='raise')
except:
# This happens with 0-sized orig_slice (sometimes?)
# here it is a ValueError, but indexing gives a:
raise IndexError('invalid index into 0-sized')
else:
mi = np.ravel_multi_index(indx[1:], orig_slice, mode='wrap')
else:
# Maybe never happens...
raise ValueError
arr = arr.take(mi.ravel(), axis=ax)
arr = arr.reshape((arr.shape[:ax]
+ mi.shape
+ arr.shape[ax+1:]))
ax += mi.ndim
continue
# If we are here, we have a 1D array for take:
arr = arr.take(indx[1], axis=ax)
ax += 1
return arr, no_copy
def _check_multi_index(self, arr, index):
"""Check a multi index item getting and simple setting.
Parameters
----------
arr : ndarray
Array to be indexed, must be a reshaped arange.
index : tuple of indexing objects
Index being tested.
"""
# Test item getting
try:
mimic_get, no_copy = self._get_multi_index(arr, index)
except Exception as e:
prev_refcount = sys.getrefcount(arr)
assert_raises(Exception, arr.__getitem__, index)
assert_raises(Exception, arr.__setitem__, index, 0)
assert_equal(prev_refcount, sys.getrefcount(arr))
return
self._compare_index_result(arr, index, mimic_get, no_copy)
def _check_single_index(self, arr, index):
"""Check a single index item getting and simple setting.
Parameters
----------
arr : ndarray
Array to be indexed, must be an arange.
index : indexing object
Index being tested. Must be a single index and not a tuple
of indexing objects (see also `_check_multi_index`).
"""
try:
mimic_get, no_copy = self._get_multi_index(arr, (index,))
except Exception as e:
prev_refcount = sys.getrefcount(arr)
assert_raises(Exception, arr.__getitem__, index)
assert_raises(Exception, arr.__setitem__, index, 0)
assert_equal(prev_refcount, sys.getrefcount(arr))
return
self._compare_index_result(arr, index, mimic_get, no_copy)
def _compare_index_result(self, arr, index, mimic_get, no_copy):
"""Compare mimicked result to indexing result.
"""
arr = arr.copy()
indexed_arr = arr[index]
assert_array_equal(indexed_arr, mimic_get)
# Check if we got a view, unless its a 0-sized or 0-d array.
# (then its not a view, and that does not matter)
if indexed_arr.size != 0 and indexed_arr.ndim != 0:
assert_(np.may_share_memory(indexed_arr, arr) == no_copy)
# Check reference count of the original array
if no_copy:
# refcount increases by one:
assert_equal(sys.getrefcount(arr), 3)
else:
assert_equal(sys.getrefcount(arr), 2)
# Test non-broadcast setitem:
b = arr.copy()
b[index] = mimic_get + 1000
if b.size == 0:
return # nothing to compare here...
if no_copy and indexed_arr.ndim != 0:
# change indexed_arr in-place to manipulate original:
indexed_arr += 1000
assert_array_equal(arr, b)
return
# Use the fact that the array is originally an arange:
arr.flat[indexed_arr.ravel()] += 1000
assert_array_equal(arr, b)
def test_boolean(self):
a = np.array(5)
assert_equal(a[np.array(True)], 5)
a[np.array(True)] = 1
assert_equal(a, 1)
# NOTE: This is different from normal broadcasting, as
# arr[boolean_array] works like in a multi index. Which means
# it is aligned to the left. This is probably correct for
# consistency with arr[boolean_array,] also no broadcasting
# is done at all
self._check_multi_index(self.a, (np.zeros_like(self.a, dtype=bool),))
self._check_multi_index(self.a, (np.zeros_like(self.a, dtype=bool)[..., 0],))
self._check_multi_index(self.a, (np.zeros_like(self.a, dtype=bool)[None, ...],))
def test_multidim(self):
# Automatically test combinations with complex indexes on 2nd (or 1st)
# spot and the simple ones in one other spot.
with warnings.catch_warnings():
# This is so that np.array(True) is not accepted in a full integer
# index, when running the file seperatly.
warnings.filterwarnings('error', '', DeprecationWarning)
for simple_pos in [0, 2, 3]:
tocheck = [self.fill_indices, self.complex_indices,
self.fill_indices, self.fill_indices]
tocheck[simple_pos] = self.simple_indices
for index in product(*tocheck):
index = tuple(i for i in index if i != 'skip')
self._check_multi_index(self.a, index)
self._check_multi_index(self.b, index)
# Check very simple item getting:
self._check_multi_index(self.a, (0, 0, 0, 0))
self._check_multi_index(self.b, (0, 0, 0, 0))
# Also check (simple cases of) too many indices:
assert_raises(IndexError, self.a.__getitem__, (0, 0, 0, 0, 0))
assert_raises(IndexError, self.a.__setitem__, (0, 0, 0, 0, 0), 0)
assert_raises(IndexError, self.a.__getitem__, (0, 0, [1], 0, 0))
assert_raises(IndexError, self.a.__setitem__, (0, 0, [1], 0, 0), 0)
def test_1d(self):
a = np.arange(10)
with warnings.catch_warnings():
warnings.filterwarnings('error', '', DeprecationWarning)
for index in self.complex_indices:
self._check_single_index(a, index)
class TestCApiAccess(TestCase):
def test_getitem(self):
subscript = functools.partial(array_indexing, 0)
# 0-d arrays don't work:
assert_raises(IndexError, subscript, np.ones(()), 0)
# Out of bound values:
assert_raises(IndexError, subscript, np.ones(10), 11)
assert_raises(IndexError, subscript, np.ones(10), -11)
assert_raises(IndexError, subscript, np.ones((10, 10)), 11)
assert_raises(IndexError, subscript, np.ones((10, 10)), -11)
a = np.arange(10)
assert_array_equal(a[4], subscript(a, 4))
a = a.reshape(5, 2)
assert_array_equal(a[-4], subscript(a, -4))
def test_setitem(self):
assign = functools.partial(array_indexing, 1)
# Deletion is impossible:
assert_raises(ValueError, assign, np.ones(10), 0)
# 0-d arrays don't work:
assert_raises(IndexError, assign, np.ones(()), 0, 0)
# Out of bound values:
assert_raises(IndexError, assign, np.ones(10), 11, 0)
assert_raises(IndexError, assign, np.ones(10), -11, 0)
assert_raises(IndexError, assign, np.ones((10, 10)), 11, 0)
assert_raises(IndexError, assign, np.ones((10, 10)), -11, 0)
a = np.arange(10)
assign(a, 4, 10)
assert_(a[4] == 10)
a = a.reshape(5, 2)
assign(a, 4, 10)
assert_array_equal(a[-1], [10, 10])
if __name__ == "__main__":
run_module_suite()
|
{
"content_hash": "e268c60a5bd107bd88f467e20c301ba6",
"timestamp": "",
"source": "github",
"line_count": 964,
"max_line_length": 95,
"avg_line_length": 37.78423236514523,
"alnum_prop": 0.5255051614320229,
"repo_name": "techtonik/numpy",
"id": "6b0b0a0b52f24cfb938755db7ee769e106e103f0",
"size": "36424",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "numpy/core/tests/test_indexing.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
from contourpy import contour_generator
from .bench_base import BenchBase
from .util_bench import corner_masks, datasets, fill_types, problem_sizes, thread_counts
class BenchFilledThreaded(BenchBase):
params = (
["threaded"], datasets(), fill_types(), corner_masks(), problem_sizes(), [40],
thread_counts())
param_names = (
"name", "dataset", "fill_type", "corner_mask", "n", "total_chunk_count", "thread_count")
def setup(self, name, dataset, fill_type, corner_mask, n, total_chunk_count, thread_count):
self.set_xyz_and_levels(dataset, n, corner_mask != "no mask")
def time_filled_threaded(
self, name, dataset, fill_type, corner_mask, n, total_chunk_count, thread_count):
if corner_mask == "no mask":
corner_mask = False
cont_gen = contour_generator(
self.x, self.y, self.z, name=name, fill_type=fill_type, corner_mask=corner_mask,
total_chunk_count=total_chunk_count, thread_count=thread_count)
for i in range(len(self.levels)-1):
cont_gen.filled(self.levels[i], self.levels[i+1])
|
{
"content_hash": "fda4cc83f59571416e5299394e4c4023",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 96,
"avg_line_length": 44.88,
"alnum_prop": 0.642602495543672,
"repo_name": "contourpy/contourpy",
"id": "5b1995ebb64f90884ea41a433bd7fa756b7944ac",
"size": "1122",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "benchmarks/benchmarks/bench_filled_threaded.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "319913"
},
{
"name": "Python",
"bytes": "205756"
}
],
"symlink_target": ""
}
|
"""Tests for the AVM Fritz!Box integration."""
from __future__ import annotations
from typing import Any
from unittest.mock import Mock
from homeassistant.components.fritzbox.const import DOMAIN
from homeassistant.core import HomeAssistant
from .const import (
CONF_FAKE_AIN,
CONF_FAKE_MANUFACTURER,
CONF_FAKE_NAME,
CONF_FAKE_PRODUCTNAME,
)
from tests.common import MockConfigEntry
async def setup_config_entry(
hass: HomeAssistant,
data: dict[str, Any],
unique_id: str = "any",
device: Mock = None,
fritz: Mock = None,
) -> bool:
"""Do setup of a MockConfigEntry."""
entry = MockConfigEntry(
domain=DOMAIN,
data=data,
unique_id=unique_id,
)
entry.add_to_hass(hass)
if device is not None and fritz is not None:
fritz().get_devices.return_value = [device]
result = await hass.config_entries.async_setup(entry.entry_id)
if device is not None:
await hass.async_block_till_done()
return result
class FritzDeviceBaseMock(Mock):
"""base mock of a AVM Fritz!Box binary sensor device."""
ain = CONF_FAKE_AIN
manufacturer = CONF_FAKE_MANUFACTURER
name = CONF_FAKE_NAME
productname = CONF_FAKE_PRODUCTNAME
class FritzDeviceBinarySensorMock(FritzDeviceBaseMock):
"""Mock of a AVM Fritz!Box binary sensor device."""
alert_state = "fake_state"
battery_level = 23
fw_version = "1.2.3"
has_alarm = True
has_powermeter = False
has_switch = False
has_temperature_sensor = False
has_thermostat = False
present = True
class FritzDeviceClimateMock(FritzDeviceBaseMock):
"""Mock of a AVM Fritz!Box climate device."""
actual_temperature = 18.0
alert_state = "fake_state"
battery_level = 23
battery_low = True
comfort_temperature = 22.0
device_lock = "fake_locked_device"
eco_temperature = 16.0
fw_version = "1.2.3"
has_alarm = False
has_powermeter = False
has_switch = False
has_temperature_sensor = False
has_thermostat = True
holiday_active = "fake_holiday"
lock = "fake_locked"
present = True
summer_active = "fake_summer"
target_temperature = 19.5
window_open = "fake_window"
class FritzDeviceSensorMock(FritzDeviceBaseMock):
"""Mock of a AVM Fritz!Box sensor device."""
battery_level = 23
device_lock = "fake_locked_device"
fw_version = "1.2.3"
has_alarm = False
has_powermeter = False
has_switch = False
has_temperature_sensor = True
has_thermostat = False
lock = "fake_locked"
present = True
temperature = 1.23
class FritzDeviceSwitchMock(FritzDeviceBaseMock):
"""Mock of a AVM Fritz!Box switch device."""
battery_level = None
device_lock = "fake_locked_device"
energy = 1234
fw_version = "1.2.3"
has_alarm = False
has_powermeter = True
has_switch = True
has_temperature_sensor = True
has_thermostat = False
switch_state = "fake_state"
lock = "fake_locked"
power = 5678
present = True
temperature = 1.23
|
{
"content_hash": "400db295b3c7afda02295d72b0d67495",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 66,
"avg_line_length": 25.33884297520661,
"alnum_prop": 0.6634050880626223,
"repo_name": "sander76/home-assistant",
"id": "da6bd982d9d1015bef9ef52e778e396da12ef0a5",
"size": "3066",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/fritzbox/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "36548768"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
}
|
"""
Needed for adding an app to project.
Created on Apr 10, 2014
@author: CarolinaFernandez
"""
|
{
"content_hash": "253f7dd99498fbffa5c9731593eaef31",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 36,
"avg_line_length": 16.166666666666668,
"alnum_prop": 0.7216494845360825,
"repo_name": "ict-felix/stack",
"id": "a814eb20eb6d234fe9c4605917cd16bf5d3ff618",
"size": "97",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "vt_manager/src/python/vt_manager/common/commands/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "337811"
},
{
"name": "Elixir",
"bytes": "17243"
},
{
"name": "Emacs Lisp",
"bytes": "1098"
},
{
"name": "Groff",
"bytes": "1735"
},
{
"name": "HTML",
"bytes": "660363"
},
{
"name": "Java",
"bytes": "18362"
},
{
"name": "JavaScript",
"bytes": "838960"
},
{
"name": "Makefile",
"bytes": "11581"
},
{
"name": "Perl",
"bytes": "5416"
},
{
"name": "Python",
"bytes": "8073455"
},
{
"name": "Shell",
"bytes": "259720"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.