code stringlengths 1 25.8M | language stringclasses 18 values | source stringclasses 4 values | repo stringclasses 78 values | path stringlengths 0 268 |
|---|---|---|---|---|
S3_SHA256_HASHES = {
"https://mdb-build-public.s3.amazonaws.com/bazelisk-binaries/v1.26.0/bazelisk-darwin": "a8c966e9ae6983b1e1c0116313ff523a862076d81b20add23da825b58610c1b3",
"https://mdb-build-public.s3.amazonaws.com/bazelisk-binaries/v1.26.0/bazelisk-darwin-amd64": "5c77f33f91dd3df119d192175100cb5b50302eb7ee37859cbab79e10a76ccce8",
"https://mdb-build-public.s3.amazonaws.com/bazelisk-binaries/v1.26.0/bazelisk-darwin-arm64": "d1ca9911cc19e1f17483f93956908334f2b7f3dd13f20853417b68fc3c3eb370",
"https://mdb-build-public.s3.amazonaws.com/bazelisk-binaries/v1.26.0/bazelisk-linux-amd64": "6539c12842ad76966f3d493e8f80d67caa84ec4a000e220d5459833c967c12bc",
"https://mdb-build-public.s3.amazonaws.com/bazelisk-binaries/v1.26.0/bazelisk-linux-arm64": "54f85ef4c23393f835252cc882e5fea596e8ef3c4c2056b059f8067cd19f0351",
"https://mdb-build-public.s3.amazonaws.com/bazelisk-binaries/v1.26.0/bazelisk-windows-amd64.exe": "023734f33ed6b9c6d65468fe20bb2c5fb32473ccb8aca2fc5bf1521e61ce1622",
"https://mdb-build-public.s3.amazonaws.com/bazelisk-binaries/v1.26.0/bazelisk-windows-arm64.exe": "99ea5997df128b33c34ba93bad26882af4aabf8c26d50e704b9b651d291fae76",
"https://mdb-build-public.s3.amazonaws.com/buildozer/v7.3.1/buildozer-darwin-amd64": "854c9583efc166602276802658cef3f224d60898cfaa60630b33d328db3b0de2",
"https://mdb-build-public.s3.amazonaws.com/buildozer/v7.3.1/buildozer-darwin-arm64": "31b1bfe20d7d5444be217af78f94c5c43799cdf847c6ce69794b7bf3319c5364",
"https://mdb-build-public.s3.amazonaws.com/buildozer/v7.3.1/buildozer-linux-amd64": "3305e287b3fcc68b9a35fd8515ee617452cd4e018f9e6886b6c7cdbcba8710d4",
"https://mdb-build-public.s3.amazonaws.com/buildozer/v7.3.1/buildozer-linux-arm64": "0b5a2a717ac4fc911e1fec8d92af71dbb4fe95b10e5213da0cc3d56cea64a328",
"https://mdb-build-public.s3.amazonaws.com/buildozer/v7.3.1/buildozer-windows-amd64.exe": "58d41ce53257c5594c9bc86d769f580909269f68de114297f46284fbb9023dcf",
"https://mdb-build-public.s3.amazonaws.com/ruff/0.6.9/ruff-aarch64-apple-darwin.tar.gz": "b94562393a4bf23f1a48521f5495a8e48de885b7c173bd7ea8206d6d09921633",
"https://mdb-build-public.s3.amazonaws.com/ruff/0.6.9/ruff-aarch64-unknown-linux-musl.tar.gz": "73df3729a3381d0918e4640aac4b2653c542f74c7b7843dee8310e2c877e6f2e",
"https://mdb-build-public.s3.amazonaws.com/ruff/0.6.9/ruff-powerpc64le-unknown-linux-gnu.tar.gz": "6eedb853553ee52309e9519af775b3359a12227ec342404b6a033308cdd48b1b",
"https://mdb-build-public.s3.amazonaws.com/ruff/0.6.9/ruff-s390x-unknown-linux-gnu.tar.gz": "b4f93af861c1b3e1956df08e0d9f20b7e55cd7beb37c9df09b659908e920ebe6",
"https://mdb-build-public.s3.amazonaws.com/ruff/0.6.9/ruff-x86_64-apple-darwin.tar.gz": "34aa37643e30dcb81a3c0e011c3a8df552465ea7580ba92ca727a3b7c6de25d1",
"https://mdb-build-public.s3.amazonaws.com/ruff/0.6.9/ruff-x86_64-pc-windows-msvc.zip": "9d10e1282c5f695b2130cf593d55e37266513fc6d497edc4a30a6ed6d8ba4067",
"https://mdb-build-public.s3.amazonaws.com/ruff/0.6.9/ruff-x86_64-unknown-linux-musl.tar.gz": "39a1cd878962ebc88322b4f6d33cae2292454563028f93a3f1f8ce58e3025b07",
"https://mdb-build-public.s3.amazonaws.com/fd-binaries/v10.3.0/fd-darwin-amd64": "e3936d70c47bf8439797aa2c6c1ddff868424ff6bc418fc8501e819a2d58ccad",
"https://mdb-build-public.s3.amazonaws.com/fd-binaries/v10.3.0/fd-darwin-arm64": "14134aadba85ab2cfe4494d0f44253145f897b77a23fd4d7df2cb4929b53c786",
"https://mdb-build-public.s3.amazonaws.com/fd-binaries/v10.3.0/fd-linux-amd64": "9f48273b6c780a5f4f084ef30bc67d98cbd7d10c55c4605cf3a6ee29b741af87",
"https://mdb-build-public.s3.amazonaws.com/fd-binaries/v10.3.0/fd-linux-arm64": "d0fc407937b8a8aec44f3a80b4a08219ba61dde234590358abb168b44478d493",
"https://mdb-build-public.s3.amazonaws.com/fd-binaries/v10.3.0/fd-windows-amd64.exe": "fd3d4853da7a319a604e1cb03ede88cbf584edd12b89a0991871fb4d9cd3ba5b",
"https://mdb-build-public.s3.amazonaws.com/db-contrib-tool-binaries/v2.2.3/db-contrib-tool_v2.2.3_darwin_x64.gz": "42dcc92c2914214783ddec659a157dcef0aadc1a03bd29730c69511d8ad84912",
"https://mdb-build-public.s3.amazonaws.com/db-contrib-tool-binaries/v2.2.3/db-contrib-tool_v2.2.3_linux_arm64.gz": "000189ea41fc498a9090d39cb1d0fbf426dfe6ba119dcd60cab802bcb261bd4d",
"https://mdb-build-public.s3.amazonaws.com/db-contrib-tool-binaries/v2.2.3/db-contrib-tool_v2.2.3_linux_s390x.gz": "de5e149c041b4f982b72579e499f47ac16bac6c7df6c5326d7d225e00c8a5a40",
"https://mdb-build-public.s3.amazonaws.com/db-contrib-tool-binaries/v2.2.3/db-contrib-tool_v2.2.3_linux_x64.gz": "c7dd52b4dc706f6ee6a2f553271f4f57a6013cf3914363802427aaf38732d2ec",
"https://mdb-build-public.s3.amazonaws.com/db-contrib-tool-binaries/v2.2.3/db-contrib-tool_v2.2.3_rhel8_ppc64le.gz": "0f6a380bd881d2423195d1338389d4ed305b6ad2fff6e773776f40896e4d58a8",
"https://mdb-build-public.s3.amazonaws.com/db-contrib-tool-binaries/v2.2.3/db-contrib-tool_v2.2.3_rhel9_ppc64le.gz": "92ae51c9ee0b343fc6723e0f6b9d529a3f93e1f4f54042193b77c762b8911c4e",
"https://mdb-build-public.s3.amazonaws.com/db-contrib-tool-binaries/v2.2.3/db-contrib-tool_v2.2.3_windows_x64.exe.gz": "da881cf80ab10ae98ade5fd7ea43337b26b5f674fabdbd11c5d1644804a8f089",
"https://mdb-build-public.s3.amazonaws.com/rg-binaries/v15.1.0/rg-manylinux2014-aarch64": "746beac19b27b866546ff43c5f629327409b933856adeec9652d3853d1658a01",
"https://mdb-build-public.s3.amazonaws.com/rg-binaries/v15.1.0/rg-manylinux2014-x86_64": "6ebf46fc6d69d90cb767abdf850b504e2541a5fd72d6efbd4397c0b7d0dae06d",
"https://mdb-build-public.s3.amazonaws.com/rg-binaries/v15.1.0/rg-manylinux2014-ppc64le": "2755083296eb66b5f7eb90dea74ced923f8fec1837e84a6bff5b878c923e4542",
"https://mdb-build-public.s3.amazonaws.com/rg-binaries/v15.1.0/rg-manylinux2014-s390x": "d018b9a755293ec16289e55b37c4b92d4a51e73fbe7aa3414c1809901e7bde04",
"https://mdb-build-public.s3.amazonaws.com/rg-binaries/v15.1.0/rg-macos-universal2": "eb65e7234928e13db25fe75fdcafd798871f248a9763b49821eab6cd469b2832",
"https://mdb-build-public.s3.amazonaws.com/rg-binaries/v15.1.0/rg-windows-x86_64.exe": "bc3a0a1771ad0b44e5319e0edd0dd8bb8544b6f8ca80a6caa2273f41efe1117b",
"https://mongot-extension.s3.amazonaws.com/release/mongot-extension-0.0.0-amazon2023-x86_64.tgz": "0755418443d1a069ff328c63ca0fd22aee52ef3b7757ee2fa9a84f55ad98ab25",
"https://mongot-extension.s3.amazonaws.com/release/mongot-extension-0.0.0-amazon2023-aarch64.tgz": "41364b9c87b087f55b9272bf124fe172fd51e1fa806c415d1162f350e0eb15a9",
"https://mongot-extension.s3.amazonaws.com/release/mongot-extension-0.0.0-amazon2-x86_64.tgz": "6abaef106a4cddb14023e74b6806f50962042e48e4540f96a1df37eaf62ca2a3",
"https://mongot-extension.s3.amazonaws.com/release/mongot-extension-0.0.0-amazon2-aarch64.tgz": "ee8a40f84b96d75af7304817ac221bf672d18ab427a5a9f9374da0556fc56b0a",
} | python | github | https://github.com/mongodb/mongo | buildscripts/s3_binary/hashes.py |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Timesheets',
'version': '1.0',
'category': 'Human Resources',
'sequence': 24,
'summary': 'Timesheets, Attendances, Activities',
'description': """
Record and validate timesheets and attendances easily
=====================================================
This application supplies a new screen enabling you to manage both attendances (Sign in/Sign out) and your work encoding (timesheet) by period. Timesheet entries are made by employees each day. At the end of the defined period, employees validate their sheet and the manager must then approve his team's entries. Periods are defined in the company forms and you can set them to run monthly or weekly.
The complete timesheet validation process is:
---------------------------------------------
* Draft sheet
* Confirmation at the end of the period by the employee
* Validation by the project manager
The validation can be configured in the company:
------------------------------------------------
* Period size (Day, Week, Month)
* Maximal difference between timesheet and attendances
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/employees',
'depends': ['hr_timesheet', 'hr_timesheet_invoice'],
'data': [
'security/ir.model.access.csv',
'security/hr_timesheet_sheet_security.xml',
'hr_timesheet_sheet_view.xml',
'hr_timesheet_workflow.xml',
'report/hr_timesheet_report_view.xml',
'wizard/hr_timesheet_current_view.xml',
'hr_timesheet_sheet_data.xml',
'res_config_view.xml',
'views/hr_timesheet_sheet.xml',
],
'demo': ['hr_timesheet_sheet_demo.xml'],
'test':['test/test_hr_timesheet_sheet.yml'],
'installable': True,
'auto_install': False,
'application': True,
'qweb': ['static/src/xml/timesheet.xml',],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | unknown | codeparrot/codeparrot-clean | ||
<?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\DependencyInjection;
use Psr\Container\ContainerInterface;
use Symfony\Component\DependencyInjection\Exception\ServiceNotFoundException;
/**
* Turns public and "container.reversible" services back to their ids.
*
* @author Nicolas Grekas <p@tchwork.com>
*/
final class ReverseContainer
{
private \Closure $getServiceId;
public function __construct(
private Container $serviceContainer,
private ContainerInterface $reversibleLocator,
private string $tagName = 'container.reversible',
) {
$this->getServiceId = \Closure::bind(fn (object $service): ?string => array_search($service, $this->services, true) ?: array_search($service, $this->privates, true) ?: null, $serviceContainer, Container::class);
}
/**
* Returns the id of the passed object when it exists as a service.
*
* To be reversible, services need to be either public or be tagged with "container.reversible".
*/
public function getId(object $service): ?string
{
if ($this->serviceContainer === $service) {
return 'service_container';
}
if (null === $id = ($this->getServiceId)($service)) {
return null;
}
if ($this->serviceContainer->has($id) || $this->reversibleLocator->has($id)) {
return $id;
}
return null;
}
/**
* @throws ServiceNotFoundException When the service is not reversible
*/
public function getService(string $id): object
{
if ($this->reversibleLocator->has($id)) {
return $this->reversibleLocator->get($id);
}
if (isset($this->serviceContainer->getRemovedIds()[$id])) {
throw new ServiceNotFoundException($id, null, null, [], \sprintf('The "%s" service is private and cannot be accessed by reference. You should either make it public, or tag it as "%s".', $id, $this->tagName));
}
return $this->serviceContainer->get($id);
}
} | php | github | https://github.com/symfony/symfony | src/Symfony/Component/DependencyInjection/ReverseContainer.php |
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.v3 import test_servers
class ServerUsageSampleJsonTest(test_servers.ServersSampleBase):
extension_name = 'os-server-usage'
def setUp(self):
"""setUp method for server usage."""
super(ServerUsageSampleJsonTest, self).setUp()
self.uuid = self._post_server()
def test_show(self):
response = self._do_get('servers/%s' % self.uuid)
subs = self._get_regexes()
subs['id'] = self.uuid
subs['hostid'] = '[a-f0-9]+'
self._verify_response('server-get-resp', subs, response, 200)
def test_details(self):
response = self._do_get('servers/detail')
subs = self._get_regexes()
subs['id'] = self.uuid
subs['hostid'] = '[a-f0-9]+'
self._verify_response('servers-detail-resp', subs, response, 200) | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Live variable analysis.
See https://en.wikipedia.org/wiki/Live_variable_analysis for a definition of
the following idioms: live variable, live in, live out, which are used
throughout this file.
This analysis attaches the following:
* symbols that are live at the exit of control flow statements
* symbols that are live at the entry of control flow statements
Requires activity analysis.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gast
from tensorflow.python.autograph.pyct import anno
from tensorflow.python.autograph.pyct import cfg
from tensorflow.python.autograph.pyct import transformer
from tensorflow.python.autograph.pyct.static_analysis import annos
class Analyzer(cfg.GraphVisitor):
"""CFG visitor that performs liveness analysis at statement level."""
def __init__(self, graph):
super(Analyzer, self).__init__(graph)
# This allows communicating that nodes generate extra symbols,
# e.g. those that a function definition closes over.
self.extra_gen = {}
def init_state(self, _):
return set()
def visit_node(self, node):
prev_live_in = self.in_[node]
if anno.hasanno(node.ast_node, anno.Static.SCOPE):
node_scope = anno.getanno(node.ast_node, anno.Static.SCOPE)
gen = node_scope.read | self.extra_gen.get(node.ast_node, frozenset())
# TODO(mdan): verify whether composites' parents need to be added.
# E.g. whether x needs to be added if x.y is live. Theoretically the
# activity analysis should have both so that wouldn't be needed.
kill = node_scope.modified | node_scope.deleted
live_out = set()
for n in node.next:
live_out |= self.in_[n]
live_in = gen | (live_out - kill)
else:
# Nodes that don't have a scope annotation are assumed not to touch any
# symbols.
# This Name node below is a literal name, e.g. False
assert isinstance(node.ast_node,
(gast.Name, gast.Continue, gast.Break, gast.Pass,
gast.Global, gast.Nonlocal)), type(node.ast_node)
live_out = set()
for n in node.next:
live_out |= self.in_[n]
live_in = live_out
self.in_[node] = live_in
self.out[node] = live_out
# TODO(mdan): Move this to the superclass?
return prev_live_in != live_in
class WholeTreeAnalyzer(transformer.Base):
"""Runs liveness analysis on each of the functions defined in the AST.
If a function defined other local functions, those will have separate CFGs.
However, dataflow analysis needs to tie up these CFGs to properly emulate the
effect of closures. In the case of liveness, the parent function's live
variables must account for the variables that are live at the entry of each
subfunction. For example:
def foo():
# baz is live here
def bar():
print(baz)
This analyzer runs liveness analysis on each individual function, accounting
for the effect above.
"""
def __init__(self, source_info, graphs):
super(WholeTreeAnalyzer, self).__init__(source_info)
self.graphs = graphs
self.current_analyzer = None
self.analyzers = {}
def visit_FunctionDef(self, node):
parent_analyzer = self.current_analyzer
subgraph = self.graphs[node]
# Postorder tree processing makes this a bit complicated:
# 1. construct an analyzer object and put it on stack
# 2. recursively walk the subtree; this will initialize the analyzer's
# in_ state properly (done in a block below)
# 3. run the final analysis
analyzer = Analyzer(subgraph)
self.current_analyzer = analyzer
node = self.generic_visit(node)
analyzer.visit_reverse()
if parent_analyzer is not None:
# Wire the state between the two subgraphs' analyzers.
child_in_state = analyzer.in_[subgraph.entry]
# Exception: symbols modified in the child function are local to it
body_scope = anno.getanno(node, annos.NodeAnno.BODY_SCOPE)
for qn in body_scope.modified:
# Note: a function modifying the symbol doesn't make that symbol
# live at the function's entry. In fact when that happens it is
# probably a case of undefined assignment, like this:
#
# bar = 0
# def foo():
# print(bar) # bar is undefined here!
# bar = 1
#
# Hence we use discard and not remove below.
child_in_state.discard(qn)
parent_analyzer.extra_gen[node] = frozenset(child_in_state,)
self.analyzers[node] = analyzer
self.current_analyzer = parent_analyzer
return node
class Annotator(transformer.Base):
"""AST visitor that annotates each control flow block with live symbols."""
# Note: additional nodes may be added as needed.
def __init__(self, source_info, cross_function_analyzer):
super(Annotator, self).__init__(source_info)
self.cross_function_analyzer = cross_function_analyzer
self.current_analyzer = None
def visit(self, node):
node = super(Annotator, self).visit(node)
if (self.current_analyzer is not None and
isinstance(node, gast.stmt) and
node in self.current_analyzer.graph.index):
cfg_node = self.current_analyzer.graph.index[node]
anno.setanno(node, anno.Static.LIVE_VARS_IN,
frozenset(self.current_analyzer.in_[cfg_node]))
return node
def visit_FunctionDef(self, node):
parent_analyzer = self.current_analyzer
self.current_analyzer = self.cross_function_analyzer.analyzers[node]
node = self.generic_visit(node)
self.current_analyzer = parent_analyzer
return node
def _block_statement_live_out(self, node):
successors = self.current_analyzer.graph.stmt_next[node]
stmt_live_out = set()
for s in successors:
stmt_live_out.update(self.current_analyzer.in_[s])
anno.setanno(node, anno.Static.LIVE_VARS_OUT, frozenset(stmt_live_out))
return node
def _block_statement_live_in(self, node, entry_node):
if entry_node in self.current_analyzer.graph.index:
cfg_node = self.current_analyzer.graph.index[entry_node]
stmt_live_in = frozenset(self.current_analyzer.in_[cfg_node])
else:
assert anno.hasanno(entry_node, anno.Static.LIVE_VARS_IN), (
'If not matching a CFG node, must be a block statement:'
' {}'.format(entry_node))
stmt_live_in = anno.getanno(entry_node, anno.Static.LIVE_VARS_IN)
anno.setanno(node, anno.Static.LIVE_VARS_IN, stmt_live_in)
return node
def visit_If(self, node):
node = self.generic_visit(node)
node = self._block_statement_live_out(node)
return self._block_statement_live_in(node, node.test)
def visit_For(self, node):
node = self.generic_visit(node)
node = self._block_statement_live_out(node)
return self._block_statement_live_in(node, node.iter)
def visit_While(self, node):
node = self.generic_visit(node)
node = self._block_statement_live_out(node)
return self._block_statement_live_in(node, node.test)
def visit_Try(self, node):
node = self.generic_visit(node)
node = self._block_statement_live_out(node)
return self._block_statement_live_in(node, node.body[0])
def visit_ExceptHandler(self, node):
node = self.generic_visit(node)
node = self._block_statement_live_out(node)
return self._block_statement_live_in(node, node.body[0])
def visit_With(self, node):
node = self.generic_visit(node)
return self._block_statement_live_in(node, node.items[0])
def visit_Expr(self, node):
node = self.generic_visit(node)
cfg_node = self.current_analyzer.graph.index[node]
anno.setanno(node, anno.Static.LIVE_VARS_OUT,
frozenset(self.current_analyzer.out[cfg_node]))
return node
def resolve(node, source_info, graphs):
"""Resolves the live symbols at the exit of control flow statements.
Args:
node: ast.AST
source_info: transformer.SourceInfo
graphs: Dict[ast.FunctionDef, cfg.Graph]
Returns:
ast.AST
"""
cross_function_analyzer = WholeTreeAnalyzer(source_info, graphs)
node = cross_function_analyzer.visit(node)
visitor = Annotator(source_info, cross_function_analyzer)
node = visitor.visit(node)
return node | unknown | codeparrot/codeparrot-clean | ||
from __future__ import division, unicode_literals
from future.builtins import int
from datetime import datetime, timedelta
try:
from urllib.parse import urlencode
except ImportError: # Python 2
from urllib import urlencode
try:
from urllib.request import Request, urlopen
except ImportError: # Python 2
from urllib2 import Request, urlopen
import django
from django.contrib.auth import get_permission_codename
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.forms import EmailField, URLField, Textarea
from django.template import RequestContext
from django.template.response import TemplateResponse
from django.utils.translation import ugettext as _
import mezzanine
from mezzanine.conf import settings
from mezzanine.utils.importing import import_dotted_path
from mezzanine.utils.sites import has_site_permission
def is_editable(obj, request):
"""
Returns ``True`` if the object is editable for the request. First
check for a custom ``editable`` handler on the object, otherwise
use the logged in user and check change permissions for the
object's model.
"""
if hasattr(obj, "is_editable"):
return obj.is_editable(request)
else:
codename = get_permission_codename("change", obj._meta)
perm = "%s.%s" % (obj._meta.app_label, codename)
return (request.user.is_authenticated() and
has_site_permission(request.user) and
request.user.has_perm(perm))
def ip_for_request(request):
"""
Returns ip address for request - first checks ``HTTP_X_FORWARDED_FOR``
header, since app will generally be behind a public web server.
"""
meta = request.META
return meta.get("HTTP_X_FORWARDED_FOR", meta["REMOTE_ADDR"]).split(",")[0]
def is_spam_akismet(request, form, url):
"""
Identifies form data as being spam, using the http://akismet.com
service. The Akismet API key should be specified in the
``AKISMET_API_KEY`` setting. This function is the default spam
handler defined in the ``SPAM_FILTERS`` setting.
The name, email, url and comment fields are all guessed from the
form fields:
* name: First field labelled "Name", also taking i18n into account.
* email: First ``EmailField`` field.
* url: First ``URLField`` field.
* comment: First field with a ``Textarea`` widget.
If the actual comment can't be extracted, spam checking is passed.
The referrer field expects a hidden form field to pass the referrer
through, since the HTTP_REFERER will be the URL the form is posted
from. The hidden referrer field is made available by default with
the ``{% fields_for %}`` templatetag used for rendering form fields.
"""
if not settings.AKISMET_API_KEY:
return False
protocol = "http" if not request.is_secure() else "https"
host = protocol + "://" + request.get_host()
data = {
"blog": host,
"user_ip": ip_for_request(request),
"user_agent": request.META.get("HTTP_USER_AGENT", ""),
"referrer": request.POST.get("referrer", ""),
"permalink": host + url,
"comment_type": "comment" if "comment" in request.POST else "form",
}
for name, field in form.fields.items():
data_field = None
if field.label and field.label.lower() in ("name", _("Name").lower()):
data_field = "comment_author"
elif isinstance(field, EmailField):
data_field = "comment_author_email"
elif isinstance(field, URLField):
data_field = "comment_author_url"
elif isinstance(field.widget, Textarea):
data_field = "comment_content"
if data_field and not data.get(data_field):
cleaned_data = form.cleaned_data.get(name)
try:
data[data_field] = cleaned_data.encode('utf-8')
except UnicodeEncodeError:
data[data_field] = cleaned_data
if not data.get("comment_content"):
return False
api_url = ("http://%s.rest.akismet.com/1.1/comment-check" %
settings.AKISMET_API_KEY)
versions = (django.get_version(), mezzanine.__version__)
headers = {"User-Agent": "Django/%s | Mezzanine/%s" % versions}
try:
response = urlopen(Request(api_url, urlencode(data), headers)).read()
except Exception:
return False
return response == "true"
def is_spam(request, form, url):
"""
Main entry point for spam handling - called from the comment view and
page processor for ``mezzanine.forms``, to check if posted content is
spam. Spam filters are configured via the ``SPAM_FILTERS`` setting.
"""
for spam_filter_path in settings.SPAM_FILTERS:
spam_filter = import_dotted_path(spam_filter_path)
if spam_filter(request, form, url):
return True
def paginate(objects, page_num, per_page, max_paging_links):
"""
Return a paginated page for the given objects, giving it a custom
``visible_page_range`` attribute calculated from ``max_paging_links``.
"""
if not per_page:
return Paginator(objects, 0)
paginator = Paginator(objects, per_page)
try:
page_num = int(page_num)
except ValueError:
page_num = 1
try:
objects = paginator.page(page_num)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
page_range = objects.paginator.page_range
if len(page_range) > max_paging_links:
start = min(objects.paginator.num_pages - max_paging_links,
max(0, objects.number - (max_paging_links // 2) - 1))
page_range = page_range[start:start + max_paging_links]
objects.visible_page_range = page_range
return objects
def render(request, templates, dictionary=None, context_instance=None,
**kwargs):
"""
Mimics ``django.shortcuts.render`` but uses a TemplateResponse for
``mezzanine.core.middleware.TemplateForDeviceMiddleware``
"""
dictionary = dictionary or {}
if context_instance:
context_instance.update(dictionary)
else:
context_instance = RequestContext(request, dictionary)
return TemplateResponse(request, templates, context_instance, **kwargs)
def set_cookie(response, name, value, expiry_seconds=None, secure=False):
"""
Set cookie wrapper that allows number of seconds to be given as the
expiry time, and ensures values are correctly encoded.
"""
if expiry_seconds is None:
expiry_seconds = 90 * 24 * 60 * 60 # Default to 90 days.
expires = datetime.strftime(datetime.utcnow() +
timedelta(seconds=expiry_seconds),
"%a, %d-%b-%Y %H:%M:%S GMT")
# Django doesn't seem to support unicode cookie keys correctly on
# Python 2. Work around by encoding it. See
# https://code.djangoproject.com/ticket/19802
try:
response.set_cookie(name, value, expires=expires, secure=secure)
except (KeyError, TypeError):
response.set_cookie(name.encode('utf-8'), value, expires=expires,
secure=secure) | unknown | codeparrot/codeparrot-clean | ||
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package util
import (
"net"
"net/url"
"strconv"
"k8s.io/apimachinery/pkg/util/validation"
"k8s.io/klog/v2"
netutils "k8s.io/utils/net"
kubeadmapi "k8s.io/kubernetes/cmd/kubeadm/app/apis/kubeadm"
"k8s.io/kubernetes/cmd/kubeadm/app/util/errors"
)
// GetControlPlaneEndpoint returns a properly formatted endpoint for the control plane built according following rules:
// - If the controlPlaneEndpoint is defined, use it.
// - if the controlPlaneEndpoint is defined but without a port number, use the controlPlaneEndpoint + localEndpoint.BindPort is used.
// - Otherwise, in case the controlPlaneEndpoint is not defined, use the localEndpoint.AdvertiseAddress + the localEndpoint.BindPort.
func GetControlPlaneEndpoint(controlPlaneEndpoint string, localEndpoint *kubeadmapi.APIEndpoint) (string, error) {
// get the URL of the local endpoint
localAPIEndpoint, err := GetLocalAPIEndpoint(localEndpoint)
if err != nil {
return "", err
}
// if the controlplane endpoint is defined
if len(controlPlaneEndpoint) > 0 {
// parse the controlplane endpoint
var host, port string
var err error
if host, port, err = ParseHostPort(controlPlaneEndpoint); err != nil {
return "", errors.Wrapf(err, "invalid value %q given for controlPlaneEndpoint", controlPlaneEndpoint)
}
// if a port is provided within the controlPlaneAddress warn the users we are using it, else use the bindport
localEndpointPort := strconv.Itoa(int(localEndpoint.BindPort))
if port != "" {
if port != localEndpointPort {
klog.Warning("[endpoint] WARNING: port specified in controlPlaneEndpoint overrides bindPort in the controlplane address")
}
} else {
port = localEndpointPort
}
// overrides the control-plane url using the controlPlaneAddress (and eventually the bindport)
return formatURL(host, port).String(), nil
}
return localAPIEndpoint, nil
}
// GetLocalAPIEndpoint parses an APIEndpoint and returns it as a string,
// or returns and error in case it cannot be parsed.
func GetLocalAPIEndpoint(localEndpoint *kubeadmapi.APIEndpoint) (string, error) {
// get the URL of the local endpoint
localEndpointIP, localEndpointPort, err := parseAPIEndpoint(localEndpoint)
if err != nil {
return "", err
}
url := formatURL(localEndpointIP.String(), localEndpointPort)
return url.String(), nil
}
// ParseHostPort parses a network address of the form "host:port", "ipv4:port", "[ipv6]:port" into host and port;
// ":port" can be eventually omitted.
// If the string is not a valid representation of network address, ParseHostPort returns an error.
func ParseHostPort(hostport string) (string, string, error) {
var host, port string
var err error
// try to split host and port
if host, port, err = net.SplitHostPort(hostport); err != nil {
// if SplitHostPort returns an error, the entire hostport is considered as host
host = hostport
}
// if port is defined, parse and validate it
if port != "" {
if _, err := ParsePort(port); err != nil {
return "", "", errors.Errorf("hostport %s: port %s must be a valid number between 1 and 65535, inclusive", hostport, port)
}
}
// if host is a valid IP, returns it
if ip := netutils.ParseIPSloppy(host); ip != nil {
return host, port, nil
}
// if host is a validate RFC-1123 subdomain, returns it
if errs := validation.IsDNS1123Subdomain(host); len(errs) == 0 {
return host, port, nil
}
return "", "", errors.Errorf("hostport %s: host '%s' must be a valid IP address or a valid RFC-1123 DNS subdomain", hostport, host)
}
// ParsePort parses a string representing a TCP port.
// If the string is not a valid representation of a TCP port, ParsePort returns an error.
func ParsePort(port string) (int, error) {
portInt, err := netutils.ParsePort(port, true)
if err == nil && (1 <= portInt && portInt <= 65535) {
return portInt, nil
}
return 0, errors.New("port must be a valid number between 1 and 65535, inclusive")
}
// parseAPIEndpoint parses an APIEndpoint and returns the AdvertiseAddress as net.IP and the BindPort as string.
// If the BindPort or AdvertiseAddress are invalid it returns an error.
func parseAPIEndpoint(localEndpoint *kubeadmapi.APIEndpoint) (net.IP, string, error) {
// parse the bind port
bindPortString := strconv.Itoa(int(localEndpoint.BindPort))
if _, err := ParsePort(bindPortString); err != nil {
return nil, "", errors.Wrapf(err, "invalid value %q given for api.bindPort", localEndpoint.BindPort)
}
// parse the AdvertiseAddress
var ip = netutils.ParseIPSloppy(localEndpoint.AdvertiseAddress)
if ip == nil {
return nil, "", errors.Errorf("invalid value `%s` given for api.advertiseAddress", localEndpoint.AdvertiseAddress)
}
return ip, bindPortString, nil
}
// formatURL takes a host and a port string and creates a net.URL using https scheme
func formatURL(host, port string) *url.URL {
return &url.URL{
Scheme: "https",
Host: net.JoinHostPort(host, port),
}
} | go | github | https://github.com/kubernetes/kubernetes | cmd/kubeadm/app/util/endpoint.go |
from typing import Any, cast
import pytest
from langchain_core.language_models import BaseChatModel
from langchain_core.messages import AIMessage
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnableConfig
from langchain_tests.integration_tests import ChatModelIntegrationTests
from pydantic import BaseModel
from langchain.chat_models import init_chat_model
class Multiply(BaseModel):
"""Product of two ints."""
x: int
y: int
@pytest.mark.requires("langchain_openai", "langchain_anthropic")
async def test_init_chat_model_chain() -> None:
model = init_chat_model("gpt-4o", configurable_fields="any", config_prefix="bar")
model_with_tools = model.bind_tools([Multiply])
model_with_config = model_with_tools.with_config(
RunnableConfig(tags=["foo"]),
configurable={"bar_model": "claude-sonnet-4-5-20250929"},
)
prompt = ChatPromptTemplate.from_messages([("system", "foo"), ("human", "{input}")])
chain = prompt | model_with_config
output = chain.invoke({"input": "bar"})
assert isinstance(output, AIMessage)
events = [event async for event in chain.astream_events({"input": "bar"}, version="v2")]
assert events
class TestStandard(ChatModelIntegrationTests):
@property
def chat_model_class(self) -> type[BaseChatModel]:
return cast("type[BaseChatModel]", init_chat_model)
@property
def chat_model_params(self) -> dict[str, Any]:
return {"model": "gpt-4o", "configurable_fields": "any"}
@property
def supports_image_inputs(self) -> bool:
return True
@property
def has_tool_calling(self) -> bool:
return True
@property
def has_structured_output(self) -> bool:
return True | python | github | https://github.com/langchain-ai/langchain | libs/langchain_v1/tests/integration_tests/chat_models/test_base.py |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import time
DOCUMENTATION = '''
---
module: copy
version_added: "historical"
short_description: Copies files to remote locations.
description:
- The M(copy) module copies a file on the local box to remote locations. Use the M(fetch) module to copy files from remote locations to the local box.
options:
src:
description:
- Local path to a file to copy to the remote server; can be absolute or relative.
If path is a directory, it is copied recursively. In this case, if path ends
with "/", only inside contents of that directory are copied to destination.
Otherwise, if it does not end with "/", the directory itself with all contents
is copied. This behavior is similar to Rsync.
required: false
default: null
aliases: []
content:
version_added: "1.1"
description:
- When used instead of 'src', sets the contents of a file directly to the specified value.
required: false
default: null
dest:
description:
- Remote absolute path where the file should be copied to. If src is a directory,
this must be a directory too.
required: true
default: null
backup:
description:
- Create a backup file including the timestamp information so you can get
the original file back if you somehow clobbered it incorrectly.
version_added: "0.7"
required: false
choices: [ "yes", "no" ]
default: "no"
force:
description:
- the default is C(yes), which will replace the remote file when contents
are different than the source. If C(no), the file will only be transferred
if the destination does not exist.
version_added: "1.1"
required: false
choices: [ "yes", "no" ]
default: "yes"
aliases: [ "thirsty" ]
validate:
description:
- The validation command to run before copying into place. The path to the file to
validate is passed in via '%s' which must be present as in the visudo example below.
The command is passed securely so shell features like expansion and pipes won't work.
required: false
default: ""
version_added: "1.2"
directory_mode:
description:
- When doing a recursive copy set the mode for the directories. If this is not set we will use the system
defaults. The mode is only set on directories which are newly created, and will not affect those that
already existed.
required: false
version_added: "1.5"
extends_documentation_fragment: files
author: Michael DeHaan
notes:
- The "copy" module recursively copy facility does not scale to lots (>hundreds) of files.
For alternative, see synchronize module, which is a wrapper around rsync.
'''
EXAMPLES = '''
# Example from Ansible Playbooks
- copy: src=/srv/myfiles/foo.conf dest=/etc/foo.conf owner=foo group=foo mode=0644
# The same example as above, but using a symbolic mode equivalent to 0644
- copy: src=/srv/myfiles/foo.conf dest=/etc/foo.conf owner=foo group=foo mode="u=rw,g=r,o=r"
# Another symbolic mode example, adding some permissions and removing others
- copy: src=/srv/myfiles/foo.conf dest=/etc/foo.conf owner=foo group=foo mode="u+rw,g-wx,o-rwx"
# Copy a new "ntp.conf file into place, backing up the original if it differs from the copied version
- copy: src=/mine/ntp.conf dest=/etc/ntp.conf owner=root group=root mode=644 backup=yes
# Copy a new "sudoers" file into place, after passing validation with visudo
- copy: src=/mine/sudoers dest=/etc/sudoers validate='visudo -cf %s'
'''
RETURN = '''
dest:
description: destination file/path
returned: success
type: string
sample: "/path/to/file.txt"
src:
description: source file used for the copy on the target machine
returned: changed
type: string
sample: "/home/httpd/.ansible/tmp/ansible-tmp-1423796390.97-147729857856000/source"
md5sum:
description: md5 checksum of the file after running copy
returned: when supported
type: string
sample: "2a5aeecc61dc98c4d780b14b330e3282",
checksum:
description: checksum of the file after running copy
returned: success
type: string
sample: "6e642bb8dd5c2e027bf21dd923337cbb4214f827"
backup_file:
description: name of backup file created
returned: changed and if backup=yes
type: string
sample: "/path/to/file.txt.2015-02-12@22:09~"
gid:
description: group id of the file, after execution
returned: success
type: int
sample: 100
group:
description: group of the file, after execution
returned: success
type: string
sample: "httpd"
owner:
description: owner of the file, after execution
returned: success
type: string
sample: "httpd"
uid: 100
description: owner id of the file, after execution
returned: success
type: int
sample: 100
mode:
description: permissions of the target, after execution
returned: success
type: string
sample: "0644"
size:
description: size of the target, after execution
returned: success
type: int
sample: 1220
state:
description: permissions of the target, after execution
returned: success
type: string
sample: "file"
'''
def split_pre_existing_dir(dirname):
'''
Return the first pre-existing directory and a list of the new directories that will be created.
'''
head, tail = os.path.split(dirname)
if not os.path.exists(head):
(pre_existing_dir, new_directory_list) = split_pre_existing_dir(head)
else:
return (head, [ tail ])
new_directory_list.append(tail)
return (pre_existing_dir, new_directory_list)
def adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed):
'''
Walk the new directories list and make sure that permissions are as we would expect
'''
if len(new_directory_list) > 0:
working_dir = os.path.join(pre_existing_dir, new_directory_list.pop(0))
directory_args['path'] = working_dir
changed = module.set_fs_attributes_if_different(directory_args, changed)
changed = adjust_recursive_directory_permissions(working_dir, new_directory_list, module, directory_args, changed)
return changed
def main():
module = AnsibleModule(
# not checking because of daisy chain to file module
argument_spec = dict(
src = dict(required=False),
original_basename = dict(required=False), # used to handle 'dest is a directory' via template, a slight hack
content = dict(required=False, no_log=True),
dest = dict(required=True),
backup = dict(default=False, type='bool'),
force = dict(default=True, aliases=['thirsty'], type='bool'),
validate = dict(required=False, type='str'),
directory_mode = dict(required=False)
),
add_file_common_args=True,
supports_check_mode=True,
)
src = os.path.expanduser(module.params['src'])
dest = os.path.expanduser(module.params['dest'])
backup = module.params['backup']
force = module.params['force']
original_basename = module.params.get('original_basename',None)
validate = module.params.get('validate',None)
follow = module.params['follow']
if not os.path.exists(src):
module.fail_json(msg="Source %s failed to transfer" % (src))
if not os.access(src, os.R_OK):
module.fail_json(msg="Source %s not readable" % (src))
checksum_src = module.sha1(src)
checksum_dest = None
# Backwards compat only. This will be None in FIPS mode
try:
md5sum_src = module.md5(src)
except ValueError:
md5sum_src = None
changed = False
# Special handling for recursive copy - create intermediate dirs
if original_basename and dest.endswith("/"):
dest = os.path.join(dest, original_basename)
dirname = os.path.dirname(dest)
if not os.path.exists(dirname) and os.path.isabs(dirname):
(pre_existing_dir, new_directory_list) = split_pre_existing_dir(dirname)
os.makedirs(dirname)
directory_args = module.load_file_common_arguments(module.params)
directory_mode = module.params["directory_mode"]
if directory_mode is not None:
directory_args['mode'] = directory_mode
else:
directory_args['mode'] = None
adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed)
if os.path.exists(dest):
if os.path.islink(dest) and follow:
dest = os.path.realpath(dest)
if not force:
module.exit_json(msg="file already exists", src=src, dest=dest, changed=False)
if (os.path.isdir(dest)):
basename = os.path.basename(src)
if original_basename:
basename = original_basename
dest = os.path.join(dest, basename)
if os.access(dest, os.R_OK):
checksum_dest = module.sha1(dest)
else:
if not os.path.exists(os.path.dirname(dest)):
try:
# os.path.exists() can return false in some
# circumstances where the directory does not have
# the execute bit for the current user set, in
# which case the stat() call will raise an OSError
os.stat(os.path.dirname(dest))
except OSError, e:
if "permission denied" in str(e).lower():
module.fail_json(msg="Destination directory %s is not accessible" % (os.path.dirname(dest)))
module.fail_json(msg="Destination directory %s does not exist" % (os.path.dirname(dest)))
if not os.access(os.path.dirname(dest), os.W_OK):
module.fail_json(msg="Destination %s not writable" % (os.path.dirname(dest)))
backup_file = None
if checksum_src != checksum_dest or os.path.islink(dest):
try:
if backup:
if os.path.exists(dest):
backup_file = module.backup_local(dest)
# allow for conversion from symlink.
if os.path.islink(dest):
os.unlink(dest)
open(dest, 'w').close()
if validate:
if "%s" not in validate:
module.fail_json(msg="validate must contain %%s: %s" % (validate))
(rc,out,err) = module.run_command(validate % src)
if rc != 0:
module.fail_json(msg="failed to validate: rc:%s error:%s" % (rc,err))
module.atomic_move(src, dest)
except IOError:
module.fail_json(msg="failed to copy: %s to %s" % (src, dest))
changed = True
else:
changed = False
res_args = dict(
dest = dest, src = src, md5sum = md5sum_src, checksum = checksum_src, changed = changed
)
if backup_file:
res_args['backup_file'] = backup_file
module.params['dest'] = dest
file_args = module.load_file_common_arguments(module.params)
res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'])
module.exit_json(**res_args)
# import module snippets
from ansible.module_utils.basic import *
main() | unknown | codeparrot/codeparrot-clean | ||
from django.db.backends.mysql.base import DatabaseOperations
from django.contrib.gis.db.backends.adapter import WKTAdapter
from django.contrib.gis.db.backends.base import BaseSpatialOperations
from django.utils import six
class MySQLOperations(DatabaseOperations, BaseSpatialOperations):
compiler_module = 'django.contrib.gis.db.backends.mysql.compiler'
mysql = True
name = 'mysql'
select = 'AsText(%s)'
from_wkb = 'GeomFromWKB'
from_text = 'GeomFromText'
Adapter = WKTAdapter
Adaptor = Adapter # Backwards-compatibility alias.
geometry_functions = {
'bbcontains' : 'MBRContains', # For consistency w/PostGIS API
'bboverlaps' : 'MBROverlaps', # .. ..
'contained' : 'MBRWithin', # .. ..
'contains' : 'MBRContains',
'disjoint' : 'MBRDisjoint',
'equals' : 'MBREqual',
'exact' : 'MBREqual',
'intersects' : 'MBRIntersects',
'overlaps' : 'MBROverlaps',
'same_as' : 'MBREqual',
'touches' : 'MBRTouches',
'within' : 'MBRWithin',
}
gis_terms = dict([(term, None) for term in list(geometry_functions) + ['isnull']])
def geo_db_type(self, f):
return f.geom_type
def get_geom_placeholder(self, value, srid):
"""
The placeholder here has to include MySQL's WKT constructor. Because
MySQL does not support spatial transformations, there is no need to
modify the placeholder based on the contents of the given value.
"""
if hasattr(value, 'expression'):
placeholder = self.get_expression_column(value)
else:
placeholder = '%s(%%s)' % self.from_text
return placeholder
def spatial_lookup_sql(self, lvalue, lookup_type, value, field, qn):
alias, col, db_type = lvalue
geo_col = '%s.%s' % (qn(alias), qn(col))
lookup_info = self.geometry_functions.get(lookup_type, False)
if lookup_info:
return "%s(%s, %s)" % (lookup_info, geo_col,
self.get_geom_placeholder(value, field.srid))
# TODO: Is this really necessary? MySQL can't handle NULL geometries
# in its spatial indexes anyways.
if lookup_type == 'isnull':
return "%s IS %sNULL" % (geo_col, (not value and 'NOT ' or ''))
raise TypeError("Got invalid lookup_type: %s" % repr(lookup_type)) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
#
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
# Avi Version: 17.1.1
#
# Copyright: (c) 2017 Gaurav Rastogi, <grastogi@avinetworks.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_useraccountprofile
author: Gaurav Rastogi (@grastogi23) <grastogi@avinetworks.com>
short_description: Module for setup of UserAccountProfile Avi RESTful Object
description:
- This module is used to configure UserAccountProfile object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
account_lock_timeout:
description:
- Lock timeout period (in minutes).
- Default is 30 minutes.
- Default value when not specified in API or module is interpreted by Avi Controller as 30.
credentials_timeout_threshold:
description:
- The time period after which credentials expire.
- Default is 180 days.
- Default value when not specified in API or module is interpreted by Avi Controller as 180.
max_concurrent_sessions:
description:
- Maximum number of concurrent sessions allowed.
- There are unlimited sessions by default.
- Default value when not specified in API or module is interpreted by Avi Controller as 0.
max_login_failure_count:
description:
- Number of login attempts before lockout.
- Default is 3 attempts.
- Default value when not specified in API or module is interpreted by Avi Controller as 3.
max_password_history_count:
description:
- Maximum number of passwords to be maintained in the password history.
- Default is 4 passwords.
- Default value when not specified in API or module is interpreted by Avi Controller as 4.
name:
description:
- Name of the object.
required: true
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create UserAccountProfile object
avi_useraccountprofile:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_useraccountprofile
"""
RETURN = '''
obj:
description: UserAccountProfile (api/useraccountprofile) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, avi_ansible_api, HAS_AVI)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
account_lock_timeout=dict(type='int',),
credentials_timeout_threshold=dict(type='int',),
max_concurrent_sessions=dict(type='int',),
max_login_failure_count=dict(type='int',),
max_password_history_count=dict(type='int',),
name=dict(type='str', required=True),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) or requests is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'useraccountprofile',
set([]))
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
'''
DataFlash Logging Module
June 2015
ArduPilot supports transmission of DataFlash logs over MavLink.
This module pokes the UAV to start sending logs, and stores them in a local directory.
The relevant code in the ArduPilot code base can be found in libraries/DataFlash/DataFlash_MAVLink.*
'''
import logging
import os
import os.path
import threading
import types
import sys
from pymavlink import mavutil
import random
import errno
from MAVProxy.modules.lib import mp_module
from MAVProxy.modules.lib import mp_util
import time
from MAVProxy.modules.lib import mp_settings
class dataflash_logger(mp_module.MPModule):
def __init__(self, mpstate):
"""Initialise module. We start poking the UAV for messages after this is called"""
super(dataflash_logger, self).__init__(mpstate, "dataflash_logger", "logging of mavlink dataflash messages")
self.new_log_started = False
self.stopped = False
self.time_last_start_packet_sent = 0
self.time_last_stop_packet_sent = 0
self.dataflash_dir = self._dataflash_dir(mpstate)
self.log_settings = mp_settings.MPSettings(
[ ('verbose', bool, False),
])
self.add_command('dataflash_logger', self.cmd_dataflash_logger, "dataflash logging control", ['status','start','stop','set (LOGSETTING)'])
self.add_completion_function('(LOGSETTING)', self.log_settings.completion)
def usage(self):
'''show help on a command line options'''
return "Usage: dataflash_logger <status|start|stop|set>"
def cmd_dataflash_logger(self, args):
'''control behaviour of the module'''
if len(args) == 0:
print (self.usage())
elif args[0] == "status":
print (self.status())
elif args[0] == "stop":
self.new_log_started = False
self.stopped = True
elif args[0] == "start":
self.stopped = False
elif args[0] == "set":
self.log_settings.command(args[1:])
else:
print (self.usage())
def _dataflash_dir(self, mpstate):
'''returns directory path to store DF logs in. May be relative'''
if mpstate.settings.state_basedir is None:
ret = 'dataflash'
else:
ret = os.path.join(mpstate.settings.state_basedir,'dataflash')
try:
os.makedirs(ret)
except OSError as e:
if e.errno != errno.EEXIST:
print("DFLogger: OSError making (%s): %s" % (ret, str(e)))
except Exception as e:
print("DFLogger: Unknown exception making (%s): %s" % (ret, str(e)))
return ret
def new_log_filepath(self):
'''returns a filepath to a log which does not currently exist and is suitable for DF logging'''
lastlog_filename = os.path.join(self.dataflash_dir,'LASTLOG.TXT')
if os.path.exists(lastlog_filename) and os.stat(lastlog_filename).st_size != 0:
fh = open(lastlog_filename,'rb')
log_cnt = int(fh.read()) + 1
fh.close()
else:
log_cnt = 1
self.lastlog_file = open(lastlog_filename,'w+b')
self.lastlog_file.write(log_cnt.__str__())
self.lastlog_file.close()
return os.path.join(self.dataflash_dir, '%u.BIN' % (log_cnt,));
def start_new_log(self):
'''open a new dataflash log, reset state'''
filename = self.new_log_filepath()
self.block_cnt = 0
self.logfile = open(filename, 'w+b')
print("DFLogger: logging started (%s)" % (filename))
self.prev_cnt = 0
self.download = 0
self.prev_download = 0
self.last_idle_status_printed_time = time.time()
self.last_status_time = time.time()
self.missing_blocks = {}
self.acking_blocks = {}
self.blocks_to_ack_and_nack = []
self.missing_found = 0
self.abandoned = 0
def status(self):
'''returns information about module'''
transfered = self.download - self.prev_download
now = time.time()
interval = now - self.last_status_time
self.last_status_time = now
return("DFLogger: %(state)s Rate(%(interval)ds):%(rate).3fkB/s Block:%(block_cnt)d Missing:%(missing)d Fixed:%(fixed)d Abandoned:%(abandoned)d" %
{"interval": interval,
"rate": transfered/(interval*1000),
"block_cnt": self.block_cnt,
"missing": len(self.missing_blocks),
"fixed": self.missing_found,
"abandoned": self.abandoned,
"state": "Inactive" if self.stopped else "Active"
})
def idle_print_status(self):
'''print out statistics every 10 seconds from idle loop'''
now = time.time()
if (now - self.last_idle_status_printed_time) >= 10:
print (self.status())
self.last_idle_status_printed_time = now
self.prev_download = self.download
def idle_send_acks_and_nacks(self):
'''Send packets to UAV in idle loop'''
max_blocks_to_send = 10
blocks_sent = 0
i=0
now = time.time()
while i < len(self.blocks_to_ack_and_nack) and blocks_sent < max_blocks_to_send:
# print("ACKLIST: %s" % ([x[1] for x in self.blocks_to_ack_and_nack],))
stuff = self.blocks_to_ack_and_nack[i]
[master, block, status, first_sent, last_sent] = stuff
if status == 1:
# print("DFLogger: ACKing block (%d)" % (block,))
self.master.mav.remote_log_block_status_send(block,status)
blocks_sent += 1
del self.acking_blocks[block]
del self.blocks_to_ack_and_nack[i]
continue
if block not in self.missing_blocks:
# we've received this block now
del self.blocks_to_ack_and_nack[i]
continue
# give up on packet if we have seen one with a much higher
# number:
if self.block_cnt - block > 200 or \
now - first_sent > 60:
print("DFLogger: Abandoning block (%d)" % (block,))
del self.blocks_to_ack_and_nack[i]
del self.missing_blocks[block]
self.abandoned += 1
continue
i += 1
# only send each nack every-so-often:
if last_sent is not None:
if now - last_sent < 0.1:
continue
print("DFLogger: NACKing block (%d)" % (block,))
self.master.mav.remote_log_block_status_send(block,status)
blocks_sent += 1
stuff[4] = now
def idle_task_started(self):
'''called in idle task only when logging is started'''
if self.log_settings.verbose:
self.idle_print_status()
self.idle_send_acks_and_nacks()
def idle_task(self):
if self.new_log_started == True:
self.idle_task_started()
def mavlink_packet(self, m):
'''handle REMOTE_LOG_DATA_BLOCK packets'''
now = time.time()
if m.get_type() == 'REMOTE_LOG_DATA_BLOCK':
if self.stopped:
# send a stop packet every second until the other end gets the idea:
if now - self.time_last_stop_packet_sent > 1:
if self.log_settings.verbose:
print("DFLogger: Sending stop packet")
self.master.mav.remote_log_block_status_send(mavutil.mavlink.MAV_REMOTE_LOG_DATA_BLOCK_STOP,1)
return
# if random.random() < 0.1: # drop 1 packet in 10
# return
if not self.new_log_started:
if self.log_settings.verbose:
print("DFLogger: Received data packet - starting new log")
self.start_new_log()
self.new_log_started = True
if self.new_log_started == True:
size = m.block_size
data = ''.join(str(chr(x)) for x in m.data[:size])
ofs = size*(m.block_cnt)
self.logfile.seek(ofs)
self.logfile.write(data)
if m.block_cnt in self.missing_blocks:
if self.log_settings.verbose:
print("DFLogger: Received missing block: %d" % (m.block_cnt,))
del self.missing_blocks[m.block_cnt]
self.missing_found += 1
self.blocks_to_ack_and_nack.append([self.master,m.block_cnt,1,now,None])
self.acking_blocks[m.block_cnt] = 1
# print("DFLogger: missing blocks: %s" % (str(self.missing_blocks),))
else:
# ACK the block we just got:
if m.block_cnt in self.acking_blocks:
# already acking this one; we probably sent
# multiple nacks and received this one
# multiple times
pass
else:
self.blocks_to_ack_and_nack.append([self.master,m.block_cnt,1,now,None])
self.acking_blocks[m.block_cnt] = 1
# NACK any blocks we haven't seen and should have:
if(m.block_cnt - self.block_cnt > 1):
for block in range(self.block_cnt+1, m.block_cnt):
if block not in self.missing_blocks and \
block not in self.acking_blocks:
self.missing_blocks[block] = 1
if self.log_settings.verbose:
print ("DFLogger: setting %d for nacking" % (block,))
self.blocks_to_ack_and_nack.append([self.master,block,0,now,None])
#print "\nmissed blocks: ",self.missing_blocks
if self.block_cnt < m.block_cnt:
self.block_cnt = m.block_cnt
self.download += size
elif not self.new_log_started and not self.stopped:
# send a start packet every second until the other end gets the idea:
if now - self.time_last_start_packet_sent > 1:
if self.log_settings.verbose:
print("DFLogger: Sending start packet")
self.master.mav.remote_log_block_status_send(mavutil.mavlink.MAV_REMOTE_LOG_DATA_BLOCK_START,1)
self.time_last_start_packet_sent = now
def init(mpstate):
'''initialise module'''
return dataflash_logger(mpstate) | unknown | codeparrot/codeparrot-clean | ||
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Document.rendered_html'
db.add_column('wiki_document', 'rendered_html', self.gf('django.db.models.fields.TextField')(null=True, blank=True), keep_default=False)
# Adding field 'Document.rendered_errors'
db.add_column('wiki_document', 'rendered_errors', self.gf('django.db.models.fields.TextField')(null=True, blank=True), keep_default=False)
# Adding field 'Document.defer_rendering'
db.add_column('wiki_document', 'defer_rendering', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True), keep_default=False)
# Adding field 'Document.render_scheduled_at'
db.add_column('wiki_document', 'render_scheduled_at', self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True), keep_default=False)
# Adding field 'Document.render_started_at'
db.add_column('wiki_document', 'render_started_at', self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True), keep_default=False)
# Adding field 'Document.last_rendered_at'
db.add_column('wiki_document', 'last_rendered_at', self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Document.rendered_html'
db.delete_column('wiki_document', 'rendered_html')
# Deleting field 'Document.rendered_errors'
db.delete_column('wiki_document', 'rendered_errors')
# Deleting field 'Document.defer_rendering'
db.delete_column('wiki_document', 'defer_rendering')
# Deleting field 'Document.render_scheduled_at'
db.delete_column('wiki_document', 'render_scheduled_at')
# Deleting field 'Document.render_started_at'
db.delete_column('wiki_document', 'render_started_at')
# Deleting field 'Document.last_rendered_at'
db.delete_column('wiki_document', 'last_rendered_at')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'tidings.watch': {
'Meta': {'object_name': 'Watch'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'db_index': 'True', 'max_length': '75', 'null': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'wiki.document': {
'Meta': {'unique_together': "(('parent', 'locale'), ('slug', 'locale'))", 'object_name': 'Document'},
'category': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'current_revision': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'current_for+'", 'null': 'True', 'to': "orm['wiki.Revision']"}),
'defer_rendering': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'html': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_localizable': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'is_template': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'last_rendered_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'locale': ('kuma.core.fields.LocaleField', [], {'default': "'en-US'", 'max_length': '7', 'db_index': 'True'}),
'mindtouch_page_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_index': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'translations'", 'null': 'True', 'to': "orm['wiki.Document']"}),
'parent_topic': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['wiki.Document']"}),
'related_documents': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['wiki.Document']", 'through': "orm['wiki.RelatedDocument']", 'symmetrical': 'False'}),
'render_scheduled_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'render_started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'rendered_errors': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rendered_html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'wiki.documenttag': {
'Meta': {'object_name': 'DocumentTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'wiki.editortoolbar': {
'Meta': {'object_name': 'EditorToolbar'},
'code': ('django.db.models.fields.TextField', [], {'max_length': '2000'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_toolbars'", 'to': "orm['auth.User']"}),
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'wiki.firefoxversion': {
'Meta': {'unique_together': "(('item_id', 'document'),)", 'object_name': 'FirefoxVersion'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'firefox_version_set'", 'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_id': ('django.db.models.fields.IntegerField', [], {})
},
'wiki.helpfulvote': {
'Meta': {'object_name': 'HelpfulVote'},
'anonymous_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'poll_votes'", 'null': 'True', 'to': "orm['auth.User']"}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'poll_votes'", 'to': "orm['wiki.Document']"}),
'helpful': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user_agent': ('django.db.models.fields.CharField', [], {'max_length': '1000'})
},
'wiki.operatingsystem': {
'Meta': {'unique_together': "(('item_id', 'document'),)", 'object_name': 'OperatingSystem'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'operating_system_set'", 'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_id': ('django.db.models.fields.IntegerField', [], {})
},
'wiki.relateddocument': {
'Meta': {'ordering': "['-in_common']", 'object_name': 'RelatedDocument'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'related_from'", 'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_common': ('django.db.models.fields.IntegerField', [], {}),
'related': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'related_to'", 'to': "orm['wiki.Document']"})
},
'wiki.reviewtag': {
'Meta': {'object_name': 'ReviewTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'wiki.reviewtaggedrevision': {
'Meta': {'object_name': 'ReviewTaggedRevision'},
'content_object': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Revision']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.ReviewTag']"})
},
'wiki.revision': {
'Meta': {'object_name': 'Revision'},
'based_on': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Revision']", 'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'content': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_revisions'", 'to': "orm['auth.User']"}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'is_mindtouch_migration': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'mindtouch_old_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'reviewed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'reviewer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reviewed_revisions'", 'null': 'True', 'to': "orm['auth.User']"}),
'show_toc': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'significance': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_index': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {}),
'tags': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_index': 'True'})
},
'wiki.taggeddocument': {
'Meta': {'object_name': 'TaggedDocument'},
'content_object': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Document']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.DocumentTag']"})
}
}
complete_apps = ['wiki'] | unknown | codeparrot/codeparrot-clean | ||
import logging
import re
import sys
import time
import warnings
from contextlib import contextmanager
from functools import wraps
from unittest import TestCase, skipIf, skipUnless
from xml.dom.minidom import Node, parseString
from django.apps import apps
from django.apps.registry import Apps
from django.conf import UserSettingsHolder, settings
from django.core import mail
from django.core.signals import request_started
from django.db import reset_queries
from django.db.models.options import Options
from django.http import request
from django.template import Template
from django.test.signals import setting_changed, template_rendered
from django.urls import get_script_prefix, set_script_prefix
from django.utils import six
from django.utils.decorators import available_attrs
from django.utils.encoding import force_str
from django.utils.translation import deactivate
try:
import jinja2
except ImportError:
jinja2 = None
__all__ = (
'Approximate', 'ContextList', 'isolate_lru_cache', 'get_runner',
'modify_settings', 'override_settings',
'requires_tz_support',
'setup_test_environment', 'teardown_test_environment',
)
TZ_SUPPORT = hasattr(time, 'tzset')
class Approximate(object):
def __init__(self, val, places=7):
self.val = val
self.places = places
def __repr__(self):
return repr(self.val)
def __eq__(self, other):
if self.val == other:
return True
return round(abs(self.val - other), self.places) == 0
class ContextList(list):
"""A wrapper that provides direct key access to context items contained
in a list of context objects.
"""
def __getitem__(self, key):
if isinstance(key, six.string_types):
for subcontext in self:
if key in subcontext:
return subcontext[key]
raise KeyError(key)
else:
return super(ContextList, self).__getitem__(key)
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
return True
def keys(self):
"""
Flattened keys of subcontexts.
"""
keys = set()
for subcontext in self:
for dict in subcontext:
keys |= set(dict.keys())
return keys
def instrumented_test_render(self, context):
"""
An instrumented Template render method, providing a signal
that can be intercepted by the test system Client
"""
template_rendered.send(sender=self, template=self, context=context)
return self.nodelist.render(context)
def setup_test_environment():
"""
Perform global pre-test setup, such as installing the instrumented template
renderer and setting the email backend to the locmem email backend.
"""
Template._original_render = Template._render
Template._render = instrumented_test_render
# Storing previous values in the settings module itself is problematic.
# Store them in arbitrary (but related) modules instead. See #20636.
mail._original_email_backend = settings.EMAIL_BACKEND
settings.EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
request._original_allowed_hosts = settings.ALLOWED_HOSTS
settings.ALLOWED_HOSTS = ['*']
mail.outbox = []
deactivate()
def teardown_test_environment():
"""
Perform any global post-test teardown, such as restoring the original
template renderer and restoring the email sending functions.
"""
Template._render = Template._original_render
del Template._original_render
settings.EMAIL_BACKEND = mail._original_email_backend
del mail._original_email_backend
settings.ALLOWED_HOSTS = request._original_allowed_hosts
del request._original_allowed_hosts
del mail.outbox
def get_runner(settings, test_runner_class=None):
if not test_runner_class:
test_runner_class = settings.TEST_RUNNER
test_path = test_runner_class.split('.')
# Allow for Python 2.5 relative paths
if len(test_path) > 1:
test_module_name = '.'.join(test_path[:-1])
else:
test_module_name = '.'
test_module = __import__(test_module_name, {}, {}, force_str(test_path[-1]))
test_runner = getattr(test_module, test_path[-1])
return test_runner
class TestContextDecorator(object):
"""
A base class that can either be used as a context manager during tests
or as a test function or unittest.TestCase subclass decorator to perform
temporary alterations.
`attr_name`: attribute assigned the return value of enable() if used as
a class decorator.
`kwarg_name`: keyword argument passing the return value of enable() if
used as a function decorator.
"""
def __init__(self, attr_name=None, kwarg_name=None):
self.attr_name = attr_name
self.kwarg_name = kwarg_name
def enable(self):
raise NotImplementedError
def disable(self):
raise NotImplementedError
def __enter__(self):
return self.enable()
def __exit__(self, exc_type, exc_value, traceback):
self.disable()
def decorate_class(self, cls):
if issubclass(cls, TestCase):
decorated_setUp = cls.setUp
decorated_tearDown = cls.tearDown
def setUp(inner_self):
context = self.enable()
if self.attr_name:
setattr(inner_self, self.attr_name, context)
decorated_setUp(inner_self)
def tearDown(inner_self):
decorated_tearDown(inner_self)
self.disable()
cls.setUp = setUp
cls.tearDown = tearDown
return cls
raise TypeError('Can only decorate subclasses of unittest.TestCase')
def decorate_callable(self, func):
@wraps(func, assigned=available_attrs(func))
def inner(*args, **kwargs):
with self as context:
if self.kwarg_name:
kwargs[self.kwarg_name] = context
return func(*args, **kwargs)
return inner
def __call__(self, decorated):
if isinstance(decorated, type):
return self.decorate_class(decorated)
elif callable(decorated):
return self.decorate_callable(decorated)
raise TypeError('Cannot decorate object of type %s' % type(decorated))
class override_settings(TestContextDecorator):
"""
Acts as either a decorator or a context manager. If it's a decorator it
takes a function and returns a wrapped function. If it's a contextmanager
it's used with the ``with`` statement. In either event entering/exiting
are called before and after, respectively, the function/block is executed.
"""
def __init__(self, **kwargs):
self.options = kwargs
super(override_settings, self).__init__()
def enable(self):
# Keep this code at the beginning to leave the settings unchanged
# in case it raises an exception because INSTALLED_APPS is invalid.
if 'INSTALLED_APPS' in self.options:
try:
apps.set_installed_apps(self.options['INSTALLED_APPS'])
except Exception:
apps.unset_installed_apps()
raise
override = UserSettingsHolder(settings._wrapped)
for key, new_value in self.options.items():
setattr(override, key, new_value)
self.wrapped = settings._wrapped
settings._wrapped = override
for key, new_value in self.options.items():
setting_changed.send(sender=settings._wrapped.__class__,
setting=key, value=new_value, enter=True)
def disable(self):
if 'INSTALLED_APPS' in self.options:
apps.unset_installed_apps()
settings._wrapped = self.wrapped
del self.wrapped
for key in self.options:
new_value = getattr(settings, key, None)
setting_changed.send(sender=settings._wrapped.__class__,
setting=key, value=new_value, enter=False)
def save_options(self, test_func):
if test_func._overridden_settings is None:
test_func._overridden_settings = self.options
else:
# Duplicate dict to prevent subclasses from altering their parent.
test_func._overridden_settings = dict(
test_func._overridden_settings, **self.options)
def decorate_class(self, cls):
from django.test import SimpleTestCase
if not issubclass(cls, SimpleTestCase):
raise ValueError(
"Only subclasses of Django SimpleTestCase can be decorated "
"with override_settings")
self.save_options(cls)
return cls
class modify_settings(override_settings):
"""
Like override_settings, but makes it possible to append, prepend or remove
items instead of redefining the entire list.
"""
def __init__(self, *args, **kwargs):
if args:
# Hack used when instantiating from SimpleTestCase.setUpClass.
assert not kwargs
self.operations = args[0]
else:
assert not args
self.operations = list(kwargs.items())
super(override_settings, self).__init__()
def save_options(self, test_func):
if test_func._modified_settings is None:
test_func._modified_settings = self.operations
else:
# Duplicate list to prevent subclasses from altering their parent.
test_func._modified_settings = list(
test_func._modified_settings) + self.operations
def enable(self):
self.options = {}
for name, operations in self.operations:
try:
# When called from SimpleTestCase.setUpClass, values may be
# overridden several times; cumulate changes.
value = self.options[name]
except KeyError:
value = list(getattr(settings, name, []))
for action, items in operations.items():
# items my be a single value or an iterable.
if isinstance(items, six.string_types):
items = [items]
if action == 'append':
value = value + [item for item in items if item not in value]
elif action == 'prepend':
value = [item for item in items if item not in value] + value
elif action == 'remove':
value = [item for item in value if item not in items]
else:
raise ValueError("Unsupported action: %s" % action)
self.options[name] = value
super(modify_settings, self).enable()
class override_system_checks(TestContextDecorator):
"""
Acts as a decorator. Overrides list of registered system checks.
Useful when you override `INSTALLED_APPS`, e.g. if you exclude `auth` app,
you also need to exclude its system checks.
"""
def __init__(self, new_checks, deployment_checks=None):
from django.core.checks.registry import registry
self.registry = registry
self.new_checks = new_checks
self.deployment_checks = deployment_checks
super(override_system_checks, self).__init__()
def enable(self):
self.old_checks = self.registry.registered_checks
self.registry.registered_checks = self.new_checks
self.old_deployment_checks = self.registry.deployment_checks
if self.deployment_checks is not None:
self.registry.deployment_checks = self.deployment_checks
def disable(self):
self.registry.registered_checks = self.old_checks
self.registry.deployment_checks = self.old_deployment_checks
def compare_xml(want, got):
"""Tries to do a 'xml-comparison' of want and got. Plain string
comparison doesn't always work because, for example, attribute
ordering should not be important. Comment nodes are not considered in the
comparison. Leading and trailing whitespace is ignored on both chunks.
Based on https://github.com/lxml/lxml/blob/master/src/lxml/doctestcompare.py
"""
_norm_whitespace_re = re.compile(r'[ \t\n][ \t\n]+')
def norm_whitespace(v):
return _norm_whitespace_re.sub(' ', v)
def child_text(element):
return ''.join(c.data for c in element.childNodes
if c.nodeType == Node.TEXT_NODE)
def children(element):
return [c for c in element.childNodes
if c.nodeType == Node.ELEMENT_NODE]
def norm_child_text(element):
return norm_whitespace(child_text(element))
def attrs_dict(element):
return dict(element.attributes.items())
def check_element(want_element, got_element):
if want_element.tagName != got_element.tagName:
return False
if norm_child_text(want_element) != norm_child_text(got_element):
return False
if attrs_dict(want_element) != attrs_dict(got_element):
return False
want_children = children(want_element)
got_children = children(got_element)
if len(want_children) != len(got_children):
return False
for want, got in zip(want_children, got_children):
if not check_element(want, got):
return False
return True
def first_node(document):
for node in document.childNodes:
if node.nodeType != Node.COMMENT_NODE:
return node
want, got = strip_quotes(want, got)
want = want.strip().replace('\\n', '\n')
got = got.strip().replace('\\n', '\n')
# If the string is not a complete xml document, we may need to add a
# root element. This allow us to compare fragments, like "<foo/><bar/>"
if not want.startswith('<?xml'):
wrapper = '<root>%s</root>'
want = wrapper % want
got = wrapper % got
# Parse the want and got strings, and compare the parsings.
want_root = first_node(parseString(want))
got_root = first_node(parseString(got))
return check_element(want_root, got_root)
def strip_quotes(want, got):
"""
Strip quotes of doctests output values:
>>> strip_quotes("'foo'")
"foo"
>>> strip_quotes('"foo"')
"foo"
"""
def is_quoted_string(s):
s = s.strip()
return len(s) >= 2 and s[0] == s[-1] and s[0] in ('"', "'")
def is_quoted_unicode(s):
s = s.strip()
return len(s) >= 3 and s[0] == 'u' and s[1] == s[-1] and s[1] in ('"', "'")
if is_quoted_string(want) and is_quoted_string(got):
want = want.strip()[1:-1]
got = got.strip()[1:-1]
elif is_quoted_unicode(want) and is_quoted_unicode(got):
want = want.strip()[2:-1]
got = got.strip()[2:-1]
return want, got
def str_prefix(s):
return s % {'_': '' if six.PY3 else 'u'}
class CaptureQueriesContext(object):
"""
Context manager that captures queries executed by the specified connection.
"""
def __init__(self, connection):
self.connection = connection
def __iter__(self):
return iter(self.captured_queries)
def __getitem__(self, index):
return self.captured_queries[index]
def __len__(self):
return len(self.captured_queries)
@property
def captured_queries(self):
return self.connection.queries[self.initial_queries:self.final_queries]
def __enter__(self):
self.force_debug_cursor = self.connection.force_debug_cursor
self.connection.force_debug_cursor = True
self.initial_queries = len(self.connection.queries_log)
self.final_queries = None
request_started.disconnect(reset_queries)
return self
def __exit__(self, exc_type, exc_value, traceback):
self.connection.force_debug_cursor = self.force_debug_cursor
request_started.connect(reset_queries)
if exc_type is not None:
return
self.final_queries = len(self.connection.queries_log)
class ignore_warnings(TestContextDecorator):
def __init__(self, **kwargs):
self.ignore_kwargs = kwargs
if 'message' in self.ignore_kwargs or 'module' in self.ignore_kwargs:
self.filter_func = warnings.filterwarnings
else:
self.filter_func = warnings.simplefilter
super(ignore_warnings, self).__init__()
def enable(self):
self.catch_warnings = warnings.catch_warnings()
self.catch_warnings.__enter__()
self.filter_func('ignore', **self.ignore_kwargs)
def disable(self):
self.catch_warnings.__exit__(*sys.exc_info())
@contextmanager
def patch_logger(logger_name, log_level, log_kwargs=False):
"""
Context manager that takes a named logger and the logging level
and provides a simple mock-like list of messages received
"""
calls = []
def replacement(msg, *args, **kwargs):
call = msg % args
calls.append((call, kwargs) if log_kwargs else call)
logger = logging.getLogger(logger_name)
orig = getattr(logger, log_level)
setattr(logger, log_level, replacement)
try:
yield calls
finally:
setattr(logger, log_level, orig)
# On OSes that don't provide tzset (Windows), we can't set the timezone
# in which the program runs. As a consequence, we must skip tests that
# don't enforce a specific timezone (with timezone.override or equivalent),
# or attempt to interpret naive datetimes in the default timezone.
requires_tz_support = skipUnless(
TZ_SUPPORT,
"This test relies on the ability to run a program in an arbitrary "
"time zone, but your operating system isn't able to do that."
)
@contextmanager
def extend_sys_path(*paths):
"""Context manager to temporarily add paths to sys.path."""
_orig_sys_path = sys.path[:]
sys.path.extend(paths)
try:
yield
finally:
sys.path = _orig_sys_path
@contextmanager
def isolate_lru_cache(lru_cache_object):
"""Clear the cache of an LRU cache object on entering and exiting."""
lru_cache_object.cache_clear()
try:
yield
finally:
lru_cache_object.cache_clear()
@contextmanager
def captured_output(stream_name):
"""Return a context manager used by captured_stdout/stdin/stderr
that temporarily replaces the sys stream *stream_name* with a StringIO.
Note: This function and the following ``captured_std*`` are copied
from CPython's ``test.support`` module."""
orig_stdout = getattr(sys, stream_name)
setattr(sys, stream_name, six.StringIO())
try:
yield getattr(sys, stream_name)
finally:
setattr(sys, stream_name, orig_stdout)
def captured_stdout():
"""Capture the output of sys.stdout:
with captured_stdout() as stdout:
print("hello")
self.assertEqual(stdout.getvalue(), "hello\n")
"""
return captured_output("stdout")
def captured_stderr():
"""Capture the output of sys.stderr:
with captured_stderr() as stderr:
print("hello", file=sys.stderr)
self.assertEqual(stderr.getvalue(), "hello\n")
"""
return captured_output("stderr")
def captured_stdin():
"""Capture the input to sys.stdin:
with captured_stdin() as stdin:
stdin.write('hello\n')
stdin.seek(0)
# call test code that consumes from sys.stdin
captured = input()
self.assertEqual(captured, "hello")
"""
return captured_output("stdin")
def reset_warning_registry():
"""
Clear warning registry for all modules. This is required in some tests
because of a bug in Python that prevents warnings.simplefilter("always")
from always making warnings appear: http://bugs.python.org/issue4180
The bug was fixed in Python 3.4.2.
"""
key = "__warningregistry__"
for mod in sys.modules.values():
if hasattr(mod, key):
getattr(mod, key).clear()
@contextmanager
def freeze_time(t):
"""
Context manager to temporarily freeze time.time(). This temporarily
modifies the time function of the time module. Modules which import the
time function directly (e.g. `from time import time`) won't be affected
This isn't meant as a public API, but helps reduce some repetitive code in
Django's test suite.
"""
_real_time = time.time
time.time = lambda: t
try:
yield
finally:
time.time = _real_time
def require_jinja2(test_func):
"""
Decorator to enable a Jinja2 template engine in addition to the regular
Django template engine for a test or skip it if Jinja2 isn't available.
"""
test_func = skipIf(jinja2 is None, "this test requires jinja2")(test_func)
test_func = override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}, {
'BACKEND': 'django.template.backends.jinja2.Jinja2',
'APP_DIRS': True,
'OPTIONS': {'keep_trailing_newline': True},
}])(test_func)
return test_func
class override_script_prefix(TestContextDecorator):
"""
Decorator or context manager to temporary override the script prefix.
"""
def __init__(self, prefix):
self.prefix = prefix
super(override_script_prefix, self).__init__()
def enable(self):
self.old_prefix = get_script_prefix()
set_script_prefix(self.prefix)
def disable(self):
set_script_prefix(self.old_prefix)
class LoggingCaptureMixin(object):
"""
Capture the output from the 'django' logger and store it on the class's
logger_output attribute.
"""
def setUp(self):
self.logger = logging.getLogger('django')
self.old_stream = self.logger.handlers[0].stream
self.logger_output = six.StringIO()
self.logger.handlers[0].stream = self.logger_output
def tearDown(self):
self.logger.handlers[0].stream = self.old_stream
class isolate_apps(TestContextDecorator):
"""
Act as either a decorator or a context manager to register models defined
in its wrapped context to an isolated registry.
The list of installed apps the isolated registry should contain must be
passed as arguments.
Two optional keyword arguments can be specified:
`attr_name`: attribute assigned the isolated registry if used as a class
decorator.
`kwarg_name`: keyword argument passing the isolated registry if used as a
function decorator.
"""
def __init__(self, *installed_apps, **kwargs):
self.installed_apps = installed_apps
super(isolate_apps, self).__init__(**kwargs)
def enable(self):
self.old_apps = Options.default_apps
apps = Apps(self.installed_apps)
setattr(Options, 'default_apps', apps)
return apps
def disable(self):
setattr(Options, 'default_apps', self.old_apps)
def tag(*tags):
"""
Decorator to add tags to a test class or method.
"""
def decorator(obj):
setattr(obj, 'tags', set(tags))
return obj
return decorator | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2009-2016, Damian Johnson and The Tor Project
# See LICENSE for licensing information
"""
Top panel for every page, containing basic system and tor related information.
This expands the information it presents to two columns if there's room
available.
"""
import os
import time
import stem
import stem.control
import stem.util.proc
import stem.util.str_tools
import stem.util.system
import nyx.controller
import nyx.curses
import nyx.panel
import nyx.popups
import nyx.tracker
from stem.util import conf, log
from nyx import msg, tor_controller
from nyx.curses import RED, GREEN, YELLOW, CYAN, WHITE, BOLD, HIGHLIGHT
MIN_DUAL_COL_WIDTH = 141 # minimum width where we'll show two columns
SHOW_FD_THRESHOLD = 60 # show file descriptor usage if usage is over this percentage
UPDATE_RATE = 5 # rate in seconds at which we refresh
CONFIG = conf.config_dict('nyx', {
'attr.flag_colors': {},
'attr.version_status_colors': {},
'tor.chroot': '',
})
class HeaderPanel(nyx.panel.DaemonPanel):
"""
Top area containing tor settings and system information.
"""
def __init__(self):
nyx.panel.DaemonPanel.__init__(self, 'header', UPDATE_RATE)
self._vals = Sampling.create()
self._last_width = nyx.curses.screen_size().width
self._reported_inactive = False
self._message = None
self._message_attr = []
tor_controller().add_status_listener(self.reset_listener)
def show_message(self, message = None, *attr, **kwargs):
"""
Sets the message displayed at the bottom of the header. If not called with
anything it clears the override.
:param str message: message to be displayed
:param list attr: text attributes to apply
:param int max_wait: seconds to wait for user input, no limit if **None**
:returns: :class:`~nyx.curses.KeyInput` user pressed if provided a
**max_wait**, **None** otherwise or if prompt was canceled
"""
self._message = message
self._message_attr = attr
self.redraw(True)
if 'max_wait' in kwargs:
user_input = nyx.curses.key_input(kwargs['max_wait'])
self.show_message() # clear override
return user_input
def is_wide(self):
"""
True if we should show two columns of information, False otherwise.
"""
return self._last_width >= MIN_DUAL_COL_WIDTH
def get_height(self):
"""
Provides the height of the content, which is dynamically determined by the
panel's maximum width.
"""
if self._vals.is_relay:
return 5 if self.is_wide() else 7
else:
return 4 if self.is_wide() else 5
def send_newnym(self):
"""
Requests a new identity and provides a visual queue.
"""
controller = tor_controller()
if not controller.is_newnym_available():
return
controller.signal(stem.Signal.NEWNYM)
# If we're wide then the newnym label in this panel will give an
# indication that the signal was sent. Otherwise use a msg.
if not self.is_wide():
self.show_message('Requesting a new identity', HIGHLIGHT, max_wait = 1)
def key_handlers(self):
def _reconnect():
if self._vals.is_connected:
return
controller = tor_controller()
self.show_message('Reconnecting...', HIGHLIGHT)
try:
try:
controller.reconnect(chroot_path = CONFIG['tor.chroot'])
except stem.connection.MissingPassword:
password = nyx.controller.input_prompt('Controller Password: ')
if password:
controller.authenticate(password)
log.notice("Reconnected to Tor's control port")
self.show_message('Tor reconnected', HIGHLIGHT, max_wait = 1)
except Exception as exc:
self.show_message('Unable to reconnect (%s)' % exc, HIGHLIGHT, max_wait = 3)
controller.close()
return (
nyx.panel.KeyHandler('n', action = self.send_newnym),
nyx.panel.KeyHandler('r', action = _reconnect),
)
def draw(self, subwindow):
vals = self._vals # local reference to avoid concurrency concerns
self._last_width = subwindow.width
is_wide = self.is_wide()
# space available for content
left_width = max(subwindow.width / 2, 77) if is_wide else subwindow.width
right_width = subwindow.width - left_width
pause_time = self.get_pause_time() if self.is_paused() else None
_draw_platform_section(subwindow, 0, 0, left_width, vals)
if vals.is_connected:
_draw_ports_section(subwindow, 0, 1, left_width, vals)
else:
_draw_disconnected(subwindow, 0, 1, vals.last_heartbeat)
if is_wide:
_draw_resource_usage(subwindow, left_width, 0, right_width, vals, pause_time)
if vals.is_relay:
_draw_fingerprint_and_fd_usage(subwindow, left_width, 1, right_width, vals)
_draw_flags(subwindow, 0, 2, vals.flags)
_draw_exit_policy(subwindow, left_width, 2, vals.exit_policy)
elif vals.is_connected:
_draw_newnym_option(subwindow, left_width, 1, vals.newnym_wait)
else:
_draw_resource_usage(subwindow, 0, 2, left_width, vals, pause_time)
if vals.is_relay:
_draw_fingerprint_and_fd_usage(subwindow, 0, 3, left_width, vals)
_draw_flags(subwindow, 0, 4, vals.flags)
_draw_status(subwindow, 0, self.get_height() - 1, self.is_paused(), self._message, *self._message_attr)
def reset_listener(self, controller, event_type, _):
self._update()
if event_type == stem.control.State.CLOSED:
log.notice('Tor control port closed')
def _update(self):
self._vals = Sampling.create(self._vals)
if self._vals.fd_used and self._vals.fd_limit != -1:
fd_percent = 100 * self._vals.fd_used / self._vals.fd_limit
if fd_percent >= 90:
log_msg = msg('panel.header.fd_used_at_ninety_percent', percentage = fd_percent)
log.log_once('fd_used_at_ninety_percent', log.WARN, log_msg)
log.DEDUPLICATION_MESSAGE_IDS.add('fd_used_at_sixty_percent')
elif fd_percent >= 60:
log_msg = msg('panel.header.fd_used_at_sixty_percent', percentage = fd_percent)
log.log_once('fd_used_at_sixty_percent', log.NOTICE, log_msg)
if self._vals.is_connected:
if not self._reported_inactive and (time.time() - self._vals.last_heartbeat) >= 10:
self._reported_inactive = True
log.notice('Relay unresponsive (last heartbeat: %s)' % time.ctime(self._vals.last_heartbeat))
elif self._reported_inactive and (time.time() - self._vals.last_heartbeat) < 10:
self._reported_inactive = False
log.notice('Relay resumed')
self.redraw(True)
class Sampling(object):
def __init__(self, **attr):
self._attr = attr
for key, value in attr.items():
setattr(self, key, value)
@staticmethod
def create(last_sampling = None):
controller = tor_controller()
retrieved = time.time()
pid = controller.get_pid('')
tor_resources = nyx.tracker.get_resource_tracker().get_value()
nyx_total_cpu_time = sum(os.times()[:3], stem.util.system.SYSTEM_CALL_TIME)
or_listeners = controller.get_listeners(stem.control.Listener.OR, [])
control_listeners = controller.get_listeners(stem.control.Listener.CONTROL, [])
if controller.get_conf('HashedControlPassword', None):
auth_type = 'password'
elif controller.get_conf('CookieAuthentication', None) == '1':
auth_type = 'cookie'
else:
auth_type = 'open'
try:
fd_used = stem.util.proc.file_descriptors_used(pid)
except IOError:
fd_used = None
if last_sampling:
nyx_cpu_delta = nyx_total_cpu_time - last_sampling.nyx_total_cpu_time
nyx_time_delta = retrieved - last_sampling.retrieved
nyx_cpu = nyx_cpu_delta / nyx_time_delta
else:
nyx_cpu = 0.0
attr = {
'retrieved': retrieved,
'is_connected': controller.is_alive(),
'connection_time': controller.connection_time(),
'last_heartbeat': controller.get_latest_heartbeat(),
'fingerprint': controller.get_info('fingerprint', 'Unknown'),
'nickname': controller.get_conf('Nickname', ''),
'newnym_wait': controller.get_newnym_wait(),
'exit_policy': controller.get_exit_policy(None),
'flags': getattr(controller.get_network_status(default = None), 'flags', []),
'version': str(controller.get_version('Unknown')).split()[0],
'version_status': controller.get_info('status/version/current', 'Unknown'),
'address': or_listeners[0][0] if (or_listeners and or_listeners[0][0] != '0.0.0.0') else controller.get_info('address', 'Unknown'),
'or_port': or_listeners[0][1] if or_listeners else '',
'dir_port': controller.get_conf('DirPort', '0'),
'control_port': str(control_listeners[0][1]) if control_listeners else None,
'socket_path': controller.get_conf('ControlSocket', None),
'is_relay': bool(or_listeners),
'auth_type': auth_type,
'pid': pid,
'start_time': stem.util.system.start_time(pid),
'fd_limit': int(controller.get_info('process/descriptor-limit', '-1')),
'fd_used': fd_used,
'nyx_total_cpu_time': nyx_total_cpu_time,
'tor_cpu': '%0.1f' % (100 * tor_resources.cpu_sample),
'nyx_cpu': '%0.1f' % (nyx_cpu),
'memory': stem.util.str_tools.size_label(tor_resources.memory_bytes) if tor_resources.memory_bytes > 0 else 0,
'memory_percent': '%0.1f' % (100 * tor_resources.memory_percent),
'hostname': os.uname()[1],
'platform': '%s %s' % (os.uname()[0], os.uname()[2]), # [platform name] [version]
}
return Sampling(**attr)
def format(self, message, crop_width = None):
formatted_msg = message.format(**self._attr)
if crop_width is not None:
formatted_msg = stem.util.str_tools.crop(formatted_msg, crop_width)
return formatted_msg
def _draw_platform_section(subwindow, x, y, width, vals):
"""
Section providing the user's hostname, platform, and version information...
nyx - odin (Linux 3.5.0-52-generic) Tor 0.2.5.1-alpha-dev (unrecommended)
|------ platform (40 characters) ------| |----------- tor version -----------|
"""
initial_x, space_left = x, min(width, 40)
x = subwindow.addstr(x, y, vals.format('nyx - {hostname}', space_left))
space_left -= x - initial_x
if space_left >= 10:
subwindow.addstr(x, y, ' (%s)' % vals.format('{platform}', space_left - 3))
x, space_left = initial_x + 43, width - 43
if vals.version != 'Unknown' and space_left >= 10:
x = subwindow.addstr(x, y, vals.format('Tor {version}', space_left))
space_left -= x - 43 - initial_x
if space_left >= 7 + len(vals.version_status):
version_color = CONFIG['attr.version_status_colors'].get(vals.version_status, WHITE)
x = subwindow.addstr(x, y, ' (')
x = subwindow.addstr(x, y, vals.version_status, version_color)
subwindow.addstr(x, y, ')')
def _draw_ports_section(subwindow, x, y, width, vals):
"""
Section providing our nickname, address, and port information...
Unnamed - 0.0.0.0:7000, Control Port (cookie): 9051
"""
if not vals.is_relay:
x = subwindow.addstr(x, y, 'Relaying Disabled', CYAN)
else:
x = subwindow.addstr(x, y, vals.format('{nickname} - {address}:{or_port}'))
if vals.dir_port != '0':
x = subwindow.addstr(x, y, vals.format(', Dir Port: {dir_port}'))
if vals.control_port:
if width >= x + 19 + len(vals.control_port) + len(vals.auth_type):
auth_color = RED if vals.auth_type == 'open' else GREEN
x = subwindow.addstr(x, y, ', Control Port (')
x = subwindow.addstr(x, y, vals.auth_type, auth_color)
subwindow.addstr(x, y, vals.format('): {control_port}'))
else:
subwindow.addstr(x, y, vals.format(', Control Port: {control_port}'))
elif vals.socket_path:
subwindow.addstr(x, y, vals.format(', Control Socket: {socket_path}'))
def _draw_disconnected(subwindow, x, y, last_heartbeat):
"""
Message indicating that tor is disconnected...
Tor Disconnected (15:21 07/13/2014, press r to reconnect)
"""
x = subwindow.addstr(x, y, 'Tor Disconnected', RED, BOLD)
last_heartbeat_str = time.strftime('%H:%M %m/%d/%Y', time.localtime(last_heartbeat))
subwindow.addstr(x, y, ' (%s, press r to reconnect)' % last_heartbeat_str)
def _draw_resource_usage(subwindow, x, y, width, vals, pause_time):
"""
System resource usage of the tor process...
cpu: 0.0% tor, 1.0% nyx mem: 0 (0.0%) pid: 16329 uptime: 12-20:42:07
"""
if vals.start_time:
if not vals.is_connected:
now = vals.connection_time
elif pause_time:
now = pause_time
else:
now = time.time()
uptime = stem.util.str_tools.short_time_label(now - vals.start_time)
else:
uptime = ''
sys_fields = (
(0, vals.format('cpu: {tor_cpu}% tor, {nyx_cpu}% nyx')),
(27, vals.format('mem: {memory} ({memory_percent}%)')),
(47, vals.format('pid: {pid}')),
(59, 'uptime: %s' % uptime),
)
for (start, label) in sys_fields:
if width >= start + len(label):
subwindow.addstr(x + start, y, label)
else:
break
def _draw_fingerprint_and_fd_usage(subwindow, x, y, width, vals):
"""
Presents our fingerprint, and our file descriptor usage if we're running
out...
fingerprint: 1A94D1A794FCB2F8B6CBC179EF8FDD4008A98D3B, file desc: 900 / 1000 (90%)
"""
initial_x, space_left = x, width
x = subwindow.addstr(x, y, vals.format('fingerprint: {fingerprint}', width))
space_left -= x - initial_x
if space_left >= 30 and vals.fd_used and vals.fd_limit != -1:
fd_percent = 100 * vals.fd_used / vals.fd_limit
if fd_percent >= SHOW_FD_THRESHOLD:
if fd_percent >= 95:
percentage_format = (RED, BOLD)
elif fd_percent >= 90:
percentage_format = (RED,)
elif fd_percent >= 60:
percentage_format = (YELLOW,)
else:
percentage_format = ()
x = subwindow.addstr(x, y, ', file descriptors' if space_left >= 37 else ', file desc')
x = subwindow.addstr(x, y, vals.format(': {fd_used} / {fd_limit} ('))
x = subwindow.addstr(x, y, '%i%%' % fd_percent, *percentage_format)
subwindow.addstr(x, y, ')')
def _draw_flags(subwindow, x, y, flags):
"""
Presents flags held by our relay...
flags: Running, Valid
"""
x = subwindow.addstr(x, y, 'flags: ')
if flags:
for i, flag in enumerate(flags):
flag_color = CONFIG['attr.flag_colors'].get(flag, WHITE)
x = subwindow.addstr(x, y, flag, flag_color, BOLD)
if i < len(flags) - 1:
x = subwindow.addstr(x, y, ', ')
else:
subwindow.addstr(x, y, 'none', CYAN, BOLD)
def _draw_exit_policy(subwindow, x, y, exit_policy):
"""
Presents our exit policy...
exit policy: reject *:*
"""
x = subwindow.addstr(x, y, 'exit policy: ')
if not exit_policy:
return
rules = list(exit_policy.strip_private().strip_default())
for i, rule in enumerate(rules):
policy_color = GREEN if rule.is_accept else RED
x = subwindow.addstr(x, y, str(rule), policy_color, BOLD)
if i < len(rules) - 1:
x = subwindow.addstr(x, y, ', ')
if exit_policy.has_default():
if rules:
x = subwindow.addstr(x, y, ', ')
subwindow.addstr(x, y, '<default>', CYAN, BOLD)
def _draw_newnym_option(subwindow, x, y, newnym_wait):
"""
Provide a notice for requiesting a new identity, and time until it's next
available if in the process of building circuits.
"""
if newnym_wait == 0:
subwindow.addstr(x, y, "press 'n' for a new identity")
else:
plural = 's' if newnym_wait > 1 else ''
subwindow.addstr(x, y, 'building circuits, available again in %i second%s' % (newnym_wait, plural))
def _draw_status(subwindow, x, y, is_paused, message, *attr):
"""
Provides general usage information or a custom message.
"""
if message:
subwindow.addstr(x, y, message, *attr)
elif not is_paused:
controller = nyx.controller.get_controller()
subwindow.addstr(x, y, 'page %i / %i - m: menu, p: pause, h: page help, q: quit' % (controller.get_page() + 1, controller.get_page_count()))
else:
subwindow.addstr(x, y, 'Paused', HIGHLIGHT) | unknown | codeparrot/codeparrot-clean | ||
<?php
namespace Illuminate\Contracts\Broadcasting;
interface Broadcaster
{
/**
* Authenticate the incoming request for a given channel.
*
* @param \Illuminate\Http\Request $request
* @return mixed
*/
public function auth($request);
/**
* Return the valid authentication response.
*
* @param \Illuminate\Http\Request $request
* @param mixed $result
* @return mixed
*/
public function validAuthenticationResponse($request, $result);
/**
* Broadcast the given event.
*
* @param array $channels
* @param string $event
* @param array $payload
* @return void
*
* @throws \Illuminate\Broadcasting\BroadcastException
*/
public function broadcast(array $channels, $event, array $payload = []);
} | php | github | https://github.com/laravel/framework | src/Illuminate/Contracts/Broadcasting/Broadcaster.php |
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""Unit tests for sparse binary matrix."""
import cPickle
import os
import numpy
import unittest2 as unittest
from nupic.bindings.math import SM32, SM_01_32_32
_RGEN = numpy.random.RandomState(37)
def error(str):
print 'Error:', str
class UnitTests(unittest.TestCase):
def setUp(self):
self.Matrix = SM_01_32_32(1)
def test_construction(self):
a = self.Matrix.__class__(4)
if a.nRows() != 0 or a.nCols() != 4:
error('constructor 1')
b = self.Matrix.__class__(a)
if b.nRows() != 0 or b.nCols() != 4:
error('constructor 2A')
if (a.toDense() != b.toDense()).any():
error('constructor 2B')
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
a = self.Matrix.__class__(n)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
for i in range(m):
a.appendSparseRow(numpy.where(x[i] > 0)[0].tolist())
b = self.Matrix.__class__(a)
if (a.toDense() != b.toDense()).any():
error('copy constructor')
c = self.Matrix.__class__(x)
if (c.toDense() != x).any():
error('constructor from numpy array')
s = c.toCSR()
d = self.Matrix.__class__(s)
if (d.toDense() != x).any():
error('constructor from csr string')
# Test construction from a SM
a = _RGEN.randint(0,10,(3,4))
a[2] = 0
a[:,3] = 0
a = SM32(a)
b = SM_01_32_32(a)
a = a.toDense()
w = numpy.where(a > 0)
a[w] = 1
if (a != b.toDense()).any():
error('construction from SM')
def testAccessors(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
a = self.Matrix.__class__(n)
print a.getVersion(), a.getVersion(True)
if a.nRows() != 0:
error('nRows 1')
if a.nCols() != n:
error('nCols 1')
for i in range(m):
a.appendSparseRow(numpy.where(x[i] > 0)[0].tolist())
if a.nRows() != m:
error('nRows 2')
if a.nCols() != n:
error('nCols 2')
if a.nNonZeros() != len(numpy.where(x > 0)[0]):
error('nNonZeros')
for i in range(m):
if a.nNonZerosOnRow(i) != x.sum(axis=1)[i]:
error('nNonZerosOnRow')
if (a.nNonZerosPerRow() != x.sum(axis=1)).any():
error('nNonZerosPerRow')
if (a.nNonZerosPerCol() != x.sum(axis=0)).any():
error('nNonZerosPerCol')
for i in range(m):
y = numpy.zeros((n))
for j in a.getRowSparse(i):
y[j] = 1
if (y != x[i]).any():
error('getRowSparse')
if a.capacity() < a.nNonZeros():
error('capacity')
m = _RGEN.randint(100,200)
n = _RGEN.randint(100,200)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
a = self.Matrix.__class__(x)
m1 = a.nBytes()
a.compact()
m2 = a.nBytes()
if (m2 > m1):
error('compact')
def testCopy(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(x)
b = self.Matrix.__class__(1)
b.copy(a)
if a != b:
error('copy')
def testClear(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
a = self.Matrix.__class__(x)
a.clear()
if a.capacity() != 0:
error('clear /capacity')
if a.nRows() != 0:
error('clear /nRows')
if a.nCols() != 0:
error('clear /nCols')
if a.nNonZeros() != 0:
error('clear /nNonZeros')
def testResize(self):
# 1. Resize to 0,0 (equivalent to clear)
m = _RGEN.randint(4,10)
n = _RGEN.randint(6,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
a = self.Matrix.__class__(x)
a.resize(0,0)
if a.capacity() != 0:
error('resize to 0,0 /capacity')
if a.nRows() != 0:
error('resize to 0,0 /nRows')
if a.nCols() != 0:
error('resize to 0,0 /nCols')
if a.nNonZeros() != 0:
error('resize to 0,0 /nNonZeros')
# 2. Resize to larger size
m = _RGEN.randint(4,10)
n = _RGEN.randint(6,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
a = self.Matrix.__class__(x)
# 2.1 More rows only
old_nrows = a.nRows()
old_ncols = a.nCols()
old_nnzr = a.nNonZeros()
a.resize(2*a.nRows(),a.nCols())
if a.nRows() != 2*old_nrows or a.nCols() != old_ncols:
error('resize to more rows, 1')
if a.nNonZeros() != old_nnzr:
error('resize to more rows, 2')
# 2.2 More cols only
old_nrows = a.nRows()
a.resize(a.nRows(), 2*a.nCols())
if a.nRows() != old_nrows or a.nCols() != 2*old_ncols:
error('resize to more cols, 1')
if a.nNonZeros() != old_nnzr:
error('resize to more cols, 2')
# 2.3 More rows and cols
m = _RGEN.randint(4,10)
n = _RGEN.randint(6,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
a = self.Matrix.__class__(x)
old_nrows = a.nRows()
old_ncols = a.nCols()
old_nnzr = a.nNonZeros()
a.resize(2*a.nRows(),2*a.nCols())
if a.nRows() != 2*old_nrows or a.nCols() != 2*old_ncols:
error('resize to more rows and cols, 1')
if a.nNonZeros() != old_nnzr:
error('resize to more rows and cols, 2')
# 3. Resize to smaller size
m = _RGEN.randint(10,20)
n = _RGEN.randint(10,20)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
a = self.Matrix.__class__(x)
# 3.1 Less rows only
old_nrows = a.nRows()
old_ncols = a.nCols()
old_nnzr = a.nNonZeros()
a.resize(a.nRows()/2,a.nCols())
if a.nRows() != old_nrows/2 or a.nCols() != old_ncols:
error('resize to less rows, 1')
if a.nNonZeros() != numpy.sum(x[:old_nrows/2]):
error('resize to less rows, 2')
# 2.2 Less cols only
old_nrows = a.nRows()
a.resize(a.nRows(), a.nCols()/2)
if a.nRows() != old_nrows or a.nCols() != old_ncols/2:
error('resize to less cols, 1')
if a.nNonZeros() != numpy.sum(x[:a.nRows(),:old_ncols/2]):
error('resize to less cols, 2')
# 2.3 Less rows and cols
m = _RGEN.randint(10,20)
n = _RGEN.randint(10,20)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
a = self.Matrix.__class__(x)
old_nrows = a.nRows()
old_ncols = a.nCols()
old_nnzr = a.nNonZeros()
a.resize(a.nRows()/2,a.nCols()/2)
if a.nRows() != old_nrows/2 or a.nCols() != old_ncols/2:
error('resize to less rows and cols, 1')
if a.nNonZeros() != numpy.sum(x[:old_nrows/2,:old_ncols/2]):
error('resize to less rows and cols, 2')
def testEquals(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
a = self.Matrix.__class__(n)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(x)
b = self.Matrix.__class__(x)
if a != b:
error('equals 1')
b.set(m/2, n/2, 1)
if a == b:
error('equals 2')
def testSet(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(x)
a.set(m/2, [0, 2, 4], 1)
x[m/2,0] = 1
x[m/2,2] = 1
x[m/2,4] = 1
if (a != x).any():
error('set on row 1')
a.set(m/2, [0,2,4], 0)
x[m/2,0] = 0
x[m/2,2] = 0
x[m/2,4] = 0
if (a != x).any():
error('set on row 1')
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(x)
a.setForAllRows([0,2,4], 1)
for i in range(m):
x[i,0] = 1
x[i,2] = 1
x[i,4] = 1
if (a != x).any():
error('set for all rows')
def testGetAllNonZeros(self):
for i in range(10):
m = _RGEN.randint(2,10)
n = _RGEN.randint(2,10)
a = _RGEN.randint(0,2,(m,n))
a[_RGEN.randint(0,m)] = 0
a[:,_RGEN.randint(0,n)] = 0
sm = self.Matrix.__class__(a)
ans_ind = numpy.where(a > 0)
ans_val = a[ans_ind]
ans = [(i,j,v) for i,j,v in zip(ans_ind[0], ans_ind[1], ans_val)]
# Returns one list of pairs by default
all_nz = sm.getAllNonZeros()
for x,y in zip(all_nz, ans):
if x[0] != y[0] or x[1] != y[1]:
error('getAllNonZeros 1 list of pairs')
# Test option to return 2 lists instead of 1 list of pairs
all_nz2 = sm.getAllNonZeros(True)
for i in range(len(ans_val)):
if all_nz2[0][i] != ans_ind[0][i] or all_nz2[1][i] != ans_ind[1][i]:
error('getAllNonZeros 2 lists')
def testSetAllNonZeros(self):
for i in range(10):
m = _RGEN.randint(2,10)
n = _RGEN.randint(2,10)
a = _RGEN.randint(0,2,(m,n))
a[_RGEN.randint(0,m)] = 0
a[:,_RGEN.randint(0,n)] = 0
a[0,0] = 1
a[m-1] = 0
a[:,n-1] = 0
nz = numpy.where(a > 0)
sm = self.Matrix.__class__(1)
# Assumes lexicographic order of the indices by default
sm.setAllNonZeros(a.shape[0], a.shape[1], nz[0],nz[1])
if (sm.toDense() != a).any():
error('setAllNonZeros, in order')
# Test when values come in out of (lexicographic) order
# and with duplicates
p = _RGEN.permutation(len(nz[0]))
nz_i2,nz_j2 = [],[]
for i in p:
nz_i2.append(nz[0][i])
nz_j2.append(nz[1][i])
for i in p:
nz_i2.append(nz[0][i])
nz_j2.append(nz[1][i])
sm2 = self.Matrix.__class__(1)
sm2.setAllNonZeros(a.shape[0], a.shape[1], nz_i2,nz_j2, False)
if (sm2.toDense() != a).any():
error('setAllNonZeros, out of order')
def testGetCol(self):
for i in range(10):
m = _RGEN.randint(2,10)
n = _RGEN.randint(2,10)
a = _RGEN.randint(0,2,(m,n)).astype(numpy.float32)
a[_RGEN.randint(0,m)] = 0
a[:,_RGEN.randint(0,n)] = 0
a[0,0] = 1
a[m/2] = 0
a[:,n/2] = 0
sm = self.Matrix.__class__(a)
for j in range(n):
if (sm.getCol(j) != a[:,j]).any():
error('getCol')
def testSetSlice(self):
# With a sparse matrix
for i in range(10):
m = _RGEN.randint(10,20)
n = _RGEN.randint(10,20)
a = _RGEN.randint(0,2,(m,n)).astype(numpy.float32)
a[_RGEN.randint(0,m)] = 0
a[:,_RGEN.randint(0,n)] = 0
a[0,0] = 1
a[m/2] = 0
a[:,n/2] = 0
sm = self.Matrix.__class__(a)
b = _RGEN.randint(0,2,(m/4,n/4)).astype(numpy.float32)
slice = self.Matrix.__class__(b)
x,y = _RGEN.randint(0,m/2), _RGEN.randint(0,n/2)
sm.setSlice(x,y,slice)
ans = numpy.array(a)
for i in range(b.shape[0]):
for j in range(b.shape[1]):
ans[x+i,y+j] = slice.get(i,j)
if (sm.toDense() != ans).any():
error('setSlice')
# With a numpy array
for i in range(10):
m = _RGEN.randint(10,20)
n = _RGEN.randint(10,20)
a = _RGEN.randint(0,2,(m,n)).astype(numpy.float32)
a[_RGEN.randint(0,m)] = 0
a[:,_RGEN.randint(0,n)] = 0
a[numpy.where(a < 25)] = 0
a[0,0] = 1
a[m/2] = 0
a[:,n/2] = 0
sm = self.Matrix.__class__(a)
slice = _RGEN.randint(0,2,(m/4,n/4)).astype(numpy.float32)
x,y = _RGEN.randint(0,m/2), _RGEN.randint(0,n/2)
sm.setSlice(x,y,slice)
ans = numpy.array(a)
for i in range(slice.shape[0]):
for j in range(slice.shape[1]):
ans[x+i,y+j] = slice[i,j]
if (sm.toDense() != ans).any():
error('setSlice/dense')
def testNNonZerosPerBox(self):
for i in range(10):
m = _RGEN.randint(2,10)
n = _RGEN.randint(2,10)
a = _RGEN.randint(0,2,(m,n)).astype(numpy.float32)
a[_RGEN.randint(0,m)] = 0
a[:,_RGEN.randint(0,n)] = 0
a[0,0] = 1
a[m/2] = 0
a[:,n/2] = 0
sm = self.Matrix.__class__(a)
nnzpb = sm.nNonZerosPerBox([m/2, m], [n/2, n])
ans = numpy.zeros((2,2))
ans[0,0] = numpy.sum(a[:m/2,:n/2])
ans[0,1] = numpy.sum(a[:m/2,n/2:])
ans[1,0] = numpy.sum(a[m/2:,:n/2])
ans[1,1] = numpy.sum(a[m/2:,n/2:])
if (nnzpb.toDense() != ans).any():
error('nNonZerosPerBox')
def testAppendSparseRow(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
a = self.Matrix.__class__(n)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
for i in range(m):
a.appendSparseRow(numpy.where(x[i] > 0)[0].tolist())
if (a.toDense() != x).any():
error('appendSparseRow')
if a.nRows() != m:
error('appendSparseRow nRows')
if (numpy.array(a.nNonZerosPerRow()) != x.sum(axis=1)).any():
error('appendSparseRow nNonZerosPerRow')
if a.nNonZeros() != len(numpy.where(x > 0)[0]):
error('appendSparseRow nNonZeros')
def testAppendDenseRow(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
a = self.Matrix.__class__(n)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
for i in range(m):
a.appendDenseRow(x[i])
if (a.toDense() != x).any():
error('appendDenseRow')
if a.nRows() != m:
error('appendDenseRow nRows')
if (numpy.array(a.nNonZerosPerRow()) != x.sum(axis=1)).any():
error('appendDenseRow nNonZerosPerRow')
if a.nNonZeros() != len(numpy.where(x > 0)[0]):
error('appendDenseRow nNonZeros')
def testReplaceSparseRow(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
a = self.Matrix.__class__(n)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
for i in range(m):
a.appendSparseRow(numpy.where(x[i] > 0)[0].tolist())
for i in range(m):
x[i] = _RGEN.randint(0,2,(n))
a.replaceSparseRow(i, numpy.where(x[i] > 0)[0].tolist())
if (a.toDense() != x).any():
error('replaceSparseRow')
if (numpy.array(a.nNonZerosPerRow()) != x.sum(axis=1)).any():
error('replaceSparseRow nNonZerosPerRow')
if a.nNonZeros() != len(numpy.where(x > 0)[0]):
error('replaceSparseRow nNonZeros')
if a.nRows() != m:
error('replaceSparseRow nRows')
def testFindRowSparse(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
a = self.Matrix.__class__(n)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
for i in range(m):
a.appendSparseRow(numpy.where(x[i] > 0)[0].tolist())
for i in range(m):
w = a.findRowSparse(numpy.where(x[i] > 0)[0].tolist())
if (x[w] != x[i]).any():
error('findRowSparse')
def testFindRowDense(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(x)
for i in range(m):
w = a.findRowDense(x[i])
if (x[w] != x[i]).any():
error('findRowDense')
def testGet(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(n)
a.fromDense(x)
for i in range(m):
for j in range(n):
if a.get(i,j) != x[i,j]:
error('get')
def testSet(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
a = self.Matrix.__class__(x)
for i in range(m):
for j in range(n):
v = _RGEN.randint(0,2)
a.set(i,j,v)
x[i,j] = v
if (a.toDense() != x).any():
error('set')
a.set(0,n-1,2)
x[0,n-1] = 1
if (a.toDense() != x).any():
error('set 2')
x[m/2] = 0
a.fromDense(x)
for j in range(n):
a.set(m/2,j,1)
x[m/2] = 1
if (a.toDense() != x).any():
error('set 3')
def testSetRangeToZero(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
a = self.Matrix.__class__(n)
a.fromDense(x)
for i in range(m):
begin = _RGEN.randint(0,n)
end = _RGEN.randint(begin, n+1)
a.setRangeToZero(i, begin, end)
x[i][begin:end] = 0
if (a.toDense() != x).any():
error('setRangeToZero 1')
a.setRangeToZero(0, 0, 0)
if (a.toDense() != x).any():
error('setRangeToZero 2')
a.setRangeToZero(0, n, n)
if (a.toDense() != x).any():
error('setRangeToZero 3')
a.setRangeToZero(0, 3, 3)
if (a.toDense() != x).any():
error('setRangeToZero 4')
a.setRangeToZero(0, 0, n)
x[0] = 0
if (a.toDense() != x).any():
error('setRangeToZero 5')
def testSetRangeToOne(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(n)
a.fromDense(x)
for i in range(m):
begin = _RGEN.randint(0,n)
end = _RGEN.randint(begin, n+1)
a.setRangeToOne(i, begin, end)
x[i][begin:end] = 1
if (a.toDense() != x).any():
error('setRangeToOne 1')
a.setRangeToOne(0, 0, 0)
if (a.toDense() != x).any():
error('setRangeToOne 2')
a.setRangeToOne(0, n, n)
if (a.toDense() != x).any():
error('setRangeToOne 3')
a.setRangeToOne(0, 3, 3)
if (a.toDense() != x).any():
error('setRangeToOne 4')
a.setRangeToOne(0, 0, n)
x[0] = 1
if (a.toDense() != x).any():
error('setRangeToOne 5')
def testTranspose(self):
for k in range(10):
m = _RGEN.randint(4,10)
n = _RGEN.randint(5,10)
a = self.Matrix.__class__(n)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
x[:n/2] = 0
for i in range(m):
a.appendDenseRow(x[i])
a.transpose()
if (a.toDense() != numpy.transpose(x)).any():
error('numpy.transpose')
if (numpy.array(a.nNonZerosPerRow()) != x.sum(axis=0)).any():
error('numpy.transpose nNonZerosPerRow')
if a.nNonZeros() != len(numpy.where(x > 0)[0]):
error('numpy.transpose nNonZeros')
a.transpose()
if (a.toDense() != x).any():
error('numpy.transpose 2')
if (numpy.array(a.nNonZerosPerRow()) != x.sum(axis=1)).any():
error('numpy.transpose nNonZerosPerRow 2')
if a.nNonZeros() != len(numpy.where(x > 0)[0]):
error('numpy.transpose nNonZeros 2')
def testCSR(self):
m = _RGEN.randint(10,20)
n = _RGEN.randint(10,20)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
x[:,n/2] = 0
a = self.Matrix.__class__(x)
csr = a.toCSR()
b = self.Matrix.__class__(1)
b.fromCSR(csr)
if (a.toDense() != b.toDense()).any():
error('toCSR/fromCSR')
if (numpy.array(a.nNonZerosPerRow()) != numpy.array(b.nNonZerosPerRow())).any():
error('toCSR/fromCSR nNonZerosPerRow')
if b.nNonZeros() != len(numpy.where(x > 0)[0]):
error('toCSR/fromCSR nNonZeros')
def testGetstateSetstate(self):
m = _RGEN.randint(10,20)
n = _RGEN.randint(10,20)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
x[:,n/2] = 0
a = self.Matrix.__class__(x)
s = a.__getstate__()
b = self.Matrix.__class__(1)
b.__setstate__(s)
if a != b:
error('__geststate__/__setstate__')
def testCSRToFromFile(self):
m = _RGEN.randint(10,20)
n = _RGEN.randint(10,20)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
x[:,n/2] = 0
a = self.Matrix.__class__(x)
a.CSRSaveToFile('test_csr2.txt')
b = self.Matrix.__class__(1)
b.CSRLoadFromFile('test_csr2.txt')
if a != b:
error('CSRSaveToFile/CSRLoadFromFile')
os.unlink('test_csr2.txt')
def testCSRSize(self):
for k in range(5):
m = _RGEN.randint(10,100)
n = _RGEN.randint(10,100)
x = _RGEN.randint(0,100,(m,n))
x[m/2] = 0
a = self.Matrix.__class__(x)
for i in range(10):
s_estimated = a.CSRSize()
a.CSRSaveToFile('test_csr.txt')
s_real = os.path.getsize('test_csr.txt')
if s_estimated != s_real:
error('CSRSize')
for j in range(1000):
a.set(_RGEN.randint(0,m),_RGEN.randint(0,n), 0)
os.unlink('test_csr.txt')
def testBinary(self):
m = _RGEN.randint(10,20)
n = _RGEN.randint(10,20)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = 0
x[:,n/2] = 0
a = self.Matrix.__class__(x)
a.binarySaveToFile('test_binary.bin')
b = self.Matrix.__class__(1)
b.binaryLoadFromFile('test_binary.bin')
if a != b:
error('binarySaveToFile/binaryLoadFromFile')
os.unlink('test_binary.bin')
def testToFromSparseVector(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
a = self.Matrix.__class__(1)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
x = x.reshape((m*n))
indices = numpy.where(x > 0)[0].tolist()
a.fromSparseVector(m, n, indices)
x = x.reshape((m,n))
if (a.toDense() != x).any():
error('fromSparseVector')
if (numpy.array(a.nNonZerosPerRow()) != x.sum(axis=1)).any():
error('fromSparseVector nNonZerosPerRow')
if a.nNonZeros() != len(numpy.where(x > 0)[0]):
error('fromSparseVector nNonZeros')
x = x.reshape(m*n)
y = a.toSparseVector()
if (y != numpy.where(x > 0)[0].tolist()).any():
error('toSparseVector')
if a.nNonZeros() != len(numpy.where(x > 0)[0]):
error('toSparseVector nNonZeros 2')
# Need to make the same matrix can go through
# fromSparseVector again with a different x
x = _RGEN.randint(0,2,(n,m))
x = x.reshape((m*n))
indices = numpy.where(x > 0)[0].tolist()
a.fromSparseVector(n, m, indices)
x = x.reshape((n, m))
if (a.toDense() != x).any():
error('fromSparseVector 2')
if (numpy.array(a.nNonZerosPerRow()) != x.sum(axis=1)).any():
error('fromSparseVector nNonZerosPerRow 2')
if a.nNonZeros() != len(numpy.where(x > 0)[0]):
error('fromSparseVector nNonZeros 2')
x = x.reshape(m*n)
y = a.toSparseVector()
if (y != numpy.where(x > 0)[0].tolist()).any():
error('toSparseVector 2')
if a.nNonZeros() != len(numpy.where(x > 0)[0]):
error('toSparseVector nNonZeros 2')
def testToFromDense(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(1)
a.fromDense(x)
if (a.toDense() != x).any():
error('fromDense')
if (numpy.array(a.nNonZerosPerRow()) != x.sum(axis=1)).any():
error('fromDense nNonZerosPerRow')
if a.nNonZeros() != len(numpy.where(x > 0)[0]):
error('fromDense nNonZeros')
# Need to make sure the same matrix can go
# through another fromDense with a different x
x = _RGEN.randint(0,2,(n,m))
a.fromDense(x)
if (a.toDense() != x).any():
error('fromDense 2')
if (numpy.array(a.nNonZerosPerRow()) != x.sum(axis=1)).any():
error('fromDense nNonZerosPerRow 2')
if a.nNonZeros() != len(numpy.where(x > 0)[0]):
error('fromDense nNonZeros 2')
def testRowToFromDense(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
b = numpy.zeros((m,n))
a = self.Matrix.__class__(n)
for i in range(m):
a.appendDenseRow(numpy.zeros((n)))
a.rowFromDense(i, x[i])
if (a.toDense() != x).any():
error('rowFromDense')
for i in range(m):
b[i] = a.rowToDense(i)
if (b != x).any():
error('rowToDense')
def testLogicalNot(self):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
a = self.Matrix.__class__(n)
x = _RGEN.randint(0,2,(m,n))
x[m/2] = numpy.zeros((n))
for i in range(m):
a.appendSparseRow(numpy.where(x[i] > 0)[0].tolist())
a.logicalNot()
y = 1 - x
if (a.toDense() != y).any():
error('logicalNot')
def testLogicalOr(self):
show = False
a = self.Matrix.__class__(1)
a.fromDense([[0,0,1,1,1,0,0],
[0,1,0,0,0,1,0],
[0,1,0,0,0,1,0],
[0,0,1,1,1,0,0]])
b = self.Matrix.__class__(1)
b.fromDense([[0,0,0,0,0,0,0],
[0,0,1,1,1,0,0],
[0,0,1,1,1,0,0],
[0,0,0,0,0,0,0]])
a.logicalOr(b)
if show: print a
a.logicalOr(a)
if show: print a
a = self.Matrix.__class__(1)
a.fromDense([[0,0,1,1,1,0,0],
[0,1,0,0,0,1,0],
[0,1,0,0,0,1,0],
[0,0,1,1,1,0,0]])
b = self.Matrix.__class__(1)
b.fromDense([[0,0,0,0,0,0,0],
[0,0,1,1,1,0,0],
[0,0,1,1,1,0,0],
[0,0,0,0,0,0,0]])
b.logicalNot()
if show: print b
b.logicalOr(a)
if show: print b
a = self.Matrix.__class__([[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
,[1,1,1,1,1,0,0,0,0,0,0,0,0,0,1]
,[1,1,1,1,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,1,1,1,1,1,0,0,1]
,[1,0,0,0,1,1,1,1,1,1,1,1,0,0,1]
,[1,0,0,0,0,1,1,1,1,1,1,0,0,0,1]
,[1,0,0,0,0,0,0,1,1,1,0,0,0,0,1]
,[1,0,0,0,0,0,0,1,1,1,0,0,0,0,1]
,[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]])
a.logicalNot()
if show: print a
b = self.Matrix.__class__(a)
b.inside()
if show: print b
a.logicalOr(b)
if show: print a
def testLogicalAnd(self):
show = False
a = self.Matrix.__class__(1)
a.fromDense([[0,0,1,1,1,0,0],
[0,1,0,0,0,1,0],
[0,1,0,0,0,1,0],
[0,0,1,1,1,0,0]])
b = self.Matrix.__class__(1)
b.fromDense([[0,0,0,0,0,0,0],
[0,0,1,1,1,0,0],
[0,0,1,1,1,0,0],
[0,0,0,0,0,0,0]])
a.logicalAnd(b)
if show: print a
a.logicalAnd(a)
if show: print a
a = self.Matrix.__class__(1)
a.fromDense([[0,0,1,1,1,0,0],
[0,1,0,0,0,1,0],
[0,1,0,0,0,1,0],
[0,0,1,1,1,0,0]])
b = self.Matrix.__class__(1)
b.fromDense([[0,0,0,0,0,0,0],
[0,0,1,1,1,0,0],
[0,0,1,1,1,0,0],
[0,0,0,0,0,0,0]])
b.logicalNot()
if show: print b
b.logicalAnd(a)
if show: print b
def testOverlap(self):
x = [[0,1,1,0,0,1],
[1,1,1,1,1,1],
[0,0,0,0,0,0],
[1,0,1,0,1,0],
[1,1,1,0,0,0],
[0,0,0,1,1,1],
[1,1,0,0,1,1]]
ans = [[3,3,0,1,2,1,2],
[3,6,0,3,3,3,4],
[0,0,0,0,0,0,0],
[1,3,0,3,2,1,2],
[2,3,0,2,3,0,2],
[1,3,0,1,0,3,2],
[2,4,0,2,2,2,4]]
a = self.Matrix.__class__(1)
a.fromDense(x)
for xv,yv in zip(x,ans):
y = a.overlap(xv)
if (y != yv).any():
error('overlap')
def testMaxAllowedOverlap(self):
for i in range(10):
m = _RGEN.randint(5,10)
maxDistance = .5
n = _RGEN.randint(10,20)
x = _RGEN.randint(0,2,(m,n))
a = self.Matrix.__class__(1)
a.fromDense(x)
for i in range(10):
coinc = _RGEN.randint(0,2,(n))
overlaps = a.overlap(coinc)
longSums = numpy.maximum(a.rowSums(), coinc.sum())
maxAllowedOverlaps = (1.0 - maxDistance) * longSums
py_accepted = True
if (overlaps > maxAllowedOverlaps).any():
py_accepted = False
if a.maxAllowedOverlap(maxDistance, coinc) != py_accepted:
error('maxAllowedOverlap')
def testSubtract(self):
a = numpy.array([[0,1,0],
[1,0,1],
[0,1,0]])
b = numpy.array([[1,1,1],
[1,0,1],
[1,1,1]])
c = b - a
a = self.Matrix.__class__(a)
b = self.Matrix.__class__(b)
a.logicalNot()
b.logicalAnd(a)
if (c != b.toDense()).any():
error('subtract')
def testInsideAndEdges(self):
show = False
def printSideBySide(before, after):
for i in range(before.nRows()):
line = ''
for j in range(before.nCols()):
line += '#' if before.get(i,j) == 1 else '.'
line += ' -> '
for j in range(before.nCols()):
line += '#' if after.get(i,j) == 1 else '.'
print line
print
def sideBySide(a, edges=False):
a = self.Matrix.__class__(a)
orig = self.Matrix.__class__(a)
if edges:
a.edges(2)
else:
a.inside()
if show:
printSideBySide(orig, a)
for edges in [False, True]:
sideBySide([[0,0,0,0,0,0],
[0,0,0,0,0,0],
[0,0,0,0,0,0]], edges)
sideBySide([[1,1,1,1,1,1],
[1,0,0,0,0,1],
[1,1,1,1,1,1]], edges)
sideBySide([[1,1,1,1,1,1],
[1,1,0,0,0,1],
[1,1,1,1,1,1]], edges)
sideBySide([[1,1,1,1,1,1],
[1,1,0,0,1,1],
[1,1,1,1,1,1]], edges)
sideBySide([[1,1,1,1,1,1],
[1,1,1,0,0,1],
[1,1,1,1,1,1]], edges)
sideBySide([[1,1,1,1,1,1],
[1,1,1,0,1,1],
[1,1,1,1,1,1]], edges)
sideBySide([[1,1,1,1,1,1],
[1,1,1,1,1,1],
[1,1,1,1,1,1]], edges)
sideBySide([[0,0,0,0,0,0,0,0],
[0,1,1,1,1,1,1,0],
[0,1,0,0,0,0,1,0],
[0,1,1,1,1,1,1,0],
[0,0,0,0,0,0,0,0]], edges)
sideBySide([[0,0,0,0,0,0,0,0],
[0,1,1,1,1,1,1,0],
[0,1,0,0,0,0,1,0],
[0,1,0,0,0,0,1,0],
[0,1,1,1,1,1,1,0],
[0,0,0,0,0,0,0,0]], edges)
sideBySide([[0,0,0,0,0,0,0,0],
[0,1,1,1,1,1,1,0],
[0,1,1,0,0,0,1,0],
[0,1,1,0,0,0,1,0],
[0,1,1,1,1,1,1,0],
[0,0,0,0,0,0,0,0]], edges)
sideBySide([[0,0,0,0,0,0,0,0],
[0,1,1,1,1,1,1,0],
[0,1,1,1,0,0,1,0],
[0,1,1,1,0,0,1,0],
[0,1,1,1,1,1,1,0],
[0,0,0,0,0,0,0,0]], edges)
sideBySide([[1,1,1,1,1,1,0],
[1,1,1,1,0,0,1],
[1,1,0,0,0,0,1],
[0,1,1,1,1,1,0]], edges)
sideBySide([[0,0,1,1,1,0,0],
[0,1,0,0,0,1,0],
[0,1,0,0,0,1,0],
[0,0,1,1,1,0,0]], edges)
sideBySide([[0,0,1,1,1,0,0],
[0,1,0,1,0,1,0],
[0,1,0,1,0,1,0],
[0,0,1,1,1,0,0]], edges)
sideBySide([[0,0,1,1,1,0,0],
[0,1,0,0,0,0,0],
[0,1,0,0,0,0,0],
[0,0,1,1,1,0,0]], edges)
sideBySide([[0,0,1,1,1,1,0],
[0,1,0,1,0,0,0],
[0,1,0,1,0,0,0],
[0,0,1,1,1,1,0]], edges)
sideBySide([[1,1,1,1,1,1,0],
[1,1,0,0,0,1,1],
[1,0,0,0,0,0,1],
[0,1,0,0,1,1,0]], edges)
sideBySide([[1,1,1,1,1,1,0],
[1,1,1,0,0,1,1],
[1,1,0,0,0,0,1],
[0,1,1,1,1,1,0]], edges)
sideBySide([[ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[ 0,0,0,1,1,1,1,1,1,1,1,1,0,0,0],
[ 0,0,0,1,1,1,1,1,1,1,1,1,1,0,0],
[ 0,0,0,1,1,1,1,1,1,1,1,1,1,0,0],
[ 0,0,0,1,1,1,1,1,1,1,1,1,1,0,0],
[ 0,0,0,0,1,1,1,1,1,1,1,1,1,0,0],
[ 0,0,0,0,1,1,1,1,1,1,1,1,1,0,0],
[ 0,0,0,0,1,1,1,1,1,1,1,1,1,0,0],
[ 0,0,0,1,1,1,1,1,1,1,1,1,0,0,0],
[ 0,0,0,1,1,1,1,1,1,1,1,1,0,0,0],
[ 0,0,0,1,1,1,1,1,1,1,1,1,0,0,0],
[ 0,0,0,1,1,1,1,1,1,1,1,1,0,0,0],
[ 0,0,0,1,1,1,1,1,1,1,1,1,0,0,0],
[ 0,0,0,1,1,1,1,1,1,1,1,1,0,0,0],
[ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]], edges)
sideBySide([[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
,[1,1,1,1,1,0,0,0,0,0,0,0,0,0,1]
,[1,1,1,1,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,1,1,1,1,1,0,0,1]
,[1,0,0,0,1,1,1,1,1,1,1,1,0,0,1]
,[1,0,0,0,0,1,1,1,1,1,1,0,0,0,1]
,[1,0,0,0,0,0,0,1,1,1,0,0,0,0,1]
,[1,0,0,0,0,0,0,1,1,1,0,0,0,0,1]
,[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]], edges)
a = self.Matrix.__class__([[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
,[1,1,1,1,1,0,0,0,0,0,0,0,0,0,1]
,[1,1,1,1,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,1,1,1,1,1,0,0,1]
,[1,0,0,0,1,1,1,1,1,1,1,1,0,0,1]
,[1,0,0,0,0,1,1,1,1,1,1,0,0,0,1]
,[1,0,0,0,0,0,0,1,1,1,0,0,0,0,1]
,[1,0,0,0,0,0,0,1,1,1,0,0,0,0,1]
,[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]])
a.logicalNot()
sideBySide(a, edges)
sideBySide([[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
,[1,1,1,1,1,0,0,0,0,0,0,0,0,0,1]
,[1,1,1,1,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,1,1,1,0,0,0,1]
,[1,0,0,0,0,0,1,1,1,0,1,0,0,0,1]
,[1,0,0,0,0,0,1,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,1,1,1,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
,[1,0,0,0,0,0,0,1,1,1,1,1,0,0,1]
,[1,0,0,0,1,1,1,1,1,1,1,1,0,0,1]
,[1,0,0,0,0,1,1,1,1,1,1,0,0,0,1]
,[1,0,0,0,0,0,0,1,1,1,0,0,0,0,1]
,[1,0,0,0,0,0,0,1,1,1,0,0,0,0,1]
,[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]], edges)
sideBySide([[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,0,0,0,0,0,0,0,1,1,1],
[1,1,1,1,0,0,0,0,0,0,0,0,0,0,1],
[1,1,1,0,0,0,0,0,0,0,0,0,0,0,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,1,1,1,1,0,0,0,0,0,0,1,1,1],
[1,1,1,1,1,1,0,0,0,0,0,0,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]], edges)
a = self.Matrix.__class__([[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
[1,1,1,1,1,0,0,0,0,0,0,0,1,1,1],
[1,1,1,1,0,0,0,0,0,0,0,0,0,0,1],
[1,1,1,0,0,0,0,0,0,0,0,0,0,0,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,0,0,0,0,0,1],
[1,1,0,0,0,0,0,0,0,0,0,0,0,1,1],
[1,1,1,1,1,1,0,0,0,0,0,0,1,1,1],
[1,1,1,1,1,1,0,0,0,0,0,0,1,1,1],
[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]])
a.logicalNot()
sideBySide(a, edges)
def testRightVecSumAtNZ(self):
# Regular matrix vector product, on the right side, all the values in the
# matrix being 1. The fast version doesn't allocate memory for the result
# and uses a pre-allocated buffer instead.
for i in range(10):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
mat = _RGEN.randint(0,2,(m,n))
mat[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(1)
a.fromDense(mat)
x = _RGEN.lognormal(size=n).astype(numpy.float32)
y = a.rightVecSumAtNZ(x)
answer = numpy.dot(mat, x)
if (max(y - answer) > 1e-5).any():
error('rightVecSumAtNZ')
y2 = numpy.zeros((m)).astype(numpy.float32)
a.rightVecSumAtNZ_fast(x, y2)
if (y != y2).any():
error('rightVecSumAtNZ_fast')
def testRightVecArgMaxAtNZ(self):
for k in range(10):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
mat = _RGEN.randint(0,2,(m,n))
mat[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(1)
a.fromDense(mat)
x = _RGEN.lognormal(size=n).astype(numpy.float32)
y = a.rightVecArgMaxAtNZ(x)
answer = numpy.zeros(m)
for i in xrange(m):
a = 0
for j in xrange(n):
if mat[i,j] > 0:
if x[j] > a:
a = x[j]
answer[i] = j
if (y != answer).any():
error('rightVecArgMaxAtNZ')
def testLeftVecSumAtNZ(self):
# Regular vector matrix product, on the left side, with all the values in the
# matrix being 1. The fast version doesn't allocate memory for the result
# and uses a pre-allocated buffer instead.
for i in range(10):
m = _RGEN.randint(1,10)
n = _RGEN.randint(5,10)
mat = _RGEN.randint(0,2,(m,n))
mat[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(1)
a.fromDense(mat)
x = _RGEN.lognormal(size=m).astype(numpy.float32)
y = a.leftVecSumAtNZ(x)
answer = numpy.dot(x, mat)
if (max(y - answer) > 1e-5).any():
error('leftVecSumAtNZ')
y2 = numpy.zeros((n)).astype(numpy.float32)
a.leftVecSumAtNZ_fast(x, y2)
if (y != y2).any():
error('rightVecSumAtNZ_fast')
def testCompact(self):
m = _RGEN.randint(1,100)
n = _RGEN.randint(5,100)
mat = _RGEN.randint(0,2,(m,n))
mat[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(1)
a.fromDense(mat)
needed = a.nNonZeros()
a.compact()
if a.capacity() != needed:
error('compact')
def testPickling(self):
m = _RGEN.randint(1,100)
n = _RGEN.randint(5,100)
mat = _RGEN.randint(0,2,(m,n))
mat[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(1)
a.fromDense(mat)
cPickle.dump(a, open('test.bin', 'wb'))
b = cPickle.load(open('test.bin'))
if (a.toDense() != b.toDense()).any():
error('pickling')
os.unlink('test.bin')
def testMinHammingDistance(self):
m = _RGEN.randint(5,10)
n = _RGEN.randint(5,10)
mat = _RGEN.randint(0,2,(m,n))
mat[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(mat)
for i in range(10):
x = _RGEN.randint(0,2,(n))
sparse_x = []
for i in range(n):
if x[i] == 1:
sparse_x.append(i)
min_row, min_d = 0, 9999
for i in range(m):
d = 0
for j in range(n):
if (x[j] == 1 and mat[i,j] == 0) \
or (x[j] == 0 and mat[i,j] == 1):
d += 1
if d < min_d:
min_d = d
min_row = i
r = a.minHammingDistance(sparse_x)
if r[0] != min_row or r[1] != min_d:
error('minHammingDistance')
def testFirstRowCloserThan(self):
m = _RGEN.randint(5,10)
n = _RGEN.randint(5,10)
mat = _RGEN.randint(0,2,(m,n))
mat[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(mat)
for i in range(10):
x = _RGEN.randint(0,2,(n))
sparse_x = []
for i in range(n):
if x[i] == 1:
sparse_x.append(i)
min_row = m
for i in range(m):
d = 0
for j in range(n):
if (x[j] == 1 and mat[i,j] == 0) \
or (x[j] == 0 and mat[i,j] == 1):
d += 1
if d < 4:
min_row = i
break
r = a.firstRowCloserThan(sparse_x, 4)
if r != min_row:
error('firstRowCloserThan')
def testVecMaxProd(self):
m = _RGEN.randint(5,10)
n = _RGEN.randint(5,10)
mat = _RGEN.randint(0,2,(m,n))
mat[m/2] = numpy.zeros((n))
a = self.Matrix.__class__(mat)
for i in range(10):
x = _RGEN.lognormal(1,2,(n))
y = a.vecMaxProd(x)
truth = numpy.zeros((m))
for j in range(m):
max_v = 0
for k in range(n):
if mat[j,k] > 0 and x[k] > max_v:
max_v = x[k]
truth[j] = max_v
if max(y - truth) > 1e-4:
error('vecMaxProd')
def testLeftDenseMatSumAtNZ(self):
for i in range(10):
a = _RGEN.randint(0,2,(12,13))
m = self.Matrix.__class__(a)
b = _RGEN.randint(0,10,(11,12))
c = m.leftDenseMatSumAtNZ(b)
d = numpy.dot(b,a)
if (c != d).any():
print m
print a
print c
print d
error('leftDenseMatSumAtNZ')
def testLeftDenseMatMaxAtNZ(self):
for i in range(10):
a = _RGEN.randint(0,2,(6,4))
b = _RGEN.randint(0,10,(5,6))
c = numpy.zeros((b.shape[0],a.shape[1])).astype(numpy.int32)
for rowIdx in range(b.shape[0]):
for colIdx in range(a.shape[1]):
elements = (b[rowIdx] * a[:,colIdx])[a[:,colIdx] > 0]
if len(elements) > 0:
c[rowIdx,colIdx] = elements.max()
d = self.Matrix.__class__(a).leftDenseMatMaxAtNZ(b).astype(numpy.int32)
if (c != d).any():
error('leftDenseMatMaxAtNZ')
def testZeroRowsIndicator(self):
for i in range(10):
m = _RGEN.randint(10, 20)
n = _RGEN.randint(10, 20)
a = _RGEN.randint(0,100,(m,n))
a[numpy.where(a < 80)] = 0
if _RGEN.randint(0,100) > 50:
a[_RGEN.randint(0,m)] = 0
elif _RGEN.randint(0,100) > 50:
for k in range(m):
a[k,0] = 1
b = self.Matrix.__class__(a)
ans_v = a.sum(axis=1) == 0
ans_c = ans_v.sum()
c,v = b.zeroRowsIndicator()
if c != ans_c or (ans_v != v).any():
error('zeroRowsIndicator 1')
c2,v2 = b.nonZeroRowsIndicator()
if c + c2 != m:
error('zeroRowsIndicator 2')
for j in range(m):
if v[j] == v2[j]:
error('zeroRowsIndicator 3')
def testNonZeroRowsIndicator(self):
for i in range(10):
m = _RGEN.randint(10, 20)
n = _RGEN.randint(10, 20)
a = _RGEN.randint(0,100,(m,n))
a[numpy.where(a < 80)] = 0
if _RGEN.randint(0,100) > 50:
a[_RGEN.randint(0,m)] = 0
elif _RGEN.randint(0,100) > 50:
for k in range(m):
a[k,0] = 1
b = self.Matrix.__class__(a)
ans_v = a.sum(axis=1) != 0
ans_c = ans_v.sum()
c,v = b.nonZeroRowsIndicator()
if c != ans_c or (ans_v != v).any():
error('nonZeroRowsIndicator 1')
c2,v2 = b.zeroRowsIndicator()
if c + c2 != m:
error('nonZeroRowsIndicator 2')
for j in range(m):
if v[j] == v2[j]:
error('nonZeroRowsIndicator 3')
def testAppendSparseCol(self):
m = _RGEN.randint(10,20)
n = _RGEN.randint(10,20)
x = _RGEN.randint(0,2,(m,n))
a = self.Matrix.__class__(x)
a.appendEmptyCols(3)
if a.nRows() != m or a.nCols() != n + 3:
error('appendEmptyCols 1')
x = _RGEN.permutation(m)[:m/2].astype('int32')
a.appendSparseCol(x)
if a.nRows() != m or a.nCols() != n + 4:
error('appendSparseCol 1')
@unittest.skip("Not currently using...")
def testScalability(self):
# Make sure we can create a long matrix
a = self.Matrix.__class__(2)
for i in range(200000):
a.appendDenseRow([1,1])
a.CSRSaveToFile('test.txt')
b = self.Matrix.__class__(1)
b.CSRLoadFromFile('test.txt')
if (a.toDense() != b.toDense()).any():
error('scalability 1')
print 'Preparing'
n = 10000
a = self.Matrix.__class__(n)
mat = _RGEN.randint(0,100,(20000,n))
x = []
for row in mat:
x += [numpy.where(row > 90)[0]]
print 'Evaluating'
for i in range(len(x)):
if i % 100 == 0:
print i
if a.findRowSparse(x[i]) == a.nRows():
a.appendSparseRow(x[i])
if __name__ == "__main__":
unittest.main() | unknown | codeparrot/codeparrot-clean | ||
"""Prettyprinter by Jurjen Bos.
(I hate spammers: mail me at pietjepuk314 at the reverse of ku.oc.oohay).
All objects have a method that create a "stringPict",
that can be used in the str method for pretty printing.
Updates by Jason Gedge (email <my last name> at cs mun ca)
- terminal_string() method
- minor fixes and changes (mostly to prettyForm)
TODO:
- Allow left/center/right alignment options for above/below and
top/center/bottom alignment options for left/right
"""
from __future__ import print_function, division
from .pretty_symbology import hobj, vobj, xsym, xobj, pretty_use_unicode
from sympy.core.compatibility import u, string_types, range
class stringPict(object):
"""An ASCII picture.
The pictures are represented as a list of equal length strings.
"""
#special value for stringPict.below
LINE = 'line'
def __init__(self, s, baseline=0):
"""Initialize from string.
Multiline strings are centered.
"""
#picture is a string that just can be printed
self.picture = stringPict.equalLengths(s.splitlines())
#baseline is the line number of the "base line"
self.baseline = baseline
self.binding = None
@staticmethod
def equalLengths(lines):
# empty lines
if not lines:
return ['']
width = max(len(line) for line in lines)
return [line.center(width) for line in lines]
def height(self):
"""The height of the picture in characters."""
return len(self.picture)
def width(self):
"""The width of the picture in characters."""
return len(self.picture[0])
@staticmethod
def next(*args):
"""Put a string of stringPicts next to each other.
Returns string, baseline arguments for stringPict.
"""
#convert everything to stringPicts
objects = []
for arg in args:
if isinstance(arg, string_types):
arg = stringPict(arg)
objects.append(arg)
#make a list of pictures, with equal height and baseline
newBaseline = max(obj.baseline for obj in objects)
newHeightBelowBaseline = max(
obj.height() - obj.baseline
for obj in objects)
newHeight = newBaseline + newHeightBelowBaseline
pictures = []
for obj in objects:
oneEmptyLine = [' '*obj.width()]
basePadding = newBaseline - obj.baseline
totalPadding = newHeight - obj.height()
pictures.append(
oneEmptyLine * basePadding +
obj.picture +
oneEmptyLine * (totalPadding - basePadding))
result = [''.join(lines) for lines in zip(*pictures)]
return '\n'.join(result), newBaseline
def right(self, *args):
r"""Put pictures next to this one.
Returns string, baseline arguments for stringPict.
(Multiline) strings are allowed, and are given a baseline of 0.
Examples
========
>>> from sympy.printing.pretty.stringpict import stringPict
>>> print(stringPict("10").right(" + ",stringPict("1\r-\r2",1))[0])
1
10 + -
2
"""
return stringPict.next(self, *args)
def left(self, *args):
"""Put pictures (left to right) at left.
Returns string, baseline arguments for stringPict.
"""
return stringPict.next(*(args + (self,)))
@staticmethod
def stack(*args):
"""Put pictures on top of each other,
from top to bottom.
Returns string, baseline arguments for stringPict.
The baseline is the baseline of the second picture.
Everything is centered.
Baseline is the baseline of the second picture.
Strings are allowed.
The special value stringPict.LINE is a row of '-' extended to the width.
"""
#convert everything to stringPicts; keep LINE
objects = []
for arg in args:
if arg is not stringPict.LINE and isinstance(arg, string_types):
arg = stringPict(arg)
objects.append(arg)
#compute new width
newWidth = max(
obj.width()
for obj in objects
if obj is not stringPict.LINE)
lineObj = stringPict(hobj('-', newWidth))
#replace LINE with proper lines
for i, obj in enumerate(objects):
if obj is stringPict.LINE:
objects[i] = lineObj
#stack the pictures, and center the result
newPicture = []
for obj in objects:
newPicture.extend(obj.picture)
newPicture = [line.center(newWidth) for line in newPicture]
newBaseline = objects[0].height() + objects[1].baseline
return '\n'.join(newPicture), newBaseline
def below(self, *args):
"""Put pictures under this picture.
Returns string, baseline arguments for stringPict.
Baseline is baseline of top picture
Examples
========
>>> from sympy.printing.pretty.stringpict import stringPict
>>> print(stringPict("x+3").below(
... stringPict.LINE, '3')[0]) #doctest: +NORMALIZE_WHITESPACE
x+3
---
3
"""
s, baseline = stringPict.stack(self, *args)
return s, self.baseline
def above(self, *args):
"""Put pictures above this picture.
Returns string, baseline arguments for stringPict.
Baseline is baseline of bottom picture.
"""
string, baseline = stringPict.stack(*(args + (self,)))
baseline = len(string.splitlines()) - self.height() + self.baseline
return string, baseline
def parens(self, left='(', right=')', ifascii_nougly=False):
"""Put parentheses around self.
Returns string, baseline arguments for stringPict.
left or right can be None or empty string which means 'no paren from
that side'
"""
h = self.height()
b = self.baseline
# XXX this is a hack -- ascii parens are ugly!
if ifascii_nougly and not pretty_use_unicode():
h = 1
b = 0
res = self
if left:
lparen = stringPict(vobj(left, h), baseline=b)
res = stringPict(*lparen.right(self))
if right:
rparen = stringPict(vobj(right, h), baseline=b)
res = stringPict(*res.right(rparen))
return ('\n'.join(res.picture), res.baseline)
def leftslash(self):
"""Precede object by a slash of the proper size.
"""
# XXX not used anywhere ?
height = max(
self.baseline,
self.height() - 1 - self.baseline)*2 + 1
slash = '\n'.join(
' '*(height - i - 1) + xobj('/', 1) + ' '*i
for i in range(height)
)
return self.left(stringPict(slash, height//2))
def root(self, n=None):
"""Produce a nice root symbol.
Produces ugly results for big n inserts.
"""
# XXX not used anywhere
# XXX duplicate of root drawing in pretty.py
#put line over expression
result = self.above('_'*self.width())
#construct right half of root symbol
height = self.height()
slash = '\n'.join(
' ' * (height - i - 1) + '/' + ' ' * i
for i in range(height)
)
slash = stringPict(slash, height - 1)
#left half of root symbol
if height > 2:
downline = stringPict('\\ \n \\', 1)
else:
downline = stringPict('\\')
#put n on top, as low as possible
if n is not None and n.width() > downline.width():
downline = downline.left(' '*(n.width() - downline.width()))
downline = downline.above(n)
#build root symbol
root = downline.right(slash)
#glue it on at the proper height
#normally, the root symbel is as high as self
#which is one less than result
#this moves the root symbol one down
#if the root became higher, the baseline has to grow too
root.baseline = result.baseline - result.height() + root.height()
return result.left(root)
def render(self, * args, **kwargs):
"""Return the string form of self.
Unless the argument line_break is set to False, it will
break the expression in a form that can be printed
on the terminal without being broken up.
"""
if kwargs["wrap_line"] is False:
return "\n".join(self.picture)
if kwargs["num_columns"] is not None:
# Read the argument num_columns if it is not None
ncols = kwargs["num_columns"]
else:
# Attempt to get a terminal width
ncols = self.terminal_width()
ncols -= 2
if ncols <= 0:
ncols = 78
# If smaller than the terminal width, no need to correct
if self.width() <= ncols:
return type(self.picture[0])(self)
# for one-line pictures we don't need v-spacers. on the other hand, for
# multiline-pictures, we need v-spacers between blocks, compare:
#
# 2 2 3 | a*c*e + a*c*f + a*d | a*c*e + a*c*f + a*d | 3.14159265358979323
# 6*x *y + 4*x*y + | | *e + a*d*f + b*c*e | 84626433832795
# | *e + a*d*f + b*c*e | + b*c*f + b*d*e + b |
# 3 4 4 | | *d*f |
# 4*y*x + x + y | + b*c*f + b*d*e + b | |
# | | |
# | *d*f
i = 0
svals = []
do_vspacers = (self.height() > 1)
while i < self.width():
svals.extend([ sval[i:i + ncols] for sval in self.picture ])
if do_vspacers:
svals.append("") # a vertical spacer
i += ncols
if svals[-1] == '':
del svals[-1] # Get rid of the last spacer
return "\n".join(svals)
def terminal_width(self):
"""Return the terminal width if possible, otherwise return 0.
"""
ncols = 0
try:
import curses
import io
try:
curses.setupterm()
ncols = curses.tigetnum('cols')
except AttributeError:
# windows curses doesn't implement setupterm or tigetnum
# code below from
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/440694
from ctypes import windll, create_string_buffer
# stdin handle is -10
# stdout handle is -11
# stderr handle is -12
h = windll.kernel32.GetStdHandle(-12)
csbi = create_string_buffer(22)
res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
if res:
import struct
(bufx, bufy, curx, cury, wattr,
left, top, right, bottom, maxx, maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw)
ncols = right - left + 1
except curses.error:
pass
except io.UnsupportedOperation:
pass
except (ImportError, TypeError):
pass
return ncols
def __eq__(self, o):
if isinstance(o, str):
return '\n'.join(self.picture) == o
elif isinstance(o, stringPict):
return o.picture == self.picture
return False
def __hash__(self):
return super(stringPict, self).__hash__()
def __str__(self):
return str.join('\n', self.picture)
def __unicode__(self):
return unicode.join(u('\n'), self.picture)
def __repr__(self):
return "stringPict(%r,%d)" % ('\n'.join(self.picture), self.baseline)
def __getitem__(self, index):
return self.picture[index]
def __len__(self):
return len(self.s)
class prettyForm(stringPict):
"""
Extension of the stringPict class that knows about basic math applications,
optimizing double minus signs.
"Binding" is interpreted as follows::
ATOM this is an atom: never needs to be parenthesized
FUNC this is a function application: parenthesize if added (?)
DIV this is a division: make wider division if divided
POW this is a power: only parenthesize if exponent
MUL this is a multiplication: parenthesize if powered
ADD this is an addition: parenthesize if multiplied or powered
NEG this is a negative number: optimize if added, parenthesize if
multiplied or powered
OPEN this is an open object: parenthesize if added, multiplied, or
powered (example: Piecewise)
"""
ATOM, FUNC, DIV, POW, MUL, ADD, NEG, OPEN = range(8)
def __init__(self, s, baseline=0, binding=0, unicode=None):
"""Initialize from stringPict and binding power."""
stringPict.__init__(self, s, baseline)
self.binding = binding
self.unicode = unicode or s
# Note: code to handle subtraction is in _print_Add
def __add__(self, *others):
"""Make a pretty addition.
Addition of negative numbers is simplified.
"""
arg = self
if arg.binding > prettyForm.NEG:
arg = stringPict(*arg.parens())
result = [arg]
for arg in others:
#add parentheses for weak binders
if arg.binding > prettyForm.NEG:
arg = stringPict(*arg.parens())
#use existing minus sign if available
if arg.binding != prettyForm.NEG:
result.append(' + ')
result.append(arg)
return prettyForm(binding=prettyForm.ADD, *stringPict.next(*result))
def __div__(self, den, slashed=False):
"""Make a pretty division; stacked or slashed.
"""
if slashed:
raise NotImplementedError("Can't do slashed fraction yet")
num = self
if num.binding == prettyForm.DIV:
num = stringPict(*num.parens())
if den.binding == prettyForm.DIV:
den = stringPict(*den.parens())
if num.binding==prettyForm.NEG:
num = num.right(" ")[0]
return prettyForm(binding=prettyForm.DIV, *stringPict.stack(
num,
stringPict.LINE,
den))
def __truediv__(self, o):
return self.__div__(o)
def __mul__(self, *others):
"""Make a pretty multiplication.
Parentheses are needed around +, - and neg.
"""
if len(others) == 0:
return self # We aren't actually multiplying... So nothing to do here.
args = self
if args.binding > prettyForm.MUL:
arg = stringPict(*args.parens())
result = [args]
for arg in others:
result.append(xsym('*'))
#add parentheses for weak binders
if arg.binding > prettyForm.MUL:
arg = stringPict(*arg.parens())
result.append(arg)
len_res = len(result)
for i in range(len_res):
if i < len_res - 1 and result[i] == '-1' and result[i + 1] == xsym('*'):
# substitute -1 by -, like in -1*x -> -x
result.pop(i)
result.pop(i)
result.insert(i, '-')
if result[0][0] == '-':
# if there is a - sign in front of all
# This test was failing to catch a prettyForm.__mul__(prettyForm("-1", 0, 6)) being negative
bin = prettyForm.NEG
if result[0] == '-':
right = result[1]
if right.picture[right.baseline][0] == '-':
result[0] = '- '
else:
bin = prettyForm.MUL
return prettyForm(binding=bin, *stringPict.next(*result))
def __repr__(self):
return "prettyForm(%r,%d,%d)" % (
'\n'.join(self.picture),
self.baseline,
self.binding)
def __pow__(self, b):
"""Make a pretty power.
"""
a = self
use_inline_func_form = False
if b.binding == prettyForm.POW:
b = stringPict(*b.parens())
if a.binding > prettyForm.FUNC:
a = stringPict(*a.parens())
elif a.binding == prettyForm.FUNC:
# heuristic for when to use inline power
if b.height() > 1:
a = stringPict(*a.parens())
else:
use_inline_func_form = True
if use_inline_func_form:
# 2
# sin + + (x)
b.baseline = a.prettyFunc.baseline + b.height()
func = stringPict(*a.prettyFunc.right(b))
return prettyForm(*func.right(a.prettyArgs))
else:
# 2 <-- top
# (x+y) <-- bot
top = stringPict(*b.left(' '*a.width()))
bot = stringPict(*a.right(' '*b.width()))
return prettyForm(binding=prettyForm.POW, *bot.above(top))
simpleFunctions = ["sin", "cos", "tan"]
@staticmethod
def apply(function, *args):
"""Functions of one or more variables.
"""
if function in prettyForm.simpleFunctions:
#simple function: use only space if possible
assert len(
args) == 1, "Simple function %s must have 1 argument" % function
arg = args[0].__pretty__()
if arg.binding <= prettyForm.DIV:
#optimization: no parentheses necessary
return prettyForm(binding=prettyForm.FUNC, *arg.left(function + ' '))
argumentList = []
for arg in args:
argumentList.append(',')
argumentList.append(arg.__pretty__())
argumentList = stringPict(*stringPict.next(*argumentList[1:]))
argumentList = stringPict(*argumentList.parens())
return prettyForm(binding=prettyForm.ATOM, *argumentList.left(function)) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Andrew Kofink <ajkofink@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: repository_set
version_added: 1.0.0
short_description: Enable/disable Red Hat Repositories available through subscriptions
description:
- Enable/disable Red Hat Repositories that are available through subscriptions
author: "Andrew Kofink (@akofink)"
options:
name:
description:
- Name of the repository set
required: false
type: str
product:
description:
- Name of the parent product
required: false
type: str
label:
description:
- Label of the repository set, can be used in place of I(name) & I(product)
required: false
type: str
repositories:
description:
- Release version and base architecture of the repositories to enable.
- Some reposotory sets require only I(basearch) or only I(releasever) to be set.
- See the examples how you can obtain this information using M(theforeman.foreman.resource_info).
- Required when I(all_repositories) is unset or C(false).
required: false
type: list
elements: dict
suboptions:
basearch:
description:
- Basearch of the repository to enable.
type: str
releasever:
description:
- Releasever of the repository to enable.
type: str
all_repositories:
description:
- Affect all available repositories in the repository set instead of listing them in I(repositories).
- Required when I(repositories) is unset or an empty list.
required: false
type: bool
state:
description:
- Whether the repositories are enabled or not
required: false
choices:
- 'enabled'
- 'disabled'
default: enabled
type: str
extends_documentation_fragment:
- theforeman.foreman.foreman
- theforeman.foreman.foreman.organization
'''
EXAMPLES = '''
- name: "Enable RHEL 7 RPMs repositories"
theforeman.foreman.repository_set:
username: "admin"
password: "changeme"
server_url: "https://foreman.example.com"
name: "Red Hat Enterprise Linux 7 Server (RPMs)"
organization: "Default Organization"
product: "Red Hat Enterprise Linux Server"
repositories:
- releasever: "7.0"
basearch: "x86_64"
- releasever: "7.1"
basearch: "x86_64"
- releasever: "7.2"
basearch: "x86_64"
- releasever: "7.3"
basearch: "x86_64"
state: enabled
- name: "Enable RHEL 7 RPMs repositories with label"
theforeman.foreman.repository_set:
username: "admin"
password: "changeme"
server_url: "https://foreman.example.com"
organization: "Default Organization"
label: rhel-7-server-rpms
repositories:
- releasever: "7.0"
basearch: "x86_64"
- releasever: "7.1"
basearch: "x86_64"
- releasever: "7.2"
basearch: "x86_64"
- releasever: "7.3"
basearch: "x86_64"
state: enabled
- name: "Disable RHEL 7 Extras RPMs repository"
theforeman.foreman.repository_set:
username: "admin"
password: "changeme"
server_url: "https://foreman.example.com"
name: Red Hat Enterprise Linux 7 Server - Extras (RPMs)
organization: "Default Organization"
product: Red Hat Enterprise Linux Server
state: disabled
repositories:
- basearch: x86_64
- name: "Enable RHEL 8 BaseOS RPMs repository with label"
theforeman.foreman.repository_set:
username: "admin"
password: "changeme"
server_url: "https://foreman.example.com"
organization: "Default Organization"
label: rhel-8-for-x86_64-baseos-rpms
repositories:
- releasever: "8"
- name: "Enable Red Hat Virtualization Manager RPMs repository with label"
theforeman.foreman.repository_set:
username: "admin"
password: "changeme"
server_url: "https://foreman.example.com"
organization: "Default Organization"
label: "rhel-7-server-rhv-4.2-manager-rpms"
repositories:
- basearch: x86_64
state: enabled
- name: "Enable Red Hat Virtualization Manager RPMs repository without specifying basearch"
theforeman.foreman.repository_set:
username: "admin"
password: "changeme"
server_url: "https://foreman.example.com"
organization: "Default Organization"
label: "rhel-7-server-rhv-4.2-manager-rpms"
all_repositories: true
state: enabled
- name: "Search for possible repository sets of a product"
theforeman.foreman.resource_info:
username: "admin"
password: "changeme"
server_url: "https://foreman.example.com"
organization: "Default Organization"
resource: repository_sets
search: product_name="Red Hat Virtualization Manager"
register: data
- name: "Output found repository sets, see the contentUrl section for possible repository substitutions"
debug:
var: data
- name: "Search for possible repository sets by label"
theforeman.foreman.resource_info:
username: "admin"
password: "changeme"
server_url: "https://foreman.example.com"
organization: "Default Organization"
resource: repository_sets
search: label=rhel-7-server-rhv-4.2-manager-rpms
register: data
- name: "Output found repository sets, see the contentUrl section for possible repository substitutions"
debug:
var: data
- name: Enable set with and without all_repositories at the same time
theforeman.foreman.repository_set:
username: "admin"
password: "changeme"
server_url: "https://foreman.example.com"
organization: "Default Organization"
label: "{{ item.label }}"
repositories: "{{ item.repositories | default(omit) }}"
all_repositories: "{{ item.repositories is not defined }}"
state: enabled
loop:
- label: rhel-7-server-rpms
repositories:
- releasever: "7Server"
basearch: "x86_64"
- label: rhel-7-server-rhv-4.2-manager-rpms
'''
RETURN = '''
entity:
description: Final state of the affected entities grouped by their type.
returned: success
type: dict
contains:
repository_sets:
description: List of repository sets.
type: list
elements: dict
'''
from ansible_collections.theforeman.foreman.plugins.module_utils.foreman_helper import KatelloEntityAnsibleModule
def get_desired_repos(desired_substitutions, available_repos):
desired_repos = []
for sub in desired_substitutions:
desired_repos += filter(lambda available: available['substitutions'] == sub, available_repos)
return desired_repos
def record_repository_set_state(module, record_data, repo, state_before, state_after):
repo_change_data = record_data.copy()
repo_change_data['repo_name'] = repo
repo_change_data['state'] = state_before
repo_change_data_after = repo_change_data.copy()
repo_change_data_after['state'] = state_after
module.record_before('repository_sets', repo_change_data)
module.record_after('repository_sets', repo_change_data_after)
module.record_after_full('repository_sets', repo_change_data_after)
class KatelloRepositorySetModule(KatelloEntityAnsibleModule):
pass
def main():
module = KatelloRepositorySetModule(
foreman_spec=dict(
product=dict(type='entity', scope=['organization']),
name=dict(),
label=dict(),
repositories=dict(type='list', elements='dict', options=dict(
basearch=dict(),
releasever=dict(),
)),
all_repositories=dict(type='bool'),
),
argument_spec=dict(
state=dict(default='enabled', choices=['disabled', 'enabled']),
),
required_one_of=[
['label', 'name'],
['repositories', 'all_repositories'],
],
required_if=[
['all_repositories', False, ['repositories']],
['repositories', [], ['all_repositories']],
],
)
repositories = module.foreman_params.get('repositories', [])
with module.api_connection():
scope = module.scope_for('organization')
record_data = {}
if 'product' in module.foreman_params:
record_data['product'] = module.foreman_params['product']
scope.update(module.scope_for('product'))
if 'label' in module.foreman_params:
search = 'label="{0}"'.format(module.foreman_params['label'])
repo_set = module.find_resource('repository_sets', search=search, params=scope)
record_data['label'] = module.foreman_params['label']
else:
repo_set = module.find_resource_by_name('repository_sets', name=module.foreman_params['name'], params=scope)
record_data['name'] = module.foreman_params['name']
module.set_entity('entity', repo_set)
repo_set_scope = {'id': repo_set['id'], 'product_id': repo_set['product']['id']}
repo_set_scope.update(scope)
available_repos = module.resource_action('repository_sets', 'available_repositories', params=repo_set_scope, ignore_check_mode=True)
available_repos = available_repos['results']
current_repos = repo_set['repositories']
if not module.foreman_params.get('all_repositories', False):
desired_repos = get_desired_repos(repositories, available_repos)
else:
desired_repos = available_repos[:]
current_repo_names = set(map(lambda repo: repo['name'], current_repos))
desired_repo_names = set(map(lambda repo: repo['repo_name'], desired_repos))
if not module.foreman_params.get('all_repositories', False) and len(repositories) != len(desired_repo_names):
repo_set_identification = ' '.join(['{0}: {1}'.format(k, v) for (k, v) in record_data.items()])
available_repo_details = [{'name': repo['repo_name'], 'repositories': repo['substitutions']} for repo in available_repos]
desired_repo_details = [{'name': repo['repo_name'], 'repositories': repo['substitutions']} for repo in desired_repos]
search_details = record_data.copy()
search_details['repositories'] = repositories
error_msg = "Desired repositories are not available on the repository set {0}.\nSearched: {1}\nFound: {2}\nAvailable: {3}".format(
repo_set_identification, search_details, desired_repo_details, available_repo_details)
module.fail_json(msg=error_msg)
if module.state == 'enabled':
for repo in desired_repo_names - current_repo_names:
repo_to_enable = next((r for r in available_repos if r['repo_name'] == repo))
repo_change_params = repo_to_enable['substitutions'].copy()
repo_change_params.update(repo_set_scope)
record_repository_set_state(module, record_data, repo, 'disabled', 'enabled')
module.resource_action('repository_sets', 'enable', params=repo_change_params)
elif module.state == 'disabled':
for repo in current_repo_names & desired_repo_names:
repo_to_disable = next((r for r in available_repos if r['repo_name'] == repo))
repo_change_params = repo_to_disable['substitutions'].copy()
repo_change_params.update(repo_set_scope)
record_repository_set_state(module, record_data, repo, 'enabled', 'disabled')
module.resource_action('repository_sets', 'disable', params=repo_change_params)
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
__author__ = 'sysferland'
from BeautifulSoup import BeautifulSoup
from difflib import SequenceMatcher as SM
from ffvideo import VideoStream
from ffvideo import DecoderError
from ffvideo import NoMoreData
import urllib2, ordereddict, os, sys, cymysql, time, re, hashlib, socket, subprocess, signal
def select_season_id(season, conn):
cur = conn.cursor()
cur.execute("SELECT `id` FROM `tongue`.`seasons` WHERE `season_name` = %s", season)
row = cur.fetchone()
if row:
return row[0]
else:
return 0
def check_feeds(ffserver_IP, ffserver_port, sql_host, sql_u, sql_p):
sconn = cymysql.connect(host=sql_host, user=sql_u, passwd=sql_p, db="tongue")
cur = sconn.cursor()
while 1:
sconn.commit()
time.sleep(0.5)
try:
html = urllib2.urlopen("http://"+ffserver_IP+":"+ffserver_port+"/stat.html")
html.getcode()
except urllib2.URLError:
print "ffserver is offline."
continue
soup = BeautifulSoup(html.read())
i = 0
ii_f = 0
jump = 0
stats = {"stats": {}}
all = soup.findAll("table")
len_supply = len(all)
for supply in all:
i += 1
if i == 1:
ii = -1
# Streams
stats['stats'].update({"Streams": {}})
for row in supply.findAll("tr"):
iii = 0
ii += 1
if ii == 0:
continue
for item in row.findAll(text=True):
if str(item) == " ":
continue
if jump > 0:
jump -= 1
continue
comp = SM(None, str(item), "index.html").ratio()
comp2 = SM(None, str(item), "stat.html").ratio()
if comp == 1.0 or comp2 == 1.0:
jump = 7
continue
if ii not in stats['stats']['Streams']:
stats['stats']['Streams'].update({ii: {}})
stats['stats']['Streams'][ii].update({iii: item})
iii += 1
else:
if i != len_supply:
# Feeds
ii_ = -1
if "Feeds" not in stats['stats']:
stats['stats'].update({"Feeds": {}})
stats['stats']['Feeds'].update({ii_f: {}})
for row in supply.findAll("tr"):
iii = 0
ii_ += 1
if ii_ == 0:
continue
for item in row.findAll(text=True):
if str(item) == " ":
continue
if jump > 0:
jump -= 1
continue
comp = SM(None, str(item), "Stream").ratio()
if comp == 1.0:
jump = 4
continue
if ii_ not in stats['stats']['Feeds'][ii_f]:
stats['stats']['Feeds'][ii_f].update({ii_: {}})
stats['stats']['Feeds'][ii_f][ii_].update({iii: item})
iii += 1
ii_f += 1
else:
ii = -1
# Connections
stats['stats'].update({"Connections": {}})
for row in supply.findAll("tr"):
iii = 0
ii += 1
if ii == 0:
continue
for item in row.findAll(text=True):
if str(item) == " ":
continue
if jump > 0:
#jump -= 1
continue
comp = SM(None, str(item), "index.html").ratio()
comp2 = SM(None, str(item), "stat.html").ratio()
if comp == 1.0 or comp2 == 1.0:
continue
if ii not in stats['stats']['Connections']:
stats['stats']['Connections'].update({ii: {}})
stats['stats']['Connections'][ii].update({iii: item})
iii += 1
temp = {}
i = 0
#print stats['stats']['Connections']
for key, conn in stats['stats']['Connections'].items():
if len(conn) < 8:
del stats['stats']['Connections'][key]
continue;
temp.update({i: {}})
temp[i].update(conn)
i += 1
del stats['stats']['Connections']
stats['stats']['Connections'] = {}
stats['stats']['Connections'].update(temp)
used_streams = {}
i = 0
for conn in stats['stats']['Connections'].values():
re1='.*?' # Non-greedy match on filler
re2='(\\(.*\\))' # Round Braces 1
rg = re.compile(re1+re2,re.IGNORECASE|re.DOTALL)
m = rg.search(conn[1])
if m:
continue
used_streams.update({i: {'feed':conn[1], 'dest':conn[2], 'http_stat': conn[4], 'sent': conn[7]}})
i += 1
used_streams = ordereddict.OrderedDict(sorted(used_streams.items(), key=lambda t: t[1]))
for key, stream in stats['stats']['Streams'].items():
#print "--------------"
flag = 0
for key, used in used_streams.items():
if SM(None, stream[0], used['feed']).ratio() == 1.0:
#print "Feed " + str(stream[9]).strip() + " in use, Insert/Update its Data"
flag = 1
try:
cur.execute("INSERT INTO `tongue`.`feeds` (`feed`, `feed_server`, `in_use`, `dest`, `http_stat`, `sent`) VALUES (%s, %s, 1, %s, %s, %s)", (str(stream[9]).strip(), ffserver_IP+":"+ffserver_port, str(used['dest']), str(used['http_stat']).strip(), str(used['sent']).strip()))
sys.stdout.write("(+)")
sys.stdout.flush()
except cymysql.MySQLError:
cur.execute("UPDATE `tongue`.`feeds` SET `in_use` = 1, `dest` = %s, `http_stat` = %s, `sent` = %s WHERE `feed` = %s", (str(used['dest']), str(used['http_stat']).strip(), str(used['sent']).strip(), str(stream[9]).strip() ))
sys.stdout.write("[+]")
sys.stdout.flush()
sconn.commit()
del used_streams[key]
break
if flag == 0:
#print "Insert/Update Unused Feed: "+str(stream[9]).strip()
try:
cur.execute("INSERT INTO `tongue`.`feeds` (`feed`, `feed_server`, `in_use`, `dest`, `http_stat`, `sent`) VALUES (%s, %s, 0, '', '', '')", (str(stream[9]).strip(), ffserver_IP+":"+ffserver_port ))
sys.stdout.write("+")
sys.stdout.flush()
except cymysql.MySQLError:
cur.execute("UPDATE `tongue`.`feeds` SET `feed` = %s, `in_use` = 0, `dest` = '', `http_stat` = '', `sent` = '' where `feed` = %s", (str(stream[9]).strip() , str(stream[9]).strip()))
sys.stdout.write("-")
sys.stdout.flush()
sconn.commit()
continue
def select_show_id(show, conn):
cur = conn.cursor()
cur.execute("SELECT `id` FROM `tongue`.`shows` WHERE `show_name` = %s", show)
row = cur.fetchone()
if row:
return row[0]
else:
return 0
def insert_show(show, conn):
cur = conn.cursor()
cur.execute("INSERT INTO `tongue`.`shows` (`id`, `show_name`) VALUES ('', %s)", show)
conn.commit()
return cur.lastrowid
def insert_season(season, show_id, conn):
conn.commit()
cur = conn.cursor()
cur.execute("INSERT INTO `tongue`.`seasons` (`id`, `season_name`, `show_id`) VALUES ('', %s, %s)", (str(season), str(show_id)))
conn.commit()
return cur.lastrowid
def prep_sql_movies(Movies_mnt, conn):
cur = conn.cursor()
#print [name for name in os.listdir(Movies_mnt) if os.path.isdir(Movies_mnt)]
mnt_path = Movies_mnt.split('/')
mnt_num = (len(mnt_path) - 1)
while 1:
dvd_flag = 0
grouped = 0
group = 1
prev_folder = ""
for root, dirs, files in os.walk(Movies_mnt):
path = root.split('/')
#print path
root_num = (len(path) - 1)
sub = os.path.basename(root)
if prev_folder != sub:
grouped = 0
for file in files:
filepath = root+"/"+file
#print filepath
path_hash = hashlib.sha256(filepath).hexdigest()
file_parts = file.split(".")
ext = file_parts[-1].lower()
if ext == "nfo" or ext == "jpg" or ext == "jpeg" or ext == "srt" or ext == "ifo" or ext == "bup" or ext == "nzb" or ext == "idx" or ext == "sfv" or ext == "txt" or ext == "db" or ext == "DS_Store":
continue
cur.execute("SELECT `id` FROM `tongue`.`movie_files` WHERE `path_hash` = %s LIMIT 1", str(path_hash))
row = cur.fetchone()
#print filepath
#print row
if not row:
del file_parts[-1]
file_name_no_ext = "-".join(file_parts)
else:
continue
#print len(path)*'---'
if mnt_num == root_num-1:
#print "In root: " + root
ii = 0
else:
#print "Sub of root: " + sub + " : " + file
if sub.upper() == "VIDEO_TS":
#print "Is DVD Video: " + file
if dvd_flag == 1:
print "flag already set"
#continue
else:
dvd_flag = 1
file = sub
else:
dvd_flag = 0
lower_file = file.lower()
if "cd1" in lower_file or "cd2" in lower_file:
#print lower_file
if prev_folder != sub:
prev_folder = sub
grouped = 1
group += 1
#print
else:
grouped = 0
if not dvd_flag:
try:
vs = VideoStream(filepath)
except DecoderError:
#pass
codec = "unknown"
length = 0
dimensions = "0x0"
print "Decoder Error :("
except NoMoreData:
codec = "unknown"
length = 0
dimensions = "0x0"
print "File corrupt??"
else:
frame = vs.get_frame_at_sec(200)
codec = vs.codec_name
hours = vs.duration/3600
minuets = (vs.duration/60) - (int(hours) * 60)
rg = re.compile('.*?\\d+.*?(\\d+)',re.IGNORECASE|re.DOTALL)
m = rg.search(str(minuets))
seconds = int(float("0."+ m.group(1)) * 60)
# print vs.duration, minuets, hours
length = "%dh:%02d:%02d" % (hours, minuets, seconds)
dimensions = "%dx%d" % (vs.frame_width, vs.frame_height)
else:
codec = "RAWDVD"
length = 0
dimensions = "0x0"
fullpath = filepath.replace("'", "\\'").replace(" ", "\\ ").replace("-", "\-").replace("&", "\&").replace(")", "\)").replace("(", "\(")
file_ = file.replace("'", "\\'").replace(" ", "\\ ").replace("-", "\-").replace("&", "\&").replace(")", "\)").replace("(", "\(")
#print "DVD Flag: " + str(dvd_flag)
if dvd_flag == 1:
file_ = path[-2]
if grouped == 0:
group_ins = 0
else:
group_ins = group
#print grouped, group_ins
try:
cur.execute("INSERT INTO `tongue`.`movie_files` (`id`, `fullpath`, `filename`, `path_hash`, `grouped`, `group`, `dvd_raw`, `runtime`, `dimensions`, `codec`) VALUES (NULL, %s, %s, %s, %s, %s, %s, %s, %s, %s)", (fullpath.replace(os.path.dirname(root) ,""), file_, str(path_hash), grouped, group_ins, dvd_flag, length, dimensions, codec))
except cymysql.MySQLError, e:
print e
sys.stdout.write("`")
sys.stdout.flush()
conn.commit()
#print len(path)*'---', file
time.sleep(900)
def prep_sql_shows(Shows_mnt, conn):
cur = conn.cursor()
while 1:
ii = 0
for (dirpath, dirnames, filenames) in os.walk(os.path.normpath(Shows_mnt)):
if filenames:
for file in filenames:
if ".DS_Store" in file:
continue
file_parts = file.split(".")
ext = file_parts[-1].lower()
del file_parts[-1]
if ext == "nfo" or ext == "jpg" or ext == "jpeg" or ext == "srt" or ext == "ifo" or ext == "bup" or ext == "nzb" or ext == "idx" or ext == "sfv" or ext == "txt" or ext == "db" or ext == "part":
continue
paths = []
parse = ""
i = 0
while Shows_mnt != parse:
if i == 0:
paths.append(file)
parse = os.path.dirname(os.path.normpath(dirpath + "/" + file))
else:
if parse in paths:
parse = os.path.dirname(parse)
i += 1
paths.append(parse)
#print paths
path_string = ''.join(paths)
path_hash = hashlib.sha256(path_string).hexdigest()
#print path_hash
cur.execute("SELECT `id` FROM `tongue`.`video_files` WHERE `path_hash` = %s LIMIT 1", str(path_hash))
row = cur.fetchone()
#print row
if not row:
if ext != "rm":
filepath = dirpath+"/"+file
print filepath
file_parts = file.split(".")
ext = file_parts[-1].lower()
del file_parts[-1]
file_name_no_ext = "-".join(file_parts)
try:
vs = VideoStream(filepath)
except DecoderError:
#pass
codec = "unknown"
length = 0
dimensions = "0x0"
print "Decoder Error :("
except NoMoreData:
codec = "unknown"
length = 0
dimensions = "0x0"
print "File corrupt??"
else:
if vs.duration < 20:
frame = vs.get_frame_at_sec(vs.duration/2)
else:
frame = vs.get_frame_at_sec(20)
codec = vs.codec_name
hours = vs.duration/3600
minuets = (vs.duration/60) - (int(hours) * 60)
rg = re.compile('.*?\\d+.*?(\\d+)',re.IGNORECASE|re.DOTALL)
m = rg.search(str(minuets))
seconds = int(float("0."+ m.group(1)) * 60)
# print vs.duration, minuets, hours
length = "%dh:%02d:%02d" % (hours, minuets, seconds)
dimensions = "%dx%d" % (vs.frame_width, vs.frame_height)
if paths:
plen = len(paths)
if plen == 3:
video = str(paths[0])
show_folder = str(os.path.basename(paths[plen-2]))
show_id = select_show_id(show_folder.replace("'", "\\'").replace(" ", "\\ ").replace("-", "\-").replace("&", "\&").replace(")", "\)").replace("(", "\("), conn)
if show_id == 0:
show_id = insert_show(show_folder.replace("'", "\\'").replace(" ", "\\ ").replace("-", "\-").replace("&", "\&").replace(")", "\)").replace("(", "\("), conn)
season_folder = str(os.path.basename(paths[plen-2]))
season_id = select_season_id(season_folder.replace("'", "\\'").replace(" ", "\\ ").replace("-", "\-").replace("&", "\&").replace(")", "\)").replace("(", "\("), conn)
if season_id == 0:
season_id = insert_season(season_folder.replace("'", "\\'").replace(" ", "\\ ").replace("-", "\-").replace("&", "\&").replace(")", "\)").replace("(", "\("), show_id, conn)
try:
cur.execute("INSERT INTO `tongue`.`video_files` (`id`, `video`, `season_id`, `show_id`, `path_hash`, `runtime`, `dimensions`) VALUES (NULL, %s, %s, %s, %s, %s, %s )", (video.replace("'", "\\'").replace(" ", "\\ ").replace("-", "\-").replace("&", "\&").replace(")", "\)").replace("(", "\("), season_id, show_id, str(path_hash), length, dimensions))
except cymysql.MySQLError, e:
print e
else:
#print paths[0]+"|=|"+os.path.basename(paths[plen-2])+"|=|"+os.path.basename(paths[plen-2])
#print show_id, season_id
if ii == 100:
sys.stdout.write("\n")
ii = 0
ii += 1
sys.stdout.write(".")
sys.stdout.flush()
else:
video = str(paths[0])
show_folder = str(os.path.basename(paths[plen-2]))
show_id = select_show_id(show_folder.replace("'", "\\'").replace(" ", "\\ ").replace("-", "\-").replace("&", "\&").replace(")", "\)").replace("(", "\("), conn)
if show_id == 0:
show_id = insert_show(show_folder.replace("'", "\\'").replace(" ", "\\ ").replace("-", "\-").replace("&", "\&").replace(")", "\)").replace("(", "\("), conn)
season_folder = str(paths[plen-3].strip(Shows_mnt))
season_id = select_season_id(season_folder.replace("'", "\\'").replace(" ", "\\ ").replace("-", "\-").replace("&", "\&").replace(")", "\)").replace("(", "\("), conn)
if season_id == 0:
season_id = insert_season(season_folder.replace("'", "\\'").replace(" ", "\\ ").replace("-", "\-").replace("&", "\&").replace(")", "\)").replace("(", "\("), show_id, conn)
try:
cur.execute("INSERT INTO `tongue`.`video_files` (`id`, `video`, `season_id`, `show_id`, `path_hash`) VALUES (NULL, %s, %s, %s, %s )", (video.replace("'", "\\'").replace(" ", "\\ ").replace("-", "\-").replace("&", "\&").replace(")", "\)").replace("(", "\("), season_id, show_id, str(path_hash)))
except cymysql.MySQLError, e:
print e
sys.stdout.write(".")
sys.stdout.flush()
conn.commit()
time.sleep(900)
def fetch_waiting(conn):
conn.commit()
cur = conn.cursor()
cur.execute("SELECT `table` FROM `waiting` ORDER BY `id` LIMIT 1")
table_ = cur.fetchone()
if table_:
if table_[0] == "video_files":
cur.execute("SELECT `waiting`.`id`, `waiting`.`feed`, `waiting`.`feed_server`, `waiting`.`seek`, `shows`.`show_name`"+
", `seasons`.`season_name`, `video_files`.`video` FROM `tongue`.`waiting`, `tongue`.`video_files`, `tongue`.`seasons`, "+
"`tongue`.`shows` WHERE `video_files`.`id` = `waiting`.`video_id` AND `seasons`.`id` = `video_files`.`season_id` AND "+
"`shows`.`id` = `video_files`.`show_id` ORDER BY `waiting`.`id` ASC LIMIT 1")
elif table_[0] == "movie_files":
cur.execute("SELECT `waiting`.`id`, `waiting`.`video_id`, `waiting`.`feed`, `waiting`.`feed_server`, `waiting`.`seek`, `movie_files`.`fullpath`, `movie_files`.`group`, `movie_files`.`grouped`, `movie_files`.`dvd_raw` FROM `tongue`.`waiting`, `tongue`.`movie_files` WHERE `movie_files`.`id` = `waiting`.`video_id` ORDER BY `waiting`.`id` ASC LIMIT 1")
row = cur.fetchone()
if row:
row = row + (table_[0],)
return row
else:
return 0
def remove_waiting(id, conn):
conn.commit()
cur = conn.cursor()
cur.execute("DELETE FROM `tongue`.`waiting` WHERE `id` = %s", int(id))
conn.commit()
def get_unused_feeds(conn):
conn.commit()
cur = conn.cursor()
cur.execute("SELECT * FROM `tongue`.`feeds` WHERE `in_use` != 1")
row = cur.fetchall()
if row:
return row
else:
return 0
def clean_threads(unused_feeds, threads):
rm = []
procs = []
for thread in threads:
#continue
for feed in unused_feeds:
if threads[thread]['feed'] in feed:
#print thread['proc'].pid
first_letter = str(threads[thread]['feed'])[0:1]
alteredFeed = "["+first_letter+"]"+str(threads[thread]['feed'][1:])
#find all PIDs for the feed : ps ax|grep [f]eed%.ffm
ps = subprocess.check_output("ps -eo pid,command|grep "+alteredFeed, shell=True).split("\n")
#print ps
for proc in ps:
proc = proc.lstrip().rstrip()
if proc == "":
continue
#print proc
#print proc.split(" ")[0]
procs.append(proc.split(" ")[0])
#print "----------------------"
procs.reverse()
for proc in procs:
#print "Killed: "+str(proc)
os.kill(int(proc), signal.SIGKILL)
sys.stdout.write("<-"+str(thread)+">")
sys.stdout.flush()
rm.append(thread)
return rm
def tongue_socket(HOST, PORT): # A socket process that just listens and responds with what was sent to it.
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # init socket class
s.bind((HOST, PORT)) # Bind to IP and Port
s.listen(1) # Tell it to listen on that IP and Port
while 1:
conn, addr = s.accept() # Accept connection
print 'Connected by', addr
data = conn.recv(1024) #Look for 1k of data
if not data: continue # if data is empty ignore
conn.send(data) # if data is there, send it back
conn.close() # not sure ho you would get here seeing how there is no break out of the while loop
def play_file(feed, feed_server, seek, fullpath, bin_path):
command = 'python StreamThread.py -feed '+str(feed)+' -ffserver '+str(feed_server)+' -seek '+str(seek)+' -source '+str(fullpath)+' -binpath '+str(bin_path)
command = command.replace("&", "\&")
print command
subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return 0 | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
"""
Pull pangrams from a variety of world languages for template testing.
A pangram is a phrase that includes every letter of an alphabet.
The list is drawn from the Web site of Markus Kuhn.
http://www.cl.cam.ac.uk/~mgk25/ucs/examples/quickbrown.txt
"""
PANGRAMS = {
'en': 'The quick brown fox jumps over the lazy dog.',
'da': 'Quizdeltagerne spiste jordbær med fløde, mens cirkusklovnen Wolther\
spillede på xylofon.',
'de': 'Falsches Üben von Xylophonmusik quält jeden größeren Zwerg.',
'el': 'Γαζέες καὶ μυρτιὲς δὲν θὰ βρῶ πιὰ στὸ χρυσαφὶ ξέφωτο.',
'es': 'El pingüino Wenceslao hizo kilómetros bajo exhaustiva lluvia y \
frío, añoraba a su querido cachorro.',
'fr': "Portez ce vieux whisky au juge blond qui fume sur son île \
intérieure, à côté de l'alcôve ovoïde, où les bûches se consument dans l'âtre,\
ce qui lui permet de penser à la cænogenèse de l'être dont il est question \
dans la cause ambiguë entendue à Moÿ, dans un capharnaüm qui, pense-t-il, \
diminue çà et là la qualité de son œuvre.",
'ga': "D'fhuascail Íosa, Úrmhac na hÓighe Beannaithe, pór Éava agus \
Ádhaimh.",
'hu': 'Árvíztűrő tükörfúrógép.',
'is': 'Kæmi ný öxi hér ykist þjófum nú bæði víl og ádrepa.',
'jp': """'いろはにほへとちりぬるを
わかよたれそつねならむ
うゐのおくやまけふこえて
あさきゆめみしゑひもせす""",
'he': '? דג סקרן שט בים מאוכזב ולפתע מצא לו חברה איך הקליטה.',
'pl': 'Pchnąć w tę łódź jeża lub ośm skrzyń fig.',
'ru': 'чащах юга жил бы цитрус? Да, но фальшивый экземпляр!',
'tr': 'Pijamalı hasta, yağız şoföre çabucak güvendi.'
}
def get_pangram(language='en'):
"""
Returns a pangram in the specified language. The default is English.
"""
return PANGRAMS[language]
def get_html(pangram):
"""
Renders the provided pangram in HTML by wrapping it in <p> tags.
Linebreaks are replaced with <br> tags.
"""
html = '<p>%s</p>' % pangram
html = html.replace("\n", "<br>")
return html | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
"""upload_docs
Implements a Distutils 'upload_docs' subcommand (upload documentation to
PyPI's pythonhosted.org).
"""
from base64 import standard_b64encode
from distutils import log
from distutils.errors import DistutilsOptionError
import os
import socket
import zipfile
import tempfile
import shutil
import itertools
import functools
from setuptools.extern import six
from setuptools.extern.six.moves import http_client, urllib
from pkg_resources import iter_entry_points
from .upload import upload
def _encode(s):
errors = 'surrogateescape' if six.PY3 else 'strict'
return s.encode('utf-8', errors)
class upload_docs(upload):
# override the default repository as upload_docs isn't
# supported by Warehouse (and won't be).
DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/'
description = 'Upload documentation to PyPI'
user_options = [
('repository=', 'r',
"url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
('show-response', None,
'display full response text from server'),
('upload-dir=', None, 'directory to upload'),
]
boolean_options = upload.boolean_options
def has_sphinx(self):
if self.upload_dir is None:
for ep in iter_entry_points('distutils.commands', 'build_sphinx'):
return True
sub_commands = [('build_sphinx', has_sphinx)]
def initialize_options(self):
upload.initialize_options(self)
self.upload_dir = None
self.target_dir = None
def finalize_options(self):
log.warn("Upload_docs command is deprecated. Use RTD instead.")
upload.finalize_options(self)
if self.upload_dir is None:
if self.has_sphinx():
build_sphinx = self.get_finalized_command('build_sphinx')
self.target_dir = build_sphinx.builder_target_dir
else:
build = self.get_finalized_command('build')
self.target_dir = os.path.join(build.build_base, 'docs')
else:
self.ensure_dirname('upload_dir')
self.target_dir = self.upload_dir
self.announce('Using upload directory %s' % self.target_dir)
def create_zipfile(self, filename):
zip_file = zipfile.ZipFile(filename, "w")
try:
self.mkpath(self.target_dir) # just in case
for root, dirs, files in os.walk(self.target_dir):
if root == self.target_dir and not files:
raise DistutilsOptionError(
"no files found in upload directory '%s'"
% self.target_dir)
for name in files:
full = os.path.join(root, name)
relative = root[len(self.target_dir):].lstrip(os.path.sep)
dest = os.path.join(relative, name)
zip_file.write(full, dest)
finally:
zip_file.close()
def run(self):
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
tmp_dir = tempfile.mkdtemp()
name = self.distribution.metadata.get_name()
zip_file = os.path.join(tmp_dir, "%s.zip" % name)
try:
self.create_zipfile(zip_file)
self.upload_file(zip_file)
finally:
shutil.rmtree(tmp_dir)
@staticmethod
def _build_part(item, sep_boundary):
key, values = item
title = '\nContent-Disposition: form-data; name="%s"' % key
# handle multiple entries for the same name
if not isinstance(values, list):
values = [values]
for value in values:
if isinstance(value, tuple):
title += '; filename="%s"' % value[0]
value = value[1]
else:
value = _encode(value)
yield sep_boundary
yield _encode(title)
yield b"\n\n"
yield value
if value and value[-1:] == b'\r':
yield b'\n' # write an extra newline (lurve Macs)
@classmethod
def _build_multipart(cls, data):
"""
Build up the MIME payload for the POST data
"""
boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = b'\n--' + boundary
end_boundary = sep_boundary + b'--'
end_items = end_boundary, b"\n",
builder = functools.partial(
cls._build_part,
sep_boundary=sep_boundary,
)
part_groups = map(builder, data.items())
parts = itertools.chain.from_iterable(part_groups)
body_items = itertools.chain(parts, end_items)
content_type = 'multipart/form-data; boundary=%s' % boundary
return b''.join(body_items), content_type
def upload_file(self, filename):
with open(filename, 'rb') as f:
content = f.read()
meta = self.distribution.metadata
data = {
':action': 'doc_upload',
'name': meta.get_name(),
'content': (os.path.basename(filename), content),
}
# set up the authentication
credentials = _encode(self.username + ':' + self.password)
credentials = standard_b64encode(credentials)
if six.PY3:
credentials = credentials.decode('ascii')
auth = "Basic " + credentials
body, ct = self._build_multipart(data)
self.announce("Submitting documentation to %s" % (self.repository),
log.INFO)
# build the Request
# We can't use urllib2 since we need to send the Basic
# auth right with the first request
schema, netloc, url, params, query, fragments = \
urllib.parse.urlparse(self.repository)
assert not params and not query and not fragments
if schema == 'http':
conn = http_client.HTTPConnection(netloc)
elif schema == 'https':
conn = http_client.HTTPSConnection(netloc)
else:
raise AssertionError("unsupported schema " + schema)
data = ''
try:
conn.connect()
conn.putrequest("POST", url)
content_type = ct
conn.putheader('Content-type', content_type)
conn.putheader('Content-length', str(len(body)))
conn.putheader('Authorization', auth)
conn.endheaders()
conn.send(body)
except socket.error as e:
self.announce(str(e), log.ERROR)
return
r = conn.getresponse()
if r.status == 200:
self.announce('Server response (%s): %s' % (r.status, r.reason),
log.INFO)
elif r.status == 301:
location = r.getheader('Location')
if location is None:
location = 'https://pythonhosted.org/%s/' % meta.get_name()
self.announce('Upload successful. Visit %s' % location,
log.INFO)
else:
self.announce('Upload failed (%s): %s' % (r.status, r.reason),
log.ERROR)
if self.show_response:
print('-' * 75, r.read(), '-' * 75) | unknown | codeparrot/codeparrot-clean | ||
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import strutils
from heat.common.i18n import _
def extract_bool(name, value):
"""Convert any true/false string to its corresponding boolean value.
Value is case insensitive.
"""
if str(value).lower() not in ('true', 'false'):
raise ValueError(_('Unrecognized value "%(value)s" for "%(name)s", '
'acceptable values are: true, false.')
% {'value': value, 'name': name})
return strutils.bool_from_string(value, strict=True)
def extract_int(name, value, allow_zero=True, allow_negative=False):
if value is None:
return None
if not strutils.is_int_like(value):
raise ValueError(_("Only integer is acceptable by "
"'%(name)s'.") % {'name': name})
if value in ('0', 0):
if allow_zero:
return int(value)
raise ValueError(_("Only non-zero integer is acceptable by "
"'%(name)s'.") % {'name': name})
try:
result = int(value)
except (TypeError, ValueError):
raise ValueError(_("Value '%(value)s' is invalid for '%(name)s' "
"which only accepts integer.") %
{'name': name, 'value': value})
if allow_negative is False and result < 0:
raise ValueError(_("Value '%(value)s' is invalid for '%(name)s' "
"which only accepts non-negative integer.") %
{'name': name, 'value': value})
return result
def extract_tags(subject):
tags = subject.split(',')
for tag in tags:
if len(tag) > 80:
raise ValueError(_('Invalid tag, "%s" is longer than 80 '
'characters') % tag)
return tags
def extract_template_type(subject):
template_type = subject.lower()
if template_type not in ('cfn', 'hot'):
raise ValueError(_('Invalid template type "%(value)s", valid '
'types are: cfn, hot.') %
{'value': subject})
return template_type | unknown | codeparrot/codeparrot-clean | ||
/* Copyright 2017 - 2021 J.Rieck (based on R. Thomas's work)
* Copyright 2017 - 2025 Quarkslab
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "LIEF/Visitor.hpp"
#include "LIEF/utils.hpp"
#include "LIEF/MachO/RPathCommand.hpp"
#include "MachO/Structures.hpp"
namespace LIEF {
namespace MachO {
RPathCommand::RPathCommand(std::string path) :
LoadCommand::LoadCommand(LoadCommand::TYPE::RPATH, 0),
path_(std::move(path))
{
size_ = align(sizeof(details::rpath_command) + path_.size() + 1, sizeof(uint64_t));
original_data_.resize(size_);
}
RPathCommand::RPathCommand(const details::rpath_command& rpath) :
LoadCommand::LoadCommand{LoadCommand::TYPE(rpath.cmd), rpath.cmdsize},
path_offset_(rpath.path)
{}
void RPathCommand::accept(Visitor& visitor) const {
visitor.visit(*this);
}
std::ostream& RPathCommand::print(std::ostream& os) const {
LoadCommand::print(os) << '\n';
os << path();
return os;
}
}
} | cpp | github | https://github.com/nodejs/node | deps/LIEF/src/MachO/RPathCommand.cpp |
# frozen_string_literal: true
require "mail"
module ActionMailbox
# The +InboundEmail+ is an Active Record that keeps a reference to the raw email stored in Active Storage
# and tracks the status of processing. By default, incoming emails will go through the following lifecycle:
#
# * Pending: Just received by one of the ingress controllers and scheduled for routing.
# * Processing: During active processing, while a specific mailbox is running its #process method.
# * Delivered: Successfully processed by the specific mailbox.
# * Failed: An exception was raised during the specific mailbox's execution of the +#process+ method.
# * Bounced: Rejected processing by the specific mailbox and bounced to sender.
#
# Once the +InboundEmail+ has reached the status of being either +delivered+, +failed+, or +bounced+,
# it'll count as having been +#processed?+. Once processed, the +InboundEmail+ will be scheduled for
# automatic incineration at a later point.
#
# When working with an +InboundEmail+, you'll usually interact with the parsed version of the source,
# which is available as a +Mail+ object from +#mail+. But you can also access the raw source directly
# using the +#source+ method.
#
# Examples:
#
# inbound_email.mail.from # => 'david@loudthinking.com'
# inbound_email.source # Returns the full rfc822 source of the email as text
class InboundEmail < Record
include Incineratable, MessageId, Routable
has_one_attached :raw_email, service: ActionMailbox.storage_service
enum :status, %i[ pending processing delivered failed bounced ]
def mail
@mail ||= Mail.from_source(source)
end
def source
@source ||= raw_email.download
end
def processed?
delivered? || failed? || bounced?
end
def instrumentation_payload # :nodoc:
{
id: id,
message_id: message_id,
status: status
}
end
end
end
ActiveSupport.run_load_hooks :action_mailbox_inbound_email, ActionMailbox::InboundEmail | ruby | github | https://github.com/rails/rails | actionmailbox/app/models/action_mailbox/inbound_email.rb |
#!/usr/bin/env python2.7
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""End snippets defaults if they were untouched by the user.
We want to do this to mask out protobuf-defined defaults which will be in effect
in that case. A common case is a URL that points to a Google server.
Another one is that authentication is on by default, which is undesirable.
We do allow users to see and set these fields, they can point urls at Google,
but it's usually the user who doesn't want that.
"""
masked_snippets = {
# [7] active.
"end_snippet.reverse_geocoder_url.value":
"",
# TODO: move to hard_masked_snippets (make not available
# for user)?
# [11] active. empty string by default.
"end_snippet.default_web_page_intl_url.value":
"",
# TODO: move to hard_masked_snippets (make not available
# for user)?
# [14] active.
"end_snippet.user_guide_intl_url.value":
"",
# TODO: move to hard_masked_snippets (make not available
# for user)?
# [15] active.
"end_snippet.support_center_intl_url.value":
"",
# TODO: move to hard_masked_snippets (make not available
# for user)?
# [17] active.
"end_snippet.support_answer_intl_url.value":
"",
# TODO: move to hard_masked_snippets (make not available
# for user)?
# [18] active.
"end_snippet.support_topic_intl_url.value":
"",
# TODO: move to hard_masked_snippets (make not available
# for user)?
# [19] active.
"end_snippet.support_request_intl_url.value":
"",
# TODO: move to hard_masked_snippets (make not available
# for user)?
# [20] active.
"end_snippet.earth_intl_url.value":
"",
# [32] The time machine options.
# Note: option is not available to user, but located in masked snippets,
# since it may be set by Fusion. So, we mask time machine option if it is
# not set.
# active
"end_snippet.time_machine_options.server_url":
"",
# active
"end_snippet.time_machine_options.is_timemachine":
False,
# [40] active.
# If not specified, default values in the client will be used.
"end_snippet.bbs_server_info.name.value":
"",
"end_snippet.bbs_server_info.base_url.value":
"",
"end_snippet.bbs_server_info.post_wizard_path.value":
"",
"end_snippet.bbs_server_info.file_submit_path.value":
"",
# [46] active.
# If empty, service will be unavailable.
# This should be set to empty for EC clients to disable connection to google
# services.
"end_snippet.elevation_service_base_url":
"",
# [47] unnecessary. [default = 500].
# "end_snippet.elevation_profile_query_delay": 500,
# TODO: move to hard_masked_snippets (make not available
# for user)?
# [48] active.
# If not specified, this URL is built from user_guide_intl_url as
# user_guide_intl_url + "tutorials/index.html".
"end_snippet.tutorial_url.value":
"",
# TODO: move to hard_masked_snippets (make not available
# for user)?
# [49] active.
# If not specified, this URL is built from user_guide_intl_url as
# user_guide_intl_url + "ug_keyboard.html".
"end_snippet.keyboard_shortcuts_url.value":
"",
# TODO: move to hard_masked_snippets (make not available
# for user)?
# [50] active.
# If not specified, this URL is built from support_answer_intl_url
"end_snippet.release_notes_url.value":
"",
# [54] active. URL of a page that will be shown when a KML search is
# performed.
# Note: for GEE, set to local path in order to override default settings.
# Note: for GEE, set by default to kmlrender since we don't need to support
# any searchlets or oneboxes.
"end_snippet.search_config.kml_search_url.value":
"/earth/client/kmlrender/index_$[hl].html",
# [54] active. URL of a page that will be shown when KML is rendered in
# the search panel.
# Note: for GEE, set to local path in order to override default settings.
"end_snippet.search_config.kml_render_url.value":
"/earth/client/kmlrender/index_$[hl].html",
# [54] active. URL of a page that will be displayed if a network error or
# other local error occurs while performing a search.
"end_snippet.search_config.error_page_url.value":
"about:blank",
# [54] active. URL of a page that will be shown when
# the search history is requested.
"end_snippet.search_config.search_history_url.value":
"about:blank",
# [57] active.
# This should be set to empty for EC clients to disable connection to
# google services. If nothing is specified, the client uses
# "http://maps.google.com/".
"end_snippet.google_maps_url.value":
"",
# TODO: move to hard_masked_snippets (make not available
# for user)?
# [59] active.
"end_snippet.privacy_policy_url.value":
"",
# [63] active.
"end_snippet.show_signin_button":
False,
# TODO: move to hard_masked_snippets (make not available
# for user)?
# [64] active.
"end_snippet.startup_tips_intl_url.value":
"",
} | unknown | codeparrot/codeparrot-clean | ||
"""Test the config manager."""
import asyncio
from datetime import timedelta
import pytest
from homeassistant import config_entries, data_entry_flow, loader
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.setup import async_setup_component
from homeassistant.util import dt
from tests.async_mock import AsyncMock, patch
from tests.common import (
MockConfigEntry,
MockEntity,
MockModule,
MockPlatform,
async_fire_time_changed,
mock_coro,
mock_entity_platform,
mock_integration,
mock_registry,
)
@pytest.fixture(autouse=True)
def mock_handlers():
"""Mock config flows."""
class MockFlowHandler(config_entries.ConfigFlow):
"""Define a mock flow handler."""
VERSION = 1
with patch.dict(
config_entries.HANDLERS, {"comp": MockFlowHandler, "test": MockFlowHandler}
):
yield
@pytest.fixture
def manager(hass):
"""Fixture of a loaded config manager."""
manager = config_entries.ConfigEntries(hass, {})
manager._entries = []
manager._store._async_ensure_stop_listener = lambda: None
hass.config_entries = manager
return manager
async def test_call_setup_entry(hass):
"""Test we call <component>.setup_entry."""
entry = MockConfigEntry(domain="comp")
entry.add_to_hass(hass)
mock_setup_entry = AsyncMock(return_value=True)
mock_migrate_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_setup_entry,
async_migrate_entry=mock_migrate_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
result = await async_setup_component(hass, "comp", {})
assert result
assert len(mock_migrate_entry.mock_calls) == 0
assert len(mock_setup_entry.mock_calls) == 1
assert entry.state == config_entries.ENTRY_STATE_LOADED
async def test_call_async_migrate_entry(hass):
"""Test we call <component>.async_migrate_entry when version mismatch."""
entry = MockConfigEntry(domain="comp")
entry.version = 2
entry.add_to_hass(hass)
mock_migrate_entry = AsyncMock(return_value=True)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_setup_entry,
async_migrate_entry=mock_migrate_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
result = await async_setup_component(hass, "comp", {})
assert result
assert len(mock_migrate_entry.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
assert entry.state == config_entries.ENTRY_STATE_LOADED
async def test_call_async_migrate_entry_failure_false(hass):
"""Test migration fails if returns false."""
entry = MockConfigEntry(domain="comp")
entry.version = 2
entry.add_to_hass(hass)
mock_migrate_entry = AsyncMock(return_value=False)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_setup_entry,
async_migrate_entry=mock_migrate_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
result = await async_setup_component(hass, "comp", {})
assert result
assert len(mock_migrate_entry.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 0
assert entry.state == config_entries.ENTRY_STATE_MIGRATION_ERROR
async def test_call_async_migrate_entry_failure_exception(hass):
"""Test migration fails if exception raised."""
entry = MockConfigEntry(domain="comp")
entry.version = 2
entry.add_to_hass(hass)
mock_migrate_entry = AsyncMock(side_effect=Exception)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_setup_entry,
async_migrate_entry=mock_migrate_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
result = await async_setup_component(hass, "comp", {})
assert result
assert len(mock_migrate_entry.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 0
assert entry.state == config_entries.ENTRY_STATE_MIGRATION_ERROR
async def test_call_async_migrate_entry_failure_not_bool(hass):
"""Test migration fails if boolean not returned."""
entry = MockConfigEntry(domain="comp")
entry.version = 2
entry.add_to_hass(hass)
mock_migrate_entry = AsyncMock(return_value=None)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_setup_entry,
async_migrate_entry=mock_migrate_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
result = await async_setup_component(hass, "comp", {})
assert result
assert len(mock_migrate_entry.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 0
assert entry.state == config_entries.ENTRY_STATE_MIGRATION_ERROR
async def test_call_async_migrate_entry_failure_not_supported(hass):
"""Test migration fails if async_migrate_entry not implemented."""
entry = MockConfigEntry(domain="comp")
entry.version = 2
entry.add_to_hass(hass)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
result = await async_setup_component(hass, "comp", {})
assert result
assert len(mock_setup_entry.mock_calls) == 0
assert entry.state == config_entries.ENTRY_STATE_MIGRATION_ERROR
async def test_remove_entry(hass, manager):
"""Test that we can remove an entry."""
async def mock_setup_entry(hass, entry):
"""Mock setting up entry."""
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, "light")
)
return True
async def mock_unload_entry(hass, entry):
"""Mock unloading an entry."""
result = await hass.config_entries.async_forward_entry_unload(entry, "light")
assert result
return result
mock_remove_entry = AsyncMock(return_value=None)
entity = MockEntity(unique_id="1234", name="Test Entity")
async def mock_setup_entry_platform(hass, entry, async_add_entities):
"""Mock setting up platform."""
async_add_entities([entity])
mock_integration(
hass,
MockModule(
"test",
async_setup_entry=mock_setup_entry,
async_unload_entry=mock_unload_entry,
async_remove_entry=mock_remove_entry,
),
)
mock_entity_platform(
hass, "light.test", MockPlatform(async_setup_entry=mock_setup_entry_platform)
)
mock_entity_platform(hass, "config_flow.test", None)
MockConfigEntry(domain="test_other", entry_id="test1").add_to_manager(manager)
entry = MockConfigEntry(domain="test", entry_id="test2")
entry.add_to_manager(manager)
MockConfigEntry(domain="test_other", entry_id="test3").add_to_manager(manager)
# Check all config entries exist
assert [item.entry_id for item in manager.async_entries()] == [
"test1",
"test2",
"test3",
]
# Setup entry
await entry.async_setup(hass)
await hass.async_block_till_done()
# Check entity state got added
assert hass.states.get("light.test_entity") is not None
assert len(hass.states.async_all()) == 1
# Check entity got added to entity registry
ent_reg = await hass.helpers.entity_registry.async_get_registry()
assert len(ent_reg.entities) == 1
entity_entry = list(ent_reg.entities.values())[0]
assert entity_entry.config_entry_id == entry.entry_id
# Remove entry
result = await manager.async_remove("test2")
await hass.async_block_till_done()
# Check that unload went well and so no need to restart
assert result == {"require_restart": False}
# Check the remove callback was invoked.
assert mock_remove_entry.call_count == 1
# Check that config entry was removed.
assert [item.entry_id for item in manager.async_entries()] == ["test1", "test3"]
# Check that entity state has been removed
assert hass.states.get("light.test_entity") is None
assert len(hass.states.async_all()) == 0
# Check that entity registry entry has been removed
entity_entry_list = list(ent_reg.entities.values())
assert not entity_entry_list
async def test_remove_entry_handles_callback_error(hass, manager):
"""Test that exceptions in the remove callback are handled."""
mock_setup_entry = AsyncMock(return_value=True)
mock_unload_entry = AsyncMock(return_value=True)
mock_remove_entry = AsyncMock(return_value=None)
mock_integration(
hass,
MockModule(
"test",
async_setup_entry=mock_setup_entry,
async_unload_entry=mock_unload_entry,
async_remove_entry=mock_remove_entry,
),
)
entry = MockConfigEntry(domain="test", entry_id="test1")
entry.add_to_manager(manager)
# Check all config entries exist
assert [item.entry_id for item in manager.async_entries()] == ["test1"]
# Setup entry
await entry.async_setup(hass)
await hass.async_block_till_done()
# Remove entry
result = await manager.async_remove("test1")
await hass.async_block_till_done()
# Check that unload went well and so no need to restart
assert result == {"require_restart": False}
# Check the remove callback was invoked.
assert mock_remove_entry.call_count == 1
# Check that config entry was removed.
assert [item.entry_id for item in manager.async_entries()] == []
async def test_remove_entry_raises(hass, manager):
"""Test if a component raises while removing entry."""
async def mock_unload_entry(hass, entry):
"""Mock unload entry function."""
raise Exception("BROKEN")
mock_integration(hass, MockModule("comp", async_unload_entry=mock_unload_entry))
MockConfigEntry(domain="test", entry_id="test1").add_to_manager(manager)
MockConfigEntry(
domain="comp", entry_id="test2", state=config_entries.ENTRY_STATE_LOADED
).add_to_manager(manager)
MockConfigEntry(domain="test", entry_id="test3").add_to_manager(manager)
assert [item.entry_id for item in manager.async_entries()] == [
"test1",
"test2",
"test3",
]
result = await manager.async_remove("test2")
assert result == {"require_restart": True}
assert [item.entry_id for item in manager.async_entries()] == ["test1", "test3"]
async def test_remove_entry_if_not_loaded(hass, manager):
"""Test that we can remove an entry that is not loaded."""
mock_unload_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_unload_entry=mock_unload_entry))
MockConfigEntry(domain="test", entry_id="test1").add_to_manager(manager)
MockConfigEntry(domain="comp", entry_id="test2").add_to_manager(manager)
MockConfigEntry(domain="test", entry_id="test3").add_to_manager(manager)
assert [item.entry_id for item in manager.async_entries()] == [
"test1",
"test2",
"test3",
]
result = await manager.async_remove("test2")
assert result == {"require_restart": False}
assert [item.entry_id for item in manager.async_entries()] == ["test1", "test3"]
assert len(mock_unload_entry.mock_calls) == 0
async def test_remove_entry_if_integration_deleted(hass, manager):
"""Test that we can remove an entry when the integration is deleted."""
mock_unload_entry = AsyncMock(return_value=True)
MockConfigEntry(domain="test", entry_id="test1").add_to_manager(manager)
MockConfigEntry(domain="comp", entry_id="test2").add_to_manager(manager)
MockConfigEntry(domain="test", entry_id="test3").add_to_manager(manager)
assert [item.entry_id for item in manager.async_entries()] == [
"test1",
"test2",
"test3",
]
result = await manager.async_remove("test2")
assert result == {"require_restart": False}
assert [item.entry_id for item in manager.async_entries()] == ["test1", "test3"]
assert len(mock_unload_entry.mock_calls) == 0
async def test_add_entry_calls_setup_entry(hass, manager):
"""Test we call setup_config_entry."""
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
return self.async_create_entry(title="title", data={"token": "supersecret"})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}):
await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 1
p_hass, p_entry = mock_setup_entry.mock_calls[0][1]
assert p_hass is hass
assert p_entry.data == {"token": "supersecret"}
async def test_entries_gets_entries(manager):
"""Test entries are filtered by domain."""
MockConfigEntry(domain="test").add_to_manager(manager)
entry1 = MockConfigEntry(domain="test2")
entry1.add_to_manager(manager)
entry2 = MockConfigEntry(domain="test2")
entry2.add_to_manager(manager)
assert manager.async_entries("test2") == [entry1, entry2]
async def test_domains_gets_uniques(manager):
"""Test we only return each domain once."""
MockConfigEntry(domain="test").add_to_manager(manager)
MockConfigEntry(domain="test2").add_to_manager(manager)
MockConfigEntry(domain="test2").add_to_manager(manager)
MockConfigEntry(domain="test").add_to_manager(manager)
MockConfigEntry(domain="test3").add_to_manager(manager)
assert manager.async_domains() == ["test", "test2", "test3"]
async def test_saving_and_loading(hass):
"""Test that we're saving and loading correctly."""
mock_integration(
hass, MockModule("test", async_setup_entry=lambda *args: mock_coro(True))
)
mock_entity_platform(hass, "config_flow.test", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 5
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("unique")
return self.async_create_entry(title="Test Title", data={"token": "abcd"})
with patch.dict(config_entries.HANDLERS, {"test": TestFlow}):
await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_USER}
)
class Test2Flow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 3
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_PUSH
async def async_step_user(self, user_input=None):
"""Test user step."""
return self.async_create_entry(
title="Test 2 Title", data={"username": "bla"}
)
with patch("homeassistant.config_entries.HANDLERS.get", return_value=Test2Flow):
await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_USER}
)
assert len(hass.config_entries.async_entries()) == 2
# To trigger the call_later
async_fire_time_changed(hass, dt.utcnow() + timedelta(seconds=1))
# To execute the save
await hass.async_block_till_done()
# Now load written data in new config manager
manager = config_entries.ConfigEntries(hass, {})
await manager.async_initialize()
assert len(manager.async_entries()) == 2
# Ensure same order
for orig, loaded in zip(
hass.config_entries.async_entries(), manager.async_entries()
):
assert orig.version == loaded.version
assert orig.domain == loaded.domain
assert orig.title == loaded.title
assert orig.data == loaded.data
assert orig.source == loaded.source
assert orig.connection_class == loaded.connection_class
assert orig.unique_id == loaded.unique_id
async def test_forward_entry_sets_up_component(hass):
"""Test we setup the component entry is forwarded to."""
entry = MockConfigEntry(domain="original")
mock_original_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass, MockModule("original", async_setup_entry=mock_original_setup_entry)
)
mock_forwarded_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass, MockModule("forwarded", async_setup_entry=mock_forwarded_setup_entry)
)
await hass.config_entries.async_forward_entry_setup(entry, "forwarded")
assert len(mock_original_setup_entry.mock_calls) == 0
assert len(mock_forwarded_setup_entry.mock_calls) == 1
async def test_forward_entry_does_not_setup_entry_if_setup_fails(hass):
"""Test we do not set up entry if component setup fails."""
entry = MockConfigEntry(domain="original")
mock_setup = AsyncMock(return_value=False)
mock_setup_entry = AsyncMock()
mock_integration(
hass,
MockModule(
"forwarded", async_setup=mock_setup, async_setup_entry=mock_setup_entry
),
)
await hass.config_entries.async_forward_entry_setup(entry, "forwarded")
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 0
async def test_discovery_notification(hass):
"""Test that we create/dismiss a notification when source is discovery."""
mock_integration(hass, MockModule("test"))
mock_entity_platform(hass, "config_flow.test", None)
await async_setup_component(hass, "persistent_notification", {})
with patch.dict(config_entries.HANDLERS):
class TestFlow(config_entries.ConfigFlow, domain="test"):
"""Test flow."""
VERSION = 5
async def async_step_discovery(self, discovery_info):
"""Test discovery step."""
return self.async_show_form(step_id="discovery_confirm")
async def async_step_discovery_confirm(self, discovery_info):
"""Test discovery confirm step."""
return self.async_create_entry(
title="Test Title", data={"token": "abcd"}
)
result = await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_DISCOVERY}
)
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_discovery")
assert state is not None
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_discovery")
assert state is None
async def test_discovery_notification_not_created(hass):
"""Test that we not create a notification when discovery is aborted."""
mock_integration(hass, MockModule("test"))
mock_entity_platform(hass, "config_flow.test", None)
await async_setup_component(hass, "persistent_notification", {})
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 5
async def async_step_discovery(self, discovery_info):
"""Test discovery step."""
return self.async_abort(reason="test")
with patch.dict(config_entries.HANDLERS, {"test": TestFlow}):
await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_DISCOVERY}
)
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_discovery")
assert state is None
async def test_loading_default_config(hass):
"""Test loading the default config."""
manager = config_entries.ConfigEntries(hass, {})
with patch("homeassistant.util.json.open", side_effect=FileNotFoundError):
await manager.async_initialize()
assert len(manager.async_entries()) == 0
async def test_updating_entry_data(manager):
"""Test that we can update an entry data."""
entry = MockConfigEntry(
domain="test",
data={"first": True},
state=config_entries.ENTRY_STATE_SETUP_ERROR,
)
entry.add_to_manager(manager)
manager.async_update_entry(entry)
assert entry.data == {"first": True}
manager.async_update_entry(entry, data={"second": True})
assert entry.data == {"second": True}
async def test_updating_entry_system_options(manager):
"""Test that we can update an entry data."""
entry = MockConfigEntry(
domain="test",
data={"first": True},
state=config_entries.ENTRY_STATE_SETUP_ERROR,
system_options={"disable_new_entities": True},
)
entry.add_to_manager(manager)
assert entry.system_options.disable_new_entities
entry.system_options.update(disable_new_entities=False)
assert not entry.system_options.disable_new_entities
async def test_update_entry_options_and_trigger_listener(hass, manager):
"""Test that we can update entry options and trigger listener."""
entry = MockConfigEntry(domain="test", options={"first": True})
entry.add_to_manager(manager)
async def update_listener(hass, entry):
"""Test function."""
assert entry.options == {"second": True}
entry.add_update_listener(update_listener)
manager.async_update_entry(entry, options={"second": True})
assert entry.options == {"second": True}
async def test_setup_raise_not_ready(hass, caplog):
"""Test a setup raising not ready."""
entry = MockConfigEntry(domain="test")
mock_setup_entry = AsyncMock(side_effect=ConfigEntryNotReady)
mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.test", None)
with patch("homeassistant.helpers.event.async_call_later") as mock_call:
await entry.async_setup(hass)
assert len(mock_call.mock_calls) == 1
assert "Config entry for test not ready yet" in caplog.text
p_hass, p_wait_time, p_setup = mock_call.mock_calls[0][1]
assert p_hass is hass
assert p_wait_time == 5
assert entry.state == config_entries.ENTRY_STATE_SETUP_RETRY
mock_setup_entry.side_effect = None
mock_setup_entry.return_value = True
await p_setup(None)
assert entry.state == config_entries.ENTRY_STATE_LOADED
async def test_setup_retrying_during_unload(hass):
"""Test if we unload an entry that is in retry mode."""
entry = MockConfigEntry(domain="test")
mock_setup_entry = AsyncMock(side_effect=ConfigEntryNotReady)
mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.test", None)
with patch("homeassistant.helpers.event.async_call_later") as mock_call:
await entry.async_setup(hass)
assert entry.state == config_entries.ENTRY_STATE_SETUP_RETRY
assert len(mock_call.return_value.mock_calls) == 0
await entry.async_unload(hass)
assert entry.state == config_entries.ENTRY_STATE_NOT_LOADED
assert len(mock_call.return_value.mock_calls) == 1
async def test_entry_options(hass, manager):
"""Test that we can set options on an entry."""
entry = MockConfigEntry(domain="test", data={"first": True}, options=None)
entry.add_to_manager(manager)
class TestFlow:
"""Test flow."""
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Test options flow."""
class OptionsFlowHandler(data_entry_flow.FlowHandler):
"""Test options flow handler."""
return OptionsFlowHandler()
config_entries.HANDLERS["test"] = TestFlow()
flow = await manager.options.async_create_flow(
entry.entry_id, context={"source": "test"}, data=None
)
flow.handler = entry.entry_id # Used to keep reference to config entry
await manager.options.async_finish_flow(flow, {"data": {"second": True}})
assert entry.data == {"first": True}
assert entry.options == {"second": True}
async def test_entry_setup_succeed(hass, manager):
"""Test that we can setup an entry."""
entry = MockConfigEntry(domain="comp", state=config_entries.ENTRY_STATE_NOT_LOADED)
entry.add_to_hass(hass)
mock_setup = AsyncMock(return_value=True)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule("comp", async_setup=mock_setup, async_setup_entry=mock_setup_entry),
)
mock_entity_platform(hass, "config_flow.comp", None)
assert await manager.async_setup(entry.entry_id)
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
assert entry.state == config_entries.ENTRY_STATE_LOADED
@pytest.mark.parametrize(
"state",
(
config_entries.ENTRY_STATE_LOADED,
config_entries.ENTRY_STATE_SETUP_ERROR,
config_entries.ENTRY_STATE_MIGRATION_ERROR,
config_entries.ENTRY_STATE_SETUP_RETRY,
config_entries.ENTRY_STATE_FAILED_UNLOAD,
),
)
async def test_entry_setup_invalid_state(hass, manager, state):
"""Test that we cannot setup an entry with invalid state."""
entry = MockConfigEntry(domain="comp", state=state)
entry.add_to_hass(hass)
mock_setup = AsyncMock(return_value=True)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule("comp", async_setup=mock_setup, async_setup_entry=mock_setup_entry),
)
with pytest.raises(config_entries.OperationNotAllowed):
assert await manager.async_setup(entry.entry_id)
assert len(mock_setup.mock_calls) == 0
assert len(mock_setup_entry.mock_calls) == 0
assert entry.state == state
async def test_entry_unload_succeed(hass, manager):
"""Test that we can unload an entry."""
entry = MockConfigEntry(domain="comp", state=config_entries.ENTRY_STATE_LOADED)
entry.add_to_hass(hass)
async_unload_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_unload_entry=async_unload_entry))
assert await manager.async_unload(entry.entry_id)
assert len(async_unload_entry.mock_calls) == 1
assert entry.state == config_entries.ENTRY_STATE_NOT_LOADED
@pytest.mark.parametrize(
"state",
(
config_entries.ENTRY_STATE_NOT_LOADED,
config_entries.ENTRY_STATE_SETUP_ERROR,
config_entries.ENTRY_STATE_SETUP_RETRY,
),
)
async def test_entry_unload_failed_to_load(hass, manager, state):
"""Test that we can unload an entry."""
entry = MockConfigEntry(domain="comp", state=state)
entry.add_to_hass(hass)
async_unload_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_unload_entry=async_unload_entry))
assert await manager.async_unload(entry.entry_id)
assert len(async_unload_entry.mock_calls) == 0
assert entry.state == config_entries.ENTRY_STATE_NOT_LOADED
@pytest.mark.parametrize(
"state",
(
config_entries.ENTRY_STATE_MIGRATION_ERROR,
config_entries.ENTRY_STATE_FAILED_UNLOAD,
),
)
async def test_entry_unload_invalid_state(hass, manager, state):
"""Test that we cannot unload an entry with invalid state."""
entry = MockConfigEntry(domain="comp", state=state)
entry.add_to_hass(hass)
async_unload_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_unload_entry=async_unload_entry))
with pytest.raises(config_entries.OperationNotAllowed):
assert await manager.async_unload(entry.entry_id)
assert len(async_unload_entry.mock_calls) == 0
assert entry.state == state
async def test_entry_reload_succeed(hass, manager):
"""Test that we can reload an entry."""
entry = MockConfigEntry(domain="comp", state=config_entries.ENTRY_STATE_LOADED)
entry.add_to_hass(hass)
async_setup = AsyncMock(return_value=True)
async_setup_entry = AsyncMock(return_value=True)
async_unload_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup=async_setup,
async_setup_entry=async_setup_entry,
async_unload_entry=async_unload_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
assert await manager.async_reload(entry.entry_id)
assert len(async_unload_entry.mock_calls) == 1
assert len(async_setup.mock_calls) == 1
assert len(async_setup_entry.mock_calls) == 1
assert entry.state == config_entries.ENTRY_STATE_LOADED
@pytest.mark.parametrize(
"state",
(
config_entries.ENTRY_STATE_NOT_LOADED,
config_entries.ENTRY_STATE_SETUP_ERROR,
config_entries.ENTRY_STATE_SETUP_RETRY,
),
)
async def test_entry_reload_not_loaded(hass, manager, state):
"""Test that we can reload an entry."""
entry = MockConfigEntry(domain="comp", state=state)
entry.add_to_hass(hass)
async_setup = AsyncMock(return_value=True)
async_setup_entry = AsyncMock(return_value=True)
async_unload_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup=async_setup,
async_setup_entry=async_setup_entry,
async_unload_entry=async_unload_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
assert await manager.async_reload(entry.entry_id)
assert len(async_unload_entry.mock_calls) == 0
assert len(async_setup.mock_calls) == 1
assert len(async_setup_entry.mock_calls) == 1
assert entry.state == config_entries.ENTRY_STATE_LOADED
@pytest.mark.parametrize(
"state",
(
config_entries.ENTRY_STATE_MIGRATION_ERROR,
config_entries.ENTRY_STATE_FAILED_UNLOAD,
),
)
async def test_entry_reload_error(hass, manager, state):
"""Test that we can reload an entry."""
entry = MockConfigEntry(domain="comp", state=state)
entry.add_to_hass(hass)
async_setup = AsyncMock(return_value=True)
async_setup_entry = AsyncMock(return_value=True)
async_unload_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup=async_setup,
async_setup_entry=async_setup_entry,
async_unload_entry=async_unload_entry,
),
)
with pytest.raises(config_entries.OperationNotAllowed):
assert await manager.async_reload(entry.entry_id)
assert len(async_unload_entry.mock_calls) == 0
assert len(async_setup.mock_calls) == 0
assert len(async_setup_entry.mock_calls) == 0
assert entry.state == state
async def test_init_custom_integration(hass):
"""Test initializing flow for custom integration."""
integration = loader.Integration(
hass,
"custom_components.hue",
None,
{"name": "Hue", "dependencies": [], "requirements": [], "domain": "hue"},
)
with pytest.raises(data_entry_flow.UnknownHandler):
with patch(
"homeassistant.loader.async_get_integration", return_value=integration,
):
await hass.config_entries.flow.async_init("bla")
async def test_support_entry_unload(hass):
"""Test unloading entry."""
assert await config_entries.support_entry_unload(hass, "light")
assert not await config_entries.support_entry_unload(hass, "auth")
async def test_reload_entry_entity_registry_ignores_no_entry(hass):
"""Test reloading entry in entity registry skips if no config entry linked."""
handler = config_entries.EntityRegistryDisabledHandler(hass)
registry = mock_registry(hass)
# Test we ignore entities without config entry
entry = registry.async_get_or_create("light", "hue", "123")
registry.async_update_entity(entry.entity_id, disabled_by="user")
await hass.async_block_till_done()
assert not handler.changed
assert handler._remove_call_later is None
async def test_reload_entry_entity_registry_works(hass):
"""Test we schedule an entry to be reloaded if disabled_by is updated."""
handler = config_entries.EntityRegistryDisabledHandler(hass)
handler.async_setup()
registry = mock_registry(hass)
config_entry = MockConfigEntry(
domain="comp", state=config_entries.ENTRY_STATE_LOADED
)
config_entry.add_to_hass(hass)
mock_setup_entry = AsyncMock(return_value=True)
mock_unload_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_setup_entry,
async_unload_entry=mock_unload_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
# Only changing disabled_by should update trigger
entity_entry = registry.async_get_or_create(
"light", "hue", "123", config_entry=config_entry
)
registry.async_update_entity(entity_entry.entity_id, name="yo")
await hass.async_block_till_done()
assert not handler.changed
assert handler._remove_call_later is None
# Disable entity, we should not do anything, only act when enabled.
registry.async_update_entity(entity_entry.entity_id, disabled_by="user")
await hass.async_block_till_done()
assert not handler.changed
assert handler._remove_call_later is None
# Enable entity, check we are reloading config entry.
registry.async_update_entity(entity_entry.entity_id, disabled_by=None)
await hass.async_block_till_done()
assert handler.changed == {config_entry.entry_id}
assert handler._remove_call_later is not None
async_fire_time_changed(
hass,
dt.utcnow()
+ timedelta(
seconds=config_entries.EntityRegistryDisabledHandler.RELOAD_AFTER_UPDATE_DELAY
+ 1
),
)
await hass.async_block_till_done()
assert len(mock_unload_entry.mock_calls) == 1
async def test_unqiue_id_persisted(hass, manager):
"""Test that a unique ID is stored in the config entry."""
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("mock-unique-id")
return self.async_create_entry(title="mock-title", data={})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert len(mock_setup_entry.mock_calls) == 1
p_hass, p_entry = mock_setup_entry.mock_calls[0][1]
assert p_hass is hass
assert p_entry.unique_id == "mock-unique-id"
async def test_unique_id_existing_entry(hass, manager):
"""Test that we remove an entry if there already is an entry with unique ID."""
hass.config.components.add("comp")
MockConfigEntry(
domain="comp",
state=config_entries.ENTRY_STATE_LOADED,
unique_id="mock-unique-id",
).add_to_hass(hass)
async_setup_entry = AsyncMock(return_value=True)
async_unload_entry = AsyncMock(return_value=True)
async_remove_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=async_setup_entry,
async_unload_entry=async_unload_entry,
async_remove_entry=async_remove_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
existing_entry = await self.async_set_unique_id("mock-unique-id")
assert existing_entry is not None
return self.async_create_entry(title="mock-title", data={"via": "flow"})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
entries = hass.config_entries.async_entries("comp")
assert len(entries) == 1
assert entries[0].data == {"via": "flow"}
assert len(async_setup_entry.mock_calls) == 1
assert len(async_unload_entry.mock_calls) == 1
assert len(async_remove_entry.mock_calls) == 1
async def test_unique_id_update_existing_entry(hass, manager):
"""Test that we update an entry if there already is an entry with unique ID."""
hass.config.components.add("comp")
entry = MockConfigEntry(
domain="comp",
data={"additional": "data", "host": "0.0.0.0"},
unique_id="mock-unique-id",
)
entry.add_to_hass(hass)
mock_integration(
hass, MockModule("comp"),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("mock-unique-id")
await self._abort_if_unique_id_configured(updates={"host": "1.1.1.1"})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
assert entry.data["host"] == "1.1.1.1"
assert entry.data["additional"] == "data"
async def test_unique_id_not_update_existing_entry(hass, manager):
"""Test that we do not update an entry if existing entry has the data."""
hass.config.components.add("comp")
entry = MockConfigEntry(
domain="comp",
data={"additional": "data", "host": "0.0.0.0"},
unique_id="mock-unique-id",
)
entry.add_to_hass(hass)
mock_integration(
hass, MockModule("comp"),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("mock-unique-id")
await self._abort_if_unique_id_configured(updates={"host": "0.0.0.0"})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch(
"homeassistant.config_entries.ConfigEntries.async_update_entry"
) as async_update_entry:
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
assert entry.data["host"] == "0.0.0.0"
assert entry.data["additional"] == "data"
assert len(async_update_entry.mock_calls) == 0
async def test_unique_id_in_progress(hass, manager):
"""Test that we abort if there is already a flow in progress with same unique id."""
mock_integration(hass, MockModule("comp"))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("mock-unique-id")
return self.async_show_form(step_id="discovery")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
# Create one to be in progress
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
# Will be canceled
result2 = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result2["reason"] == "already_in_progress"
async def test_finish_flow_aborts_progress(hass, manager):
"""Test that when finishing a flow, we abort other flows in progress with unique ID."""
mock_integration(
hass, MockModule("comp", async_setup_entry=AsyncMock(return_value=True)),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("mock-unique-id", raise_on_progress=False)
if user_input is None:
return self.async_show_form(step_id="discovery")
return self.async_create_entry(title="yo", data={})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
# Create one to be in progress
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
# Will finish and cancel other one.
result2 = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}, data={}
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert len(hass.config_entries.flow.async_progress()) == 0
async def test_unique_id_ignore(hass, manager):
"""Test that we can ignore flows that are in progress and have a unique ID."""
async_setup_entry = AsyncMock(return_value=False)
mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user flow."""
await self.async_set_unique_id("mock-unique-id")
return self.async_show_form(step_id="discovery")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
# Create one to be in progress
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result2 = await manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_IGNORE},
data={"unique_id": "mock-unique-id"},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
# assert len(hass.config_entries.flow.async_progress()) == 0
# We should never set up an ignored entry.
assert len(async_setup_entry.mock_calls) == 0
entry = hass.config_entries.async_entries("comp")[0]
assert entry.source == "ignore"
assert entry.unique_id == "mock-unique-id"
async def test_unignore_step_form(hass, manager):
"""Test that we can ignore flows that are in progress and have a unique ID, then rediscover them."""
async_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_unignore(self, user_input):
"""Test unignore step."""
unique_id = user_input["unique_id"]
await self.async_set_unique_id(unique_id)
return self.async_show_form(step_id="discovery")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_IGNORE},
data={"unique_id": "mock-unique-id"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
entry = hass.config_entries.async_entries("comp")[0]
assert entry.source == "ignore"
assert entry.unique_id == "mock-unique-id"
assert entry.domain == "comp"
await manager.async_remove(entry.entry_id)
# Right after removal there shouldn't be an entry or active flows
assert len(hass.config_entries.async_entries("comp")) == 0
assert len(hass.config_entries.flow.async_progress()) == 0
# But after a 'tick' the unignore step has run and we can see an active flow again.
await hass.async_block_till_done()
assert len(hass.config_entries.flow.async_progress()) == 1
# and still not config entries
assert len(hass.config_entries.async_entries("comp")) == 0
async def test_unignore_create_entry(hass, manager):
"""Test that we can ignore flows that are in progress and have a unique ID, then rediscover them."""
async_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_unignore(self, user_input):
"""Test unignore step."""
unique_id = user_input["unique_id"]
await self.async_set_unique_id(unique_id)
return self.async_create_entry(title="yo", data={})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_IGNORE},
data={"unique_id": "mock-unique-id"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
entry = hass.config_entries.async_entries("comp")[0]
assert entry.source == "ignore"
assert entry.unique_id == "mock-unique-id"
assert entry.domain == "comp"
await manager.async_remove(entry.entry_id)
# Right after removal there shouldn't be an entry or flow
assert len(hass.config_entries.flow.async_progress()) == 0
assert len(hass.config_entries.async_entries("comp")) == 0
# But after a 'tick' the unignore step has run and we can see a config entry.
await hass.async_block_till_done()
entry = hass.config_entries.async_entries("comp")[0]
assert entry.source == "unignore"
assert entry.unique_id == "mock-unique-id"
assert entry.title == "yo"
# And still no active flow
assert len(hass.config_entries.flow.async_progress()) == 0
async def test_unignore_default_impl(hass, manager):
"""Test that resdicovery is a no-op by default."""
async_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_IGNORE},
data={"unique_id": "mock-unique-id"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
entry = hass.config_entries.async_entries("comp")[0]
assert entry.source == "ignore"
assert entry.unique_id == "mock-unique-id"
assert entry.domain == "comp"
await manager.async_remove(entry.entry_id)
await hass.async_block_till_done()
assert len(hass.config_entries.async_entries("comp")) == 0
assert len(hass.config_entries.flow.async_progress()) == 0
async def test_partial_flows_hidden(hass, manager):
"""Test that flows that don't have a cur_step and haven't finished initing are hidden."""
async_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
await async_setup_component(hass, "persistent_notification", {})
# A flag to test our assertion that `async_step_discovery` was called and is in its blocked state
# This simulates if the step was e.g. doing network i/o
discovery_started = asyncio.Event()
# A flag to allow `async_step_discovery` to resume after we have verified the uninited flow is not
# visible and has not triggered a discovery alert. This lets us control when the mocked network
# i/o is complete.
pause_discovery = asyncio.Event()
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_discovery(self, discovery_info):
"""Test discovery step."""
discovery_started.set()
await pause_discovery.wait()
return self.async_show_form(step_id="someform")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
# Start a config entry flow and wait for it to be blocked
init_task = asyncio.ensure_future(
manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_DISCOVERY},
data={"unique_id": "mock-unique-id"},
)
)
await discovery_started.wait()
# While it's blocked it shouldn't be visible or trigger discovery notifications
assert len(hass.config_entries.flow.async_progress()) == 0
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_discovery")
assert state is None
# Let the flow init complete
pause_discovery.set()
# When it's complete it should now be visible in async_progress and have triggered
# discovery notifications
result = await init_task
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert len(hass.config_entries.flow.async_progress()) == 1
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_discovery")
assert state is not None
async def test_async_setup_init_entry(hass):
"""Test a config entry being initialized during integration setup."""
async def mock_async_setup(hass, config):
"""Mock setup."""
hass.async_create_task(
hass.config_entries.flow.async_init(
"comp", context={"source": config_entries.SOURCE_IMPORT}, data={},
)
)
return True
async_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp", async_setup=mock_async_setup, async_setup_entry=async_setup_entry
),
)
mock_entity_platform(hass, "config_flow.comp", None)
await async_setup_component(hass, "persistent_notification", {})
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_import(self, user_input):
"""Test import step creating entry."""
return self.async_create_entry(title="title", data={})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
assert await async_setup_component(hass, "comp", {})
await hass.async_block_till_done()
assert len(async_setup_entry.mock_calls) == 1
entries = hass.config_entries.async_entries("comp")
assert len(entries) == 1
assert entries[0].state == config_entries.ENTRY_STATE_LOADED
async def test_async_setup_update_entry(hass):
"""Test a config entry being updated during integration setup."""
entry = MockConfigEntry(domain="comp", data={"value": "initial"})
entry.add_to_hass(hass)
async def mock_async_setup(hass, config):
"""Mock setup."""
hass.async_create_task(
hass.config_entries.flow.async_init(
"comp", context={"source": config_entries.SOURCE_IMPORT}, data={},
)
)
return True
async def mock_async_setup_entry(hass, entry):
"""Mock setting up an entry."""
assert entry.data["value"] == "updated"
return True
mock_integration(
hass,
MockModule(
"comp",
async_setup=mock_async_setup,
async_setup_entry=mock_async_setup_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
await async_setup_component(hass, "persistent_notification", {})
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_import(self, user_input):
"""Test import step updating existing entry."""
self.hass.config_entries.async_update_entry(
entry, data={"value": "updated"}
)
return self.async_abort(reason="yo")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
assert await async_setup_component(hass, "comp", {})
entries = hass.config_entries.async_entries("comp")
assert len(entries) == 1
assert entries[0].state == config_entries.ENTRY_STATE_LOADED
assert entries[0].data == {"value": "updated"}
@pytest.mark.parametrize(
"discovery_source",
(
config_entries.SOURCE_DISCOVERY,
config_entries.SOURCE_SSDP,
config_entries.SOURCE_HOMEKIT,
config_entries.SOURCE_ZEROCONF,
config_entries.SOURCE_HASSIO,
),
)
async def test_flow_with_default_discovery(hass, manager, discovery_source):
"""Test that finishing a default discovery flow removes the unique ID in the entry."""
mock_integration(
hass, MockModule("comp", async_setup_entry=AsyncMock(return_value=True)),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
if user_input is None:
return self.async_show_form(step_id="user")
return self.async_create_entry(title="yo", data={})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
# Create one to be in progress
result = await manager.flow.async_init(
"comp", context={"source": discovery_source}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert (
flows[0]["context"]["unique_id"]
== config_entries.DEFAULT_DISCOVERY_UNIQUE_ID
)
# Finish flow
result2 = await manager.flow.async_configure(
result["flow_id"], user_input={"fake": "data"}
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert len(hass.config_entries.flow.async_progress()) == 0
entry = hass.config_entries.async_entries("comp")[0]
assert entry.title == "yo"
assert entry.source == discovery_source
assert entry.unique_id is None
async def test_flow_with_default_discovery_with_unique_id(hass, manager):
"""Test discovery flow using the default discovery is ignored when unique ID is set."""
mock_integration(hass, MockModule("comp"))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_discovery(self, discovery_info):
"""Test discovery step."""
await self.async_set_unique_id("mock-unique-id")
# This call should make no difference, as a unique ID is set
await self._async_handle_discovery_without_unique_id()
return self.async_show_form(step_id="mock")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_DISCOVERY}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert flows[0]["context"]["unique_id"] == "mock-unique-id"
async def test_default_discovery_abort_existing_entries(hass, manager):
"""Test that a flow without discovery implementation aborts when a config entry exists."""
hass.config.components.add("comp")
entry = MockConfigEntry(domain="comp", data={}, unique_id="mock-unique-id")
entry.add_to_hass(hass)
mock_integration(hass, MockModule("comp"))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_DISCOVERY}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_default_discovery_in_progress(hass, manager):
"""Test that a flow using default discovery can only be triggered once."""
mock_integration(hass, MockModule("comp"))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_discovery(self, discovery_info):
"""Test discovery step."""
await self.async_set_unique_id(discovery_info.get("unique_id"))
await self._async_handle_discovery_without_unique_id()
return self.async_show_form(step_id="mock")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_DISCOVERY},
data={"unique_id": "mock-unique-id"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
# Second discovery without a unique ID
result2 = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_DISCOVERY}, data={}
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_ABORT
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert flows[0]["context"]["unique_id"] == "mock-unique-id"
async def test_default_discovery_abort_on_new_unique_flow(hass, manager):
"""Test that a flow using default discovery is aborted when a second flow with unique ID is created."""
mock_integration(hass, MockModule("comp"))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_discovery(self, discovery_info):
"""Test discovery step."""
await self.async_set_unique_id(discovery_info.get("unique_id"))
await self._async_handle_discovery_without_unique_id()
return self.async_show_form(step_id="mock")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
# First discovery with default, no unique ID
result2 = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_DISCOVERY}, data={}
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
# Second discovery brings in a unique ID
result = await manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_DISCOVERY},
data={"unique_id": "mock-unique-id"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
# Ensure the first one is cancelled and we end up with just the last one
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert flows[0]["context"]["unique_id"] == "mock-unique-id" | unknown | codeparrot/codeparrot-clean | ||
########################################################################
# $Header: /var/local/cvsroot/4Suite/Ft/Lib/DistExt/__init__.py,v 1.19.2.5 2006/10/30 20:39:16 jkloth Exp $
"""
Extensions to distutils to support building, installing, packaging 4Suite
Copyright 2005 Fourthought, Inc. (USA).
Detailed license and copyright information: http://4suite.org/COPYRIGHT
Project home, documentation, distributions: http://4suite.org/
"""
# Make sure that we are using the proper version of Distutils
# We make certain assumptions about the implementation
def EnsureVersion(version):
"""Checks Distutils version against specified version number"""
# We need to use LooseVersions because of distutils in Python 2.1
try:
import distutils
except ImportError:
raise ImportError('Requires distutils v%s or newer.\n'
'No distutils found.' % version)
from distutils import __version__
from distutils.version import LooseVersion
dist_version = LooseVersion(__version__)
expected = LooseVersion(version)
if expected > dist_version:
raise ImportError('Requires distutils v%s or newer.\n'
'Found version %s.' % (expected, dist_version))
return
EnsureVersion('1.0.2')
# For convienence of script writers
from distutils.core import setup as _setup, DEBUG
from distutils.errors import DistutilsError
from distutils.extension import Extension
from Ft.Lib.DistExt import Structures
from Ft.Lib.DistExt.Structures import *
__all__ = ['EnsureVersion', 'Extension', 'setup']
__all__.extend(Structures.__all__)
def setup(**attrs):
if 'distclass' not in attrs:
from Ft.Lib.DistExt.PackageManager import PackageManager
attrs['distclass'] = PackageManager
# Only needed for Python 2.2
try:
return _setup(**attrs)
except DistutilsError, error:
if DEBUG: raise
raise SystemExit('error: ' + str(error))
setup.__doc__ = _setup.__doc__
# -- Fixup various compatibility issues --------------------------------
import sys, os, re
from distutils import sysconfig, util
# Fix broken Mac OSX Jaguar Python
if sys.platform.startswith('darwin') and util.get_platform().endswith('-ppc'):
cfgvars = sysconfig.get_config_vars()
# The Makefile vars are already expanded, replace all uses of LDFLAGS
for name in ('LDFLAGS', 'LDSHARED', 'BLDSHARED'):
if cfgvars[name].find('-arch i386') != -1:
cfgvars[name] = cfgvars[name].replace('-arch i386', '')
del name, cfgvars
if sys.version < '2.3':
# Make the announce method work the same for all versions.
from distutils.cmd import Command
def announce(self, msg, level=1):
if level >= self.verbose:
print msg
sys.stdout.flush()
Command.announce = announce
del announce
# Allow environment vars to override settings from the Makefile
# (using version from Python 2.3)
def customize_compiler(compiler):
"""Do any platform-specific customization of a CCompiler instance.
Mainly needed on Unix, so we can plug in the information that
varies across Unices and is stored in Python's Makefile.
"""
if compiler.compiler_type == "unix":
(cc, cxx, opt, basecflags, ccshared, ldshared, so_ext) = \
sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'BASECFLAGS',
'CCSHARED', 'LDSHARED', 'SO')
if os.environ.has_key('CC'):
cc = os.environ['CC']
if os.environ.has_key('CXX'):
cxx = os.environ['CXX']
if os.environ.has_key('CPP'):
cpp = os.environ['CPP']
else:
cpp = cc + " -E" # not always
if os.environ.has_key('LDFLAGS'):
ldshared = ldshared + ' ' + os.environ['LDFLAGS']
if basecflags:
opt = basecflags + ' ' + opt
if os.environ.has_key('CFLAGS'):
opt = opt + ' ' + os.environ['CFLAGS']
ldshared = ldshared + ' ' + os.environ['CFLAGS']
if os.environ.has_key('CPPFLAGS'):
cpp = cpp + ' ' + os.environ['CPPFLAGS']
opt = opt + ' ' + os.environ['CPPFLAGS']
ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
cc_cmd = cc + ' ' + opt
compiler.set_executables(
preprocessor=cpp,
compiler=cc_cmd,
compiler_so=cc_cmd + ' ' + ccshared,
linker_so=ldshared,
linker_exe=cc)
compiler.shared_lib_extension = so_ext
sysconfig.customize_compiler = customize_compiler
del customize_compiler
def get_python_version():
return sys.version[:3]
sysconfig.get_python_version = get_python_version
del get_python_version
# -- Monkey-patch filelist pattern matching
from distutils import filelist
if sys.platform == 'win32':
_special_chars = r'<>:"/\\|'
else:
_special_chars = r'/'
def glob_to_re(pat, _specials=_special_chars):
i, n = 0, len(pat)
res = ''
while i < n:
c = pat[i]
i = i+1
if c == '*':
res = res + '[^%s]*' % _specials
elif c == '?':
res = res + '[^%s]' % _specials
elif c == '[':
j = i
if j < n and pat[j] == '!':
j = j+1
if j < n and pat[j] == ']':
j = j+1
while j < n and pat[j] != ']':
j = j+1
if j >= n:
res = res + '\\['
else:
stuff = pat[i:j].replace('\\','\\\\')
i = j+1
if stuff[0] == '!':
stuff = '^' + stuff[1:]
elif stuff[0] == '^':
stuff = '\\' + stuff
res = '%s[%s]' % (res, stuff)
else:
res = res + re.escape(c)
return res + "$"
filelist.glob_to_re = glob_to_re
del glob_to_re | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urllib_request,
)
class VodlockerIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?vodlocker\.com/(?P<id>[0-9a-zA-Z]+)(?:\..*?)?'
_TESTS = [{
'url': 'http://vodlocker.com/e8wvyzz4sl42',
'md5': 'ce0c2d18fa0735f1bd91b69b0e54aacf',
'info_dict': {
'id': 'e8wvyzz4sl42',
'ext': 'mp4',
'title': 'Germany vs Brazil',
'thumbnail': 're:http://.*\.jpg',
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
fields = self._hidden_inputs(webpage)
if fields['op'] == 'download1':
self._sleep(3, video_id) # they do detect when requests happen too fast!
post = compat_urllib_parse.urlencode(fields)
req = compat_urllib_request.Request(url, post)
req.add_header('Content-type', 'application/x-www-form-urlencoded')
webpage = self._download_webpage(
req, video_id, 'Downloading video page')
title = self._search_regex(
r'id="file_title".*?>\s*(.*?)\s*<(?:br|span)', webpage, 'title')
thumbnail = self._search_regex(
r'image:\s*"(http[^\"]+)",', webpage, 'thumbnail')
url = self._search_regex(
r'file:\s*"(http[^\"]+)",', webpage, 'file url')
formats = [{
'format_id': 'sd',
'url': url,
}]
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'formats': formats,
} | unknown | codeparrot/codeparrot-clean | ||
set -o errexit
curl --fail-with-body \
--header "Api-User: ${EVERGREEN_API_USER}" \
--header "Api-Key: ${EVERGREEN_API_KEY}" \
-L https://evergreen.mongodb.com/rest/v2/tasks/${PROMOTE_TASK_ID} \
--output ./debug_task_data.json
echo ".................."
echo "archive_dist_test_debug task data"
echo ".................."
cat debug_task_data.json
fetch_address=$(cat debug_task_data.json | jq -r '.artifacts[] | select(.name == "mongo-debugsymbols.tgz" or .name == "mongo-debugsymbols.zip") | .url')
if [[ "$fetch_address" =~ ".zip" ]]; then
promote_extension="zip"
else
promote_extension="tgz"
fi
if [ -z "$PROMOTE_PROJECT_IDENTIFIER" ]; then
promote_project_identifier=$(cat debug_task_data.json | jq -r ".project_identifier")
else
promote_project_identifier=$PROMOTE_PROJECT_IDENTIFIER
fi
promote_version_id=$(cat debug_task_data.json | jq -r ".version_id")
promote_build_id=$(cat debug_task_data.json | jq -r ".build_id")
promote_build_variant=$(cat debug_task_data.json | jq -r ".build_variant")
promote_archive_dist_test_task_id=$(cat debug_task_data.json | jq -r '.depends_on[0].id')
if [[ ! "$promote_archive_dist_test_task_id" =~ "archive_dist_test" ]]; then
echo "task '$promote_archive_dist_test_task_id' does not appear to be an archive_dist_test task, please report this issue in #ask-devprod-release-tools"
exit 1
fi
artifact_address="https://internal-downloads.mongodb.com/server-custom-builds/${promote_project_identifier}/${promote_version_id}/${promote_build_variant}/${promote_build_id}/mongo-debugsymbols.${promote_extension}"
cat <<EOT >./debug-promote-expansions.yml
promote_archive_dist_test_task_id: "$promote_archive_dist_test_task_id"
promote_cdn_address_debug: "$artifact_address"
EOT
echo ".................."
echo "archive_dist_test_debug promote expansions"
echo ".................."
cat ./debug-promote-expansions.yml
echo ""
echo "The debug symbols will be accessible at '$artifact_address'"
echo ""
echo "fetching debug symbols from $fetch_address"
curl --fail-with-body -L $fetch_address --output "mongo-debugsymbols.$promote_extension"
attach_body=$(printf '[
{
"name": "Custom Build Debug Symbols URL",
"link": "%s",
"visibility": "public"
}
]' "$artifact_address")
echo "$attach_body" >./attach-address-debug.json | unknown | github | https://github.com/mongodb/mongo | evergreen/get_custom_build_promotion_expansions_debug.sh |
# -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2012 Carlos Vercelino - CLVsol.net #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from osv import fields, osv
import pooler
from tools.translate import _
import sys
class create_test_report(osv.osv_memory):
_name='oehealth.lab_test.create'
def create_lab_test(self, cr, uid, ids, context={}):
data=ids
test_request_obj = self.pool.get('oehealth.dispensation')
lab_obj = self.pool.get('oehealth.dispensation')
test_report_data={}
test_cases = []
test_obj = test_request_obj.browse(cr, uid, context.get('active_id'), context=context)
#if test_obj.state == 'tested':
if test_obj.state != 'tested':
raise osv.except_osv(_('UserError'),_('Test Report already created.'))
test_report_data['test'] = test_obj.name.id
test_report_data['patient'] = test_obj.patient_id.id
#test_report_data['requestor'] = test_obj.doctor_id.id
test_report_data['date_requested'] = test_obj.date
for criterion in test_obj.name.criteria:
test_cases.append((0,0,{'name':criterion.name,
'sequence':criterion.sequence,
'normal_range':criterion.normal_range,
'unit':criterion.unit.id,
}))
test_report_data['criteria'] = test_cases
lab_id = lab_obj.create(cr,uid,test_report_data,context=context)
test_request_obj.write(cr, uid, context.get('active_id'), {'state':'tested'})
return {
'domain': "[('id','=', "+str(lab_id)+")]",
'name': 'Lab Test Report',
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'oehealth.lab_test',
'type': 'ir.actions.act_window'
}
create_test_report() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env ruby
module RubyVersion
def self.tag(version)
major_version = Integer(version.split('.', 2)[0])
if major_version >= 4
"v#{version}"
else
"v#{version.tr('.-', '_')}"
end
end
# Return the previous version to be used for release diff links.
# For a ".0" version, it returns the previous ".0" version.
# For a non-".0" version, it returns the previous teeny version.
def self.previous(version)
unless /\A(\d+)\.(\d+)\.(\d+)(?:-(?:preview|rc)\d+)?\z/ =~ version
raise "unexpected version string '#{version}'"
end
major = Integer($1)
minor = Integer($2)
teeny = Integer($3)
if teeny != 0
"#{major}.#{minor}.#{teeny-1}"
elsif minor != 0 # && teeny == 0
"#{major}.#{minor-1}.#{teeny}"
else # minor == 0 && teeny == 0
case major
when 3
"2.7.0"
when 4
"3.4.0"
else
raise "it doesn't know what is the previous version of '#{version}'"
end
end
end
end
if __FILE__ == $0
case ARGV[0]
when "tag"
print RubyVersion.tag(ARGV[1])
when "previous"
print RubyVersion.previous(ARGV[1])
when "previous-tag"
print RubyVersion.tag(RubyVersion.previous(ARGV[1]))
else
"#{$0}: unexpected command #{ARGV[0].inspect}"
end
end | ruby | github | https://github.com/ruby/ruby | tool/ruby-version.rb |
/*
Copyright 2019 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1
import (
admissionregistrationv1 "k8s.io/api/admissionregistration/v1"
"k8s.io/apimachinery/pkg/runtime/schema"
)
// GroupName is the group name used in this package
const GroupName = "admissionregistration.k8s.io"
// SchemeGroupVersion is group version used to register these objects
var SchemeGroupVersion = schema.GroupVersion{Group: GroupName, Version: "v1"}
// Resource takes an unqualified resource and returns a Group qualified GroupResource
func Resource(resource string) schema.GroupResource {
return SchemeGroupVersion.WithResource(resource).GroupResource()
}
var (
localSchemeBuilder = &admissionregistrationv1.SchemeBuilder
// AddToScheme handler to add items to the schema
AddToScheme = localSchemeBuilder.AddToScheme
)
func init() {
// We only register manually written functions here. The registration of the
// generated functions takes place in the generated files. The separation
// makes the code compile even when the generated files are missing.
localSchemeBuilder.Register(addDefaultingFuncs)
} | go | github | https://github.com/kubernetes/kubernetes | pkg/apis/admissionregistration/v1/register.go |
from __future__ import unicode_literals
from django.contrib.gis.geos import (
LinearRing, LineString, Point, Polygon, fromstr,
)
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import total_ordering
from django.utils.html import html_safe
@html_safe
@python_2_unicode_compatible
class GEvent(object):
"""
A Python wrapper for the Google GEvent object.
Events can be attached to any object derived from GOverlayBase with the
add_event() call.
For more information please see the Google Maps API Reference:
http://code.google.com/apis/maps/documentation/reference.html#GEvent
Example:
from django.shortcuts import render_to_response
from django.contrib.gis.maps.google import GoogleMap, GEvent, GPolyline
def sample_request(request):
polyline = GPolyline('LINESTRING(101 26, 112 26, 102 31)')
event = GEvent('click',
'function() { location.href = "http://www.google.com"}')
polyline.add_event(event)
return render_to_response('mytemplate.html',
{'google' : GoogleMap(polylines=[polyline])})
"""
def __init__(self, event, action):
"""
Initializes a GEvent object.
Parameters:
event:
string for the event, such as 'click'. The event must be a valid
event for the object in the Google Maps API.
There is no validation of the event type within Django.
action:
string containing a Javascript function, such as
'function() { location.href = "newurl";}'
The string must be a valid Javascript function. Again there is no
validation fo the function within Django.
"""
self.event = event
self.action = action
def __str__(self):
"Returns the parameter part of a GEvent."
return '"%s", %s' % (self.event, self.action)
@html_safe
@python_2_unicode_compatible
class GOverlayBase(object):
def __init__(self):
self.events = []
def latlng_from_coords(self, coords):
"Generates a JavaScript array of GLatLng objects for the given coordinates."
return '[%s]' % ','.join('new GLatLng(%s,%s)' % (y, x) for x, y in coords)
def add_event(self, event):
"Attaches a GEvent to the overlay object."
self.events.append(event)
def __str__(self):
"The string representation is the JavaScript API call."
return '%s(%s)' % (self.__class__.__name__, self.js_params)
class GPolygon(GOverlayBase):
"""
A Python wrapper for the Google GPolygon object. For more information
please see the Google Maps API Reference:
http://code.google.com/apis/maps/documentation/reference.html#GPolygon
"""
def __init__(self, poly,
stroke_color='#0000ff', stroke_weight=2, stroke_opacity=1,
fill_color='#0000ff', fill_opacity=0.4):
"""
The GPolygon object initializes on a GEOS Polygon or a parameter that
may be instantiated into GEOS Polygon. Please note that this will not
depict a Polygon's internal rings.
Keyword Options:
stroke_color:
The color of the polygon outline. Defaults to '#0000ff' (blue).
stroke_weight:
The width of the polygon outline, in pixels. Defaults to 2.
stroke_opacity:
The opacity of the polygon outline, between 0 and 1. Defaults to 1.
fill_color:
The color of the polygon fill. Defaults to '#0000ff' (blue).
fill_opacity:
The opacity of the polygon fill. Defaults to 0.4.
"""
if isinstance(poly, six.string_types):
poly = fromstr(poly)
if isinstance(poly, (tuple, list)):
poly = Polygon(poly)
if not isinstance(poly, Polygon):
raise TypeError('GPolygon may only initialize on GEOS Polygons.')
# Getting the envelope of the input polygon (used for automatically
# determining the zoom level).
self.envelope = poly.envelope
# Translating the coordinates into a JavaScript array of
# Google `GLatLng` objects.
self.points = self.latlng_from_coords(poly.shell.coords)
# Stroke settings.
self.stroke_color, self.stroke_opacity, self.stroke_weight = stroke_color, stroke_opacity, stroke_weight
# Fill settings.
self.fill_color, self.fill_opacity = fill_color, fill_opacity
super(GPolygon, self).__init__()
@property
def js_params(self):
return '%s, "%s", %s, %s, "%s", %s' % (self.points, self.stroke_color, self.stroke_weight, self.stroke_opacity,
self.fill_color, self.fill_opacity)
class GPolyline(GOverlayBase):
"""
A Python wrapper for the Google GPolyline object. For more information
please see the Google Maps API Reference:
http://code.google.com/apis/maps/documentation/reference.html#GPolyline
"""
def __init__(self, geom, color='#0000ff', weight=2, opacity=1):
"""
The GPolyline object may be initialized on GEOS LineStirng, LinearRing,
and Polygon objects (internal rings not supported) or a parameter that
may instantiated into one of the above geometries.
Keyword Options:
color:
The color to use for the polyline. Defaults to '#0000ff' (blue).
weight:
The width of the polyline, in pixels. Defaults to 2.
opacity:
The opacity of the polyline, between 0 and 1. Defaults to 1.
"""
# If a GEOS geometry isn't passed in, try to construct one.
if isinstance(geom, six.string_types):
geom = fromstr(geom)
if isinstance(geom, (tuple, list)):
geom = Polygon(geom)
# Generating the lat/lng coordinate pairs.
if isinstance(geom, (LineString, LinearRing)):
self.latlngs = self.latlng_from_coords(geom.coords)
elif isinstance(geom, Polygon):
self.latlngs = self.latlng_from_coords(geom.shell.coords)
else:
raise TypeError('GPolyline may only initialize on GEOS LineString, LinearRing, and/or Polygon geometries.')
# Getting the envelope for automatic zoom determination.
self.envelope = geom.envelope
self.color, self.weight, self.opacity = color, weight, opacity
super(GPolyline, self).__init__()
@property
def js_params(self):
return '%s, "%s", %s, %s' % (self.latlngs, self.color, self.weight, self.opacity)
@total_ordering
class GIcon(object):
"""
Creates a GIcon object to pass into a Gmarker object.
The keyword arguments map to instance attributes of the same name. These,
in turn, correspond to a subset of the attributes of the official GIcon
javascript object:
http://code.google.com/apis/maps/documentation/reference.html#GIcon
Because a Google map often uses several different icons, a name field has
been added to the required arguments.
Required Arguments:
varname:
A string which will become the basis for the js variable name of
the marker, for this reason, your code should assign a unique
name for each GIcon you instantiate, otherwise there will be
name space collisions in your javascript.
Keyword Options:
image:
The url of the image to be used as the icon on the map defaults
to 'G_DEFAULT_ICON'
iconsize:
a tuple representing the pixel size of the foreground (not the
shadow) image of the icon, in the format: (width, height) ex.:
GIcon('fast_food',
image="/media/icon/star.png",
iconsize=(15,10))
Would indicate your custom icon was 15px wide and 10px height.
shadow:
the url of the image of the icon's shadow
shadowsize:
a tuple representing the pixel size of the shadow image, format is
the same as ``iconsize``
iconanchor:
a tuple representing the pixel coordinate relative to the top left
corner of the icon image at which this icon is anchored to the map.
In (x, y) format. x increases to the right in the Google Maps
coordinate system and y increases downwards in the Google Maps
coordinate system.)
infowindowanchor:
The pixel coordinate relative to the top left corner of the icon
image at which the info window is anchored to this icon.
"""
def __init__(self, varname, image=None, iconsize=None,
shadow=None, shadowsize=None, iconanchor=None,
infowindowanchor=None):
self.varname = varname
self.image = image
self.iconsize = iconsize
self.shadow = shadow
self.shadowsize = shadowsize
self.iconanchor = iconanchor
self.infowindowanchor = infowindowanchor
def __eq__(self, other):
return self.varname == other.varname
def __lt__(self, other):
return self.varname < other.varname
def __hash__(self):
# XOR with hash of GIcon type so that hash('varname') won't
# equal hash(GIcon('varname')).
return hash(self.__class__) ^ hash(self.varname)
class GMarker(GOverlayBase):
"""
A Python wrapper for the Google GMarker object. For more information
please see the Google Maps API Reference:
http://code.google.com/apis/maps/documentation/reference.html#GMarker
Example:
from django.shortcuts import render_to_response
from django.contrib.gis.maps.google.overlays import GMarker, GEvent
def sample_request(request):
marker = GMarker('POINT(101 26)')
event = GEvent('click',
'function() { location.href = "http://www.google.com"}')
marker.add_event(event)
return render_to_response('mytemplate.html',
{'google' : GoogleMap(markers=[marker])})
"""
def __init__(self, geom, title=None, draggable=False, icon=None):
"""
The GMarker object may initialize on GEOS Points or a parameter
that may be instantiated into a GEOS point. Keyword options map to
GMarkerOptions -- so far only the title option is supported.
Keyword Options:
title:
Title option for GMarker, will be displayed as a tooltip.
draggable:
Draggable option for GMarker, disabled by default.
"""
# If a GEOS geometry isn't passed in, try to construct one.
if isinstance(geom, six.string_types):
geom = fromstr(geom)
if isinstance(geom, (tuple, list)):
geom = Point(geom)
if isinstance(geom, Point):
self.latlng = self.latlng_from_coords(geom.coords)
else:
raise TypeError('GMarker may only initialize on GEOS Point geometry.')
# Getting the envelope for automatic zoom determination.
self.envelope = geom.envelope
# TODO: Add support for more GMarkerOptions
self.title = title
self.draggable = draggable
self.icon = icon
super(GMarker, self).__init__()
def latlng_from_coords(self, coords):
return 'new GLatLng(%s,%s)' % (coords[1], coords[0])
def options(self):
result = []
if self.title:
result.append('title: "%s"' % self.title)
if self.icon:
result.append('icon: %s' % self.icon.varname)
if self.draggable:
result.append('draggable: true')
return '{%s}' % ','.join(result)
@property
def js_params(self):
return '%s, %s' % (self.latlng, self.options()) | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2010 Google Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import base64
import binascii
import os
import re
import StringIO
from boto.exception import BotoClientError
from boto.s3.key import Key as S3Key
from boto.s3.keyfile import KeyFile
class Key(S3Key):
"""
Represents a key (object) in a GS bucket.
:ivar bucket: The parent :class:`boto.gs.bucket.Bucket`.
:ivar name: The name of this Key object.
:ivar metadata: A dictionary containing user metadata that you
wish to store with the object or that has been retrieved from
an existing object.
:ivar cache_control: The value of the `Cache-Control` HTTP header.
:ivar content_type: The value of the `Content-Type` HTTP header.
:ivar content_encoding: The value of the `Content-Encoding` HTTP header.
:ivar content_disposition: The value of the `Content-Disposition` HTTP
header.
:ivar content_language: The value of the `Content-Language` HTTP header.
:ivar etag: The `etag` associated with this object.
:ivar last_modified: The string timestamp representing the last
time this object was modified in GS.
:ivar owner: The ID of the owner of this object.
:ivar storage_class: The storage class of the object. Currently, one of:
STANDARD | DURABLE_REDUCED_AVAILABILITY.
:ivar md5: The MD5 hash of the contents of the object.
:ivar size: The size, in bytes, of the object.
:ivar generation: The generation number of the object.
:ivar meta_generation: The generation number of the object metadata.
:ivar encrypted: Whether the object is encrypted while at rest on
the server.
"""
generation = None
meta_generation = None
def __repr__(self):
if self.generation and self.meta_generation:
ver_str = '#%s.%s' % (self.generation, self.meta_generation)
else:
ver_str = ''
if self.bucket:
return '<Key: %s,%s%s>' % (self.bucket.name, self.name, ver_str)
else:
return '<Key: None,%s%s>' % (self.name, ver_str)
def endElement(self, name, value, connection):
if name == 'Key':
self.name = value
elif name == 'ETag':
self.etag = value
elif name == 'IsLatest':
if value == 'true':
self.is_latest = True
else:
self.is_latest = False
elif name == 'LastModified':
self.last_modified = value
elif name == 'Size':
self.size = int(value)
elif name == 'StorageClass':
self.storage_class = value
elif name == 'Owner':
pass
elif name == 'VersionId':
self.version_id = value
elif name == 'Generation':
self.generation = value
elif name == 'MetaGeneration':
self.meta_generation = value
else:
setattr(self, name, value)
def handle_version_headers(self, resp, force=False):
self.meta_generation = resp.getheader('x-goog-metageneration', None)
self.generation = resp.getheader('x-goog-generation', None)
def get_file(self, fp, headers=None, cb=None, num_cb=10,
torrent=False, version_id=None, override_num_retries=None,
response_headers=None):
query_args = None
if self.generation:
query_args = ['generation=%s' % self.generation]
self._get_file_internal(fp, headers=headers, cb=cb, num_cb=num_cb,
override_num_retries=override_num_retries,
response_headers=response_headers,
query_args=query_args)
def delete(self):
return self.bucket.delete_key(self.name, version_id=self.version_id,
generation=self.generation)
def add_email_grant(self, permission, email_address):
"""
Convenience method that provides a quick way to add an email grant to a
key. This method retrieves the current ACL, creates a new grant based on
the parameters passed in, adds that grant to the ACL and then PUT's the
new ACL back to GS.
:type permission: string
:param permission: The permission being granted. Should be one of:
READ|FULL_CONTROL
See http://code.google.com/apis/storage/docs/developer-guide.html#authorization
for more details on permissions.
:type email_address: string
:param email_address: The email address associated with the Google
account to which you are granting the permission.
"""
acl = self.get_acl()
acl.add_email_grant(permission, email_address)
self.set_acl(acl)
def add_user_grant(self, permission, user_id):
"""
Convenience method that provides a quick way to add a canonical user
grant to a key. This method retrieves the current ACL, creates a new
grant based on the parameters passed in, adds that grant to the ACL and
then PUT's the new ACL back to GS.
:type permission: string
:param permission: The permission being granted. Should be one of:
READ|FULL_CONTROL
See http://code.google.com/apis/storage/docs/developer-guide.html#authorization
for more details on permissions.
:type user_id: string
:param user_id: The canonical user id associated with the GS account to
which you are granting the permission.
"""
acl = self.get_acl()
acl.add_user_grant(permission, user_id)
self.set_acl(acl)
def add_group_email_grant(self, permission, email_address, headers=None):
"""
Convenience method that provides a quick way to add an email group
grant to a key. This method retrieves the current ACL, creates a new
grant based on the parameters passed in, adds that grant to the ACL and
then PUT's the new ACL back to GS.
:type permission: string
:param permission: The permission being granted. Should be one of:
READ|FULL_CONTROL
See http://code.google.com/apis/storage/docs/developer-guide.html#authorization
for more details on permissions.
:type email_address: string
:param email_address: The email address associated with the Google
Group to which you are granting the permission.
"""
acl = self.get_acl(headers=headers)
acl.add_group_email_grant(permission, email_address)
self.set_acl(acl, headers=headers)
def add_group_grant(self, permission, group_id):
"""
Convenience method that provides a quick way to add a canonical group
grant to a key. This method retrieves the current ACL, creates a new
grant based on the parameters passed in, adds that grant to the ACL and
then PUT's the new ACL back to GS.
:type permission: string
:param permission: The permission being granted. Should be one of:
READ|FULL_CONTROL
See http://code.google.com/apis/storage/docs/developer-guide.html#authorization
for more details on permissions.
:type group_id: string
:param group_id: The canonical group id associated with the Google
Groups account you are granting the permission to.
"""
acl = self.get_acl()
acl.add_group_grant(permission, group_id)
self.set_acl(acl)
def set_contents_from_file(self, fp, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
res_upload_handler=None, size=None, rewind=False,
if_generation=None):
"""
Store an object in GS using the name of the Key object as the
key in GS and the contents of the file pointed to by 'fp' as the
contents.
:type fp: file
:param fp: the file whose contents are to be uploaded
:type headers: dict
:param headers: additional HTTP headers to be sent with the PUT request.
:type replace: bool
:param replace: If this parameter is False, the method will first check
to see if an object exists in the bucket with the same key. If it
does, it won't overwrite it. The default value is True which will
overwrite the object.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two integer
parameters, the first representing the number of bytes that have
been successfully transmitted to GS and the second representing the
total number of bytes that need to be transmitted.
:type num_cb: int
:param num_cb: (optional) If a callback is specified with the cb
parameter, this parameter determines the granularity of the callback
by defining the maximum number of times the callback will be called
during the file transfer.
:type policy: :class:`boto.gs.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the new key
in GS.
:type md5: A tuple containing the hexdigest version of the MD5 checksum
of the file as the first element and the Base64-encoded version of
the plain checksum as the second element. This is the same format
returned by the compute_md5 method.
:param md5: If you need to compute the MD5 for any reason prior to
upload, it's silly to have to do it twice so this param, if present,
will be used as the MD5 values of the file. Otherwise, the checksum
will be computed.
:type res_upload_handler: ResumableUploadHandler
:param res_upload_handler: If provided, this handler will perform the
upload.
:type size: int
:param size: (optional) The Maximum number of bytes to read from
the file pointer (fp). This is useful when uploading
a file in multiple parts where you are splitting the
file up into different ranges to be uploaded. If not
specified, the default behaviour is to read all bytes
from the file pointer. Less bytes may be available.
Notes:
1. The "size" parameter currently cannot be used when
a resumable upload handler is given but is still
useful for uploading part of a file as implemented
by the parent class.
2. At present Google Cloud Storage does not support
multipart uploads.
:type rewind: bool
:param rewind: (optional) If True, the file pointer (fp) will be
rewound to the start before any bytes are read from
it. The default behaviour is False which reads from
the current position of the file pointer (fp).
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the
object will only be written to if its current generation number is
this value. If set to the value 0, the object will only be written
if it doesn't already exist.
:rtype: int
:return: The number of bytes written to the key.
TODO: At some point we should refactor the Bucket and Key classes,
to move functionality common to all providers into a parent class,
and provider-specific functionality into subclasses (rather than
just overriding/sharing code the way it currently works).
"""
provider = self.bucket.connection.provider
if res_upload_handler and size:
# could use size instead of file_length if provided but...
raise BotoClientError('"size" param not supported for resumable uploads.')
headers = headers or {}
if policy:
headers[provider.acl_header] = policy
if rewind:
# caller requests reading from beginning of fp.
fp.seek(0, os.SEEK_SET)
else:
# The following seek/tell/seek logic is intended
# to detect applications using the older interface to
# set_contents_from_file(), which automatically rewound the
# file each time the Key was reused. This changed with commit
# 14ee2d03f4665fe20d19a85286f78d39d924237e, to support uploads
# split into multiple parts and uploaded in parallel, and at
# the time of that commit this check was added because otherwise
# older programs would get a success status and upload an empty
# object. Unfortuantely, it's very inefficient for fp's implemented
# by KeyFile (used, for example, by gsutil when copying between
# providers). So, we skip the check for the KeyFile case.
# TODO: At some point consider removing this seek/tell/seek
# logic, after enough time has passed that it's unlikely any
# programs remain that assume the older auto-rewind interface.
if not isinstance(fp, KeyFile):
spos = fp.tell()
fp.seek(0, os.SEEK_END)
if fp.tell() == spos:
fp.seek(0, os.SEEK_SET)
if fp.tell() != spos:
# Raise an exception as this is likely a programming
# error whereby there is data before the fp but nothing
# after it.
fp.seek(spos)
raise AttributeError('fp is at EOF. Use rewind option '
'or seek() to data start.')
# seek back to the correct position.
fp.seek(spos)
if hasattr(fp, 'name'):
self.path = fp.name
if self.bucket != None:
if isinstance(fp, KeyFile):
# Avoid EOF seek for KeyFile case as it's very inefficient.
key = fp.getkey()
size = key.size - fp.tell()
self.size = size
# At present both GCS and S3 use MD5 for the etag for
# non-multipart-uploaded objects. If the etag is 32 hex
# chars use it as an MD5, to avoid having to read the file
# twice while transferring.
if (re.match('^"[a-fA-F0-9]{32}"$', key.etag)):
etag = key.etag.strip('"')
md5 = (etag, base64.b64encode(binascii.unhexlify(etag)))
if size:
self.size = size
else:
# If md5 is provided, still need to size so
# calculate based on bytes to end of content
spos = fp.tell()
fp.seek(0, os.SEEK_END)
self.size = fp.tell() - spos
fp.seek(spos)
size = self.size
if md5 == None:
md5 = self.compute_md5(fp, size)
self.md5 = md5[0]
self.base64md5 = md5[1]
if self.name == None:
self.name = self.md5
if not replace:
if self.bucket.lookup(self.name):
return
if if_generation is not None:
headers['x-goog-if-generation-match'] = str(if_generation)
if res_upload_handler:
res_upload_handler.send_file(self, fp, headers, cb, num_cb)
else:
# Not a resumable transfer so use basic send_file mechanism.
self.send_file(fp, headers, cb, num_cb, size=size)
def set_contents_from_filename(self, filename, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
reduced_redundancy=None,
res_upload_handler=None,
if_generation=None):
"""
Store an object in GS using the name of the Key object as the
key in GS and the contents of the file named by 'filename'.
See set_contents_from_file method for details about the
parameters.
:type filename: string
:param filename: The name of the file that you want to put onto GS
:type headers: dict
:param headers: Additional headers to pass along with the request to GS.
:type replace: bool
:param replace: If True, replaces the contents of the file if it
already exists.
:type cb: function
:param cb: (optional) a callback function that will be called to report
progress on the download. The callback should accept two integer
parameters, the first representing the number of bytes that have
been successfully transmitted from GS and the second representing
the total number of bytes that need to be transmitted.
:type cb: int
:param num_cb: (optional) If a callback is specified with the cb
parameter this parameter determines the granularity of the callback
by defining the maximum number of times the callback will be called
during the file transfer.
:type policy: :class:`boto.gs.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the new key
in GS.
:type md5: A tuple containing the hexdigest version of the MD5 checksum
of the file as the first element and the Base64-encoded version of
the plain checksum as the second element. This is the same format
returned by the compute_md5 method.
:param md5: If you need to compute the MD5 for any reason prior to
upload, it's silly to have to do it twice so this param, if present,
will be used as the MD5 values of the file. Otherwise, the checksum
will be computed.
:type res_upload_handler: ResumableUploadHandler
:param res_upload_handler: If provided, this handler will perform the
upload.
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the
object will only be written to if its current generation number is
this value. If set to the value 0, the object will only be written
if it doesn't already exist.
"""
# Clear out any previously computed md5 hashes, since we are setting the content.
self.md5 = None
self.base64md5 = None
fp = open(filename, 'rb')
self.set_contents_from_file(fp, headers, replace, cb, num_cb,
policy, md5, res_upload_handler,
if_generation=if_generation)
fp.close()
def set_contents_from_string(self, s, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
if_generation=None):
"""
Store an object in S3 using the name of the Key object as the
key in S3 and the string 's' as the contents.
See set_contents_from_file method for details about the
parameters.
:type headers: dict
:param headers: Additional headers to pass along with the
request to AWS.
:type replace: bool
:param replace: If True, replaces the contents of the file if
it already exists.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept
two integer parameters, the first representing the
number of bytes that have been successfully
transmitted to S3 and the second representing the
size of the to be transmitted object.
:type cb: int
:param num_cb: (optional) If a callback is specified with
the cb parameter this parameter determines the
granularity of the callback by defining
the maximum number of times the callback will
be called during the file transfer.
:type policy: :class:`boto.s3.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the
new key in S3.
:type md5: A tuple containing the hexdigest version of the MD5
checksum of the file as the first element and the
Base64-encoded version of the plain checksum as the
second element. This is the same format returned by
the compute_md5 method.
:param md5: If you need to compute the MD5 for any reason prior
to upload, it's silly to have to do it twice so this
param, if present, will be used as the MD5 values
of the file. Otherwise, the checksum will be computed.
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the
object will only be written to if its current generation number is
this value. If set to the value 0, the object will only be written
if it doesn't already exist.
"""
# Clear out any previously computed md5 hashes, since we are setting the content.
self.md5 = None
self.base64md5 = None
if isinstance(s, unicode):
s = s.encode("utf-8")
fp = StringIO.StringIO(s)
r = self.set_contents_from_file(fp, headers, replace, cb, num_cb,
policy, md5,
if_generation=if_generation)
fp.close()
return r
def set_contents_from_stream(self, *args, **kwargs):
"""
Store an object using the name of the Key object as the key in
cloud and the contents of the data stream pointed to by 'fp' as
the contents.
The stream object is not seekable and total size is not known.
This has the implication that we can't specify the
Content-Size and Content-MD5 in the header. So for huge
uploads, the delay in calculating MD5 is avoided but with a
penalty of inability to verify the integrity of the uploaded
data.
:type fp: file
:param fp: the file whose contents are to be uploaded
:type headers: dict
:param headers: additional HTTP headers to be sent with the
PUT request.
:type replace: bool
:param replace: If this parameter is False, the method will first check
to see if an object exists in the bucket with the same key. If it
does, it won't overwrite it. The default value is True which will
overwrite the object.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two integer
parameters, the first representing the number of bytes that have
been successfully transmitted to GS and the second representing the
total number of bytes that need to be transmitted.
:type num_cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter, this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type policy: :class:`boto.gs.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the new key
in GS.
:type reduced_redundancy: bool
:param reduced_redundancy: If True, this will set the storage
class of the new Key to be REDUCED_REDUNDANCY. The Reduced
Redundancy Storage (RRS) feature of S3, provides lower
redundancy at lower storage cost.
:type size: int
:param size: (optional) The Maximum number of bytes to read from
the file pointer (fp). This is useful when uploading a
file in multiple parts where you are splitting the file up
into different ranges to be uploaded. If not specified,
the default behaviour is to read all bytes from the file
pointer. Less bytes may be available.
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the
object will only be written to if its current generation number is
this value. If set to the value 0, the object will only be written
if it doesn't already exist.
"""
if_generation = kwargs.pop('if_generation', None)
if if_generation is not None:
headers = kwargs.get('headers', {})
headers['x-goog-if-generation-match'] = str(if_generation)
kwargs['headers'] = headers
super(Key, self).set_contents_from_stream(*args, **kwargs)
def set_acl(self, acl_or_str, headers=None, generation=None,
if_generation=None, if_metageneration=None):
"""Sets the ACL for this object.
:type acl_or_str: string or :class:`boto.gs.acl.ACL`
:param acl_or_str: A canned ACL string (see
:data:`~.gs.acl.CannedACLStrings`) or an ACL object.
:type headers: dict
:param headers: Additional headers to set during the request.
:type generation: int
:param generation: If specified, sets the ACL for a specific generation
of a versioned object. If not specified, the current version is
modified.
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the acl
will only be updated if its current generation number is this value.
:type if_metageneration: int
:param if_metageneration: (optional) If set to a metageneration number,
the acl will only be updated if its current metageneration number is
this value.
"""
if self.bucket != None:
self.bucket.set_acl(acl_or_str, self.name, headers=headers,
generation=generation,
if_generation=if_generation,
if_metageneration=if_metageneration)
def get_acl(self, headers=None, generation=None):
"""Returns the ACL of this object.
:param dict headers: Additional headers to set during the request.
:param int generation: If specified, gets the ACL for a specific
generation of a versioned object. If not specified, the current
version is returned.
:rtype: :class:`.gs.acl.ACL`
"""
if self.bucket != None:
return self.bucket.get_acl(self.name, headers=headers,
generation=generation)
def get_xml_acl(self, headers=None, generation=None):
"""Returns the ACL string of this object.
:param dict headers: Additional headers to set during the request.
:param int generation: If specified, gets the ACL for a specific
generation of a versioned object. If not specified, the current
version is returned.
:rtype: str
"""
if self.bucket != None:
return self.bucket.get_xml_acl(self.name, headers=headers,
generation=generation)
def set_xml_acl(self, acl_str, headers=None, generation=None,
if_generation=None, if_metageneration=None):
"""Sets this objects's ACL to an XML string.
:type acl_str: string
:param acl_str: A string containing the ACL XML.
:type headers: dict
:param headers: Additional headers to set during the request.
:type generation: int
:param generation: If specified, sets the ACL for a specific generation
of a versioned object. If not specified, the current version is
modified.
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the acl
will only be updated if its current generation number is this value.
:type if_metageneration: int
:param if_metageneration: (optional) If set to a metageneration number,
the acl will only be updated if its current metageneration number is
this value.
"""
if self.bucket != None:
return self.bucket.set_xml_acl(acl_str, self.name, headers=headers,
generation=generation,
if_generation=if_generation,
if_metageneration=if_metageneration)
def set_canned_acl(self, acl_str, headers=None, generation=None,
if_generation=None, if_metageneration=None):
"""Sets this objects's ACL using a predefined (canned) value.
:type acl_str: string
:param acl_str: A canned ACL string. See
:data:`~.gs.acl.CannedACLStrings`.
:type headers: dict
:param headers: Additional headers to set during the request.
:type generation: int
:param generation: If specified, sets the ACL for a specific generation
of a versioned object. If not specified, the current version is
modified.
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the acl
will only be updated if its current generation number is this value.
:type if_metageneration: int
:param if_metageneration: (optional) If set to a metageneration number,
the acl will only be updated if its current metageneration number is
this value.
"""
if self.bucket != None:
return self.bucket.set_canned_acl(
acl_str,
self.name,
headers=headers,
generation=generation,
if_generation=if_generation,
if_metageneration=if_metageneration
) | unknown | codeparrot/codeparrot-clean | ||
# Copyright (c) 2015 Tanium Inc
#
# Generated from console.wsdl version 0.0.1
#
#
from .base import BaseType
class Action(BaseType):
_soap_tag = 'action'
def __init__(self):
BaseType.__init__(
self,
simple_properties={'id': int,
'name': str,
'comment': str,
'start_time': str,
'expiration_time': str,
'status': str,
'skip_lock_flag': int,
'expire_seconds': int,
'distribute_seconds': int,
'creation_time': str,
'stopped_flag': int,
'cache_row_id': int},
complex_properties={'target_group': Group,
'action_group': Group,
'package_spec': PackageSpec,
'user': User,
'approver': User,
'history_saved_question': SavedQuestion,
'saved_action': SavedAction,
'metadata': MetadataList},
list_properties={},
)
self.id = None
self.name = None
self.comment = None
self.start_time = None
self.expiration_time = None
self.status = None
self.skip_lock_flag = None
self.expire_seconds = None
self.distribute_seconds = None
self.creation_time = None
self.stopped_flag = None
self.cache_row_id = None
self.target_group = None
self.action_group = None
self.package_spec = None
self.user = None
self.approver = None
self.history_saved_question = None
self.saved_action = None
self.metadata = None
from group import Group
from group import Group
from package_spec import PackageSpec
from user import User
from user import User
from saved_question import SavedQuestion
from saved_action import SavedAction
from metadata_list import MetadataList | unknown | codeparrot/codeparrot-clean | ||
"""Message and message content types.
Includes message types for different roles (e.g., human, AI, system), as well as types
for message content blocks (e.g., text, image, audio) and tool calls.
"""
from langchain_core.messages import (
AIMessage,
AIMessageChunk,
Annotation,
AnyMessage,
AudioContentBlock,
Citation,
ContentBlock,
DataContentBlock,
FileContentBlock,
HumanMessage,
ImageContentBlock,
InputTokenDetails,
InvalidToolCall,
MessageLikeRepresentation,
NonStandardAnnotation,
NonStandardContentBlock,
OutputTokenDetails,
PlainTextContentBlock,
ReasoningContentBlock,
RemoveMessage,
ServerToolCall,
ServerToolCallChunk,
ServerToolResult,
SystemMessage,
TextContentBlock,
ToolCall,
ToolCallChunk,
ToolMessage,
UsageMetadata,
VideoContentBlock,
trim_messages,
)
__all__ = [
"AIMessage",
"AIMessageChunk",
"Annotation",
"AnyMessage",
"AudioContentBlock",
"Citation",
"ContentBlock",
"DataContentBlock",
"FileContentBlock",
"HumanMessage",
"ImageContentBlock",
"InputTokenDetails",
"InvalidToolCall",
"MessageLikeRepresentation",
"NonStandardAnnotation",
"NonStandardContentBlock",
"OutputTokenDetails",
"PlainTextContentBlock",
"ReasoningContentBlock",
"RemoveMessage",
"ServerToolCall",
"ServerToolCallChunk",
"ServerToolResult",
"SystemMessage",
"TextContentBlock",
"ToolCall",
"ToolCallChunk",
"ToolMessage",
"UsageMetadata",
"VideoContentBlock",
"trim_messages",
] | python | github | https://github.com/langchain-ai/langchain | libs/langchain_v1/langchain/messages/__init__.py |
"""
The more times we visit a hypothesis, the more we decrease its prior
TODO: Try version where penalty decreases with time!
TODO: This currently only extends LOTHypotheses, since we have to handle casting
inside of h0 to WrapperClass. HOWEVER, we could make WrapperClass just dispatch the right methods
if they don't exist
"""
from MetropolisHastings import MHSampler
from collections import Counter
class TabooMCMC(MHSampler):
"""
An MCMC sampler that penalizes for visits to a hypothesis
NOTE: rEquires storing of all hypotheses visited.
"""
def __init__(self, h0, data, penalty=1.0, **kwargs ):
MHSampler.__init__(self, h0, data, **kwargs)
self.penalty=penalty
self.seen = Counter()
def internal_sample(self, h):
"""
Keep track of how many samples we've drawn for h
"""
self.seen[h] += 1
def compute_posterior(self, h, data):
"""
Wrap the posterior with a penalty for how often we've seen h. Computes the penalty on the prior
"""
mypenalty = self.seen[h] * self.penalty
np, nl = MHSampler.compute_posterior(self, h, data)
return np+mypenalty, nl
if __name__ == "__main__":
from LOTlib.Examples.Number.Model.Utilities import generate_data, NumberExpression, grammar, get_knower_pattern
from LOTlib.Miscellaneous import q
data = generate_data(500)
h0 = NumberExpression(grammar)
for h in TabooMCMC(h0, data, steps=10000):
print q(get_knower_pattern(h)), h.posterior_score, h.prior, h.likelihood, q(h) | unknown | codeparrot/codeparrot-clean | ||
imports:
- { resource: ../config/default.yml }
- { resource: services.yml }
framework:
mailer:
dsn: 'null://null'
envelope:
sender: sender@example.org
recipients:
- redirected@example.org
profiler: ~ | unknown | github | https://github.com/symfony/symfony | src/Symfony/Bundle/FrameworkBundle/Tests/Functional/app/Mailer/config.yml |
#!/usr/bin/python
#
# Copyright (c) 2016 Matt Davis, <mdavis@ansible.com>
# Chris Houseknecht, <house@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: azure_rm_networkinterface
version_added: "2.1"
short_description: Manage Azure network interfaces.
description:
- Create, update or delete a network interface. When creating a network interface you must provide the name of an
existing virtual network, the name of an existing subnet within the virtual network. A default security group
and public IP address will be created automatically, or you can provide the name of an existing security group
and public IP address. See the examples below for more details.
options:
resource_group:
description:
- Name of a resource group where the network interface exists or will be created.
required: true
name:
description:
- Name of the network interface.
required: true
state:
description:
- Assert the state of the network interface. Use 'present' to create or update an interface and
'absent' to delete an interface.
default: present
choices:
- absent
- present
required: false
location:
description:
- Valid azure location. Defaults to location of the resource group.
default: resource_group location
required: false
virtual_network_name:
description:
- Name of an existing virtual network with which the network interface will be associated. Required
when creating a network interface.
aliases:
- virtual_network
required: false
default: null
subnet_name:
description:
- Name of an existing subnet within the specified virtual network. Required when creating a network
interface
aliases:
- subnet
required: false
default: null
os_type:
description:
- Determines any rules to be added to a default security group. When creating a network interface, if no
security group name is provided, a default security group will be created. If the os_type is 'Windows',
a rule will be added allowing RDP access. If the os_type is 'Linux', a rule allowing SSH access will be
added.
choices:
- Windows
- Linux
default: Linux
required: false
private_ip_address:
description:
- Valid IPv4 address that falls within the specified subnet.
required: false
private_ip_allocation_method:
description:
- "Specify whether or not the assigned IP address is permanent. NOTE: when creating a network interface
specifying a value of 'Static' requires that a private_ip_address value be provided. You can update
the allocation method to 'Static' after a dynamic private ip address has been assigned."
default: Dynamic
choices:
- Dynamic
- Static
required: false
public_ip:
description:
- When creating a network interface, if no public IP address name is provided a default public IP
address will be created. Set to false, if you do not want a public IP address automatically created.
default: true
required: false
public_ip_address_name:
description:
- Name of an existing public IP address object to associate with the security group.
aliases:
- public_ip_address
- public_ip_name
required: false
default: null
public_ip_allocation_method:
description:
- If a public_ip_address_name is not provided, a default public IP address will be created. The allocation
method determines whether or not the public IP address assigned to the network interface is permanent.
choices:
- Dynamic
- Static
default: Dynamic
required: false
security_group_name:
description:
- Name of an existing security group with which to associate the network interface. If not provided, a
default security group will be created.
aliases:
- security_group
required: false
default: null
open_ports:
description:
- When a default security group is created for a Linux host a rule will be added allowing inbound TCP
connections to the default SSH port 22, and for a Windows host rules will be added allowing inbound
access to RDP ports 3389 and 5986. Override the default ports by providing a list of open ports.
type: list
required: false
default: null
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Chris Houseknecht (@chouseknecht)"
- "Matt Davis (@nitzmahone)"
'''
EXAMPLES = '''
- name: Create a network interface with minimal parameters
azure_rm_networkinterface:
name: nic001
resource_group: Testing
virtual_network_name: vnet001
subnet_name: subnet001
- name: Create a network interface with private IP address only (no Public IP)
azure_rm_networkinterface:
name: nic001
resource_group: Testing
virtual_network_name: vnet001
subnet_name: subnet001
public_ip: no
- name: Create a network interface for use in a Windows host (opens RDP port) with custom RDP port
azure_rm_networkinterface:
name: nic002
resource_group: Testing
virtual_network_name: vnet001
subnet_name: subnet001
os_type: Windows
rdp_port: 3399
- name: Create a network interface using existing security group and public IP
azure_rm_networkinterface:
name: nic003
resource_group: Testing
virtual_network_name: vnet001
subnet_name: subnet001
security_group_name: secgroup001
public_ip_address_name: publicip001
- name: Delete network interface
azure_rm_networkinterface:
resource_group: Testing
name: nic003
state: absent
'''
RETURN = '''
state:
description: The current state of the network interface.
returned: always
type: dict
sample: {
"dns_settings": {
"applied_dns_servers": [],
"dns_servers": [],
"internal_dns_name_label": null,
"internal_fqdn": null
},
"enable_ip_forwarding": false,
"etag": 'W/"be115a43-2148-4545-a324-f33ad444c926"',
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkInterfaces/nic003",
"ip_configuration": {
"name": "default",
"private_ip_address": "10.1.0.10",
"private_ip_allocation_method": "Static",
"public_ip_address": {
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/publicIPAddresses/publicip001",
"name": "publicip001"
},
"subnet": {}
},
"location": "eastus2",
"mac_address": null,
"name": "nic003",
"network_security_group": {},
"primary": null,
"provisioning_state": "Succeeded",
"tags": null,
"type": "Microsoft.Network/networkInterfaces"
}
'''
from ansible.module_utils.basic import *
from ansible.module_utils.azure_rm_common import *
try:
from msrestazure.azure_exceptions import CloudError
from azure.mgmt.network.models import NetworkInterface, NetworkInterfaceIPConfiguration, Subnet, \
PublicIPAddress, NetworkSecurityGroup
except ImportError:
# This is handled in azure_rm_common
pass
def nic_to_dict(nic):
result = dict(
id=nic.id,
name=nic.name,
type=nic.type,
location=nic.location,
tags=nic.tags,
network_security_group=dict(),
ip_configuration=dict(
name=nic.ip_configurations[0].name,
private_ip_address=nic.ip_configurations[0].private_ip_address,
private_ip_allocation_method=nic.ip_configurations[0].private_ip_allocation_method,
subnet=dict(),
public_ip_address=dict(),
),
dns_settings=dict(
dns_servers=nic.dns_settings.dns_servers,
applied_dns_servers=nic.dns_settings.applied_dns_servers,
internal_dns_name_label=nic.dns_settings.internal_dns_name_label,
internal_fqdn=nic.dns_settings.internal_fqdn
),
mac_address=nic.mac_address,
primary=nic.primary,
enable_ip_forwarding=nic.enable_ip_forwarding,
provisioning_state=nic.provisioning_state,
etag=nic.etag,
)
if nic.network_security_group:
result['network_security_group']['id'] = nic.network_security_group.id
id_keys = azure_id_to_dict(nic.network_security_group.id)
result['network_security_group']['name'] = id_keys['networkSecurityGroups']
if nic.ip_configurations[0].subnet:
result['ip_configuration']['subnet']['id'] = \
nic.ip_configurations[0].subnet.id
id_keys = azure_id_to_dict(nic.ip_configurations[0].subnet.id)
result['ip_configuration']['subnet']['virtual_network_name'] = id_keys['virtualNetworks']
result['ip_configuration']['subnet']['name'] = id_keys['subnets']
if nic.ip_configurations[0].public_ip_address:
result['ip_configuration']['public_ip_address']['id'] = \
nic.ip_configurations[0].public_ip_address.id
id_keys = azure_id_to_dict(nic.ip_configurations[0].public_ip_address.id)
result['ip_configuration']['public_ip_address']['name'] = id_keys['publicIPAddresses']
return result
class AzureRMNetworkInterface(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(type='str', required=True),
name=dict(type='str', required=True),
location=dict(type='str'),
security_group_name=dict(type='str', aliases=['security_group']),
state=dict(default='present', choices=['present', 'absent']),
private_ip_address=dict(type='str'),
private_ip_allocation_method=dict(type='str', choices=['Dynamic', 'Static'], default='Dynamic'),
public_ip_address_name=dict(type='str', aliases=['public_ip_address', 'public_ip_name']),
public_ip=dict(type='bool', default=True),
subnet_name=dict(type='str', aliases=['subnet']),
virtual_network_name=dict(type='str', aliases=['virtual_network']),
os_type=dict(type='str', choices=['Windows', 'Linux'], default='Linux'),
open_ports=dict(type='list'),
public_ip_allocation_method=dict(type='str', choices=['Dynamic', 'Static'], default='Dynamic'),
)
self.resource_group = None
self.name = None
self.location = None
self.security_group_name = None
self.private_ip_address = None
self.private_ip_allocation_method = None
self.public_ip_address_name = None
self.state = None
self.subnet_name = None
self.tags = None
self.virtual_network_name = None
self.security_group_name = None
self.os_type = None
self.open_ports = None
self.public_ip_allocation_method = None
self.public_ip = None
self.results = dict(
changed=False,
state=dict(),
)
super(AzureRMNetworkInterface, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True)
def exec_module(self, **kwargs):
for key in self.module_arg_spec.keys() + ['tags']:
setattr(self, key, kwargs[key])
results = dict()
changed = False
nic = None
subnet = None
nsg = None
pip = None
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
# Set default location
self.location = resource_group.location
if self.state == 'present':
if self.virtual_network_name and not self.subnet_name:
self.fail("Parameter error: a subnet is required when passing a virtual_network_name.")
if self.subnet_name and not self.virtual_network_name:
self.fail("Parameter error: virtual_network_name is required when passing a subnet value.")
if self.virtual_network_name and self.subnet_name:
subnet = self.get_subnet(self.virtual_network_name, self.subnet_name)
if self.public_ip_address_name:
pip = self.get_public_ip_address(self.public_ip_address_name)
if self.security_group_name:
nsg = self.get_security_group(self.security_group_name)
try:
self.log('Fetching network interface {0}'.format(self.name))
nic = self.network_client.network_interfaces.get(self.resource_group, self.name)
self.log('Network interface {0} exists'.format(self.name))
self.check_provisioning_state(nic, self.state)
results = nic_to_dict(nic)
self.log(results, pretty_print=True)
if self.state == 'present':
update_tags, results['tags'] = self.update_tags(results['tags'])
if update_tags:
changed = True
if self.private_ip_address:
if results['ip_configuration']['private_ip_address'] != self.private_ip_address:
self.log("CHANGED: network interface {0} private ip".format(self.name))
changed = True
results['ip_configuration']['private_ip_address'] = self.private_ip_address
if self.public_ip_address_name:
if results['ip_configuration']['public_ip_address'].get('id') != pip.id:
self.log("CHANGED: network interface {0} public ip".format(self.name))
changed = True
results['ip_configuration']['public_ip_address']['id'] = pip.id
results['ip_configuration']['public_ip_address']['name'] = pip.name
if self.security_group_name:
if results['network_security_group'].get('id') != nsg.id:
self.log("CHANGED: network interface {0} network security group".format(self.name))
changed = True
results['network_security_group']['id'] = nsg.id
results['network_security_group']['name'] = nsg.name
if self.private_ip_allocation_method:
if results['ip_configuration']['private_ip_allocation_method'] != self.private_ip_allocation_method:
self.log("CHANGED: network interface {0} private ip allocation".format(self.name))
changed = True
results['ip_configuration']['private_ip_allocation_method'] = self.private_ip_allocation_method
if self.private_ip_allocation_method == 'Dynamic':
results['ip_configuration']['private_ip_address'] = None
if self.subnet_name:
if results['ip_configuration']['subnet'].get('id') != subnet.id:
changed = True
self.log("CHANGED: network interface {0} subnet".format(self.name))
results['ip_configuration']['subnet']['id'] = subnet.id
results['ip_configuration']['subnet']['name'] = subnet.name
results['ip_configuration']['subnet']['virtual_network_name'] = self.virtual_network_name
elif self.state == 'absent':
self.log("CHANGED: network interface {0} exists but requested state is 'absent'".format(self.name))
changed = True
except CloudError:
self.log('Network interface {0} does not exist'.format(self.name))
if self.state == 'present':
self.log("CHANGED: network interface {0} does not exist but requested state is "
"'present'".format(self.name))
changed = True
self.results['changed'] = changed
self.results['state'] = results
if self.check_mode:
return self.results
if changed:
if self.state == 'present':
if not nic:
# create network interface
self.log("Creating network interface {0}.".format(self.name))
# check required parameters
if not self.subnet_name:
self.fail("parameter error: subnet_name required when creating a network interface.")
if not self.virtual_network_name:
self.fail("parameter error: virtual_network_name required when creating a network interface.")
if not self.security_group_name:
# create default security group
nsg = self.create_default_securitygroup(self.resource_group, self.location, self.name,
self.os_type, self.open_ports)
if not pip and self.public_ip:
# create a default public_ip
pip = self.create_default_pip(self.resource_group, self.location, self.name,
self.public_ip_allocation_method)
nic = NetworkInterface(
location=self.location,
tags=self.tags,
ip_configurations=[
NetworkInterfaceIPConfiguration(
private_ip_allocation_method=self.private_ip_allocation_method,
)
]
)
#nic.name = self.name
nic.ip_configurations[0].subnet = Subnet(id=subnet.id)
nic.ip_configurations[0].name = 'default'
nic.network_security_group = NetworkSecurityGroup(id=nsg.id,
location=nsg.location,
resource_guid=nsg.resource_guid)
if self.private_ip_address:
nic.ip_configurations[0].private_ip_address = self.private_ip_address
if pip:
nic.ip_configurations[0].public_ip_address = PublicIPAddress(
id=pip.id,
location=pip.location,
resource_guid=pip.resource_guid)
else:
self.log("Updating network interface {0}.".format(self.name))
nic = NetworkInterface(
id=results['id'],
location=results['location'],
tags=results['tags'],
ip_configurations=[
NetworkInterfaceIPConfiguration(
private_ip_allocation_method=
results['ip_configuration']['private_ip_allocation_method']
)
]
)
subnet = self.get_subnet(results['ip_configuration']['subnet']['virtual_network_name'],
results['ip_configuration']['subnet']['name'])
nic.ip_configurations[0].subnet = Subnet(id=subnet.id)
nic.ip_configurations[0].name = results['ip_configuration']['name']
#nic.name = name=results['name'],
if results['ip_configuration'].get('private_ip_address'):
nic.ip_configurations[0].private_ip_address = results['ip_configuration']['private_ip_address']
if results['ip_configuration']['public_ip_address'].get('id'):
pip = \
self.get_public_ip_address(results['ip_configuration']['public_ip_address']['name'])
nic.ip_configurations[0].public_ip_address = PublicIPAddress(
id=pip.id,
location=pip.location,
resource_guid=pip.resource_guid)
#name=pip.name,
if results['network_security_group'].get('id'):
nsg = self.get_security_group(results['network_security_group']['name'])
nic.network_security_group = NetworkSecurityGroup(id=nsg.id,
location=nsg.location,
resource_guid=nsg.resource_guid)
# See what actually gets sent to the API
request = self.serialize_obj(nic, 'NetworkInterface')
self.log(request, pretty_print=True)
self.results['state'] = self.create_or_update_nic(nic)
elif self.state == 'absent':
self.log('Deleting network interface {0}'.format(self.name))
self.delete_nic()
return self.results
def create_or_update_nic(self, nic):
try:
poller = self.network_client.network_interfaces.create_or_update(self.resource_group, self.name, nic)
new_nic = self.get_poller_result(poller)
except Exception as exc:
self.fail("Error creating or updating network interface {0} - {1}".format(self.name, str(exc)))
return nic_to_dict(new_nic)
def delete_nic(self):
try:
poller = self.network_client.network_interfaces.delete(self.resource_group, self.name)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error deleting network interface {0} - {1}".format(self.name, str(exc)))
# Delete doesn't return anything. If we get this far, assume success
self.results['state']['status'] = 'Deleted'
return True
def get_public_ip_address(self, name):
self.log("Fetching public ip address {0}".format(name))
try:
public_ip = self.network_client.public_ip_addresses.get(self.resource_group, name)
except Exception as exc:
self.fail("Error: fetching public ip address {0} - {1}".format(self.name, str(exc)))
return public_ip
def get_subnet(self, vnet_name, subnet_name):
self.log("Fetching subnet {0} in virtual network {1}".format(subnet_name, vnet_name))
try:
subnet = self.network_client.subnets.get(self.resource_group, vnet_name, subnet_name)
except Exception as exc:
self.fail("Error: fetching subnet {0} in virtual network {1} - {2}".format(subnet_name,
vnet_name,
str(exc)))
return subnet
def get_security_group(self, name):
self.log("Fetching security group {0}".format(name))
try:
nsg = self.network_client.network_security_groups.get(self.resource_group, name)
except Exception as exc:
self.fail("Error: fetching network security group {0} - {1}.".format(name, str(exc)))
return nsg
def main():
AzureRMNetworkInterface()
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
import locale
import os
from os import path
from flask import Flask, g
from flask_assets import Bundle, Environment
from flask_principal import Principal, identity_loaded
from nbx.jinjafilters import (cuitfmt_filter, dateformat_filter,
moneyfmt_filter, timeago_filter)
from nbx.models import User, db
from nbx.views import account, order, product, supplier
from webassets.filter import get_filter
locale.setlocale(locale.LC_ALL, '')
__all__ = ['create_app']
DEFAULT_APPNAME = 'nbx'
def create_app(config=None, app_name=None):
if app_name is None:
app_name = DEFAULT_APPNAME
app = Flask(app_name)
configure_app(app, config)
configure_jinja2(app)
configure_webassets(app)
configure_db(app)
configure_identity(app)
configure_blueprints(app)
return app
def configure_app(app, config=None):
if config is not None:
app.config.from_object(config)
else:
try:
app.config.from_object('localconfig.LocalConfig')
except ImportError:
if os.getenv('DEV') == 'yes':
app.config.from_object('nbx.config.DevelopmentConfig')
app.logger.info("Config: Development")
elif os.getenv('TEST') == 'yes':
app.config.from_object('nbx.config.TestConfig')
app.logger.info("Config: Test")
else:
app.config.from_object('nbx.config.ProductionConfig')
app.logger.info("Config: Production")
if os.getenv('FLASK_DEBUG') in ('1', 'True', 'TRUE'):
from flask_debugtoolbar import DebugToolbarExtension
DebugToolbarExtension(app)
def configure_jinja2(app):
# Jinja2 extensions
app.jinja_options['extensions'].extend([
'jinja2.ext.i18n',
'jinja2.ext.do',
'jinja2.ext.loopcontrols',
])
# Jinja2 filters
app.jinja_env.filters['dateformat'] = dateformat_filter
app.jinja_env.filters['timeago'] = timeago_filter
app.jinja_env.filters['moneyfmt'] = moneyfmt_filter
app.jinja_env.filters['cuitfmt'] = cuitfmt_filter
def configure_webassets(app):
# Fask-Assets
assets = Environment(app)
assets_out_dir = app.config.get('ASSETS_OUTPUT_DIR')
# ensure output directory exists
if not path.exists(path.join(app.static_folder, assets_out_dir)):
app.logger.info("Creating assets output folder")
os.mkdir(path.join(app.static_folder, assets_out_dir))
# webassets bundles
jquery_bundle = Bundle(
'js/libs/jquery-3.1.1.js',
)
datatable_bundle = Bundle(
'js/libs/jquery.dataTables.js',
'js/libs/dataTables.bootstrap.js',
)
js_bootstrap_bundle = Bundle(
'js/bootstrap/transition.js',
'js/bootstrap/alert.js',
'js/bootstrap/button.js',
'js/bootstrap/carousel.js',
'js/bootstrap/collapse.js',
'js/bootstrap/dropdown.js',
'js/bootstrap/modal.js',
'js/bootstrap/tab.js',
'js/bootstrap/affix.js',
'js/bootstrap/scrollspy.js',
'js/bootstrap/tooltip.js',
'js/bootstrap/popover.js',
)
js_bundle = Bundle(
jquery_bundle,
datatable_bundle,
js_bootstrap_bundle,
filters='jsmin',
output=path.join(assets_out_dir, 'js_bundle.js')
)
scss = get_filter('scss', load_paths=['style'])
scss_bundle = Bundle(
'style/master.scss',
filters=scss,
#output=path.join(assets_out_dir, 'style_bundle.css'),
depends=('style/*.scss',)
)
css_bundle = Bundle(
scss_bundle,
#'style/datatables/jquery.dataTables.css',
'style/datatables/dataTables.bootstrap.css',
filters='autoprefixer, cssmin',
output=path.join(assets_out_dir, 'css_bundle.css'),
)
assets.register('js_bundle', js_bundle)
assets.register('css_bundle', css_bundle)
def configure_db(app):
db.init_app(app)
def configure_identity(app):
Principal(app)
@identity_loaded.connect_via(app)
def on_identity_loaded(sender, identity):
g.user = User.query.from_identity(identity)
@app.before_request
def authenticate():
g.user = getattr(g.identity, 'user', None)
def configure_blueprints(app):
app.register_blueprint(supplier, url_prefix='/suppliers')
app.register_blueprint(product, url_prefix='/products')
app.register_blueprint(order, url_prefix='/orders')
app.register_blueprint(account, url_prefix='/accounts') | unknown | codeparrot/codeparrot-clean | ||
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
// Printf variants that place their output in a C++ string.
//
// Usage:
// string result = strings::Printf("%d %s\n", 10, "hello");
// strings::Appendf(&result, "%d %s\n", 20, "there");
#ifndef TENSORFLOW_CORE_PLATFORM_STRINGPRINTF_H_
#define TENSORFLOW_CORE_PLATFORM_STRINGPRINTF_H_
#include <stdarg.h>
#include <string>
#include "tensorflow/core/platform/macros.h"
#include "tensorflow/core/platform/types.h"
#include "tsl/platform/stringprintf.h"
namespace tensorflow {
namespace strings {
// NOLINTBEGIN(misc-unused-using-decls)
using tsl::strings::Appendf;
using tsl::strings::Printf;
// NOLINTEND(misc-unused-using-decls)
} // namespace strings
} // namespace tensorflow
#endif // TENSORFLOW_CORE_PLATFORM_STRINGPRINTF_H_ | c | github | https://github.com/tensorflow/tensorflow | tensorflow/core/platform/stringprintf.h |
"""
Base classes supporting Libvirt Sandbox (lxc) container testing
:copyright: 2013 Red Hat Inc.
"""
import logging
import signal
import aexpect
class SandboxException(Exception):
"""
Basic exception class for problems occurring in SandboxBase or subclasses
"""
def __init__(self, message):
super(SandboxException, self).__init__()
self.message = message
def __str__(self):
return self.message
# This is to allow us to alter back-end session management w/o affecting
# sandbox subclasses
class SandboxSession(object):
"""
Connection instance to asynchronous I/O redirector process
"""
# Assist with warning on re-use
used = False
def __init__(self):
self.session = None # createdby new_session
@property
def connected(self):
"""
Represents True/False value if background process was created/opened
"""
if self.session is None:
return False
else:
return True
@property
def session_id(self):
"""
Returns unique & persistent identifier for the background process
"""
if self.connected:
return self.session.get_id()
else:
raise SandboxException("Can't get id of non-running sandbox "
"session")
def new_session(self, command):
"""
Create and set new opaque session object
"""
# Allow this to be called more than once w/o consequence
self.close_session(warn_if_nonexist=self.used)
self.session = aexpect.Expect(command, auto_close=False)
self.used = True
def open_session(self, a_id):
"""
Restore connection to existing session identified by a_id
"""
# Allow this to be called more than once w/o consequence
self.close_session(warn_if_nonexist=self.used)
aexpect.Expect(a_id=a_id)
self.used = True
def close_session(self, warn_if_nonexist=True):
"""
Finalize assigned opaque session object
"""
# Allow this to be called more than once w/o consequence
if self.connected:
self.session.close()
else:
if warn_if_nonexist:
logging.warning("Closing nonexisting sandbox session")
def kill_session(self, sig=signal.SIGTERM):
"""
Send a signal to the opaque session object
"""
if self.connected:
self.session.kill(sig=sig)
else:
raise SandboxException("Can't send signal to inactive sandbox "
"session")
def send(self, a_string):
"""Send a_string to session"""
if self.connected:
self.session.send(a_string)
else:
raise SandboxException("Can't send to an inactive sandbox session")
def recv(self):
"""Return combined stdout/stderr output received so far"""
if self.connected:
return self.session.get_output()
else:
raise SandboxException("Can't get output from finalized sandbox "
"session")
def recvout(self):
"""Return just stdout output"""
# FIXME: aexpect combines stdout and stderr in a single pipe :(
raise NotImplementedError
def recverr(self):
"""Return just stderr output"""
# FIXME: aexpect combines stdout and stderr in a single pipe :(
raise NotImplementedError
def exit_code(self):
"""Block, and return exit code from session"""
if self.connected:
return self.session.get_status()
else:
raise SandboxException("Can't get exit code from finalized sandbox "
"session")
def is_running(self):
"""Return True if exit_code() would block"""
if self.connected:
return self.session.is_alive()
else:
return None
def auto_clean(self, boolean):
"""Make session cleanup on GC if True"""
if self.connected:
self.session.auto_close = boolean
else:
raise SandboxException("Can't set auto_clean on disconnected "
"sandbox session")
class SandboxBase(object):
"""
Base operations for sandboxed command
"""
# Provide unique instance number for each sandbox
instances = None
def __init__(self, params):
"""
Create a new sandbox interface instance based on this type from params
"""
# Un-pickling instances doesn't call init again
if self.__class__.instances is None:
self.__class__.instances = 1
else:
self.__class__.instances += 1
# store a copy for use to avoid referencing class attribute
self.identifier = self.__class__.instances
# Allow global 'lvsb_*' keys to be overridden for specific subclass
self.params = params.object_params(self.__class__.__name__)
self.options = None # opaque value consumed by make_command()
# Aexpect has some well hidden bugs, private attribute hides
# interface in case it changes from fixes or gets swapped out
# entirely.
self._session = SandboxSession()
# Allow running sandboxes to persist across multiple tests if needed
def __getstate__(self):
"""Serialize instance for pickling"""
# Regular dictionary format for now, but could change later
state = {'params': self.params,
'identifier': self.identifier,
'options': self.options}
# Critical info. to re-connect to session when un-pickle
if self._session.connected:
state['session_id'] = self._session.session_id
return state
def __setstate__(self, state):
"""Actualize instance from state"""
for key in ('identifier', 'params', 'options'):
setattr(self, key, state[key])
if state.haskey('session_id'):
self._session = SandboxSession()
self._session.open_session(state['session_id'])
def run(self, extra=None):
"""
Launch new sandbox as asynchronous background sandbox process
:param extra: String of extra command-line to use but not store
"""
logging.debug("Launching %s", self.make_sandbox_command_line())
self._session.new_session(self.make_sandbox_command_line(extra))
def stop(self):
"""Destroy but don't finalize asynchronous background sandbox process"""
self._session.kill_session()
def fini(self):
"""
Finalize asynchronous background sandbox process (destroys state!)
"""
self._session.close_session()
def send(self, data):
"""Send data to asynchronous background sandbox process"""
self._session.send(data)
def recv(self):
"""
Return stdout and stderr from asynchronous background sandbox process
"""
return self._session.recv()
def recvout(self):
"""
Return only stdout from asynchronous background sandbox process
"""
return self._session.recvout()
def recverr(self):
"""
return only stderr from asynchronous background sandbox process
"""
return self._session.recverr()
def running(self):
"""
Return True/False if asynchronous background sandbox process executing
"""
return self._session.is_running()
def exit_code(self):
"""
Block until asynchronous background sandbox process ends, returning code
"""
return self._session.exit_code()
def auto_clean(self, boolean):
"""
Change behavior of asynchronous background sandbox process on __del__
"""
self._session.auto_clean(boolean)
def make_sandbox_command_line(self, extra=None):
"""
Return the fully formed command-line for the sandbox using self.options
"""
# These are the abstract methods subclasses must override
raise NotImplementedError
class SandboxCommandBase(SandboxBase):
"""
Connection to a single new or existing sandboxed command
"""
BINARY_PATH_PARAM = 'virt_sandbox_binary'
# Cache generated name first time it is requested
_name = None
def __init__(self, params, name=None):
"""
Initialize sandbox-command with params and name, autogenerate if None
"""
if name is not None:
self._name = name
super(SandboxCommandBase, self).__init__(params)
def __getstate__(self):
"""Serialize instance for pickling"""
state = super(SandboxCommandBase, self).__getstate__()
state['name'] = self._name
return state
def __setstate__(self, state):
"""Actualize instance from state"""
self._name = state.pop('name')
super(SandboxCommandBase, self).__setstate__(state)
def __get_name__(self):
"""
Represent a unique sandbox name generated from class and identifier
"""
# Use shortest possible unique names for instances to be easier
# to track and make name-comparison fast when there are 10000's
# of sandboxes. Only use upper-case letters from class name along
# with instance identifier attribute.
if self._name is None:
class_name = self.__class__.__name__
class_initials = class_name.translate(None,
'abcdefghijklmnopqrstuvwxyz')
self._name = "%s_%d" % (class_initials, self.identifier)
return self._name
@staticmethod
def __set_name__(value):
del value # not used
raise SandboxException("Name is read-only")
@staticmethod
def __del_name__():
raise SandboxException("Name is read-only")
name = property(__get_name__, __set_name__, __del_name__)
@staticmethod
def flaten_options(options):
"""
Convert a list of tuples into space-seperated options+argument string
"""
result_list = []
for option, argument in options:
# positional argument
if option is None:
if argument is not None:
result_list.append(argument)
# both empty, ignore
else: # option is not None
# --flag
if argument is None:
result_list.append(argument)
else: # argument is not None
# --option argument or -o argument
result_list.append("%s %s" % (option, argument))
if len(result_list) > 0:
return " " + " ".join(result_list)
else: # they were all (None, None)
return ""
def make_sandbox_command_line(self, extra=None):
"""Return entire command-line string needed to start sandbox"""
command = self.params[self.BINARY_PATH_PARAM] # mandatory param
if self.options is not None:
command += self.flaten_options(self.options)
if extra is not None:
command += ' ' + extra
return command
def add_optarg(self, option, argument):
"""
Add an option with an argument into the list of command line options
"""
if self.options is None:
self.options = []
self.options.append((option, argument))
def add_flag(self, option):
"""
Add a flag into the list of command line options
"""
# Tuple encoding required for flaten_options()
self.add_optarg(option, None)
def add_pos(self, argument):
"""
Add a positional option into the list of command line options
"""
# Tuple encoding required for flaten_options()
self.add_optarg(None, argument)
def add_mm(self):
"""
Append a -- to the end of the current option list
"""
self.add_pos('--')
def list_long_options(self):
"""
Return a list of all long options with an argument
"""
return [opt for opt, arg in self.options
if opt.startswith('--') and arg is not None]
def list_short_options(self):
"""
Return a list of all short options with an argument
"""
result = []
for opt, arg in self.options:
if arg is None:
continue # flag or positional
if len(opt) > 1 and opt[0] == '-' and opt[1] != '-':
result.append(opt)
def list_flags(self):
"""
Return a list of all flags (options without arguments)
"""
return [opt for opt, arg in self.options
if opt.startswith('--') and arg is None]
def list_pos(self):
"""
Return a list of all positional arguments
"""
return [arg for opt, arg in self.options if opt is None]
# Instances are similar to a list-of-lists- multiple kinds (classes) of
# multiple sandobx executions.
class TestSandboxes(object):
"""
Aggregate manager class of SandboxCommandBase or subclass instances
"""
# The class of each sandbox instance to operate on
SANDBOX_TYPE = SandboxCommandBase
def __init__(self, params, env):
"""
Create instance(s) of sandbox from a command
"""
# public attribute for access to each sandbox execution
self.sandboxes = []
# Each sandbox type will object_params() itself
self.params = params
# In case a subclass wants to interface with tests before/after
self.env = env
# Parse out aggregate manager class-specific params
pop = self.params.object_params(self.__class__.__name__)
# Allows iterating over all sandboxes e.g. with for_each()
self.count = int(pop.get('lvsb_count', '1'))
# Simple-case is all sandboxes on the local host
self.uri = pop.get('lvsb_uri', 'lxc:///')
# The command to run inside the sandbox
self.command = pop.get('lvsb_command')
def init_sandboxes(self):
"""
Create self.count Sandbox instances
"""
# self.sandboxes probably empty, can't use for_each()
for index in xrange(0, self.count):
del index # Keep pylint happy
self.sandboxes.append(self.SANDBOX_TYPE(self.params))
def for_each(self, do_something, *args, **dargs):
"""
Iterate over all sandboxes, calling do_something on each
:param do_sometihng: Called with the item and *args, **dargs
"""
# Simplify making the same call to every running sandbox
return [do_something(sandbox, *args, **dargs)
for sandbox in self.sandboxes]
def are_running(self):
"""
Return the number of sandbox processes still running
"""
running = 0
for is_running in self.for_each(lambda sb: sb.running()):
if is_running:
running += 1
return running
def are_failed(self):
"""
Return the number of sandbox processes with non-zero exit codes
"""
# Warning, this will block if self.are_running() > 0
failed = 0
for exit_code in self.for_each(lambda sb: sb.exit_code()):
if exit_code != 0:
failed += 1
return failed | unknown | codeparrot/codeparrot-clean | ||
## This file is part of Scapy
## Copyright (C) 2017 Maxence Tury
## This program is published under a GPLv2 license
"""
TLS 1.3 key exchange logic.
"""
from __future__ import print_function
import math
from scapy.config import conf, crypto_validator
from scapy.error import warning
from scapy.fields import *
from scapy.packet import Packet, Raw, Padding
from scapy.layers.tls.cert import PubKeyRSA, PrivKeyRSA
from scapy.layers.tls.session import _GenericTLSSessionInheritance
from scapy.layers.tls.basefields import _tls_version, _TLSClientVersionField
from scapy.layers.tls.extensions import TLS_Ext_Unknown, _tls_ext
from scapy.layers.tls.crypto.pkcs1 import pkcs_i2osp, pkcs_os2ip
from scapy.layers.tls.crypto.groups import (_tls_named_ffdh_groups,
_tls_named_curves, _ffdh_groups,
_tls_named_groups)
if conf.crypto_valid:
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import dh, ec
if conf.crypto_valid_advanced:
from cryptography.hazmat.primitives.asymmetric import x25519
class KeyShareEntry(Packet):
"""
When building from scratch, we create a DH private key, and when
dissecting, we create a DH public key. Default group is secp256r1.
"""
__slots__ = ["privkey", "pubkey"]
name = "Key Share Entry"
fields_desc = [ShortEnumField("group", None, _tls_named_groups),
FieldLenField("kxlen", None, length_of="key_exchange"),
StrLenField("key_exchange", "",
length_from=lambda pkt: pkt.kxlen) ]
def __init__(self, *args, **kargs):
self.privkey = None
self.pubkey = None
super(KeyShareEntry, self).__init__(*args, **kargs)
def do_build(self):
"""
We need this hack, else 'self' would be replaced by __iter__.next().
"""
tmp = self.explicit
self.explicit = True
b = super(KeyShareEntry, self).do_build()
self.explicit = tmp
return b
@crypto_validator
def create_privkey(self):
"""
This is called by post_build() for key creation.
"""
if self.group in _tls_named_ffdh_groups:
params = _ffdh_groups[_tls_named_ffdh_groups[self.group]][0]
privkey = params.generate_private_key()
self.privkey = privkey
pubkey = privkey.public_key()
self.key_exchange = pubkey.public_numbers().y
elif self.group in _tls_named_curves:
if _tls_named_curves[self.group] == "x25519":
if conf.crypto_valid_advanced:
privkey = x25519.X25519PrivateKey.generate()
self.privkey = privkey
pubkey = privkey.public_key()
self.key_exchange = pubkey.public_bytes()
elif _tls_named_curves[self.group] != "x448":
curve = ec._CURVE_TYPES[_tls_named_curves[self.group]]()
privkey = ec.generate_private_key(curve, default_backend())
self.privkey = privkey
pubkey = privkey.public_key()
self.key_exchange = pubkey.public_numbers().encode_point()
def post_build(self, pkt, pay):
if self.group is None:
self.group = 23 # secp256r1
if self.key_exchange == "":
try:
self.create_privkey()
except ImportError:
pass
if self.kxlen is None:
self.kxlen = len(self.key_exchange)
group = struct.pack("!H", self.group)
kxlen = struct.pack("!H", self.kxlen)
return group + kxlen + self.key_exchange + pay
@crypto_validator
def register_pubkey(self):
if self.group in _tls_named_ffdh_groups:
params = _ffdh_groups[_tls_named_ffdh_groups[self.group]][0]
pn = params.parameter_numbers()
public_numbers = dh.DHPublicNumbers(self.key_exchange, pn)
self.pubkey = public_numbers.public_key(default_backend())
elif self.group in _tls_named_curves:
if _tls_named_curves[self.group] == "x25519":
if conf.crypto_valid_advanced:
import_point = x25519.X25519PublicKey.from_public_bytes
self.pubkey = import_point(self.key_exchange)
elif _tls_named_curves[self.group] != "x448":
curve = ec._CURVE_TYPES[_tls_named_curves[self.group]]()
import_point = ec.EllipticCurvePublicNumbers.from_encoded_point
public_numbers = import_point(curve, self.key_exchange)
self.pubkey = public_numbers.public_key(default_backend())
def post_dissection(self, r):
try:
self.register_pubkey()
except ImportError:
pass
def extract_padding(self, s):
return "", s
class TLS_Ext_KeyShare_CH(TLS_Ext_Unknown):
name = "TLS Extension - Key Share (for ClientHello)"
fields_desc = [ShortEnumField("type", 0x28, _tls_ext),
ShortField("len", None),
FieldLenField("client_shares_len", None,
length_of="client_shares"),
PacketListField("client_shares", [], KeyShareEntry,
length_from=lambda pkt: pkt.client_shares_len) ]
def post_build(self, pkt, pay):
if not self.tls_session.frozen:
privshares = self.tls_session.tls13_client_privshares
for kse in self.client_shares:
if kse.privkey:
if _tls_named_curves[kse.group] in privshares:
print("Group %s used twice in the same ClientHello!" % kse.group)
break
privshares[_tls_named_groups[kse.group]] = kse.privkey
return super(TLS_Ext_KeyShare_CH, self).post_build(pkt, pay)
def post_dissection(self, r):
if not self.tls_session.frozen:
for kse in self.client_shares:
if kse.pubkey:
pubshares = self.tls_session.tls13_client_pubshares
if _tls_named_curves[kse.group] in pubshares:
print("Group %s used twice in the same ClientHello!" % kse.group)
break
pubshares[_tls_named_curves[kse.group]] = kse.pubkey
return super(TLS_Ext_KeyShare_CH, self).post_dissection(r)
class TLS_Ext_KeyShare_HRR(TLS_Ext_Unknown):
name = "TLS Extension - Key Share (for HelloRetryRequest)"
fields_desc = [ShortEnumField("type", 0x28, _tls_ext),
ShortField("len", None),
ShortEnumField("selected_group", None, _tls_named_groups) ]
class TLS_Ext_KeyShare_SH(TLS_Ext_Unknown):
name = "TLS Extension - Key Share (for ServerHello)"
fields_desc = [ShortEnumField("type", 0x28, _tls_ext),
ShortField("len", None),
PacketField("server_share", None, KeyShareEntry) ]
def post_build(self, pkt, pay):
if not self.tls_session.frozen and self.server_share.privkey:
# if there is a privkey, we assume the crypto library is ok
privshare = self.tls_session.tls13_server_privshare
if len(privshare) > 0:
print("Server key share was already stored...?")
group_name = _tls_named_groups[self.server_share.group]
privshare[group_name] = self.server_share.privkey
if group_name in self.tls_session.tls13_client_pubshares:
privkey = self.server_share.privkey
pubkey = self.tls_session.tls13_client_pubshares[group_name]
if group_name in _tls_named_ffdh_groups.itervalues():
pms = privkey.exchange(pubkey)
elif group_name in _tls_named_curves.itervalues():
if group_name == "x25519":
pms = privkey.exchange(pubkey)
else:
pms = privkey.exchange(ec.ECDH(), pubkey)
self.tls_session.tls13_dhe_secret = pms
return super(TLS_Ext_KeyShare_SH, self).post_build(pkt, pay)
def post_dissection(self, r):
if not self.tls_session.frozen and self.server_share.pubkey:
# if there is a pubkey, we assume the crypto library is ok
pubshare = self.tls_session.tls13_server_pubshare
if len(pubshare) > 0:
print("Server key share was already stored...?")
group_name = _tls_named_groups[self.server_share.group]
pubshare[group_name] = self.server_share.pubkey
if group_name in self.tls_session.tls13_client_privshares:
pubkey = self.server_share.pubkey
privkey = self.tls_session.tls13_client_privshares[group_name]
if group_name in _tls_named_ffdh_groups.itervalues():
pms = privkey.exchange(pubkey)
elif group_name in _tls_named_curves.itervalues():
if group_name == "x25519":
pms = privkey.exchange(pubkey)
else:
pms = privkey.exchange(ec.ECDH(), pubkey)
self.tls_session.tls13_dhe_secret = pms
return super(TLS_Ext_KeyShare_SH, self).post_dissection(r)
_tls_ext_keyshare_cls = { 1: TLS_Ext_KeyShare_CH,
2: TLS_Ext_KeyShare_SH,
6: TLS_Ext_KeyShare_HRR }
class Ticket(Packet):
name = "Recommended Ticket Construction (from RFC 5077)"
fields_desc = [ StrFixedLenField("key_name", None, 16),
StrFixedLenField("iv", None, 16),
FieldLenField("encstatelen", None, length_of="encstate"),
StrLenField("encstate", "",
length_from=lambda pkt: pkt.encstatelen),
StrFixedLenField("mac", None, 32) ]
class TicketField(PacketField):
__slots__ = ["length_from"]
def __init__(self, name, default, length_from=None, **kargs):
self.length_from = length_from
PacketField.__init__(self, name, default, Ticket, **kargs)
def m2i(self, pkt, m):
l = self.length_from(pkt)
tbd, rem = m[:l], m[l:]
return self.cls(tbd)/Padding(rem)
class PSKIdentity(Packet):
name = "PSK Identity"
fields_desc = [FieldLenField("identity_len", None,
length_of="identity"),
TicketField("identity", "",
length_from=lambda pkt: pkt.identity_len),
IntField("obfuscated_ticket_age", 0) ]
class PSKBinderEntry(Packet):
name = "PSK Binder Entry"
fields_desc = [FieldLenField("binder_len", None, fmt="B",
length_of="binder"),
StrLenField("binder", "",
length_from=lambda pkt: pkt.binder_len) ]
class TLS_Ext_PreSharedKey_CH(TLS_Ext_Unknown):
#XXX define post_build and post_dissection methods
name = "TLS Extension - Pre Shared Key (for ClientHello)"
fields_desc = [ShortEnumField("type", 0x28, _tls_ext),
ShortField("len", None),
FieldLenField("identities_len", None,
length_of="identities"),
PacketListField("identities", [], PSKIdentity,
length_from=lambda pkt: pkt.identities_len),
FieldLenField("binders_len", None,
length_of="binders"),
PacketListField("binders", [], PSKBinderEntry,
length_from=lambda pkt: pkt.binders_len) ]
class TLS_Ext_PreSharedKey_SH(TLS_Ext_Unknown):
name = "TLS Extension - Pre Shared Key (for ServerHello)"
fields_desc = [ShortEnumField("type", 0x29, _tls_ext),
ShortField("len", None),
ShortField("selected_identity", None) ]
_tls_ext_presharedkey_cls = { 1: TLS_Ext_PreSharedKey_CH,
2: TLS_Ext_PreSharedKey_SH } | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This module is also sponsored by E.T.A.I. (www.etai.fr)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: vmware_guest_facts
short_description: Gather facts about a single VM
description:
- Gather facts about a single VM on a VMware ESX cluster
version_added: 2.3
author:
- Loic Blot (@nerzhul) <loic.blot@unix-experience.fr>
notes:
- Tested on vSphere 5.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
name:
description:
- Name of the VM to work with
required: True
name_match:
description:
- If multiple VMs matching the name, use the first or last found
default: 'first'
choices: ['first', 'last']
uuid:
description:
- UUID of the instance to manage if known, this is VMware's unique identifier.
- This is required if name is not supplied.
folder:
description:
- Destination folder, absolute path to find an existing guest.
- This is required if name is supplied.
datacenter:
description:
- Destination datacenter for the deploy operation
required: True
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
- name: Gather VM facts
vmware_guest_facts:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
validate_certs: no
uuid: 421e4592-c069-924d-ce20-7e7533fab926
delegate_to: localhost
register: facts
'''
RETURN = """
instance:
description: metadata about the virtual machine
returned: always
type: dict
sample: None
"""
import os
import time
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.six import iteritems
from ansible.module_utils.vmware import connect_to_api, find_vm_by_id, gather_vm_facts
try:
import json
except ImportError:
import simplejson as json
HAS_PYVMOMI = False
try:
import pyVmomi
from pyVmomi import vim
HAS_PYVMOMI = True
except ImportError:
pass
class PyVmomiHelper(object):
def __init__(self, module):
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi module required')
self.module = module
self.params = module.params
self.content = connect_to_api(self.module)
def getvm(self, name=None, uuid=None, folder=None):
vm = None
if uuid:
vm = find_vm_by_id(self.content, vm_id=uuid, vm_id_type="uuid")
elif folder:
# Build the absolute folder path to pass into the search method
if not self.params['folder'].startswith('/'):
self.module.fail_json(msg="Folder %(folder)s needs to be an absolute path, starting with '/'." % self.params)
searchpath = '%(datacenter)s%(folder)s' % self.params
# get all objects for this path ...
f_obj = self.content.searchIndex.FindByInventoryPath(searchpath)
if f_obj:
if isinstance(f_obj, vim.Datacenter):
f_obj = f_obj.vmFolder
for c_obj in f_obj.childEntity:
if not isinstance(c_obj, vim.VirtualMachine):
continue
if c_obj.name == name:
vm = c_obj
if self.params['name_match'] == 'first':
break
return vm
def gather_facts(self, vm):
return gather_vm_facts(self.content, vm)
def get_obj(content, vimtype, name):
"""
Return an object by name, if name is None the
first found object is returned
"""
obj = None
container = content.viewManager.CreateContainerView(
content.rootFolder, vimtype, True)
for c in container.view:
if name:
if c.name == name:
obj = c
break
else:
obj = c
break
container.Destroy()
return obj
def main():
module = AnsibleModule(
argument_spec=dict(
hostname=dict(
type='str',
default=os.environ.get('VMWARE_HOST')
),
username=dict(
type='str',
default=os.environ.get('VMWARE_USER')
),
password=dict(
type='str', no_log=True,
default=os.environ.get('VMWARE_PASSWORD')
),
validate_certs=dict(required=False, type='bool', default=True),
name=dict(required=True, type='str'),
name_match=dict(required=False, type='str', default='first'),
uuid=dict(required=False, type='str'),
folder=dict(required=False, type='str', default='/vm'),
datacenter=dict(required=True, type='str'),
),
)
# Prepend /vm if it was missing from the folder path, also strip trailing slashes
if not module.params['folder'].startswith('/vm') and module.params['folder'].startswith('/'):
module.params['folder'] = '/vm%(folder)s' % module.params
module.params['folder'] = module.params['folder'].rstrip('/')
pyv = PyVmomiHelper(module)
# Check if the VM exists before continuing
vm = pyv.getvm(name=module.params['name'],
folder=module.params['folder'],
uuid=module.params['uuid'])
# VM already exists
if vm:
try:
module.exit_json(instance=pyv.gather_facts(vm))
except Exception:
e = get_exception()
module.fail_json(msg="Fact gather failed with exception %s" % e)
else:
module.fail_json(msg="Unable to gather facts for non-existing VM %(name)s" % module.params)
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class VirtualMachineScaleSetSku(Model):
"""Describes an available virtual machine scale set sku.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar resource_type: The type of resource the sku applies to.
:vartype resource_type: str
:ivar sku: The Sku.
:vartype sku: :class:`Sku
<azure.mgmt.compute.compute.v2017_03_30.models.Sku>`
:ivar capacity: Available scaling information.
:vartype capacity: :class:`VirtualMachineScaleSetSkuCapacity
<azure.mgmt.compute.compute.v2017_03_30.models.VirtualMachineScaleSetSkuCapacity>`
"""
_validation = {
'resource_type': {'readonly': True},
'sku': {'readonly': True},
'capacity': {'readonly': True},
}
_attribute_map = {
'resource_type': {'key': 'resourceType', 'type': 'str'},
'sku': {'key': 'sku', 'type': 'Sku'},
'capacity': {'key': 'capacity', 'type': 'VirtualMachineScaleSetSkuCapacity'},
}
def __init__(self):
self.resource_type = None
self.sku = None
self.capacity = None | unknown | codeparrot/codeparrot-clean | ||
#include "test/jemalloc_test.h"
#include "jemalloc/internal/peak.h"
TEST_BEGIN(test_peak) {
peak_t peak = PEAK_INITIALIZER;
expect_u64_eq(0, peak_max(&peak),
"Peak should be zero at initialization");
peak_update(&peak, 100, 50);
expect_u64_eq(50, peak_max(&peak),
"Missed update");
peak_update(&peak, 100, 100);
expect_u64_eq(50, peak_max(&peak), "Dallocs shouldn't change peak");
peak_update(&peak, 100, 200);
expect_u64_eq(50, peak_max(&peak), "Dallocs shouldn't change peak");
peak_update(&peak, 200, 200);
expect_u64_eq(50, peak_max(&peak), "Haven't reached peak again");
peak_update(&peak, 300, 200);
expect_u64_eq(100, peak_max(&peak), "Missed an update.");
peak_set_zero(&peak, 300, 200);
expect_u64_eq(0, peak_max(&peak), "No effect from zeroing");
peak_update(&peak, 300, 300);
expect_u64_eq(0, peak_max(&peak), "Dalloc shouldn't change peak");
peak_update(&peak, 400, 300);
expect_u64_eq(0, peak_max(&peak), "Should still be net negative");
peak_update(&peak, 500, 300);
expect_u64_eq(100, peak_max(&peak), "Missed an update.");
/*
* Above, we set to zero while a net allocator; let's try as a
* net-deallocator.
*/
peak_set_zero(&peak, 600, 700);
expect_u64_eq(0, peak_max(&peak), "No effect from zeroing.");
peak_update(&peak, 600, 800);
expect_u64_eq(0, peak_max(&peak), "Dalloc shouldn't change peak.");
peak_update(&peak, 700, 800);
expect_u64_eq(0, peak_max(&peak), "Should still be net negative.");
peak_update(&peak, 800, 800);
expect_u64_eq(100, peak_max(&peak), "Missed an update.");
}
TEST_END
int
main(void) {
return test_no_reentrancy(
test_peak);
} | c | github | https://github.com/redis/redis | deps/jemalloc/test/unit/peak.c |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides generic deployment steps for machines post boot.
"""
from __future__ import with_statement
import os
import binascii
from libcloud.utils.py3 import basestring, PY3
class Deployment(object):
"""
Base class for deployment tasks.
"""
def run(self, node, client):
"""
Runs this deployment task on node using the client provided.
:type node: :class:`Node`
:keyword node: Node to operate one
:type client: :class:`BaseSSHClient`
:keyword client: Connected SSH client to use.
:return: :class:`Node`
"""
raise NotImplementedError(
'run not implemented for this deployment')
def _get_string_value(self, argument_name, argument_value):
if not isinstance(argument_value, basestring) and \
not hasattr(argument_value, 'read'):
raise TypeError('%s argument must be a string or a file-like '
'object' % (argument_name))
if hasattr(argument_value, 'read'):
argument_value = argument_value.read()
return argument_value
class SSHKeyDeployment(Deployment):
"""
Installs a public SSH Key onto a server.
"""
def __init__(self, key):
"""
:type key: ``str`` or :class:`File` object
:keyword key: Contents of the public key write or a file object which
can be read.
"""
self.key = self._get_string_value(argument_name='key',
argument_value=key)
def run(self, node, client):
"""
Installs SSH key into ``.ssh/authorized_keys``
See also :class:`Deployment.run`
"""
client.put(".ssh/authorized_keys", contents=self.key, mode='a')
return node
class FileDeployment(Deployment):
"""
Installs a file on the server.
"""
def __init__(self, source, target):
"""
:type source: ``str``
:keyword source: Local path of file to be installed
:type target: ``str``
:keyword target: Path to install file on node
"""
self.source = source
self.target = target
def run(self, node, client):
"""
Upload the file, retaining permissions.
See also :class:`Deployment.run`
"""
perms = int(oct(os.stat(self.source).st_mode)[4:], 8)
with open(self.source, 'rb') as fp:
content = fp.read()
client.put(path=self.target, chmod=perms,
contents=content)
return node
class ScriptDeployment(Deployment):
"""
Runs an arbitrary shell script on the server.
This step works by first writing the content of the shell script (script
argument) in a \*.sh file on a remote server and then running that file.
If you are running a non-shell script, make sure to put the appropriate
shebang to the top of the script. You are also advised to do that even if
you are running a plan shell script.
"""
def __init__(self, script, args=None, name=None, delete=False):
"""
:type script: ``str``
:keyword script: Contents of the script to run.
:type args: ``list``
:keyword args: Optional command line arguments which get passed to the
deployment script file.
:type name: ``str``
:keyword name: Name of the script to upload it as, if not specified,
a random name will be chosen.
:type delete: ``bool``
:keyword delete: Whether to delete the script on completion.
"""
script = self._get_string_value(argument_name='script',
argument_value=script)
self.script = script
self.args = args or []
self.stdout = None
self.stderr = None
self.exit_status = None
self.delete = delete
self.name = name
if self.name is None:
# File is put under user's home directory
# (~/libcloud_deployment_<random_string>.sh)
random_string = binascii.hexlify(os.urandom(4))
random_string = random_string.decode('ascii')
self.name = 'libcloud_deployment_%s.sh' % (random_string)
def run(self, node, client):
"""
Uploads the shell script and then executes it.
See also :class:`Deployment.run`
"""
file_path = client.put(path=self.name, chmod=int('755', 8),
contents=self.script)
# Pre-pend cwd if user specified a relative path
if self.name[0] != '/':
base_path = os.path.dirname(file_path)
name = os.path.join(base_path, self.name)
else:
name = self.name
cmd = name
if self.args:
# Append arguments to the command
cmd = '%s %s' % (name, ' '.join(self.args))
else:
cmd = name
self.stdout, self.stderr, self.exit_status = client.run(cmd)
if self.delete:
client.delete(self.name)
return node
class ScriptFileDeployment(ScriptDeployment):
"""
Runs an arbitrary shell script from a local file on the server. Same as
ScriptDeployment, except that you can pass in a path to the file instead of
the script content.
"""
def __init__(self, script_file, args=None, name=None, delete=False):
"""
:type script_file: ``str``
:keyword script_file: Path to a file containing the script to run.
:type args: ``list``
:keyword args: Optional command line arguments which get passed to the
deployment script file.
:type name: ``str``
:keyword name: Name of the script to upload it as, if not specified,
a random name will be chosen.
:type delete: ``bool``
:keyword delete: Whether to delete the script on completion.
"""
with open(script_file, 'rb') as fp:
content = fp.read()
if PY3:
content = content.decode('utf-8')
super(ScriptFileDeployment, self).__init__(script=content,
args=args,
name=name,
delete=delete)
class MultiStepDeployment(Deployment):
"""
Runs a chain of Deployment steps.
"""
def __init__(self, add=None):
"""
:type add: ``list``
:keyword add: Deployment steps to add.
"""
self.steps = []
self.add(add)
def add(self, add):
"""
Add a deployment to this chain.
:type add: Single :class:`Deployment` or a ``list`` of
:class:`Deployment`
:keyword add: Adds this deployment to the others already in this
object.
"""
if add is not None:
add = add if isinstance(add, (list, tuple)) else [add]
self.steps.extend(add)
def run(self, node, client):
"""
Run each deployment that has been added.
See also :class:`Deployment.run`
"""
for s in self.steps:
node = s.run(node, client)
return node | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2010-2025 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.analysis.api.descriptors.symbols.descriptorBased.base
import com.intellij.psi.impl.compiled.ClsElementImpl
import org.jetbrains.kotlin.analysis.api.KaConstantInitializerValue
import org.jetbrains.kotlin.analysis.api.KaInitializerValue
import org.jetbrains.kotlin.analysis.api.KaNonConstantInitializerValue
import org.jetbrains.kotlin.analysis.api.annotations.KaAnnotation
import org.jetbrains.kotlin.analysis.api.annotations.KaAnnotationValue
import org.jetbrains.kotlin.analysis.api.annotations.KaNamedAnnotationValue
import org.jetbrains.kotlin.analysis.api.base.KaConstantValue
import org.jetbrains.kotlin.analysis.api.base.KaContextReceiver
import org.jetbrains.kotlin.analysis.api.descriptors.Fe10AnalysisContext
import org.jetbrains.kotlin.analysis.api.descriptors.symbols.KaFe10FileSymbol
import org.jetbrains.kotlin.analysis.api.descriptors.symbols.KaFe10PackageSymbol
import org.jetbrains.kotlin.analysis.api.descriptors.symbols.descriptorBased.*
import org.jetbrains.kotlin.analysis.api.descriptors.symbols.psiBased.*
import org.jetbrains.kotlin.analysis.api.descriptors.symbols.psiBased.base.KaFe10PsiSymbol
import org.jetbrains.kotlin.analysis.api.descriptors.types.*
import org.jetbrains.kotlin.analysis.api.impl.base.*
import org.jetbrains.kotlin.analysis.api.impl.base.annotations.*
import org.jetbrains.kotlin.analysis.api.impl.base.symbols.asKaSymbolModality
import org.jetbrains.kotlin.analysis.api.impl.base.types.KaBaseStarTypeProjection
import org.jetbrains.kotlin.analysis.api.impl.base.types.KaBaseTypeArgumentWithVariance
import org.jetbrains.kotlin.analysis.api.symbols.*
import org.jetbrains.kotlin.analysis.api.types.KaType
import org.jetbrains.kotlin.analysis.api.types.KaTypeProjection
import org.jetbrains.kotlin.analysis.api.types.symbol
import org.jetbrains.kotlin.analysis.utils.errors.unexpectedElementError
import org.jetbrains.kotlin.builtins.KotlinBuiltIns
import org.jetbrains.kotlin.builtins.StandardNames
import org.jetbrains.kotlin.builtins.functions.FunctionClassDescriptor
import org.jetbrains.kotlin.descriptors.*
import org.jetbrains.kotlin.descriptors.annotations.AnnotationDescriptor
import org.jetbrains.kotlin.descriptors.annotations.AnnotationUseSiteTarget
import org.jetbrains.kotlin.descriptors.impl.*
import org.jetbrains.kotlin.idea.KotlinLanguage
import org.jetbrains.kotlin.load.java.descriptors.JavaCallableMemberDescriptor
import org.jetbrains.kotlin.load.java.descriptors.JavaClassDescriptor
import org.jetbrains.kotlin.load.java.descriptors.JavaForKotlinOverridePropertyDescriptor
import org.jetbrains.kotlin.load.java.descriptors.JavaPropertyDescriptor
import org.jetbrains.kotlin.load.java.sources.JavaSourceElement
import org.jetbrains.kotlin.load.kotlin.toSourceElement
import org.jetbrains.kotlin.name.CallableId
import org.jetbrains.kotlin.name.ClassId
import org.jetbrains.kotlin.name.FqName
import org.jetbrains.kotlin.name.StandardClassIds
import org.jetbrains.kotlin.psi.KtCallElement
import org.jetbrains.kotlin.psi.KtExpression
import org.jetbrains.kotlin.renderer.DescriptorRenderer
import org.jetbrains.kotlin.resolve.DescriptorUtils
import org.jetbrains.kotlin.resolve.ImportedFromObjectCallableDescriptor
import org.jetbrains.kotlin.resolve.calls.inference.CapturedType
import org.jetbrains.kotlin.resolve.calls.tasks.isDynamic
import org.jetbrains.kotlin.resolve.constants.*
import org.jetbrains.kotlin.resolve.constants.evaluate.ConstantExpressionEvaluator
import org.jetbrains.kotlin.resolve.descriptorUtil.annotationClass
import org.jetbrains.kotlin.resolve.descriptorUtil.builtIns
import org.jetbrains.kotlin.resolve.lazy.descriptors.LazyAnnotationDescriptor
import org.jetbrains.kotlin.resolve.sam.SamConstructorDescriptor
import org.jetbrains.kotlin.resolve.scopes.receivers.ImplicitContextReceiver
import org.jetbrains.kotlin.resolve.source.PsiSourceElement
import org.jetbrains.kotlin.resolve.source.getPsi
import org.jetbrains.kotlin.synthetic.SyntheticJavaPropertyDescriptor
import org.jetbrains.kotlin.types.*
import org.jetbrains.kotlin.types.checker.NewCapturedType
import org.jetbrains.kotlin.types.checker.NewTypeVariableConstructor
import org.jetbrains.kotlin.types.error.ErrorType
import org.jetbrains.kotlin.types.error.ErrorTypeKind
import org.jetbrains.kotlin.types.error.ErrorUtils
internal val DeclarationDescriptor.kaSymbolLocation: KaSymbolLocation
get() = when {
isDynamic() -> KaSymbolLocation.CLASS
this is PropertyAccessorDescriptor -> KaSymbolLocation.PROPERTY
this is SamConstructorDescriptor -> KaSymbolLocation.TOP_LEVEL
else -> when (containingDeclaration) {
is PackageFragmentDescriptor -> KaSymbolLocation.TOP_LEVEL
is ClassDescriptor -> KaSymbolLocation.CLASS
else -> KaSymbolLocation.LOCAL
}
}
internal val CallableMemberDescriptor.isExplicitOverride: Boolean
get() {
return (this !is PropertyAccessorDescriptor
&& kind != CallableMemberDescriptor.Kind.FAKE_OVERRIDE
&& overriddenDescriptors.isNotEmpty())
}
internal val ClassDescriptor.isInterfaceLike: Boolean
get() = when (kind) {
ClassKind.CLASS, ClassKind.ENUM_CLASS, ClassKind.OBJECT, ClassKind.ENUM_ENTRY -> false
else -> true
}
internal fun DeclarationDescriptor.toKtSymbol(analysisContext: Fe10AnalysisContext): KaSymbol? {
if (this is ClassDescriptor && kind == ClassKind.ENUM_ENTRY) {
return KaFe10DescEnumEntrySymbol(this, analysisContext)
}
return when (this) {
is ClassifierDescriptor -> toKtClassifierSymbol(analysisContext)
is ReceiverParameterDescriptor -> toKtReceiverParameterSymbol(analysisContext)
is CallableDescriptor -> toKtCallableSymbol(analysisContext)
is PackageViewDescriptor -> toKtPackageSymbol(analysisContext)
else -> null
}
}
internal fun ClassifierDescriptor.toKtClassifierSymbol(analysisContext: Fe10AnalysisContext): KaClassifierSymbol? {
return when (this) {
is TypeAliasDescriptor -> KaFe10DescTypeAliasSymbol(this, analysisContext)
is TypeParameterDescriptor -> KaFe10DescTypeParameterSymbol(this, analysisContext)
is ClassDescriptor -> toKaClassSymbol(analysisContext)
else -> null
}
}
internal fun ClassDescriptor.toKaClassSymbol(analysisContext: Fe10AnalysisContext): KaClassSymbol {
return if (DescriptorUtils.isAnonymousObject(this)) {
KaFe10DescAnonymousObjectSymbol(this, analysisContext)
} else {
KaFe10DescNamedClassSymbol(this, analysisContext)
}
}
internal fun PackageViewDescriptor.toKtPackageSymbol(analysisContext: Fe10AnalysisContext): KaPackageSymbol {
return KaFe10PackageSymbol(fqName, analysisContext)
}
internal fun ReceiverParameterDescriptor.toKtReceiverParameterSymbol(analysisContext: Fe10AnalysisContext): KaReceiverParameterSymbol {
return KaFe10ReceiverParameterSymbol(this, analysisContext)
}
internal fun KaSymbol.getDescriptor(): DeclarationDescriptor? {
return when (this) {
is KaFe10PsiSymbol<*, *> -> descriptor
is KaFe10DescSymbol<*> -> descriptor
is KaFe10DescSyntheticFieldSymbol -> descriptor
is KaFe10PsiDefaultPropertyGetterSymbol -> descriptor
is KaFe10PsiDefaultPropertySetterSymbol -> descriptor
is KaFe10PsiDefaultSetterParameterSymbol -> descriptor
is KaFe10DescDefaultPropertySetterSymbol -> null
is KaFe10DynamicFunctionDescValueParameterSymbol -> null
is KaFe10FileSymbol -> null
is KaFe10DescDefaultPropertySetterSymbol.KaDefaultValueParameterSymbol -> descriptor
is KaFe10PsiDefaultPropertySetterSymbol.KaDefaultValueParameterSymbol -> descriptor
is KaFe10DescDefaultBackingFieldSymbol, is KaFe10PsiDefaultBackingFieldSymbol -> null
is KaFe10PsiClassInitializerSymbol -> null
else -> unexpectedElementError("KtSymbol", this)
}
}
internal fun ConstructorDescriptor.toKtConstructorSymbol(analysisContext: Fe10AnalysisContext): KaConstructorSymbol {
if (this is TypeAliasConstructorDescriptor) {
return this.underlyingConstructorDescriptor.toKtConstructorSymbol(analysisContext)
}
return KaFe10DescConstructorSymbol(this, analysisContext)
}
internal val CallableMemberDescriptor.ktHasStableParameterNames: Boolean
get() = when {
this is ConstructorDescriptor && isPrimary && constructedClass.kind == ClassKind.ANNOTATION_CLASS -> true
isExpect -> false
else -> when (this) {
is JavaCallableMemberDescriptor -> false
else -> hasStableParameterNames()
}
}
internal fun CallableDescriptor.toKtCallableSymbol(analysisContext: Fe10AnalysisContext): KaCallableSymbol? {
return when (val unwrapped = unwrapFakeOverrideIfNeeded()) {
is ImportedFromObjectCallableDescriptor<*> -> unwrapped.callableFromObject.toKtCallableSymbol(analysisContext)
is PropertyGetterDescriptor -> KaFe10DescPropertyGetterSymbol(unwrapped, analysisContext)
is PropertySetterDescriptor -> KaFe10DescPropertySetterSymbol(unwrapped, analysisContext)
is SamConstructorDescriptor -> KaFe10DescSamConstructorSymbol(unwrapped, analysisContext)
is ConstructorDescriptor -> unwrapped.toKtConstructorSymbol(analysisContext)
is FunctionDescriptor -> {
if (DescriptorUtils.isAnonymousFunction(unwrapped)) {
KaFe10DescAnonymousFunctionSymbol(unwrapped, analysisContext)
} else {
KaFe10DescNamedFunctionSymbol.build(unwrapped, analysisContext)
}
}
is SyntheticFieldDescriptor -> KaFe10DescSyntheticFieldSymbol(unwrapped, analysisContext)
is LocalVariableDescriptor -> KaFe10DescLocalVariableSymbol(unwrapped, analysisContext)
is ValueParameterDescriptor -> KaFe10DescValueParameterSymbol(unwrapped, analysisContext)
is SyntheticJavaPropertyDescriptor -> KaFe10DescSyntheticJavaPropertySymbol(unwrapped, analysisContext)
is JavaForKotlinOverridePropertyDescriptor -> KaFe10DescSyntheticJavaPropertySymbolForOverride(unwrapped, analysisContext)
is JavaPropertyDescriptor -> KaFe10DescJavaFieldSymbol(unwrapped, analysisContext)
is PropertyDescriptorImpl -> KaFe10DescKotlinPropertySymbol(unwrapped, analysisContext)
else -> null
}
}
/**
* This logic should be equivalent to
* [org.jetbrains.kotlin.analysis.api.fir.KaSymbolByFirBuilder.unwrapSubstitutionOverrideIfNeeded]. But this method unwrap all fake
* overrides that do not change the signature.
*/
internal fun CallableDescriptor.unwrapFakeOverrideIfNeeded(): CallableDescriptor {
val useSiteUnwrapped = unwrapUseSiteSubstitutionOverride()
if (useSiteUnwrapped !is CallableMemberDescriptor) return useSiteUnwrapped
if (useSiteUnwrapped.kind.isReal) return useSiteUnwrapped
val overriddenDescriptor = useSiteUnwrapped.overriddenDescriptors.singleOrNull()?.unwrapUseSiteSubstitutionOverride()
?: return useSiteUnwrapped
if (hasTypeReferenceAffectingSignature(useSiteUnwrapped, overriddenDescriptor)) {
return useSiteUnwrapped
}
return overriddenDescriptor.unwrapFakeOverrideIfNeeded()
}
private fun hasTypeReferenceAffectingSignature(
descriptor: CallableMemberDescriptor,
overriddenDescriptor: CallableMemberDescriptor
): Boolean {
val containingClass = (descriptor.containingDeclaration as? ClassifierDescriptorWithTypeParameters)
val typeParametersFromOuterClass = buildList { containingClass?.let { collectTypeParameters(it) } }
val allowedTypeParameters = (overriddenDescriptor.typeParameters + typeParametersFromOuterClass).toSet()
return overriddenDescriptor.returnType?.hasReferenceOtherThan(allowedTypeParameters) == true ||
overriddenDescriptor.extensionReceiverParameter?.type?.hasReferenceOtherThan(allowedTypeParameters) == true ||
overriddenDescriptor.valueParameters.any { it.type.hasReferenceOtherThan(allowedTypeParameters) }
}
private fun MutableList<TypeParameterDescriptor>.collectTypeParameters(innerClass: ClassifierDescriptorWithTypeParameters) {
if (!innerClass.isInner) return
val outerClass = innerClass.containingDeclaration as? ClassifierDescriptorWithTypeParameters ?: return
addAll(outerClass.declaredTypeParameters)
collectTypeParameters(outerClass)
}
private fun KotlinType.hasReferenceOtherThan(allowedTypeParameterDescriptors: Set<TypeParameterDescriptor>): Boolean {
return when (this) {
is SimpleType -> {
val declarationDescriptor = constructor.declarationDescriptor
if (declarationDescriptor !is AbstractTypeParameterDescriptor) return false
declarationDescriptor !in allowedTypeParameterDescriptors ||
declarationDescriptor.upperBounds.any { it.hasReferenceOtherThan(allowedTypeParameterDescriptors) }
}
else -> arguments.any { typeProjection ->
// A star projection type (lazily) built by type parameter will be yet another type with a star projection,
// resulting in stack overflow if we keep checking allowed type parameter descriptors
!typeProjection.isStarProjection &&
typeProjection.type.hasReferenceOtherThan(allowedTypeParameterDescriptors)
}
}
}
/**
* Use-site substitution override are tracked through [CallableDescriptor.getOriginal]. Note that overridden symbols are accessed through
* [CallableDescriptor.getOverriddenDescriptors] instead, which is separate from [CallableDescriptor.getOriginal].
*/
@Suppress("UNCHECKED_CAST")
private fun <T : CallableDescriptor> T.unwrapUseSiteSubstitutionOverride(): T {
var current: CallableDescriptor = this
while (original != current) {
current = current.original
}
return current as T
}
internal fun KotlinType.toKtType(analysisContext: Fe10AnalysisContext): KaType {
return when (val unwrappedType = unwrap()) {
is DynamicType -> KaFe10DynamicType(unwrappedType, analysisContext)
is FlexibleType -> KaFe10FlexibleType(unwrappedType, analysisContext)
is DefinitelyNotNullType -> KaFe10DefinitelyNotNullType(unwrappedType, analysisContext)
is ErrorType -> {
if (unwrappedType.kind.isUnresolved)
KaFe10ClassErrorType(unwrappedType, analysisContext)
else
KaFe10ErrorType(unwrappedType, analysisContext)
}
is CapturedType -> KaFe10CapturedType(unwrappedType, analysisContext)
is NewCapturedType -> KaFe10NewCapturedType(unwrappedType, analysisContext)
is SimpleType -> {
val typeParameterDescriptor = TypeUtils.getTypeParameterDescriptorOrNull(unwrappedType)
if (typeParameterDescriptor != null) {
return KaFe10TypeParameterType(unwrappedType, typeParameterDescriptor, analysisContext)
}
val typeConstructor = unwrappedType.constructor
if (typeConstructor is NewTypeVariableConstructor) {
val newTypeParameterDescriptor = typeConstructor.originalTypeParameter
return if (newTypeParameterDescriptor != null) {
KaFe10TypeParameterType(unwrappedType, newTypeParameterDescriptor, analysisContext)
} else {
KaFe10ClassErrorType(ErrorUtils.createErrorType(ErrorTypeKind.UNRESOLVED_TYPE_PARAMETER_TYPE), analysisContext)
}
}
if (typeConstructor is IntersectionTypeConstructor) {
return KaFe10IntersectionType(unwrappedType, typeConstructor.supertypes, analysisContext)
}
when (val typeDeclaration = typeConstructor.declarationDescriptor) {
is FunctionClassDescriptor -> KaFe10FunctionType(unwrappedType, typeDeclaration, analysisContext)
is ClassifierDescriptorWithTypeParameters -> KaFe10UsualClassType(unwrappedType, typeDeclaration, analysisContext)
else -> {
val errorType =
ErrorUtils.createErrorType(ErrorTypeKind.UNRESOLVED_CLASS_TYPE, typeConstructor, typeDeclaration.toString())
KaFe10ClassErrorType(errorType, analysisContext)
}
}
}
}
}
internal fun TypeProjection.toKtTypeProjection(analysisContext: Fe10AnalysisContext): KaTypeProjection {
return if (isStarProjection) {
KaBaseStarTypeProjection(analysisContext.token)
} else {
KaBaseTypeArgumentWithVariance(type.toKtType(analysisContext), this.projectionKind, analysisContext.token)
}
}
internal fun TypeParameterDescriptor.toKtTypeParameter(analysisContext: Fe10AnalysisContext): KaTypeParameterSymbol {
return KaFe10DescTypeParameterSymbol(this, analysisContext)
}
internal fun DeclarationDescriptor.getSymbolOrigin(analysisContext: Fe10AnalysisContext): KaSymbolOrigin {
when (this) {
is SyntheticJavaPropertyDescriptor -> return KaSymbolOrigin.JAVA_SYNTHETIC_PROPERTY
is SyntheticFieldDescriptor -> return KaSymbolOrigin.PROPERTY_BACKING_FIELD
is SamConstructorDescriptor -> return KaSymbolOrigin.SAM_CONSTRUCTOR
is JavaClassDescriptor, is JavaCallableMemberDescriptor -> return javaOrigin()
is DeserializedDescriptor -> return KaSymbolOrigin.LIBRARY
is EnumEntrySyntheticClassDescriptor -> return containingDeclaration.getSymbolOrigin(analysisContext)
is CallableMemberDescriptor -> when (kind) {
CallableMemberDescriptor.Kind.DELEGATION -> return KaSymbolOrigin.DELEGATED
CallableMemberDescriptor.Kind.SYNTHESIZED -> return KaSymbolOrigin.SOURCE_MEMBER_GENERATED
else -> {
if (isDynamic()) {
return KaSymbolOrigin.JS_DYNAMIC
}
}
}
}
val sourceElement = this.toSourceElement
if (sourceElement is JavaSourceElement) {
return javaOrigin()
}
val psi = sourceElement.getPsi()
if (psi != null) {
if (psi.language != KotlinLanguage.INSTANCE) {
return javaOrigin()
}
val virtualFile = psi.containingFile.virtualFile
return analysisContext.getOrigin(virtualFile)
} else { // psi == null
// Implicit lambda parameter
if (this is ValueParameterDescriptor && this.name == StandardNames.IMPLICIT_LAMBDA_PARAMETER_NAME) {
return KaSymbolOrigin.SOURCE_MEMBER_GENERATED
}
}
return KaSymbolOrigin.SOURCE
}
private fun DeclarationDescriptor.javaOrigin(): KaSymbolOrigin {
val psi = toSourceElement.getPsi()
return if (psi == null || psi is ClsElementImpl) KaSymbolOrigin.JAVA_LIBRARY else KaSymbolOrigin.JAVA_SOURCE
}
@Suppress("DEPRECATION")
internal val KotlinType.ktNullability: org.jetbrains.kotlin.analysis.api.types.KaTypeNullability
get() = when {
this.isNullabilityFlexible() -> org.jetbrains.kotlin.analysis.api.types.KaTypeNullability.UNKNOWN
this.isMarkedNullable -> org.jetbrains.kotlin.analysis.api.types.KaTypeNullability.NULLABLE
else -> org.jetbrains.kotlin.analysis.api.types.KaTypeNullability.NON_NULLABLE
}
internal val DeclarationDescriptorWithVisibility.ktVisibility: Visibility
get() = when (visibility) {
DescriptorVisibilities.PUBLIC -> Visibilities.Public
DescriptorVisibilities.PROTECTED -> Visibilities.Protected
DescriptorVisibilities.INTERNAL -> Visibilities.Internal
DescriptorVisibilities.PRIVATE -> Visibilities.Private
DescriptorVisibilities.PRIVATE_TO_THIS -> Visibilities.PrivateToThis
DescriptorVisibilities.LOCAL -> Visibilities.Local
DescriptorVisibilities.INVISIBLE_FAKE -> Visibilities.InvisibleFake
DescriptorVisibilities.INHERITED -> Visibilities.Inherited
else -> Visibilities.Unknown
}
internal val DeclarationDescriptor.kaSymbolModality: KaSymbolModality
get() {
if (this !is MemberDescriptor) return KaSymbolModality.FINAL
val selfModality = this.modality
if (selfModality == Modality.OPEN) {
val containingDeclaration = this.containingDeclaration
if (containingDeclaration is ClassDescriptor && containingDeclaration.modality == Modality.FINAL) {
if (this !is CallableMemberDescriptor || dispatchReceiverParameter != null) {
// Non-static open callables in final class are counted as final (to match FIR)
return KaSymbolModality.FINAL
}
}
}
return this.modality.asKaSymbolModality
}
internal fun ConstantValue<*>.toKtConstantValue(): KaConstantValue {
return toKtConstantValueOrNull() ?: error("Unexpected constant value $value")
}
internal fun ConstantValue<*>.toKtConstantValueOrNull(): KaConstantValue? {
return when (this) {
is ErrorValue.ErrorValueWithMessage -> KaErrorConstantValueImpl(message, sourcePsi = null)
is BooleanValue -> KaBooleanConstantValueImpl(value, sourcePsi = null)
is DoubleValue -> KaDoubleConstantValueImpl(value, sourcePsi = null)
is FloatValue -> KaFloatConstantValueImpl(value, sourcePsi = null)
is NullValue -> KaNullConstantValueImpl(sourcePsi = null)
is StringValue -> KaStringConstantValueImpl(value, sourcePsi = null)
is ByteValue -> KaByteConstantValueImpl(value, sourcePsi = null)
is CharValue -> KaCharConstantValueImpl(value, sourcePsi = null)
is IntValue -> KaIntConstantValueImpl(value, sourcePsi = null)
is LongValue -> KaLongConstantValueImpl(value, sourcePsi = null)
is ShortValue -> KaShortConstantValueImpl(value, sourcePsi = null)
is UByteValue -> KaUnsignedByteConstantValueImpl(value.toUByte(), sourcePsi = null)
is UIntValue -> KaUnsignedIntConstantValueImpl(value.toUInt(), sourcePsi = null)
is ULongValue -> KaUnsignedLongConstantValueImpl(value.toULong(), sourcePsi = null)
is UShortValue -> KaUnsignedShortConstantValueImpl(value.toUShort(), sourcePsi = null)
else -> null
}
}
internal tailrec fun KotlinBuiltIns.areSameArrayTypeIgnoringProjections(left: KotlinType, right: KotlinType): Boolean {
val leftIsArray = KotlinBuiltIns.isArrayOrPrimitiveArray(left)
val rightIsArray = KotlinBuiltIns.isArrayOrPrimitiveArray(right)
return when {
leftIsArray && rightIsArray -> areSameArrayTypeIgnoringProjections(getArrayElementType(left), getArrayElementType(right))
!leftIsArray && !rightIsArray -> left == right
else -> false
}
}
internal fun List<ConstantValue<*>>.expandArrayAnnotationValue(
containingArrayType: KotlinType,
analysisContext: Fe10AnalysisContext,
): List<KaAnnotationValue> = flatMap { constantValue: ConstantValue<*> ->
val constantType = constantValue.getType(analysisContext.resolveSession.moduleDescriptor)
if (analysisContext.builtIns.areSameArrayTypeIgnoringProjections(containingArrayType, constantType)) {
// If an element in the array has the same type as the containing array, it's a spread component that needs
// to be expanded here. (It should have the array element type instead.)
(constantValue as ArrayValue).value.expandArrayAnnotationValue(containingArrayType, analysisContext)
} else {
listOf(constantValue.toKaAnnotationValue(analysisContext))
}
}
internal fun ConstantValue<*>.toKaAnnotationValue(analysisContext: Fe10AnalysisContext): KaAnnotationValue {
val token = analysisContext.token
return when (this) {
is ArrayValue -> {
val arrayType = getType(analysisContext.resolveSession.moduleDescriptor)
KaArrayAnnotationValueImpl(value.expandArrayAnnotationValue(arrayType, analysisContext), sourcePsi = null, token)
}
is EnumValue -> KaEnumEntryAnnotationValueImpl(CallableId(enumClassId, enumEntryName), sourcePsi = null, token)
is KClassValue -> when (val value = value) {
is KClassValue.Value.LocalClass -> {
val type = value.type.toKtType(analysisContext)
val classId = value.type.unwrap().constructor.declarationDescriptor?.maybeLocalClassId
KaClassLiteralAnnotationValueImpl(type, classId, sourcePsi = null, token)
}
is KClassValue.Value.NormalClass -> {
val classLiteralInfo = resolveClassLiteral(value, analysisContext)
if (classLiteralInfo != null) {
KaClassLiteralAnnotationValueImpl(classLiteralInfo.type, classLiteralInfo.classId, sourcePsi = null, token)
} else {
val classId = if (value.arrayDimensions == 0) value.classId else StandardClassIds.Array
val type = ErrorUtils
.createErrorType(ErrorTypeKind.UNRESOLVED_TYPE, classId.asFqNameString())
.toKtType(analysisContext)
KaClassLiteralAnnotationValueImpl(type, classId, sourcePsi = null, token)
}
}
}
is AnnotationValue -> {
KaNestedAnnotationAnnotationValueImpl(
KaAnnotationImpl(
value.annotationClass?.classId,
psi = null,
useSiteTarget = null,
lazyArguments = lazy { value.getKtNamedAnnotationArguments(analysisContext) },
constructorSymbol = null,
token = token
),
token
)
}
else -> {
KaConstantAnnotationValueImpl(toKtConstantValue(), token)
}
}
}
private class ClassLiteralResolutionResult(val type: KaType, val classId: ClassId)
private fun resolveClassLiteral(value: KClassValue.Value.NormalClass, analysisContext: Fe10AnalysisContext): ClassLiteralResolutionResult? {
var descriptor = analysisContext.resolveSession.moduleDescriptor.findClassifierAcrossModuleDependencies(value.classId)
if (descriptor is TypeAliasDescriptor) {
descriptor = descriptor.classDescriptor
}
if (descriptor !is ClassDescriptor) {
return null
}
// Generic non-array class literals are not supported in K1
val typeArguments = descriptor.typeConstructor.parameters.map { StarProjectionImpl(it) }
var type: KotlinType = TypeUtils.substituteProjectionsForParameters(descriptor, typeArguments)
var classId = value.classId
if (value.arrayDimensions > 0) {
val arrayDescriptor = analysisContext.resolveSession.moduleDescriptor.findClassAcrossModuleDependencies(StandardClassIds.Array)
?: return null
repeat(value.arrayDimensions) {
type = TypeUtils.substituteParameters(arrayDescriptor, listOf(type))
}
classId = StandardClassIds.Array
}
return ClassLiteralResolutionResult(type.toKtType(analysisContext), classId)
}
internal val CallableMemberDescriptor.callableIdIfNotLocal: CallableId?
get() = calculateCallableId(allowLocal = false)
internal fun CallableMemberDescriptor.calculateCallableId(allowLocal: Boolean): CallableId? {
if (this is SyntheticJavaPropertyDescriptor) {
return getMethod.calculateCallableId(allowLocal)?.copy(callableName = name)
}
var current: DeclarationDescriptor = containingDeclaration
val localName = mutableListOf<String>()
val className = mutableListOf<String>()
while (true) {
when (current) {
is PackageFragmentDescriptor -> {
return CallableId(
packageName = current.fqName,
className = if (className.isNotEmpty()) FqName.fromSegments(className.asReversed()) else null,
callableName = name,
pathToLocal = if (localName.isNotEmpty()) FqName.fromSegments(localName.asReversed()) else null
)
}
is ModuleDescriptor -> {
return CallableId(
packageName = FqName.ROOT,
className = if (className.isNotEmpty()) FqName.fromSegments(className.asReversed()) else null,
callableName = name,
pathToLocal = if (localName.isNotEmpty()) FqName.fromSegments(localName.asReversed()) else null
)
}
is ClassDescriptor -> {
if (current.kind == ClassKind.ENUM_ENTRY) {
if (!allowLocal) {
return null
}
localName += current.name.asString()
} else {
className += current.name.asString()
}
}
is PropertyAccessorDescriptor -> {} // Filter out property accessors
is CallableDescriptor -> {
if (!allowLocal) {
return null
}
localName += current.name.asString()
}
}
current = current.containingDeclaration ?: return null
}
}
internal val PropertyDescriptor.getterCallableIdIfNotLocal: CallableId?
get() {
if (this is SyntheticPropertyDescriptor) {
return getMethod.callableIdIfNotLocal
}
return null
}
internal val PropertyDescriptor.setterCallableIdIfNotLocal: CallableId?
get() {
if (this is SyntheticPropertyDescriptor) {
val setMethod = this.setMethod
if (setMethod != null) {
return setMethod.callableIdIfNotLocal
}
}
return null
}
internal fun getSymbolDescriptor(symbol: KaSymbol): DeclarationDescriptor? {
return when (symbol) {
is KaFe10DescSymbol<*> -> symbol.descriptor
is KaFe10PsiSymbol<*, *> -> symbol.descriptor
is KaFe10DescSyntheticFieldSymbol -> symbol.descriptor
else -> null
}
}
internal val ClassifierDescriptor.classId: ClassId?
get() = when (val owner = containingDeclaration) {
is PackageFragmentDescriptor -> ClassId(owner.fqName, name)
is ClassifierDescriptorWithTypeParameters -> owner.classId?.createNestedClassId(name)
else -> null
}
internal val ClassifierDescriptor.maybeLocalClassId: ClassId
get() = classId ?: ClassId(containingPackage() ?: FqName.ROOT, FqName.topLevel(this.name), isLocal = true)
internal fun ClassDescriptor.computeSymbolSupertypes(): Collection<KotlinType> {
val classId = this.classId
if (classId == StandardClassIds.Any || classId == StandardClassIds.Nothing) {
return emptyList()
}
val supertypes = typeConstructor.supertypes
if (isInterfaceLike) {
return supertypes
}
val hasClassSupertype = supertypes.any { (it.constructor.declarationDescriptor as? ClassDescriptor)?.kind == ClassKind.CLASS }
return if (hasClassSupertype) supertypes else listOf(builtIns.anyType) + supertypes
}
internal fun CallableMemberDescriptor.getSymbolPointerSignature(): String {
return DescriptorRenderer.FQ_NAMES_IN_TYPES.render(this)
}
internal fun createKtInitializerValue(
initializer: KtExpression?,
propertyDescriptor: PropertyDescriptor?,
analysisContext: Fe10AnalysisContext,
): KaInitializerValue? {
if (initializer == null && propertyDescriptor?.compileTimeInitializer == null) {
return null
}
val compileTimeInitializer = propertyDescriptor?.compileTimeInitializer
if (compileTimeInitializer != null) {
return KaConstantInitializerValue(compileTimeInitializer.toKtConstantValue(), initializer)
}
if (initializer != null) {
val bindingContext = analysisContext.analyze(initializer)
val constantValue = ConstantExpressionEvaluator.getConstant(initializer, bindingContext)
if (constantValue != null) {
val evaluated = constantValue.toConstantValue(propertyDescriptor?.type ?: TypeUtils.NO_EXPECTED_TYPE).toKtConstantValue()
return KaConstantInitializerValue(evaluated, initializer)
}
}
return KaNonConstantInitializerValue(initializer)
}
internal fun AnnotationDescriptor.toKaAnnotation(analysisContext: Fe10AnalysisContext): KaAnnotation {
return KaAnnotationImpl(
classId = classIdForAnnotation,
psi = psi,
useSiteTarget = useSiteTarget,
lazyArguments = lazy { getKtNamedAnnotationArguments(analysisContext) },
constructorSymbol = null,
token = analysisContext.token
)
}
private val AnnotationDescriptor.psi: KtCallElement? get() = (source as? PsiSourceElement)?.psi as? KtCallElement
internal val AnnotationDescriptor.classIdForAnnotation: ClassId? get() = annotationClass?.maybeLocalClassId
internal val AnnotationDescriptor.useSiteTarget: AnnotationUseSiteTarget?
get() = (this as? LazyAnnotationDescriptor)?.annotationEntry?.useSiteTarget?.getAnnotationUseSiteTarget()
internal fun AnnotationDescriptor.getKtNamedAnnotationArguments(analysisContext: Fe10AnalysisContext): List<KaNamedAnnotationValue> =
allValueArguments.map { (name, value) ->
KaBaseNamedAnnotationValue(name, value.toKaAnnotationValue(analysisContext))
}
internal fun CallableDescriptor.createContextReceivers(
analysisContext: Fe10AnalysisContext
): List<KaContextReceiver> {
return contextReceiverParameters.map { createContextReceiver(it, analysisContext) }
}
internal fun CallableDescriptor.createContextParameters(analysisContext: Fe10AnalysisContext): List<KaContextParameterSymbol> {
return contextReceiverParameters.map {
KaFe10DescContextReceiverBasedContextParameterSymbol(it, analysisContext)
}
}
internal fun ClassDescriptor.createContextReceivers(
analysisContext: Fe10AnalysisContext
): List<KaContextReceiver> {
return contextReceivers.map { createContextReceiver(it, analysisContext) }
}
private fun createContextReceiver(
contextReceiver: ReceiverParameterDescriptor,
analysisContext: Fe10AnalysisContext
): KaBaseContextReceiver {
val type = contextReceiver.value.type.toKtType(analysisContext)
return KaBaseContextReceiver(
type,
(contextReceiver.value as ImplicitContextReceiver).customLabelName ?: type.symbol?.name,
analysisContext.token
)
} | kotlin | github | https://github.com/JetBrains/kotlin | analysis/analysis-api-fe10/src/org/jetbrains/kotlin/analysis/api/descriptors/symbols/descriptorBased/base/fe10DescUtils.kt |
# -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.db.models import get_model
from socialoauth import socialsites
from socialoauth.utils import import_oauth_class
from socialoauth.exception import SocialAPIError
from .models import SocialUser
from .app_settings import (
SOCIALOAUTH_SITES,
SOCIAL_LOGIN_USER_INFO_MODEL,
SOCIAL_LOGIN_DONE_REDIRECT_URL,
SOCIAL_LOGIN_ERROR_REDIRECT_URL,
)
socialsites.config(SOCIALOAUTH_SITES)
def social_login_callback(request, sitename):
code = request.GET.get('code', None)
if not code:
# Maybe user not authorize
return HttpResponseRedirect(SOCIAL_LOGIN_ERROR_REDIRECT_URL)
s = import_oauth_class(socialsites[sitename])()
try:
s.get_access_token(code)
except SocialAPIError:
# see social_oauth example and docs
return HttpResponseRedirect(SOCIAL_LOGIN_ERROR_REDIRECT_URL)
user_info_model = get_model(*SOCIAL_LOGIN_USER_INFO_MODEL.split('.'))
try:
user = SocialUser.objects.get(site_uid=s.uid, site_id=s.site_id)
#got user, update username and avatar
user_info_model.objects.filter(user_id=user.user_id).update(
username=s.name, avatar=s.avatar
)
except SocialUser.DoesNotExist:
user = SocialUser.objects.create(site_uid=s.uid, site_id=s.site_id)
user_info_model.objects.create(
user_id=user.user_id,
username=s.name,
avatar=s.avatar
)
# set uid in session, then next time, this user will be auto loggin
request.session['uid'] = user.user_id
# done
return HttpResponseRedirect(SOCIAL_LOGIN_DONE_REDIRECT_URL) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python2.6
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handles test distribution and results upload to app engine."""
import base64
import json
import math
import random
import time
import urllib
import urllib2
import zlib
import blobstore_upload
import client_logging
# Define the constants
_BLOBSTORE_UPLOAD_RETRIES = 3
_PIECES_UPLOAD_RETRIES = 3
_MAX_WAIT_TIME = 3
_TEST_DISTRIBUTION_SERVER = 'http://YOUR_APPENGINE_SERVER_HERE'
_FETCH_TEST_URL = _TEST_DISTRIBUTION_SERVER + '/distributor/accept_work_item'
_FINISH_TEST_URL = _TEST_DISTRIBUTION_SERVER + '/distributor/finish_work_item'
_RESULTS_SERVER = 'http://YOUR_APPENGINE_SERVER_HERE'
_RESULTS_UPLOAD_URL = _RESULTS_SERVER + '/putdata'
_LOG_UPLOAD_URL = _RESULTS_SERVER + '/distributor/upload_client_log'
LOGGER_NAME = 'appengine_communicator'
# Initialize the logger for this module
logger = client_logging.GetLogger(LOGGER_NAME)
class CommunicationError(Exception):
pass
class AuthCookie(object):
"""A data object that contains cookie dictionaries used to authenticate.
Attributes:
domain: A string representing the domain to authenticate on.
cookies: A list of dictionaries that define the cookies to add to the
browser in order to authenticate for a webpage.
"""
def __init__(self, domain, cookies):
self.domain = domain
self.cookies = cookies
class TestCase(object):
"""A data object describing a test case to run for bots.
Attributes:
url: A string indicating the URL to run for the test.
start_time: A string indicating the start time for the test.
config: A dictionary that specifies various configuration settings for
the test.
test_key: An integer representing the key that identifies this test.
auth_cookie: An AuthCookie object that represents data for authenticating
for the test case.
"""
def __init__(self, url, start_time, config, test_key, auth_domain=None,
auth_cookies=None):
self.url = url
self.start_time = start_time
self.config = config
self.test_key = test_key
self.auth_cookie = None
if auth_domain and auth_cookies:
self.auth_cookie = AuthCookie(auth_domain, auth_cookies)
class AppEngineCommunicator(object):
"""Handles communication with the test distributor and results servers.
Attributes:
_token: A string representing the token to use to pull tests from the
distributor.
_useragent: A string representing the useragent of the browser under test.
_instance_id: A string representing a unique identifier for the machine
instance.
_current_test_case: A TestCase object representing the current test case.
_log_uploaded: A boolean indicating whether the log file has been uploaded.
"""
def __init__(self, token, useragent, instance_id):
# Set up the attributes
self._token = token
self._useragent = useragent
self._instance_id = instance_id
self._current_test_case = None
self._log_uploaded = False
# TODO(user): Move this function into a shared utility module.
@staticmethod
def ExponentialBackoff(attempt, max_wait_time=_MAX_WAIT_TIME):
"""Wait a time that increases exponentially with the attempt number.
Args:
attempt: The most recent attempt number (starting at 0).
max_wait_time: An optional int that specifies the max base time to wait
in seconds.
"""
sleep_time = math.pow(2, attempt) * random.uniform(0.5, 1.0) * max_wait_time
time.sleep(sleep_time)
def FetchTest(self):
"""Fetch a new test from the test distributor.
This function will not prevent you from fetching another test if you have a
current test case that hasn't been finished. The old test case will be over
written by the new test case.
Returns:
A TestCase object describing the test case that was fetched. If there are
no more tests to run, None is returned.
Raises:
CommunicationError: There is an error in fetching the test.
"""
# Fetch the test case from the test distributor.
try:
data = urllib.urlencode({
'tokens': self._token, 'useragent': urllib.quote(self._useragent),
'instance_id': self._instance_id})
url_page = urllib2.urlopen(_FETCH_TEST_URL, data)
except urllib2.URLError:
self._LogAndRaiseException('Failed to fetch a test from app engine.')
# Process the data from the test distributor.
self._current_test_case = None
try:
test_dictionary = json.loads(url_page.read())
# Check if there is a test available.
if test_dictionary:
test_config = json.loads(test_dictionary['config'])
auth_domain = None
auth_cookies = None
if 'auth_domain' in test_config:
auth_domain = test_config['auth_domain']
if 'auth_cookies' in test_config:
auth_cookies = test_config['auth_cookies']
self._current_test_case = TestCase(
test_dictionary['data_str'][19:-1], test_dictionary['start_time'],
test_config, test_dictionary['key'], auth_domain=auth_domain,
auth_cookies=auth_cookies)
except ValueError:
logger.exception('Could not process the data from the test distributor.')
return self._current_test_case
def FinishTest(self, result):
"""Acknowledge that the current test case has been finished.
Args:
result: A string indicating the result of executing the test case.
Raises:
CommunicationError: There is an error communicating with
the test distributor.
"""
# Make sure there is a current test case to finish.
if not self._current_test_case:
return
try:
data = urllib.urlencode({'key': self._current_test_case.test_key,
'result': result,
'instance_id': self._instance_id})
urllib2.urlopen(_FINISH_TEST_URL, data)
self._current_test_case = None
except urllib2.URLError:
self._LogAndRaiseException('Failed acknowledging that the test finished.')
def _LogAndRaiseException(self, message):
"""Log the current exception being handled and raise a new exception.
Args:
message: A string indicating the message to log and use with the new
exception.
Raises:
CommunicationError: This exception is always raised using the given
message.
"""
logger.exception(message)
raise CommunicationError(message)
def UploadResults(self, nodes_table, layout_table, dynamic_content_table,
png, channel=''):
"""Upload the test case results to the results server.
Args:
nodes_table: A list representing the node results from the test case.
layout_table: A list representing the layout results from the test case.
dynamic_content_table: A list representing the dynamic content results
from the test case.
png: A string representing the binary data for a png image.
channel: An optional string representing the channel for the browser.
Raises:
CommunicationError: The initial upload communication failed.
"""
# Make sure there is a current test case to upload results for.
if not self._current_test_case:
return
# Format the results data for uploading.
suite_info = {
'date': self._current_test_case.start_time,
'key': self._current_test_case.test_key,
'refBrowser': self._current_test_case.config['refBrowser'],
'refBrowserChannel': self._current_test_case.config['refBrowserChannel']
}
data_to_send = {
'userAgent': self._useragent,
'url': self._current_test_case.url,
'nodesTable': base64.b64encode(
zlib.compress(json.dumps(nodes_table), 9)),
'dynamicContentTable': json.dumps(dynamic_content_table),
'width': self._current_test_case.config['width'],
'height': self._current_test_case.config['height'],
'channel': channel,
'suiteInfo': json.dumps(suite_info),
'instance_id': self._instance_id
}
# Upload the initial data.
try:
initial_send = urllib2.urlopen(
_RESULTS_UPLOAD_URL, urllib.urlencode(data_to_send))
except urllib2.URLError:
self._LogAndRaiseException('Failed on the initial results upload.')
response = initial_send.read()
if not response:
self._LogAndRaiseException(
'Initial results upload did not provide continuation data.')
response = json.loads(response)
upload_key = response['key'].encode('ascii')
num_pieces = int(response['nPieces'])
layout_table_length = len(layout_table)
logger.info('Uploading the image to blobstore with key "%s".', upload_key)
for attempt in range(_BLOBSTORE_UPLOAD_RETRIES):
try:
blobstore_upload.UploadImageToBlobstore(upload_key, png)
break
except blobstore_upload.BlobstoreUploadError:
logger.exception('Blobstore upload failed, attempt %d.', attempt+1)
AppEngineCommunicator.ExponentialBackoff(attempt)
# Send the layout table in the requested number of pieces.
logger.info('Uploading remaining results in %d pieces.', num_pieces)
n_rows_per_piece = int(math.ceil(layout_table_length / (num_pieces * 1.0)))
start = 0
end = n_rows_per_piece
for i in range(num_pieces):
data_pieces_to_send = {
'key': upload_key,
'layoutTable': json.dumps(layout_table[start:end]),
'i': i,
'instance_id': self._instance_id
}
for attempt in range(_PIECES_UPLOAD_RETRIES):
try:
urllib2.urlopen(_RESULTS_UPLOAD_URL,
urllib.urlencode(data_pieces_to_send))
break
except urllib2.URLError:
logger.exception('Piece "%d" upload failed, attempt %d.',
i, attempt+1)
AppEngineCommunicator.ExponentialBackoff(attempt)
start = end
end = min(end+n_rows_per_piece, len(layout_table))
def UploadLog(self, log):
"""Upload the test case results to the results server.
Args:
log: A string representing the client log to upload.
"""
# Upload the log data if this is our first upload.
if self._log_uploaded:
return
try:
urllib2.urlopen(_LOG_UPLOAD_URL, urllib.urlencode(
{'log': base64.b64encode(zlib.compress(json.dumps(log), 9)),
'instance_id': self._instance_id}))
self._log_uploaded = True
except:
raise CommunicationError('Failed to upload the client log.') | unknown | codeparrot/codeparrot-clean | ||
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from .addresses import Address, SavedAddress, SavedAddressRole, SavedAddressStatus
from .taxes import CustomerTaxGroup, Tax, TaxClass
from .attributes import Attribute, AttributeType, AttributeVisibility
from .categories import Category, CategoryVisibility, CategoryStatus
from .counters import Counter, CounterType
from .contacts import Contact, ContactGroup, CompanyContact, PersonContact, AnonymousContact, Gender, get_person_contact
from .methods import ShippingMethod, PaymentMethod, MethodType, MethodStatus
from .manufacturers import Manufacturer
from .orders import Order, OrderStatus, OrderStatusRole, OrderLogEntry, PaymentStatus, ShippingStatus
from .order_lines import OrderLine, OrderLineTax, OrderLineType
from .payments import Payment
from .persistent_cache import PersistentCacheEntry
from .products import (
Product, ProductMode, StockBehavior, ProductCrossSellType, ShippingMode,
ProductType, ProductCrossSell, ProductAttribute
)
from .product_media import ProductMedia, ProductMediaKind
from .product_shops import ShopProduct, ProductVisibility
from .product_variation import (
ProductVariationLinkStatus, ProductVariationVariable, ProductVariationVariableValue, ProductVariationResult
)
from .product_packages import ProductPackageLink
from .shops import Shop, ShopStatus
from .shipments import Shipment, ShipmentProduct
from .suppliers import Supplier, SupplierType
from .supplied_products import SuppliedProduct
from .units import SalesUnit
__all__ = [
"Address",
"AnonymousContact",
"Attribute",
"AttributeType",
"AttributeVisibility",
"Category",
"CategoryStatus",
"CategoryVisibility",
"CompanyContact",
"Contact",
"ContactGroup",
"Counter",
"CounterType",
"CustomerTaxGroup",
"get_person_contact",
"Gender",
"Manufacturer",
"MethodStatus",
"MethodType",
"Order",
"OrderLine",
"OrderLineTax",
"OrderLineType",
"OrderLogEntry",
"OrderStatus",
"OrderStatusRole",
"Payment",
"PaymentMethod",
"PaymentStatus",
"PersistentCacheEntry",
"PersonContact",
"Product",
"Product",
"ProductAttribute",
"ProductCrossSell",
"ProductCrossSellType",
"ProductMedia",
"ProductMediaKind",
"ProductMode",
"ProductPackageLink",
"ProductType",
"ProductVariationLinkStatus",
"ProductVariationResult",
"ProductVariationVariable",
"ProductVariationVariableValue",
"ProductVisibility",
"SalesUnit",
"SavedAddress",
"SavedAddressRole",
"SavedAddressStatus",
"Shipment",
"ShipmentProduct",
"ShippingMethod",
"ShippingMode",
"ShippingStatus",
"Shop",
"ShopProduct",
"ShopStatus",
"StockBehavior",
"SuppliedProduct",
"Supplier",
"SupplierType",
"Tax",
"TaxClass",
] | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""
The iosxr_l3_interfaces class
It is in this file where the current configuration (as dict)
is compared to the provided configuration (as dict) and the command set
necessary to bring the current configuration to it's desired end-state is
created
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.network.common.cfg.base import ConfigBase
from ansible.module_utils.network.common.utils import to_list
from ansible.module_utils.network.iosxr.facts.facts import Facts
from ansible.module_utils.network.iosxr.utils.utils import normalize_interface, dict_to_set
from ansible.module_utils.network.iosxr.utils.utils import remove_command_from_config_list, add_command_to_config_list
from ansible.module_utils.network.iosxr.utils.utils import filter_dict_having_none_value, remove_duplicate_interface
from ansible.module_utils.network.iosxr.utils.utils import validate_n_expand_ipv4, validate_ipv6
class L3_Interfaces(ConfigBase):
"""
The iosxr_l3_interfaces class
"""
gather_subset = [
'!all',
'!min',
]
gather_network_resources = [
'l3_interfaces',
]
def get_l3_interfaces_facts(self):
""" Get the 'facts' (the current configuration)
:rtype: A dictionary
:returns: The current configuration as a dictionary
"""
facts, _warnings = Facts(self._module).get_facts(self.gather_subset, self.gather_network_resources)
l3_interfaces_facts = facts['ansible_network_resources'].get('l3_interfaces')
if not l3_interfaces_facts:
return []
return l3_interfaces_facts
def execute_module(self):
""" Execute the module
:rtype: A dictionary
:returns: The result from module execution
"""
result = {'changed': False}
commands = list()
warnings = list()
existing_l3_interfaces_facts = self.get_l3_interfaces_facts()
commands.extend(self.set_config(existing_l3_interfaces_facts))
if commands:
if not self._module.check_mode:
self._connection.edit_config(commands)
result['changed'] = True
result['commands'] = commands
changed_l3_interfaces_facts = self.get_l3_interfaces_facts()
result['before'] = existing_l3_interfaces_facts
if result['changed']:
result['after'] = changed_l3_interfaces_facts
result['warnings'] = warnings
return result
def set_config(self, existing_l3_interfaces_facts):
""" Collect the configuration from the args passed to the module,
collect the current configuration (as a dict from facts)
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
want = self._module.params['config']
have = existing_l3_interfaces_facts
resp = self.set_state(want, have)
return to_list(resp)
def set_state(self, want, have):
""" Select the appropriate function based on the state provided
:param want: the desired configuration as a dictionary
:param have: the current configuration as a dictionary
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
commands = []
state = self._module.params['state']
if state in ('overridden', 'merged', 'replaced') and not want:
self._module.fail_json(msg='value of config parameter must not be empty for state {0}'.format(state))
if state == 'overridden':
commands = self._state_overridden(want, have, self._module)
elif state == 'deleted':
commands = self._state_deleted(want, have)
elif state == 'merged':
commands = self._state_merged(want, have, self._module)
elif state == 'replaced':
commands = self._state_replaced(want, have, self._module)
return commands
def _state_replaced(self, want, have, module):
""" The command generator when state is replaced
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
commands = []
for interface in want:
interface['name'] = normalize_interface(interface['name'])
for each in have:
if each['name'] == interface['name']:
break
else:
commands.extend(self._set_config(interface, dict(), module))
continue
have_dict = filter_dict_having_none_value(interface, each)
commands.extend(self._clear_config(dict(), have_dict))
commands.extend(self._set_config(interface, each, module))
# Remove the duplicate interface call
commands = remove_duplicate_interface(commands)
return commands
def _state_overridden(self, want, have, module):
""" The command generator when state is overridden
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
commands = []
not_in_have = set()
in_have = set()
for each in have:
for interface in want:
interface['name'] = normalize_interface(interface['name'])
if each['name'] == interface['name']:
in_have.add(interface['name'])
break
elif interface['name'] != each['name']:
not_in_have.add(interface['name'])
else:
# We didn't find a matching desired state, which means we can
# pretend we recieved an empty desired state.
interface = dict(name=each['name'])
kwargs = {'want': interface, 'have': each}
commands.extend(self._clear_config(**kwargs))
continue
have_dict = filter_dict_having_none_value(interface, each)
commands.extend(self._clear_config(dict(), have_dict))
commands.extend(self._set_config(interface, each, module))
# Add the want interface that's not already configured in have interface
for each in (not_in_have - in_have):
for every in want:
interface = 'interface {0}'.format(every['name'])
if each and interface not in commands:
commands.extend(self._set_config(every, {}, module))
# Remove the duplicate interface call
commands = remove_duplicate_interface(commands)
return commands
def _state_merged(self, want, have, module):
""" The command generator when state is merged
:rtype: A list
:returns: the commands necessary to merge the provided into
the current configuration
"""
commands = []
for interface in want:
interface['name'] = normalize_interface(interface['name'])
for each in have:
if each['name'] == interface['name']:
break
else:
commands.extend(self._set_config(interface, dict(), module))
continue
commands.extend(self._set_config(interface, each, module))
return commands
def _state_deleted(self, want, have):
""" The command generator when state is deleted
:rtype: A list
:returns: the commands necessary to remove the current configuration
of the provided objects
"""
commands = []
if want:
for interface in want:
interface['name'] = normalize_interface(interface['name'])
for each in have:
if each['name'] == interface['name']:
break
elif interface['name'] in each['name']:
break
else:
continue
interface = dict(name=interface['name'])
commands.extend(self._clear_config(interface, each))
else:
for each in have:
want = dict()
commands.extend(self._clear_config(want, each))
return commands
def verify_diff_again(self, want, have):
"""
Verify the IPV4 difference again as sometimes due to
change in order of set, set difference may result into change,
when there's actually no difference between want and have
:param want: want_dict IPV4
:param have: have_dict IPV4
:return: diff
"""
diff = False
for each in want:
each_want = dict(each)
for every in have:
every_have = dict(every)
if each_want.get('address') != every_have.get('address') and \
each_want.get('secondary') != every_have.get('secondary') and \
len(each_want.keys()) == len(every_have.keys()):
diff = True
break
elif each_want.get('address') != every_have.get('address') and len(each_want.keys()) == len(
every_have.keys()):
diff = True
break
if diff:
break
return diff
def _set_config(self, want, have, module):
# Set the interface config based on the want and have config
commands = []
interface = 'interface ' + want['name']
# To handle L3 IPV4 configuration
if want.get("ipv4"):
for each in want.get("ipv4"):
if each.get('address') != 'dhcp':
ip_addr_want = validate_n_expand_ipv4(module, each)
each['address'] = ip_addr_want
# Get the diff b/w want and have
want_dict = dict_to_set(want)
have_dict = dict_to_set(have)
# To handle L3 IPV4 configuration
want_ipv4 = dict(want_dict).get('ipv4')
have_ipv4 = dict(have_dict).get('ipv4')
if want_ipv4:
if have_ipv4:
diff_ipv4 = set(want_ipv4) - set(dict(have_dict).get('ipv4'))
if diff_ipv4:
diff_ipv4 = diff_ipv4 if self.verify_diff_again(want_ipv4, have_ipv4) else ()
else:
diff_ipv4 = set(want_ipv4)
for each in diff_ipv4:
ipv4_dict = dict(each)
if ipv4_dict.get('address') != 'dhcp':
cmd = "ipv4 address {0}".format(ipv4_dict['address'])
if ipv4_dict.get("secondary"):
cmd += " secondary"
add_command_to_config_list(interface, cmd, commands)
# To handle L3 IPV6 configuration
want_ipv6 = dict(want_dict).get('ipv6')
have_ipv6 = dict(have_dict).get('ipv6')
if want_ipv6:
if have_ipv6:
diff_ipv6 = set(want_ipv6) - set(have_ipv6)
else:
diff_ipv6 = set(want_ipv6)
for each in diff_ipv6:
ipv6_dict = dict(each)
validate_ipv6(ipv6_dict.get('address'), module)
cmd = "ipv6 address {0}".format(ipv6_dict.get('address'))
add_command_to_config_list(interface, cmd, commands)
return commands
def _clear_config(self, want, have):
# Delete the interface config based on the want and have config
count = 0
commands = []
if want.get('name'):
interface = 'interface ' + want['name']
else:
interface = 'interface ' + have['name']
if have.get('ipv4') and want.get('ipv4'):
for each in have.get('ipv4'):
if each.get('secondary') and not (want.get('ipv4')[count].get('secondary')):
cmd = 'ipv4 address {0} secondary'.format(each.get('address'))
remove_command_from_config_list(interface, cmd, commands)
count += 1
if have.get('ipv4') and not (want.get('ipv4')):
remove_command_from_config_list(interface, 'ipv4 address', commands)
if have.get('ipv6') and not (want.get('ipv6')):
remove_command_from_config_list(interface, 'ipv6 address', commands)
return commands | unknown | codeparrot/codeparrot-clean | ||
// Copyright 2017 The Cockroach Authors.
//
// Use of this software is governed by the CockroachDB Software License
// included in the /LICENSE file.
package sql
import "github.com/cockroachdb/cockroach/pkg/sql/catalog/colinfo"
// ReqOrdering is the ordering that must be preserved by an operator when it is
// distributed. It is used to configure DistSQL with the orderings it needs to
// maintain when joining streams.
type ReqOrdering = colinfo.ColumnOrdering
// planReqOrdering describes known ordering information for the rows generated by
// this node. The ordering information includes columns the output is ordered
// by and columns for which we know all rows have the same value.
func planReqOrdering(plan planNode) ReqOrdering {
switch n := plan.(type) {
case *limitNode:
return planReqOrdering(n.input)
case *max1RowNode:
return planReqOrdering(n.input)
case *saveTableNode:
return planReqOrdering(n.input)
case *deleteNode:
if n.run.rowsNeeded {
return planReqOrdering(n.input)
}
case *deleteSwapNode:
if n.run.rowsNeeded {
return planReqOrdering(n.input)
}
case *filterNode:
return n.reqOrdering
case *groupNode:
return n.reqOrdering
case *distinctNode:
return n.reqOrdering
case *indexJoinNode:
return n.reqOrdering
case *windowNode:
// TODO: window partitions can be ordered if the source is ordered
// appropriately.
case *vectorSearchNode:
case *vectorMutationSearchNode:
// TODO(drewk,mw5h): vector partition search could pass through the input
// ordering.
case *joinNode:
return n.reqOrdering
case *unionNode:
return n.reqOrdering
case *insertNode, *insertFastPathNode:
// TODO(knz): RETURNING is ordered by the PK.
case *updateNode, *updateSwapNode, *upsertNode:
// After an update, the original order may have been destroyed.
// For example, if the PK is updated by a SET expression.
// So we can't assume any ordering.
//
// TODO(knz/radu): this can be refined by an analysis which
// determines whether the columns that participate in the ordering
// of the source are being updated. If they are not, the source
// ordering can be propagated.
case *scanNode:
return n.reqOrdering
case *ordinalityNode:
return n.reqOrdering
case *renderNode:
return n.reqOrdering
case *sortNode:
return n.ordering
case *topKNode:
return n.ordering
case *lookupJoinNode:
return n.reqOrdering
case *invertedJoinNode:
return n.reqOrdering
case *zigzagJoinNode:
return n.reqOrdering
}
return nil
} | go | github | https://github.com/cockroachdb/cockroach | pkg/sql/plan_ordering.go |
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
. "$DIR/prelude.sh"
set -o errexit
set -o verbose
cd src
activate_venv
command_invocation="$python buildscripts/monitor_build_status/cli.py"
if [ "${is_patch}" != "true" ]; then
command_invocation="$command_invocation --notify"
fi
echo "Verbatim monitor_build_status invocation: ${command_invocation}"
eval "${command_invocation}" | unknown | github | https://github.com/mongodb/mongo | evergreen/monitor_build_status_run.sh |
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
from typing_extensions import Literal
from ..._models import BaseModel
__all__ = ["ResponseInputFile"]
class ResponseInputFile(BaseModel):
"""A file input to the model."""
type: Literal["input_file"]
"""The type of the input item. Always `input_file`."""
file_data: Optional[str] = None
"""The content of the file to be sent to the model."""
file_id: Optional[str] = None
"""The ID of the file to be sent to the model."""
file_url: Optional[str] = None
"""The URL of the file to be sent to the model."""
filename: Optional[str] = None
"""The name of the file to be sent to the model.""" | python | github | https://github.com/openai/openai-python | src/openai/types/responses/response_input_file.py |
import numpy as np
def lhc_unif(XB,NS,XI=None,maxits=10):
''' XS = lhc_unif(XB,NS,XI=None,maxits=10):
Latin Hypercube Sampling with uniform density
Iterates to maximize minimum L2 distance
Accepts an array of points to respect while sampling
Inputs:
XB - ndim x 2 array of [lower,upper] bounds
NS - number of new points to sample
XI = None - ni x ndim array of initial points to respect
maxits = 10 - maximum number of iterations
Outputs:
XS - ns x ndim array of sampled points
'''
# dimension
XB = np.atleast_2d(XB)
ND = XB.shape[0]
# initial points to respect
if XI is None:
XI = np.empty([0,ND])
else:
XI = np.atleast_2d(XI)
# output points
XO = []
# initialize
mindiff = 0;
# maximize minimum distance
for it in range(maxits):
# samples
S = np.zeros([NS,ND])
# populate samples
for i_d in range(ND):
S[:,i_d] = ( np.random.random([1,NS]) + np.random.permutation(NS) ) / NS
XS = S*(XB[:,1]-XB[:,0]) + XB[:,0]
# add initial points
XX = np.vstack([ XI , XS ])
# calc distances
vecdiff = vec_dist(XX)[0]
# update
if vecdiff > mindiff:
mindiff = vecdiff
XO = XX
#: for iterate
return XO
def vec_dist(X,P=None):
''' calculates distance between points in matrix X
with each other, or optionally to given point P
returns min, max and matrix/vector of distances
'''
# distance matrix among X
if P is None:
nK,nD = X.shape
d = np.zeros([nK,nK,nD])
for iD in range(nD):
d[:,:,iD] = np.array([X[:,iD]])-np.array([X[:,iD]]).T
D = np.sqrt( np.sum( d**2 , 2 ) )
diag_inf = np.diag( np.ones([nK])*np.inf )
dmin = np.min(np.min( D + diag_inf ))
dmax = np.max(np.max( D ))
# distance vector to P
else:
assert P.shape[0] == 1 , 'P must be a horizontal vector'
D = np.array([ np.sqrt( np.sum( (X-P)**2 , 1 ) ) ]).T
dmin = D.min()
dmax = D.max()
return (dmin,dmax,D) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2011 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
"""Frontends for the message extraction functionality."""
from datetime import datetime
from distutils import log
from distutils.cmd import Command
from distutils.errors import DistutilsOptionError, DistutilsSetupError
from locale import getpreferredencoding
import logging
from optparse import OptionParser
import os
import shutil
import sys
import tempfile
from babel import __version__ as VERSION
from babel import Locale, localedata
from babel.compat import RawConfigParser, StringIO, string_types, u
from babel.core import UnknownLocaleError
from babel.messages.catalog import Catalog
from babel.messages.extract import extract_from_dir, DEFAULT_KEYWORDS, \
DEFAULT_MAPPING
from babel.messages.mofile import write_mo
from babel.messages.pofile import read_po, write_po
from babel.util import odict, LOCALTZ
__all__ = ['CommandLineInterface', 'compile_catalog', 'extract_messages',
'init_catalog', 'check_message_extractors', 'update_catalog']
__docformat__ = 'restructuredtext en'
class compile_catalog(Command):
"""Catalog compilation command for use in ``setup.py`` scripts.
If correctly installed, this command is available to Setuptools-using
setup scripts automatically. For projects using plain old ``distutils``,
the command needs to be registered explicitly in ``setup.py``::
from babel.messages.frontend import compile_catalog
setup(
...
cmdclass = {'compile_catalog': compile_catalog}
)
:since: version 0.9
:see: `Integrating new distutils commands <http://docs.python.org/dist/node32.html>`_
:see: `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
"""
description = 'compile message catalogs to binary MO files'
user_options = [
('domain=', 'D',
"domain of PO file (default 'messages')"),
('directory=', 'd',
'path to base directory containing the catalogs'),
('input-file=', 'i',
'name of the input file'),
('output-file=', 'o',
"name of the output file (default "
"'<output_dir>/<locale>/LC_MESSAGES/<domain>.po')"),
('locale=', 'l',
'locale of the catalog to compile'),
('use-fuzzy', 'f',
'also include fuzzy translations'),
('statistics', None,
'print statistics about translations')
]
boolean_options = ['use-fuzzy', 'statistics']
def initialize_options(self):
self.domain = 'messages'
self.directory = None
self.input_file = None
self.output_file = None
self.locale = None
self.use_fuzzy = False
self.statistics = False
def finalize_options(self):
if not self.input_file and not self.directory:
raise DistutilsOptionError('you must specify either the input file '
'or the base directory')
if not self.output_file and not self.directory:
raise DistutilsOptionError('you must specify either the input file '
'or the base directory')
def run(self):
po_files = []
mo_files = []
if not self.input_file:
if self.locale:
po_files.append((self.locale,
os.path.join(self.directory, self.locale,
'LC_MESSAGES',
self.domain + '.po')))
mo_files.append(os.path.join(self.directory, self.locale,
'LC_MESSAGES',
self.domain + '.mo'))
else:
for locale in os.listdir(self.directory):
po_file = os.path.join(self.directory, locale,
'LC_MESSAGES', self.domain + '.po')
if os.path.exists(po_file):
po_files.append((locale, po_file))
mo_files.append(os.path.join(self.directory, locale,
'LC_MESSAGES',
self.domain + '.mo'))
else:
po_files.append((self.locale, self.input_file))
if self.output_file:
mo_files.append(self.output_file)
else:
mo_files.append(os.path.join(self.directory, self.locale,
'LC_MESSAGES',
self.domain + '.mo'))
if not po_files:
raise DistutilsOptionError('no message catalogs found')
for idx, (locale, po_file) in enumerate(po_files):
mo_file = mo_files[idx]
infile = open(po_file, 'r')
try:
catalog = read_po(infile, locale)
finally:
infile.close()
if self.statistics:
translated = 0
for message in list(catalog)[1:]:
if message.string:
translated +=1
percentage = 0
if len(catalog):
percentage = translated * 100 // len(catalog)
log.info('%d of %d messages (%d%%) translated in %r',
translated, len(catalog), percentage, po_file)
if catalog.fuzzy and not self.use_fuzzy:
log.warn('catalog %r is marked as fuzzy, skipping', po_file)
continue
for message, errors in catalog.check():
for error in errors:
log.error('error: %s:%d: %s', po_file, message.lineno,
error)
log.info('compiling catalog %r to %r', po_file, mo_file)
outfile = open(mo_file, 'wb')
try:
write_mo(outfile, catalog, use_fuzzy=self.use_fuzzy)
finally:
outfile.close()
class extract_messages(Command):
"""Message extraction command for use in ``setup.py`` scripts.
If correctly installed, this command is available to Setuptools-using
setup scripts automatically. For projects using plain old ``distutils``,
the command needs to be registered explicitly in ``setup.py``::
from babel.messages.frontend import extract_messages
setup(
...
cmdclass = {'extract_messages': extract_messages}
)
:see: `Integrating new distutils commands <http://docs.python.org/dist/node32.html>`_
:see: `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
"""
description = 'extract localizable strings from the project code'
user_options = [
('charset=', None,
'charset to use in the output file'),
('keywords=', 'k',
'space-separated list of keywords to look for in addition to the '
'defaults'),
('no-default-keywords', None,
'do not include the default keywords'),
('mapping-file=', 'F',
'path to the mapping configuration file'),
('no-location', None,
'do not include location comments with filename and line number'),
('omit-header', None,
'do not include msgid "" entry in header'),
('output-file=', 'o',
'name of the output file'),
('width=', 'w',
'set output line width (default 76)'),
('no-wrap', None,
'do not break long message lines, longer than the output line width, '
'into several lines'),
('sort-output', None,
'generate sorted output (default False)'),
('sort-by-file', None,
'sort output by file location (default False)'),
('msgid-bugs-address=', None,
'set report address for msgid'),
('copyright-holder=', None,
'set copyright holder in output'),
('add-comments=', 'c',
'place comment block with TAG (or those preceding keyword lines) in '
'output file. Seperate multiple TAGs with commas(,)'),
('strip-comments', None,
'strip the comment TAGs from the comments.'),
('input-dirs=', None,
'directories that should be scanned for messages'),
]
boolean_options = [
'no-default-keywords', 'no-location', 'omit-header', 'no-wrap',
'sort-output', 'sort-by-file', 'strip-comments'
]
def initialize_options(self):
self.charset = 'utf-8'
self.keywords = ''
self._keywords = DEFAULT_KEYWORDS.copy()
self.no_default_keywords = False
self.mapping_file = None
self.no_location = False
self.omit_header = False
self.output_file = None
self.input_dirs = None
self.width = None
self.no_wrap = False
self.sort_output = False
self.sort_by_file = False
self.msgid_bugs_address = None
self.copyright_holder = None
self.add_comments = None
self._add_comments = []
self.strip_comments = False
def finalize_options(self):
if self.no_default_keywords and not self.keywords:
raise DistutilsOptionError('you must specify new keywords if you '
'disable the default ones')
if self.no_default_keywords:
self._keywords = {}
if self.keywords:
self._keywords.update(parse_keywords(self.keywords.split()))
if not self.output_file:
raise DistutilsOptionError('no output file specified')
if self.no_wrap and self.width:
raise DistutilsOptionError("'--no-wrap' and '--width' are mutually "
"exclusive")
if not self.no_wrap and not self.width:
self.width = 76
elif self.width is not None:
self.width = int(self.width)
if self.sort_output and self.sort_by_file:
raise DistutilsOptionError("'--sort-output' and '--sort-by-file' "
"are mutually exclusive")
if not self.input_dirs:
self.input_dirs = list(dict.fromkeys([k.split('.',1)[0]
for k in self.distribution.packages
]).keys())
if self.add_comments:
self._add_comments = self.add_comments.split(',')
def run(self):
mappings = self._get_mappings()
outfile = open(self.output_file, 'wb')
try:
catalog = Catalog(project=self.distribution.get_name(),
version=self.distribution.get_version(),
msgid_bugs_address=self.msgid_bugs_address,
copyright_holder=self.copyright_holder,
charset=self.charset)
for dirname, (method_map, options_map) in mappings.items():
def callback(filename, method, options):
if method == 'ignore':
return
filepath = os.path.normpath(os.path.join(dirname, filename))
optstr = ''
if options:
optstr = ' (%s)' % ', '.join(['%s="%s"' % (k, v) for
k, v in options.items()])
log.info('extracting messages from %s%s', filepath, optstr)
extracted = extract_from_dir(dirname, method_map, options_map,
keywords=self._keywords,
comment_tags=self._add_comments,
callback=callback,
strip_comment_tags=
self.strip_comments)
for filename, lineno, message, comments in extracted:
filepath = os.path.normpath(os.path.join(dirname, filename))
catalog.add(message, None, [(filepath, lineno)],
auto_comments=comments)
log.info('writing PO template file to %s' % self.output_file)
write_po(outfile, catalog, width=self.width,
no_location=self.no_location,
omit_header=self.omit_header,
sort_output=self.sort_output,
sort_by_file=self.sort_by_file)
finally:
outfile.close()
def _get_mappings(self):
mappings = {}
if self.mapping_file:
fileobj = open(self.mapping_file, 'U')
try:
method_map, options_map = parse_mapping(fileobj)
for dirname in self.input_dirs:
mappings[dirname] = method_map, options_map
finally:
fileobj.close()
elif getattr(self.distribution, 'message_extractors', None):
message_extractors = self.distribution.message_extractors
for dirname, mapping in message_extractors.items():
if isinstance(mapping, string_types):
method_map, options_map = parse_mapping(StringIO(mapping))
else:
method_map, options_map = [], {}
for pattern, method, options in mapping:
method_map.append((pattern, method))
options_map[pattern] = options or {}
mappings[dirname] = method_map, options_map
else:
for dirname in self.input_dirs:
mappings[dirname] = DEFAULT_MAPPING, {}
return mappings
def check_message_extractors(dist, name, value):
"""Validate the ``message_extractors`` keyword argument to ``setup()``.
:param dist: the distutils/setuptools ``Distribution`` object
:param name: the name of the keyword argument (should always be
"message_extractors")
:param value: the value of the keyword argument
:raise `DistutilsSetupError`: if the value is not valid
:see: `Adding setup() arguments
<http://peak.telecommunity.com/DevCenter/setuptools#adding-setup-arguments>`_
"""
assert name == 'message_extractors'
if not isinstance(value, dict):
raise DistutilsSetupError('the value of the "message_extractors" '
'parameter must be a dictionary')
class init_catalog(Command):
"""New catalog initialization command for use in ``setup.py`` scripts.
If correctly installed, this command is available to Setuptools-using
setup scripts automatically. For projects using plain old ``distutils``,
the command needs to be registered explicitly in ``setup.py``::
from babel.messages.frontend import init_catalog
setup(
...
cmdclass = {'init_catalog': init_catalog}
)
:see: `Integrating new distutils commands <http://docs.python.org/dist/node32.html>`_
:see: `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
"""
description = 'create a new catalog based on a POT file'
user_options = [
('domain=', 'D',
"domain of PO file (default 'messages')"),
('input-file=', 'i',
'name of the input file'),
('output-dir=', 'd',
'path to output directory'),
('output-file=', 'o',
"name of the output file (default "
"'<output_dir>/<locale>/LC_MESSAGES/<domain>.po')"),
('locale=', 'l',
'locale for the new localized catalog'),
]
def initialize_options(self):
self.output_dir = None
self.output_file = None
self.input_file = None
self.locale = None
self.domain = 'messages'
def finalize_options(self):
if not self.input_file:
raise DistutilsOptionError('you must specify the input file')
if not self.locale:
raise DistutilsOptionError('you must provide a locale for the '
'new catalog')
try:
self._locale = Locale.parse(self.locale)
except UnknownLocaleError:
raise DistutilsOptionError(sys.exc_info()[1])
if not self.output_file and not self.output_dir:
raise DistutilsOptionError('you must specify the output directory')
if not self.output_file:
self.output_file = os.path.join(self.output_dir, self.locale,
'LC_MESSAGES', self.domain + '.po')
if not os.path.exists(os.path.dirname(self.output_file)):
os.makedirs(os.path.dirname(self.output_file))
def run(self):
log.info('creating catalog %r based on %r', self.output_file,
self.input_file)
infile = open(self.input_file, 'r')
try:
# Although reading from the catalog template, read_po must be fed
# the locale in order to correcly calculate plurals
catalog = read_po(infile, locale=self.locale)
finally:
infile.close()
catalog.locale = self._locale
catalog.fuzzy = False
outfile = open(self.output_file, 'wb')
try:
write_po(outfile, catalog)
finally:
outfile.close()
class update_catalog(Command):
"""Catalog merging command for use in ``setup.py`` scripts.
If correctly installed, this command is available to Setuptools-using
setup scripts automatically. For projects using plain old ``distutils``,
the command needs to be registered explicitly in ``setup.py``::
from babel.messages.frontend import update_catalog
setup(
...
cmdclass = {'update_catalog': update_catalog}
)
:since: version 0.9
:see: `Integrating new distutils commands <http://docs.python.org/dist/node32.html>`_
:see: `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_
"""
description = 'update message catalogs from a POT file'
user_options = [
('domain=', 'D',
"domain of PO file (default 'messages')"),
('input-file=', 'i',
'name of the input file'),
('output-dir=', 'd',
'path to base directory containing the catalogs'),
('output-file=', 'o',
"name of the output file (default "
"'<output_dir>/<locale>/LC_MESSAGES/<domain>.po')"),
('locale=', 'l',
'locale of the catalog to compile'),
('ignore-obsolete=', None,
'whether to omit obsolete messages from the output'),
('no-fuzzy-matching', 'N',
'do not use fuzzy matching'),
('previous', None,
'keep previous msgids of translated messages')
]
boolean_options = ['ignore_obsolete', 'no_fuzzy_matching', 'previous']
def initialize_options(self):
self.domain = 'messages'
self.input_file = None
self.output_dir = None
self.output_file = None
self.locale = None
self.ignore_obsolete = False
self.no_fuzzy_matching = False
self.previous = False
def finalize_options(self):
if not self.input_file:
raise DistutilsOptionError('you must specify the input file')
if not self.output_file and not self.output_dir:
raise DistutilsOptionError('you must specify the output file or '
'directory')
if self.output_file and not self.locale:
raise DistutilsOptionError('you must specify the locale')
if self.no_fuzzy_matching and self.previous:
self.previous = False
def run(self):
po_files = []
if not self.output_file:
if self.locale:
po_files.append((self.locale,
os.path.join(self.output_dir, self.locale,
'LC_MESSAGES',
self.domain + '.po')))
else:
for locale in os.listdir(self.output_dir):
po_file = os.path.join(self.output_dir, locale,
'LC_MESSAGES',
self.domain + '.po')
if os.path.exists(po_file):
po_files.append((locale, po_file))
else:
po_files.append((self.locale, self.output_file))
domain = self.domain
if not domain:
domain = os.path.splitext(os.path.basename(self.input_file))[0]
infile = open(self.input_file, 'U')
try:
template = read_po(infile)
finally:
infile.close()
if not po_files:
raise DistutilsOptionError('no message catalogs found')
for locale, filename in po_files:
log.info('updating catalog %r based on %r', filename,
self.input_file)
infile = open(filename, 'U')
try:
catalog = read_po(infile, locale=locale, domain=domain)
finally:
infile.close()
catalog.update(template, self.no_fuzzy_matching)
tmpname = os.path.join(os.path.dirname(filename),
tempfile.gettempprefix() +
os.path.basename(filename))
tmpfile = open(tmpname, 'w')
try:
try:
write_po(tmpfile, catalog,
ignore_obsolete=self.ignore_obsolete,
include_previous=self.previous)
finally:
tmpfile.close()
except:
os.remove(tmpname)
raise
try:
os.rename(tmpname, filename)
except OSError:
# We're probably on Windows, which doesn't support atomic
# renames, at least not through Python
# If the error is in fact due to a permissions problem, that
# same error is going to be raised from one of the following
# operations
os.remove(filename)
shutil.copy(tmpname, filename)
os.remove(tmpname)
class CommandLineInterface(object):
"""Command-line interface.
This class provides a simple command-line interface to the message
extraction and PO file generation functionality.
"""
usage = '%%prog %s [options] %s'
version = '%%prog %s' % VERSION
commands = {
'compile': 'compile message catalogs to MO files',
'extract': 'extract messages from source files and generate a POT file',
'init': 'create new message catalogs from a POT file',
'update': 'update existing message catalogs from a POT file'
}
def run(self, argv=sys.argv):
"""Main entry point of the command-line interface.
:param argv: list of arguments passed on the command-line
"""
self.parser = OptionParser(usage=self.usage % ('command', '[args]'),
version=self.version)
self.parser.disable_interspersed_args()
self.parser.print_help = self._help
self.parser.add_option('--list-locales', dest='list_locales',
action='store_true',
help="print all known locales and exit")
self.parser.add_option('-v', '--verbose', action='store_const',
dest='loglevel', const=logging.DEBUG,
help='print as much as possible')
self.parser.add_option('-q', '--quiet', action='store_const',
dest='loglevel', const=logging.ERROR,
help='print as little as possible')
self.parser.set_defaults(list_locales=False, loglevel=logging.INFO)
options, args = self.parser.parse_args(argv[1:])
self._configure_logging(options.loglevel)
if options.list_locales:
identifiers = sorted(localedata.locale_identifiers())
longest = max([len(identifier) for identifier in identifiers])
format = u('%%-%ds %%s') % (longest + 1)
for identifier in identifiers:
locale = Locale.parse(identifier)
output = format % (identifier, locale.english_name)
print(output.encode(sys.stdout.encoding or
getpreferredencoding() or
'ascii', 'replace'))
return 0
if not args:
self.parser.error('no valid command or option passed. '
'Try the -h/--help option for more information.')
cmdname = args[0]
if cmdname not in self.commands:
self.parser.error('unknown command "%s"' % cmdname)
return getattr(self, cmdname)(args[1:])
def _configure_logging(self, loglevel):
self.log = logging.getLogger('babel')
self.log.setLevel(loglevel)
# Don't add a new handler for every instance initialization (#227), this
# would cause duplicated output when the CommandLineInterface as an
# normal Python class.
if self.log.handlers:
handler = self.log.handlers[0]
else:
handler = logging.StreamHandler()
self.log.addHandler(handler)
handler.setLevel(loglevel)
formatter = logging.Formatter('%(message)s')
handler.setFormatter(formatter)
def _help(self):
print(self.parser.format_help())
print("commands:")
longest = max([len(command) for command in self.commands])
format = " %%-%ds %%s" % max(8, longest + 1)
commands = sorted(self.commands.items())
for name, description in commands:
print(format % (name, description))
def compile(self, argv):
"""Subcommand for compiling a message catalog to a MO file.
:param argv: the command arguments
:since: version 0.9
"""
parser = OptionParser(usage=self.usage % ('compile', ''),
description=self.commands['compile'])
parser.add_option('--domain', '-D', dest='domain',
help="domain of MO and PO files (default '%default')")
parser.add_option('--directory', '-d', dest='directory',
metavar='DIR', help='base directory of catalog files')
parser.add_option('--locale', '-l', dest='locale', metavar='LOCALE',
help='locale of the catalog')
parser.add_option('--input-file', '-i', dest='input_file',
metavar='FILE', help='name of the input file')
parser.add_option('--output-file', '-o', dest='output_file',
metavar='FILE',
help="name of the output file (default "
"'<output_dir>/<locale>/LC_MESSAGES/"
"<domain>.mo')")
parser.add_option('--use-fuzzy', '-f', dest='use_fuzzy',
action='store_true',
help='also include fuzzy translations (default '
'%default)')
parser.add_option('--statistics', dest='statistics',
action='store_true',
help='print statistics about translations')
parser.set_defaults(domain='messages', use_fuzzy=False,
compile_all=False, statistics=False)
options, args = parser.parse_args(argv)
po_files = []
mo_files = []
if not options.input_file:
if not options.directory:
parser.error('you must specify either the input file or the '
'base directory')
if options.locale:
po_files.append((options.locale,
os.path.join(options.directory,
options.locale, 'LC_MESSAGES',
options.domain + '.po')))
mo_files.append(os.path.join(options.directory, options.locale,
'LC_MESSAGES',
options.domain + '.mo'))
else:
for locale in os.listdir(options.directory):
po_file = os.path.join(options.directory, locale,
'LC_MESSAGES', options.domain + '.po')
if os.path.exists(po_file):
po_files.append((locale, po_file))
mo_files.append(os.path.join(options.directory, locale,
'LC_MESSAGES',
options.domain + '.mo'))
else:
po_files.append((options.locale, options.input_file))
if options.output_file:
mo_files.append(options.output_file)
else:
if not options.directory:
parser.error('you must specify either the input file or '
'the base directory')
mo_files.append(os.path.join(options.directory, options.locale,
'LC_MESSAGES',
options.domain + '.mo'))
if not po_files:
parser.error('no message catalogs found')
for idx, (locale, po_file) in enumerate(po_files):
mo_file = mo_files[idx]
infile = open(po_file, 'r')
try:
catalog = read_po(infile, locale)
finally:
infile.close()
if options.statistics:
translated = 0
for message in list(catalog)[1:]:
if message.string:
translated +=1
percentage = 0
if len(catalog):
percentage = translated * 100 // len(catalog)
self.log.info("%d of %d messages (%d%%) translated in %r",
translated, len(catalog), percentage, po_file)
if catalog.fuzzy and not options.use_fuzzy:
self.log.warn('catalog %r is marked as fuzzy, skipping',
po_file)
continue
for message, errors in catalog.check():
for error in errors:
self.log.error('error: %s:%d: %s', po_file, message.lineno,
error)
self.log.info('compiling catalog %r to %r', po_file, mo_file)
outfile = open(mo_file, 'wb')
try:
write_mo(outfile, catalog, use_fuzzy=options.use_fuzzy)
finally:
outfile.close()
def extract(self, argv):
"""Subcommand for extracting messages from source files and generating
a POT file.
:param argv: the command arguments
"""
parser = OptionParser(usage=self.usage % ('extract', 'dir1 <dir2> ...'),
description=self.commands['extract'])
parser.add_option('--charset', dest='charset',
help='charset to use in the output (default '
'"%default")')
parser.add_option('-k', '--keyword', dest='keywords', action='append',
help='keywords to look for in addition to the '
'defaults. You can specify multiple -k flags on '
'the command line.')
parser.add_option('--no-default-keywords', dest='no_default_keywords',
action='store_true',
help="do not include the default keywords")
parser.add_option('--mapping', '-F', dest='mapping_file',
help='path to the extraction mapping file')
parser.add_option('--no-location', dest='no_location',
action='store_true',
help='do not include location comments with filename '
'and line number')
parser.add_option('--omit-header', dest='omit_header',
action='store_true',
help='do not include msgid "" entry in header')
parser.add_option('-o', '--output', dest='output',
help='path to the output POT file')
parser.add_option('-w', '--width', dest='width', type='int',
help="set output line width (default 76)")
parser.add_option('--no-wrap', dest='no_wrap', action = 'store_true',
help='do not break long message lines, longer than '
'the output line width, into several lines')
parser.add_option('--sort-output', dest='sort_output',
action='store_true',
help='generate sorted output (default False)')
parser.add_option('--sort-by-file', dest='sort_by_file',
action='store_true',
help='sort output by file location (default False)')
parser.add_option('--msgid-bugs-address', dest='msgid_bugs_address',
metavar='EMAIL@ADDRESS',
help='set report address for msgid')
parser.add_option('--copyright-holder', dest='copyright_holder',
help='set copyright holder in output')
parser.add_option('--project', dest='project',
help='set project name in output')
parser.add_option('--version', dest='version',
help='set project version in output')
parser.add_option('--add-comments', '-c', dest='comment_tags',
metavar='TAG', action='append',
help='place comment block with TAG (or those '
'preceding keyword lines) in output file. One '
'TAG per argument call')
parser.add_option('--strip-comment-tags', '-s',
dest='strip_comment_tags', action='store_true',
help='Strip the comment tags from the comments.')
parser.set_defaults(charset='utf-8', keywords=[],
no_default_keywords=False, no_location=False,
omit_header = False, width=None, no_wrap=False,
sort_output=False, sort_by_file=False,
comment_tags=[], strip_comment_tags=False)
options, args = parser.parse_args(argv)
if not args:
parser.error('incorrect number of arguments')
if options.output not in (None, '-'):
outfile = open(options.output, 'wb')
else:
outfile = sys.stdout
keywords = DEFAULT_KEYWORDS.copy()
if options.no_default_keywords:
if not options.keywords:
parser.error('you must specify new keywords if you disable the '
'default ones')
keywords = {}
if options.keywords:
keywords.update(parse_keywords(options.keywords))
if options.mapping_file:
fileobj = open(options.mapping_file, 'U')
try:
method_map, options_map = parse_mapping(fileobj)
finally:
fileobj.close()
else:
method_map = DEFAULT_MAPPING
options_map = {}
if options.width and options.no_wrap:
parser.error("'--no-wrap' and '--width' are mutually exclusive.")
elif not options.width and not options.no_wrap:
options.width = 76
if options.sort_output and options.sort_by_file:
parser.error("'--sort-output' and '--sort-by-file' are mutually "
"exclusive")
try:
catalog = Catalog(project=options.project,
version=options.version,
msgid_bugs_address=options.msgid_bugs_address,
copyright_holder=options.copyright_holder,
charset=options.charset)
for dirname in args:
if not os.path.isdir(dirname):
parser.error('%r is not a directory' % dirname)
def callback(filename, method, options):
if method == 'ignore':
return
filepath = os.path.normpath(os.path.join(dirname, filename))
optstr = ''
if options:
optstr = ' (%s)' % ', '.join(['%s="%s"' % (k, v) for
k, v in options.items()])
self.log.info('extracting messages from %s%s', filepath,
optstr)
extracted = extract_from_dir(dirname, method_map, options_map,
keywords, options.comment_tags,
callback=callback,
strip_comment_tags=
options.strip_comment_tags)
for filename, lineno, message, comments in extracted:
filepath = os.path.normpath(os.path.join(dirname, filename))
catalog.add(message, None, [(filepath, lineno)],
auto_comments=comments)
if options.output not in (None, '-'):
self.log.info('writing PO template file to %s' % options.output)
write_po(outfile, catalog, width=options.width,
no_location=options.no_location,
omit_header=options.omit_header,
sort_output=options.sort_output,
sort_by_file=options.sort_by_file)
finally:
if options.output:
outfile.close()
def init(self, argv):
"""Subcommand for creating new message catalogs from a template.
:param argv: the command arguments
"""
parser = OptionParser(usage=self.usage % ('init', ''),
description=self.commands['init'])
parser.add_option('--domain', '-D', dest='domain',
help="domain of PO file (default '%default')")
parser.add_option('--input-file', '-i', dest='input_file',
metavar='FILE', help='name of the input file')
parser.add_option('--output-dir', '-d', dest='output_dir',
metavar='DIR', help='path to output directory')
parser.add_option('--output-file', '-o', dest='output_file',
metavar='FILE',
help="name of the output file (default "
"'<output_dir>/<locale>/LC_MESSAGES/"
"<domain>.po')")
parser.add_option('--locale', '-l', dest='locale', metavar='LOCALE',
help='locale for the new localized catalog')
parser.set_defaults(domain='messages')
options, args = parser.parse_args(argv)
if not options.locale:
parser.error('you must provide a locale for the new catalog')
try:
locale = Locale.parse(options.locale)
except UnknownLocaleError:
parser.error(sys.exc_info()[1])
if not options.input_file:
parser.error('you must specify the input file')
if not options.output_file and not options.output_dir:
parser.error('you must specify the output file or directory')
if not options.output_file:
options.output_file = os.path.join(options.output_dir,
options.locale, 'LC_MESSAGES',
options.domain + '.po')
if not os.path.exists(os.path.dirname(options.output_file)):
os.makedirs(os.path.dirname(options.output_file))
infile = open(options.input_file, 'r')
try:
# Although reading from the catalog template, read_po must be fed
# the locale in order to correcly calculate plurals
catalog = read_po(infile, locale=options.locale)
finally:
infile.close()
catalog.locale = locale
catalog.revision_date = datetime.now(LOCALTZ)
self.log.info('creating catalog %r based on %r', options.output_file,
options.input_file)
outfile = open(options.output_file, 'wb')
try:
write_po(outfile, catalog)
finally:
outfile.close()
def update(self, argv):
"""Subcommand for updating existing message catalogs from a template.
:param argv: the command arguments
:since: version 0.9
"""
parser = OptionParser(usage=self.usage % ('update', ''),
description=self.commands['update'])
parser.add_option('--domain', '-D', dest='domain',
help="domain of PO file (default '%default')")
parser.add_option('--input-file', '-i', dest='input_file',
metavar='FILE', help='name of the input file')
parser.add_option('--output-dir', '-d', dest='output_dir',
metavar='DIR', help='path to output directory')
parser.add_option('--output-file', '-o', dest='output_file',
metavar='FILE',
help="name of the output file (default "
"'<output_dir>/<locale>/LC_MESSAGES/"
"<domain>.po')")
parser.add_option('--locale', '-l', dest='locale', metavar='LOCALE',
help='locale of the translations catalog')
parser.add_option('--ignore-obsolete', dest='ignore_obsolete',
action='store_true',
help='do not include obsolete messages in the output '
'(default %default)')
parser.add_option('--no-fuzzy-matching', '-N', dest='no_fuzzy_matching',
action='store_true',
help='do not use fuzzy matching (default %default)')
parser.add_option('--previous', dest='previous', action='store_true',
help='keep previous msgids of translated messages '
'(default %default)')
parser.set_defaults(domain='messages', ignore_obsolete=False,
no_fuzzy_matching=False, previous=False)
options, args = parser.parse_args(argv)
if not options.input_file:
parser.error('you must specify the input file')
if not options.output_file and not options.output_dir:
parser.error('you must specify the output file or directory')
if options.output_file and not options.locale:
parser.error('you must specify the locale')
if options.no_fuzzy_matching and options.previous:
options.previous = False
po_files = []
if not options.output_file:
if options.locale:
po_files.append((options.locale,
os.path.join(options.output_dir,
options.locale, 'LC_MESSAGES',
options.domain + '.po')))
else:
for locale in os.listdir(options.output_dir):
po_file = os.path.join(options.output_dir, locale,
'LC_MESSAGES',
options.domain + '.po')
if os.path.exists(po_file):
po_files.append((locale, po_file))
else:
po_files.append((options.locale, options.output_file))
domain = options.domain
if not domain:
domain = os.path.splitext(os.path.basename(options.input_file))[0]
infile = open(options.input_file, 'U')
try:
template = read_po(infile)
finally:
infile.close()
if not po_files:
parser.error('no message catalogs found')
for locale, filename in po_files:
self.log.info('updating catalog %r based on %r', filename,
options.input_file)
infile = open(filename, 'U')
try:
catalog = read_po(infile, locale=locale, domain=domain)
finally:
infile.close()
catalog.update(template, options.no_fuzzy_matching)
tmpname = os.path.join(os.path.dirname(filename),
tempfile.gettempprefix() +
os.path.basename(filename))
tmpfile = open(tmpname, 'w')
try:
try:
write_po(tmpfile, catalog,
ignore_obsolete=options.ignore_obsolete,
include_previous=options.previous)
finally:
tmpfile.close()
except:
os.remove(tmpname)
raise
try:
os.rename(tmpname, filename)
except OSError:
# We're probably on Windows, which doesn't support atomic
# renames, at least not through Python
# If the error is in fact due to a permissions problem, that
# same error is going to be raised from one of the following
# operations
os.remove(filename)
shutil.copy(tmpname, filename)
os.remove(tmpname)
def main():
return CommandLineInterface().run(sys.argv)
def parse_mapping(fileobj, filename=None):
"""Parse an extraction method mapping from a file-like object.
>>> buf = StringIO('''
... [extractors]
... custom = mypackage.module:myfunc
...
... # Python source files
... [python: **.py]
...
... # Genshi templates
... [genshi: **/templates/**.html]
... include_attrs =
... [genshi: **/templates/**.txt]
... template_class = genshi.template:TextTemplate
... encoding = latin-1
...
... # Some custom extractor
... [custom: **/custom/*.*]
... ''')
>>> method_map, options_map = parse_mapping(buf)
>>> len(method_map)
4
>>> method_map[0]
('**.py', 'python')
>>> options_map['**.py']
{}
>>> method_map[1]
('**/templates/**.html', 'genshi')
>>> options_map['**/templates/**.html']['include_attrs']
''
>>> method_map[2]
('**/templates/**.txt', 'genshi')
>>> options_map['**/templates/**.txt']['template_class']
'genshi.template:TextTemplate'
>>> options_map['**/templates/**.txt']['encoding']
'latin-1'
>>> method_map[3]
('**/custom/*.*', 'mypackage.module:myfunc')
>>> options_map['**/custom/*.*']
{}
:param fileobj: a readable file-like object containing the configuration
text to parse
:return: a `(method_map, options_map)` tuple
:rtype: `tuple`
:see: `extract_from_directory`
"""
extractors = {}
method_map = []
options_map = {}
parser = RawConfigParser()
parser._sections = odict(parser._sections) # We need ordered sections
parser.readfp(fileobj, filename)
for section in parser.sections():
if section == 'extractors':
extractors = dict(parser.items(section))
else:
method, pattern = [part.strip() for part in section.split(':', 1)]
method_map.append((pattern, method))
options_map[pattern] = dict(parser.items(section))
if extractors:
for idx, (pattern, method) in enumerate(method_map):
if method in extractors:
method = extractors[method]
method_map[idx] = (pattern, method)
return (method_map, options_map)
def parse_keywords(strings=[]):
"""Parse keywords specifications from the given list of strings.
>>> kw = sorted(parse_keywords(['_', 'dgettext:2', 'dngettext:2,3']).items())
>>> for keyword, indices in sorted(kw):
... print((keyword, indices))
('_', None)
('dgettext', (2,))
('dngettext', (2, 3))
"""
keywords = {}
for string in strings:
if ':' in string:
funcname, indices = string.split(':')
else:
funcname, indices = string, None
if funcname not in keywords:
if indices:
indices = tuple([(int(x)) for x in indices.split(',')])
keywords[funcname] = indices
return keywords
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
#
# torrent.py
#
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
#
# Deluge is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# deluge is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with deluge. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
#
"""Internal Torrent class"""
import os
import time
import logging
from urllib import unquote
from urlparse import urlparse
from deluge._libtorrent import lt
import deluge.common
import deluge.component as component
from deluge.configmanager import ConfigManager, get_config_dir
from deluge.event import *
TORRENT_STATE = deluge.common.TORRENT_STATE
log = logging.getLogger(__name__)
def sanitize_filepath(filepath, folder=False):
"""
Returns a sanitized filepath to pass to libotorrent rename_file().
The filepath will have backslashes substituted along with whitespace
padding and duplicate slashes stripped. If `folder` is True a trailing
slash is appended to the returned filepath.
"""
def clean_filename(filename):
filename = filename.strip()
if filename.replace('.', '') == '':
return ''
return filename
if '\\' in filepath or '/' in filepath:
folderpath = filepath.replace('\\', '/').split('/')
folderpath = [clean_filename(x) for x in folderpath]
newfilepath = '/'.join(filter(None, folderpath))
else:
newfilepath = clean_filename(filepath)
if folder is True:
return newfilepath + '/'
else:
return newfilepath
class TorrentOptions(dict):
def __init__(self):
config = ConfigManager("core.conf").config
options_conf_map = {
"max_connections": "max_connections_per_torrent",
"max_upload_slots": "max_upload_slots_per_torrent",
"max_upload_speed": "max_upload_speed_per_torrent",
"max_download_speed": "max_download_speed_per_torrent",
"prioritize_first_last_pieces": "prioritize_first_last_pieces",
"sequential_download": "sequential_download",
"compact_allocation": "compact_allocation",
"download_location": "download_location",
"auto_managed": "auto_managed",
"stop_at_ratio": "stop_seed_at_ratio",
"stop_ratio": "stop_seed_ratio",
"remove_at_ratio": "remove_seed_at_ratio",
"move_completed": "move_completed",
"move_completed_path": "move_completed_path",
"add_paused": "add_paused",
"shared": "shared"
}
for opt_k, conf_k in options_conf_map.iteritems():
self[opt_k] = config[conf_k]
self["file_priorities"] = []
self["mapped_files"] = {}
class Torrent(object):
"""Torrent holds information about torrents added to the libtorrent session.
"""
def __init__(self, handle, options, state=None, filename=None, magnet=None, owner=None):
log.debug("Creating torrent object %s", str(handle.info_hash()))
# Get the core config
self.config = ConfigManager("core.conf")
self.rpcserver = component.get("RPCServer")
# This dict holds previous status dicts returned for this torrent
# We use this to return dicts that only contain changes from the previous
# {session_id: status_dict, ...}
self.prev_status = {}
from twisted.internet.task import LoopingCall
self.prev_status_cleanup_loop = LoopingCall(self.cleanup_prev_status)
self.prev_status_cleanup_loop.start(10)
# Set the libtorrent handle
self.handle = handle
# Set the torrent_id for this torrent
self.torrent_id = str(handle.info_hash())
# Let's us know if we're waiting on a lt alert
self.waiting_on_resume_data = False
# Keep a list of file indexes we're waiting for file_rename alerts on
# This also includes the old_folder and new_folder to know what signal to send
# This is so we can send one folder_renamed signal instead of multiple
# file_renamed signals.
# [(old_folder, new_folder, [*indexes]), ...]
self.waiting_on_folder_rename = []
# We store the filename just in case we need to make a copy of the torrentfile
if not filename:
# If no filename was provided, then just use the infohash
filename = self.torrent_id
self.filename = filename
# Store the magnet uri used to add this torrent if available
self.magnet = magnet
# Holds status info so that we don't need to keep getting it from lt
self.status = self.handle.status()
try:
self.torrent_info = self.handle.get_torrent_info()
except RuntimeError:
self.torrent_info = None
# Default total_uploaded to 0, this may be changed by the state
self.total_uploaded = 0
# Set the default options
self.options = TorrentOptions()
self.options.update(options)
# We need to keep track if the torrent is finished in the state to prevent
# some weird things on state load.
self.is_finished = False
# Load values from state if we have it
if state:
# This is for saving the total uploaded between sessions
self.total_uploaded = state.total_uploaded
# Set the trackers
self.set_trackers(state.trackers)
# Set the filename
self.filename = state.filename
self.is_finished = state.is_finished
else:
# Tracker list
self.trackers = []
# Create a list of trackers
for value in self.handle.trackers():
if lt.version_minor < 15:
tracker = {}
tracker["url"] = value.url
tracker["tier"] = value.tier
else:
tracker = value
self.trackers.append(tracker)
# Various torrent options
self.handle.resolve_countries(True)
self.set_options(self.options)
# Status message holds error info about the torrent
self.statusmsg = "OK"
# The torrents state
self.update_state()
# The tracker status
self.tracker_status = ""
# This gets updated when get_tracker_host is called
self.tracker_host = None
if state:
self.time_added = state.time_added
else:
self.time_added = time.time()
# Keep track of the owner
if state:
self.owner = state.owner
else:
self.owner = owner
# Keep track of last seen complete
if state:
self._last_seen_complete = state.last_seen_complete or 0.0
else:
self._last_seen_complete = 0.0
# Keep track if we're forcing a recheck of the torrent so that we can
# re-pause it after its done if necessary
self.forcing_recheck = False
self.forcing_recheck_paused = False
log.debug("Torrent object created.")
## Options methods ##
def set_options(self, options):
OPTIONS_FUNCS = {
# Functions used for setting options
"auto_managed": self.set_auto_managed,
"download_location": self.set_save_path,
"file_priorities": self.set_file_priorities,
"max_connections": self.handle.set_max_connections,
"max_download_speed": self.set_max_download_speed,
"max_upload_slots": self.handle.set_max_uploads,
"max_upload_speed": self.set_max_upload_speed,
"prioritize_first_last_pieces": self.set_prioritize_first_last,
"sequential_download": self.set_sequential_download
}
for (key, value) in options.items():
if OPTIONS_FUNCS.has_key(key):
OPTIONS_FUNCS[key](value)
self.options.update(options)
def get_options(self):
return self.options
def get_name(self):
if self.handle.has_metadata():
name = self.torrent_info.file_at(0).path.split("/", 1)[0]
if not name:
name = self.torrent_info.name()
try:
return name.decode("utf8", "ignore")
except UnicodeDecodeError:
return name
elif self.magnet:
try:
keys = dict([k.split('=') for k in self.magnet.split('?')[-1].split('&')])
name = keys.get('dn')
if not name:
return self.torrent_id
name = unquote(name).replace('+', ' ')
try:
return name.decode("utf8", "ignore")
except UnicodeDecodeError:
return name
except:
pass
return self.torrent_id
def set_owner(self, account):
self.owner = account
def set_max_connections(self, max_connections):
self.options["max_connections"] = int(max_connections)
self.handle.set_max_connections(max_connections)
def set_max_upload_slots(self, max_slots):
self.options["max_upload_slots"] = int(max_slots)
self.handle.set_max_uploads(max_slots)
def set_max_upload_speed(self, m_up_speed):
self.options["max_upload_speed"] = m_up_speed
if m_up_speed < 0:
v = -1
else:
v = int(m_up_speed * 1024)
self.handle.set_upload_limit(v)
def set_max_download_speed(self, m_down_speed):
self.options["max_download_speed"] = m_down_speed
if m_down_speed < 0:
v = -1
else:
v = int(m_down_speed * 1024)
self.handle.set_download_limit(v)
def set_prioritize_first_last(self, prioritize):
self.options["prioritize_first_last_pieces"] = prioritize
if self.handle.has_metadata():
if self.options["compact_allocation"]:
log.debug("Setting first/last priority with compact "
"allocation does not work!")
return
paths = {}
ti = self.handle.get_torrent_info()
for n in range(ti.num_pieces()):
slices = ti.map_block(n, 0, ti.piece_size(n))
for slice in slices:
fe = ti.file_at(slice.file_index)
paths.setdefault(fe.path, []).append(n)
priorities = self.handle.piece_priorities()
for pieces in paths.itervalues():
two_percent = 2*100/len(pieces)
for piece in pieces[:two_percent]+pieces[-two_percent:]:
priorities[piece] = prioritize and 7 or 1
self.handle.prioritize_pieces(priorities)
def set_sequential_download(self, set_sequencial):
self.options["sequential_download"] = set_sequencial
self.handle.set_sequential_download(set_sequencial)
def set_auto_managed(self, auto_managed):
self.options["auto_managed"] = auto_managed
if not (self.handle.is_paused() and not self.handle.is_auto_managed()):
self.handle.auto_managed(auto_managed)
self.update_state()
def set_stop_ratio(self, stop_ratio):
self.options["stop_ratio"] = stop_ratio
def set_stop_at_ratio(self, stop_at_ratio):
self.options["stop_at_ratio"] = stop_at_ratio
def set_remove_at_ratio(self, remove_at_ratio):
self.options["remove_at_ratio"] = remove_at_ratio
def set_move_completed(self, move_completed):
self.options["move_completed"] = move_completed
def set_move_completed_path(self, move_completed_path):
self.options["move_completed_path"] = move_completed_path
def set_file_priorities(self, file_priorities):
if len(file_priorities) != len(self.get_files()):
log.debug("file_priorities len != num_files")
self.options["file_priorities"] = self.handle.file_priorities()
return
if self.options["compact_allocation"]:
log.debug("setting file priority with compact allocation does not work!")
self.options["file_priorities"] = self.handle.file_priorities()
return
log.debug("setting %s's file priorities: %s", self.torrent_id, file_priorities)
self.handle.prioritize_files(file_priorities)
if 0 in self.options["file_priorities"]:
# We have previously marked a file 'Do Not Download'
# Check to see if we have changed any 0's to >0 and change state accordingly
for index, priority in enumerate(self.options["file_priorities"]):
if priority == 0 and file_priorities[index] > 0:
# We have a changed 'Do Not Download' to a download priority
self.is_finished = False
self.update_state()
break
self.options["file_priorities"] = self.handle.file_priorities()
if self.options["file_priorities"] != list(file_priorities):
log.warning("File priorities were not set for this torrent")
# Set the first/last priorities if needed
self.set_prioritize_first_last(self.options["prioritize_first_last_pieces"])
def set_trackers(self, trackers):
"""Sets trackers"""
if trackers == None:
trackers = []
for value in self.handle.trackers():
tracker = {}
tracker["url"] = value.url
tracker["tier"] = value.tier
trackers.append(tracker)
self.trackers = trackers
self.tracker_host = None
return
log.debug("Setting trackers for %s: %s", self.torrent_id, trackers)
tracker_list = []
for tracker in trackers:
new_entry = lt.announce_entry(str(tracker["url"]))
new_entry.tier = tracker["tier"]
tracker_list.append(new_entry)
self.handle.replace_trackers(tracker_list)
# Print out the trackers
#for t in self.handle.trackers():
# log.debug("tier: %s tracker: %s", t["tier"], t["url"])
# Set the tracker list in the torrent object
self.trackers = trackers
if len(trackers) > 0:
# Force a re-announce if there is at least 1 tracker
self.force_reannounce()
self.tracker_host = None
### End Options methods ###
def set_save_path(self, save_path):
self.options["download_location"] = save_path
def set_tracker_status(self, status):
"""Sets the tracker status"""
self.tracker_status = self.get_tracker_host() + ": " + status
def update_state(self):
"""Updates the state based on what libtorrent's state for the torrent is"""
# Set the initial state based on the lt state
LTSTATE = deluge.common.LT_TORRENT_STATE
ltstate = int(self.handle.status().state)
# Set self.state to the ltstate right away just incase we don't hit some
# of the logic below
if ltstate in LTSTATE:
self.state = LTSTATE[ltstate]
else:
self.state = str(ltstate)
log.debug("set_state_based_on_ltstate: %s", deluge.common.LT_TORRENT_STATE[ltstate])
log.debug("session.is_paused: %s", component.get("Core").session.is_paused())
# First we check for an error from libtorrent, and set the state to that
# if any occurred.
if len(self.handle.status().error) > 0:
# This is an error'd torrent
self.state = "Error"
self.set_status_message(self.handle.status().error)
if self.handle.is_paused():
self.handle.auto_managed(False)
return
if ltstate == LTSTATE["Queued"] or ltstate == LTSTATE["Checking"]:
if self.handle.is_paused():
self.state = "Paused"
else:
self.state = "Checking"
return
elif ltstate == LTSTATE["Downloading"] or ltstate == LTSTATE["Downloading Metadata"]:
self.state = "Downloading"
elif ltstate == LTSTATE["Finished"] or ltstate == LTSTATE["Seeding"]:
self.state = "Seeding"
elif ltstate == LTSTATE["Allocating"]:
self.state = "Allocating"
if self.handle.is_paused() and self.handle.is_auto_managed() and not component.get("Core").session.is_paused():
self.state = "Queued"
elif component.get("Core").session.is_paused() or (self.handle.is_paused() and not self.handle.is_auto_managed()):
self.state = "Paused"
def set_state(self, state):
"""Accepts state strings, ie, "Paused", "Seeding", etc."""
if state not in TORRENT_STATE:
log.debug("Trying to set an invalid state %s", state)
return
self.state = state
return
def set_status_message(self, message):
self.statusmsg = message
def get_eta(self):
"""Returns the ETA in seconds for this torrent"""
if self.status == None:
status = self.handle.status()
else:
status = self.status
if self.is_finished and self.options["stop_at_ratio"]:
# We're a seed, so calculate the time to the 'stop_share_ratio'
if not status.upload_payload_rate:
return 0
stop_ratio = self.options["stop_ratio"]
return ((status.all_time_download * stop_ratio) - status.all_time_upload) / status.upload_payload_rate
left = status.total_wanted - status.total_wanted_done
if left <= 0 or status.download_payload_rate == 0:
return 0
try:
eta = left / status.download_payload_rate
except ZeroDivisionError:
eta = 0
return eta
def get_ratio(self):
"""Returns the ratio for this torrent"""
if self.status == None:
status = self.handle.status()
else:
status = self.status
if status.total_done > 0:
# We use 'total_done' if the downloaded value is 0
downloaded = status.total_done
else:
# Return -1.0 to signify infinity
return -1.0
return float(status.all_time_upload) / float(downloaded)
def get_files(self):
"""Returns a list of files this torrent contains"""
if self.torrent_info == None and self.handle.has_metadata():
torrent_info = self.handle.get_torrent_info()
else:
torrent_info = self.torrent_info
if not torrent_info:
return []
ret = []
files = torrent_info.files()
for index, file in enumerate(files):
ret.append({
'index': index,
'path': file.path.decode("utf8", "ignore"),
'size': file.size,
'offset': file.offset
})
return ret
def get_peers(self):
"""Returns a list of peers and various information about them"""
ret = []
peers = self.handle.get_peer_info()
for peer in peers:
# We do not want to report peers that are half-connected
if peer.flags & peer.connecting or peer.flags & peer.handshake:
continue
try:
client = str(peer.client).decode("utf-8")
except UnicodeDecodeError:
client = str(peer.client).decode("latin-1")
# Make country a proper string
country = str()
for c in peer.country:
if not c.isalpha():
country += " "
else:
country += c
ret.append({
"client": client,
"country": country,
"down_speed": peer.payload_down_speed,
"ip": "%s:%s" % (peer.ip[0], peer.ip[1]),
"progress": peer.progress,
"seed": peer.flags & peer.seed,
"up_speed": peer.payload_up_speed,
})
return ret
def get_queue_position(self):
"""Returns the torrents queue position"""
return self.handle.queue_position()
def get_file_progress(self):
"""Returns the file progress as a list of floats.. 0.0 -> 1.0"""
if not self.handle.has_metadata():
return 0.0
file_progress = self.handle.file_progress()
ret = []
for i,f in enumerate(self.get_files()):
try:
ret.append(float(file_progress[i]) / float(f["size"]))
except ZeroDivisionError:
ret.append(0.0)
return ret
def get_tracker_host(self):
"""Returns just the hostname of the currently connected tracker
if no tracker is connected, it uses the 1st tracker."""
if self.tracker_host:
return self.tracker_host
if not self.status:
self.status = self.handle.status()
tracker = self.status.current_tracker
if not tracker and self.trackers:
tracker = self.trackers[0]["url"]
if tracker:
url = urlparse(tracker.replace("udp://", "http://"))
if hasattr(url, "hostname"):
host = (url.hostname or 'DHT')
# Check if hostname is an IP address and just return it if that's the case
import socket
try:
socket.inet_aton(host)
except socket.error:
pass
else:
# This is an IP address because an exception wasn't raised
return url.hostname
parts = host.split(".")
if len(parts) > 2:
if parts[-2] in ("co", "com", "net", "org") or parts[-1] in ("uk"):
host = ".".join(parts[-3:])
else:
host = ".".join(parts[-2:])
self.tracker_host = host
return host
return ""
def get_last_seen_complete(self):
"""
Returns the time a torrent was last seen complete, ie, with all pieces
available.
"""
if lt.version_minor > 15:
return self.status.last_seen_complete
self.calculate_last_seen_complete()
return self._last_seen_complete
def get_status(self, keys, diff=False):
"""
Returns the status of the torrent based on the keys provided
:param keys: the keys to get the status on
:type keys: list of str
:param diff: if True, will return a diff of the changes since the last
call to get_status based on the session_id
:type diff: bool
:returns: a dictionary of the status keys and their values
:rtype: dict
"""
# Create the full dictionary
self.status = self.handle.status()
if self.handle.has_metadata():
self.torrent_info = self.handle.get_torrent_info()
# Adjust progress to be 0-100 value
progress = self.status.progress * 100
# Adjust status.distributed_copies to return a non-negative value
distributed_copies = self.status.distributed_copies
if distributed_copies < 0:
distributed_copies = 0.0
# Calculate the seeds:peers ratio
if self.status.num_incomplete == 0:
# Use -1.0 to signify infinity
seeds_peers_ratio = -1.0
else:
seeds_peers_ratio = self.status.num_complete / float(self.status.num_incomplete)
full_status = {
"active_time": self.status.active_time,
"all_time_download": self.status.all_time_download,
"compact": self.options["compact_allocation"],
"distributed_copies": distributed_copies,
"download_payload_rate": self.status.download_payload_rate,
"file_priorities": self.options["file_priorities"],
"hash": self.torrent_id,
"is_auto_managed": self.options["auto_managed"],
"is_finished": self.is_finished,
"max_connections": self.options["max_connections"],
"max_download_speed": self.options["max_download_speed"],
"max_upload_slots": self.options["max_upload_slots"],
"max_upload_speed": self.options["max_upload_speed"],
"message": self.statusmsg,
"move_on_completed_path": self.options["move_completed_path"],
"move_on_completed": self.options["move_completed"],
"move_completed_path": self.options["move_completed_path"],
"move_completed": self.options["move_completed"],
"next_announce": self.status.next_announce.seconds,
"num_peers": self.status.num_peers - self.status.num_seeds,
"num_seeds": self.status.num_seeds,
"owner": self.owner,
"paused": self.status.paused,
"prioritize_first_last": self.options["prioritize_first_last_pieces"],
"sequential_download": self.options["sequential_download"],
"progress": progress,
"shared": self.options["shared"],
"remove_at_ratio": self.options["remove_at_ratio"],
"save_path": self.options["download_location"],
"seeding_time": self.status.seeding_time,
"seeds_peers_ratio": seeds_peers_ratio,
"seed_rank": self.status.seed_rank,
"state": self.state,
"stop_at_ratio": self.options["stop_at_ratio"],
"stop_ratio": self.options["stop_ratio"],
"time_added": self.time_added,
"total_done": self.status.total_done,
"total_payload_download": self.status.total_payload_download,
"total_payload_upload": self.status.total_payload_upload,
"total_peers": self.status.num_incomplete,
"total_seeds": self.status.num_complete,
"total_uploaded": self.status.all_time_upload,
"total_wanted": self.status.total_wanted,
"tracker": self.status.current_tracker,
"trackers": self.trackers,
"tracker_status": self.tracker_status,
"upload_payload_rate": self.status.upload_payload_rate
}
def ti_comment():
if self.handle.has_metadata():
try:
return self.torrent_info.comment().decode("utf8", "ignore")
except UnicodeDecodeError:
return self.torrent_info.comment()
return ""
def ti_priv():
if self.handle.has_metadata():
return self.torrent_info.priv()
return False
def ti_total_size():
if self.handle.has_metadata():
return self.torrent_info.total_size()
return 0
def ti_num_files():
if self.handle.has_metadata():
return self.torrent_info.num_files()
return 0
def ti_num_pieces():
if self.handle.has_metadata():
return self.torrent_info.num_pieces()
return 0
def ti_piece_length():
if self.handle.has_metadata():
return self.torrent_info.piece_length()
return 0
def ti_pieces_info():
if self.handle.has_metadata():
return self.get_pieces_info()
return None
fns = {
"comment": ti_comment,
"eta": self.get_eta,
"file_progress": self.get_file_progress,
"files": self.get_files,
"is_seed": self.handle.is_seed,
"name": self.get_name,
"num_files": ti_num_files,
"num_pieces": ti_num_pieces,
"pieces": ti_pieces_info,
"peers": self.get_peers,
"piece_length": ti_piece_length,
"private": ti_priv,
"queue": self.handle.queue_position,
"ratio": self.get_ratio,
"total_size": ti_total_size,
"tracker_host": self.get_tracker_host,
"last_seen_complete": self.get_last_seen_complete
}
# Create the desired status dictionary and return it
status_dict = {}
if len(keys) == 0:
status_dict = full_status
for key in fns:
status_dict[key] = fns[key]()
else:
for key in keys:
if key in full_status:
status_dict[key] = full_status[key]
elif key in fns:
status_dict[key] = fns[key]()
session_id = self.rpcserver.get_session_id()
if diff:
if session_id in self.prev_status:
# We have a previous status dict, so lets make a diff
status_diff = {}
for key, value in status_dict.items():
if key in self.prev_status[session_id]:
if value != self.prev_status[session_id][key]:
status_diff[key] = value
else:
status_diff[key] = value
self.prev_status[session_id] = status_dict
return status_diff
self.prev_status[session_id] = status_dict
return status_dict
return status_dict
def apply_options(self):
"""Applies the per-torrent options that are set."""
self.handle.set_max_connections(self.max_connections)
self.handle.set_max_uploads(self.max_upload_slots)
self.handle.set_upload_limit(int(self.max_upload_speed * 1024))
self.handle.set_download_limit(int(self.max_download_speed * 1024))
self.handle.prioritize_files(self.file_priorities)
self.handle.set_sequential_download(self.options["sequential_download"])
self.handle.resolve_countries(True)
def pause(self):
"""Pause this torrent"""
# Turn off auto-management so the torrent will not be unpaused by lt queueing
self.handle.auto_managed(False)
if self.handle.is_paused():
# This torrent was probably paused due to being auto managed by lt
# Since we turned auto_managed off, we should update the state which should
# show it as 'Paused'. We need to emit a torrent_paused signal because
# the torrent_paused alert from libtorrent will not be generated.
self.update_state()
component.get("EventManager").emit(TorrentStateChangedEvent(self.torrent_id, "Paused"))
else:
try:
self.handle.pause()
except Exception, e:
log.debug("Unable to pause torrent: %s", e)
return False
return True
def resume(self):
"""Resumes this torrent"""
if self.handle.is_paused() and self.handle.is_auto_managed():
log.debug("Torrent is being auto-managed, cannot resume!")
return
else:
# Reset the status message just in case of resuming an Error'd torrent
self.set_status_message("OK")
if self.handle.is_finished():
# If the torrent has already reached it's 'stop_seed_ratio' then do not do anything
if self.options["stop_at_ratio"]:
if self.get_ratio() >= self.options["stop_ratio"]:
#XXX: This should just be returned in the RPC Response, no event
#self.signals.emit_event("torrent_resume_at_stop_ratio")
return
if self.options["auto_managed"]:
# This torrent is to be auto-managed by lt queueing
self.handle.auto_managed(True)
try:
self.handle.resume()
except:
pass
return True
def connect_peer(self, ip, port):
"""adds manual peer"""
try:
self.handle.connect_peer((ip, int(port)), 0)
except Exception, e:
log.debug("Unable to connect to peer: %s", e)
return False
return True
def move_storage(self, dest):
"""Move a torrent's storage location"""
if deluge.common.windows_check():
# Attempt to convert utf8 path to unicode
# Note: Inconsistent encoding for 'dest', needs future investigation
try:
dest_u = unicode(dest, "utf-8")
except TypeError:
# String is already unicode
dest_u = dest
else:
dest_u = dest
if not os.path.exists(dest_u):
try:
# Try to make the destination path if it doesn't exist
os.makedirs(dest_u)
except IOError, e:
log.exception(e)
log.error("Could not move storage for torrent %s since %s does "
"not exist and could not create the directory.",
self.torrent_id, dest_u)
return False
try:
self.handle.move_storage(dest_u)
except:
return False
return True
def save_resume_data(self):
"""Signals libtorrent to build resume data for this torrent, it gets
returned in a libtorrent alert"""
self.handle.save_resume_data()
self.waiting_on_resume_data = True
def write_torrentfile(self):
"""Writes the torrent file"""
path = "%s/%s.torrent" % (
os.path.join(get_config_dir(), "state"),
self.torrent_id)
log.debug("Writing torrent file: %s", path)
try:
self.torrent_info = self.handle.get_torrent_info()
# Regenerate the file priorities
self.set_file_priorities([])
md = lt.bdecode(self.torrent_info.metadata())
torrent_file = {}
torrent_file["info"] = md
open(path, "wb").write(lt.bencode(torrent_file))
except Exception, e:
log.warning("Unable to save torrent file: %s", e)
def delete_torrentfile(self):
"""Deletes the .torrent file in the state"""
path = "%s/%s.torrent" % (
os.path.join(get_config_dir(), "state"),
self.torrent_id)
log.debug("Deleting torrent file: %s", path)
try:
os.remove(path)
except Exception, e:
log.warning("Unable to delete the torrent file: %s", e)
def force_reannounce(self):
"""Force a tracker reannounce"""
try:
self.handle.force_reannounce()
except Exception, e:
log.debug("Unable to force reannounce: %s", e)
return False
return True
def scrape_tracker(self):
"""Scrape the tracker"""
try:
self.handle.scrape_tracker()
except Exception, e:
log.debug("Unable to scrape tracker: %s", e)
return False
return True
def force_recheck(self):
"""Forces a recheck of the torrents pieces"""
paused = self.handle.is_paused()
try:
self.handle.force_recheck()
self.handle.resume()
except Exception, e:
log.debug("Unable to force recheck: %s", e)
return False
self.forcing_recheck = True
self.forcing_recheck_paused = paused
return True
def rename_files(self, filenames):
"""Renames files in the torrent. 'filenames' should be a list of
(index, filename) pairs."""
for index, filename in filenames:
filename = sanitize_filepath(filename)
self.handle.rename_file(index, filename.encode("utf-8"))
def rename_folder(self, folder, new_folder):
"""Renames a folder within a torrent. This basically does a file rename
on all of the folders children."""
log.debug("attempting to rename folder: %s to %s", folder, new_folder)
if len(new_folder) < 1:
log.error("Attempting to rename a folder with an invalid folder name: %s", new_folder)
return
new_folder = sanitize_filepath(new_folder, folder=True)
wait_on_folder = (folder, new_folder, [])
for f in self.get_files():
if f["path"].startswith(folder):
# Keep a list of filerenames we're waiting on
wait_on_folder[2].append(f["index"])
self.handle.rename_file(f["index"], f["path"].replace(folder, new_folder, 1).encode("utf-8"))
self.waiting_on_folder_rename.append(wait_on_folder)
def cleanup_prev_status(self):
"""
This method gets called to check the validity of the keys in the prev_status
dict. If the key is no longer valid, the dict will be deleted.
"""
for key in self.prev_status.keys():
if not self.rpcserver.is_session_valid(key):
del self.prev_status[key]
def calculate_last_seen_complete(self):
if self._last_seen_complete+60 > time.time():
# Simple caching. Only calculate every 1 min at minimum
return self._last_seen_complete
availability = self.handle.piece_availability()
if filter(lambda x: x<1, availability):
# Torrent does not have all the pieces
return
log.trace("Torrent %s has all the pieces. Setting last seen complete.",
self.torrent_id)
self._last_seen_complete = time.time()
def get_pieces_info(self):
pieces = {}
# First get the pieces availability.
availability = self.handle.piece_availability()
# Pieces from connected peers
for peer_info in self.handle.get_peer_info():
if peer_info.downloading_piece_index < 0:
# No piece index, then we're not downloading anything from
# this peer
continue
pieces[peer_info.downloading_piece_index] = 2
# Now, the rest of the pieces
for idx, piece in enumerate(self.handle.status().pieces):
if idx in pieces:
# Piece beeing downloaded, handled above
continue
elif piece:
# Completed Piece
pieces[idx] = 3
continue
elif availability[idx] > 0:
# Piece not downloaded nor beeing downloaded but available
pieces[idx] = 1
continue
# If we reached here, it means the piece is missing, ie, there's
# no known peer with this piece, or this piece has not been asked
# for so far.
pieces[idx] = 0
sorted_indexes = pieces.keys()
sorted_indexes.sort()
# Return only the piece states, no need for the piece index
# Keep the order
return [pieces[idx] for idx in sorted_indexes] | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
#
# Copyright 2010 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='closure_linter',
version='2.3.14',
description='Closure Linter',
license='Apache',
author='The Closure Linter Authors',
author_email='opensource@google.com',
url='http://code.google.com/p/closure-linter',
install_requires=['python-gflags'],
package_dir={'closure_linter': 'closure_linter'},
packages=['closure_linter', 'closure_linter.common'],
entry_points = {
'console_scripts': [
'gjslint = closure_linter.gjslint:main',
'fixjsstyle = closure_linter.fixjsstyle:main'
]
}
) | unknown | codeparrot/codeparrot-clean | ||
'''
Copyright (C) 2015 Dato, Inc.
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
'''
##\internal
"""@package graphlab.toolkits
Internal Toolkit Calling
"""
from ..connect import _get_metric_tracker
from ..connect import main as glconnect
import time
import logging
class ToolkitError(RuntimeError):
pass
def run(toolkit_name, options, verbose=True, show_progress=False):
"""
Internal function to execute toolkit on the graphlab server.
Parameters
----------
toolkit_name : string
The name of the toolkit.
options : dict
A map containing the required input for the toolkit function,
for example: {'graph': g, 'reset_prob': 0.15}.
verbose : bool
If true, enable progress log from server.
show_progress : bool
If true, display progress plot.
Returns
-------
out : dict
The toolkit specific model parameters.
Raises
------
RuntimeError
Raises RuntimeError if the server fail executing the toolkit.
"""
unity = glconnect.get_unity()
if (not verbose):
glconnect.get_client().set_log_progress(False)
# spawn progress threads
server_addr = glconnect.get_server().get_server_addr()
splits = server_addr.split(':')
try:
start_time = time.time()
(success, message, params) = unity.run_toolkit(toolkit_name, options)
end_time = time.time()
except:
raise
if (len(message) > 0):
logging.getLogger(__name__).error("Toolkit error: " + message)
track_props = {}
track_props['success'] = success
if success:
track_props['runtime'] = end_time - start_time
else:
if (len(message) > 0):
track_props['message'] = message
metric_name = 'toolkit.%s.executed' % (toolkit_name)
_get_metric_tracker().track(metric_name, value=1, properties=track_props, send_sys_info=False)
# set the verbose level back to default
glconnect.get_client().set_log_progress(True)
if success:
return params
else:
raise ToolkitError(str(message)) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
#
# Copyright 2015 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
# ignore-tidy-linelength
import sys
import os
import subprocess
import argparse
# usage: testparser.py [-h] [-p PARSER [PARSER ...]] -s SOURCE_DIR
# Parsers should read from stdin and return exit status 0 for a
# successful parse, and nonzero for an unsuccessful parse
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--parser', nargs='+')
parser.add_argument('-s', '--source-dir', nargs=1, required=True)
args = parser.parse_args(sys.argv[1:])
total = 0
ok = {}
bad = {}
for parser in args.parser:
ok[parser] = 0
bad[parser] = []
devnull = open(os.devnull, 'w')
print("\n")
for base, dirs, files in os.walk(args.source_dir[0]):
for f in filter(lambda p: p.endswith('.rs'), files):
p = os.path.join(base, f)
parse_fail = 'parse-fail' in p
if sys.version_info.major == 3:
lines = open(p, encoding='utf-8').readlines()
else:
lines = open(p).readlines()
if any('ignore-test' in line or 'ignore-lexer-test' in line for line in lines):
continue
total += 1
for parser in args.parser:
if subprocess.call(parser, stdin=open(p), stderr=subprocess.STDOUT, stdout=devnull) == 0:
if parse_fail:
bad[parser].append(p)
else:
ok[parser] += 1
else:
if parse_fail:
ok[parser] += 1
else:
bad[parser].append(p)
parser_stats = ', '.join(['{}: {}'.format(parser, ok[parser]) for parser in args.parser])
sys.stdout.write("\033[K\r total: {}, {}, scanned {}"
.format(total, os.path.relpath(parser_stats), os.path.relpath(p)))
devnull.close()
print("\n")
for parser in args.parser:
filename = os.path.basename(parser) + '.bad'
print("writing {} files that did not yield the correct result with {} to {}".format(len(bad[parser]), parser, filename))
with open(filename, "w") as f:
for p in bad[parser]:
f.write(p)
f.write("\n") | unknown | codeparrot/codeparrot-clean | ||
#-*- coding: ISO-8859-1 -*-
# pysqlite2/__init__.py: the pysqlite2 package.
#
# Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
#
# This file is part of pysqlite.
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
def _():
import sys
if sys.platform == 'cli':
import clr
clr.AddReference('IronPython.SQLite')
_()
del _
from dbapi2 import * | unknown | codeparrot/codeparrot-clean | ||
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package rpcapi
import (
"github.com/zclconf/go-cty/cty"
ctyjson "github.com/zclconf/go-cty/cty/json"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
"github.com/hashicorp/terraform/internal/addrs"
"github.com/hashicorp/terraform/internal/plans"
"github.com/hashicorp/terraform/internal/providers"
"github.com/hashicorp/terraform/internal/rpcapi/terraform1/stacks"
"github.com/hashicorp/terraform/internal/stacks/stackstate"
)
func listResourceIdentities(stackState *stackstate.State, identitySchemas map[addrs.Provider]map[string]providers.IdentitySchema) ([]*stacks.ListResourceIdentities_Resource, error) {
resourceIdentities := make([]*stacks.ListResourceIdentities_Resource, 0)
// A non-existent stack state has no resource identities
if stackState == nil {
return resourceIdentities, nil
}
for ci := range stackState.AllComponentInstances() {
componentIdentities := stackState.IdentitiesForComponent(ci)
for ri, src := range componentIdentities {
// We skip resources without identity JSON
if len(src.IdentityJSON) == 0 {
continue
}
providerAddrs := addrs.ImpliedProviderForUnqualifiedType(ri.ResourceInstance.Resource.Resource.ImpliedProvider())
identitySchema, ok := identitySchemas[providerAddrs]
if !ok {
return nil, status.Errorf(codes.InvalidArgument, "provider %s could not be found in the identity schema", providerAddrs)
}
resourceType := ri.ResourceInstance.Resource.Resource.Type
schema, ok := identitySchema[resourceType]
if !ok {
return nil, status.Errorf(codes.InvalidArgument, "resource %s could not be found in the identity schema", ri)
}
if src.IdentitySchemaVersion != uint64(schema.Version) {
return nil, status.Errorf(codes.InvalidArgument, "resource %s has an invalid identity schema version, please update the provider or refresh the state", ri)
}
ty := schema.Body.ImpliedType()
identity, err := ctyjson.Unmarshal(src.IdentityJSON, ty)
if err != nil {
return nil, status.Errorf(codes.InvalidArgument, "failed to unmarshal identity JSON for resource %s: %s", ri, err)
}
identityRaw, err := plans.NewDynamicValue(identity, ty)
if err != nil {
return nil, status.Errorf(codes.InvalidArgument, "failed to create dynamic value for identity for resource %s: %s", ri, err)
}
stacksIdentityRaw := stacks.NewDynamicValue(identityRaw, []cty.Path{})
resourceIdentities = append(resourceIdentities, &stacks.ListResourceIdentities_Resource{
ComponentAddr: ci.Item.Component.String(),
ComponentInstanceAddr: ci.Item.String(),
ResourceInstanceAddr: ri.String(),
ResourceIdentity: stacksIdentityRaw,
})
}
}
return resourceIdentities, nil
} | go | github | https://github.com/hashicorp/terraform | internal/rpcapi/resource_identity.go |
"""Test fixtures."""
from builtins import super
import pytest
from napalm_base.test import conftest as parent_conftest
from napalm_base.test.double import BaseTestDouble
from napalm_base.utils import py23_compat
from napalm_nxos_ssh import nxos_ssh
@pytest.fixture(scope='class')
def set_device_parameters(request):
"""Set up the class."""
def fin():
request.cls.device.close()
request.addfinalizer(fin)
request.cls.driver = nxos_ssh.NXOSSSHDriver
request.cls.patched_driver = PatchedNXOSSSHDriver
request.cls.vendor = 'nxos_ssh'
parent_conftest.set_device_parameters(request)
def pytest_generate_tests(metafunc):
"""Generate test cases dynamically."""
parent_conftest.pytest_generate_tests(metafunc, __file__)
class PatchedNXOSSSHDriver(nxos_ssh.NXOSSSHDriver):
"""Patched NXOS Driver."""
def __init__(self, hostname, username, password, timeout=60, optional_args=None):
super().__init__(hostname, username, password, timeout, optional_args)
self.patched_attrs = ['device']
self.device = FakeNXOSSSHDevice()
def disconnect(self):
pass
def is_alive(self):
return {
'is_alive': True # In testing everything works..
}
def open(self):
pass
class FakeNXOSSSHDevice(BaseTestDouble):
"""NXOS device test double."""
def send_command(self, command, **kwargs):
filename = '{}.txt'.format(self.sanitize_text(command))
full_path = self.find_file(filename)
result = self.read_txt_file(full_path)
return py23_compat.text_type(result)
def disconnect(self):
pass | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
# Copyright(C) 2008-2011 Romain Bignon, Christophe Benz
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import math
import re
import urllib
from weboob.tools.browser import BaseBrowser, BrowserIncorrectPassword, BrowserHTTPNotFound
from weboob.tools.json import json
from weboob.capabilities.base import UserError
from weboob.capabilities.messages import CantSendMessage
__all__ = ['AuMBrowser']
class AuMException(UserError):
ERRORS = {"0.0.0": "Bad signature",
"0.0.1": "Malformed request",
"0.0.2": "Not logged",
"1.1.1": "No member has this login",
"1.1.2": "Password don't match",
"1.1.3": "User has been banned",
"1.12.1": "Invalid country",
"1.12.1": "Invalid region",
"4.0.1": "Member not found",
"4.1.1": "Thread doesn't exist",
"4.1.2": "Cannot write to this member",
"5.1.1": "Member tergeted doesn't exist",
"5.1.2": "Sex member targeted is not the opposite of the member logged",
"5.1.3": "Not possible to send a charm",
"5.1.4": "Not possible to send a charm because the 5 charms has been already used",
"5.1.5": "Not possible because the guy has already send a charm to this girl",
"5.1.6": "No more money",
"5.1.7": "Not possible to add to basket",
"5.2.1": "Member doesn't exist",
"5.3.1": "Member doesn't exist",
}
def __init__(self, code):
Exception.__init__(self, self.ERRORS.get(code, code))
self.code = code
class AuMBrowser(BaseBrowser):
DOMAIN = 'www.adopteunmec.com'
APIKEY = 'fb0123456789abcd'
consts = None
my_sex = 0
my_id = 0
my_name = u''
my_coords = (0,0)
def __init__(self, username, password, search_query, *args, **kwargs):
kwargs['get_home'] = False
BaseBrowser.__init__(self, username, password, *args, **kwargs)
self.add_password('http://www.adopteunmec.com/api/', self.username, self.password)
self.home()
self.search_query = search_query
def id2url(self, id):
return u'http://www.adopteunmec.com/index.php/profile/%s' % id
def url2id(func):
def inner(self, id, *args, **kwargs):
m = re.match('^http://.*adopteunmec.com.*/(\d+)$', str(id))
if m:
id = int(m.group(1))
else:
m = re.match('^http://.*adopteunmec.com/index.php/profile/(\d+).*', str(id))
if m:
id = int(m.group(1))
return func(self, id, *args, **kwargs)
return inner
def api0_request(self, command, action, parameter='', data=None, nologin=False):
if data is None:
# Always do POST requests.
data = ''
elif isinstance(data, (list,tuple,dict)):
data = urllib.urlencode(data)
elif isinstance(data, unicode):
data = data.encode('utf-8')
url = self.buildurl('http://api.adopteunmec.com/api.php',
S=self.APIKEY,
C=command,
A=action,
P=parameter,
O='json')
buf = self.openurl(url, data).read()
try:
r = json.loads(buf[buf.find('{'):])
except ValueError:
raise ValueError(buf)
if 'errors' in r and r['errors'] != '0' and len(r['errors']) > 0:
code = r['errors'][0]
if code in (u'0.0.2', u'1.1.1', u'1.1.2'):
if not nologin:
self.login()
return self.api0_request(command, action, parameter, data, nologin=True)
else:
raise BrowserIncorrectPassword(AuMException.ERRORS[code])
else:
raise AuMException(code)
return r
def login(self):
pass
# XXX old API is disabled
#r = self.api0_request('me', 'login', data={'login': self.username,
# 'pass': self.password,
# }, nologin=True)
#self.my_coords = (float(r['result']['me']['lat']), float(r['result']['me']['lng']))
#if not self.search_query:
# self.search_query = 'region=%s' % r['result']['me']['region']
def api_request(self, command, **kwargs):
if 'data' in kwargs:
data = kwargs.pop('data').encode('utf-8', 'replace')
else:
data = None
url = self.buildurl(self.absurl('/api/%s' % command), **kwargs)
buf = self.openurl(url, data).read()
try:
r = json.loads(buf)
except ValueError:
raise ValueError(buf)
return r
def home(self):
r = self.api_request('home/')
self.my_sex = r['user']['sex']
self.my_id = int(r['user']['id'])
self.my_name = r['user']['pseudo']
return r
def get_consts(self):
if self.consts is not None:
return self.consts
self.consts = [{}, {}]
for key, sexes in self.api_request('values').iteritems():
for sex, values in sexes.iteritems():
if sex in ('boy', 'both'):
self.consts[0][key] = values
if sex in ('girl', 'both'):
self.consts[1][key] = values
return self.consts
def score(self):
r = self.home()
return int(r['user']['points'])
def get_my_name(self):
return self.my_name
def get_my_id(self):
return self.my_id
def nb_new_mails(self):
r = self.home()
return r['counters']['new_mails']
def nb_new_baskets(self):
r = self.home()
return r['counters']['new_baskets']
def nb_new_visites(self):
r = self.home()
return r['counters']['new_visits']
def nb_available_charms(self):
r = self.home()
return r['subscription']['flashes_stock']
def get_baskets(self):
r = self.api_request('basket', count=30, offset=0)
return r['results']
def get_flashs(self):
r = self.api_request('charms/', count=30, offset=0)
return r['results']
def get_visits(self):
r = self.api_request('visits', count=30, offset=0)
return r['results']
def get_threads_list(self, count=30):
r = self.api_request('threads', count=count, offset=0)
return r['results']
@url2id
def get_thread_mails(self, id, count=30):
r = self.api_request('threads/%s' % id, count=count, offset=0)
return r
@url2id
def post_mail(self, id, content):
content = content.replace('\n', '\r\n')
try:
self.api_request('threads/%s' % id, data=content)
except BrowserHTTPNotFound:
raise CantSendMessage('Unable to send message.')
@url2id
def delete_thread(self, id):
r = self.api_request('message', 'delete', data={'id_user': id})
self.logger.debug('Thread deleted: %r' % r)
@url2id
def send_charm(self, id):
try:
self.api_request('users/%s/charms' % id, data='')
except BrowserHTTPNotFound:
return False
else:
return True
@url2id
def add_basket(self, id):
try:
self.api_request('basket/%s' % id, data='')
except BrowserHTTPNotFound:
return False
else:
return True
def search_profiles(self, **kwargs):
if not self.search_query:
# retrieve query
self.login()
r = self.api_request('users?count=100&offset=0&%s' % self.search_query)
ids = [s['id'] for s in r['results']]
return set(ids)
@url2id
def get_profile(self, id, with_pics=True):
# XXX OLD API IS DISABLED (fucking faggots)
#r = self.api0_request('member', 'view', data={'id': id})
#if not 'result' in r:
# print r
#profile = r['result']['member']
profile = {}
profile.update(self.api_request('users/%s' % id))
# Calculate distance in km.
profile['dist'] = 0.0
if 'lat' in profile and 'lng' in profile:
coords = (float(profile['lat']), float(profile['lng']))
R = 6371
lat1 = math.radians(self.my_coords[0])
lat2 = math.radians(coords[0])
lon1 = math.radians(self.my_coords[1])
lon2 = math.radians(coords[1])
dLat = lat2 - lat1
dLong = lon2 - lon1
a= pow(math.sin(dLat/2), 2) + math.cos(lat1) * math.cos(lat2) * pow(math.sin(dLong/2), 2)
c= 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
profile['dist'] = R * c
return profile | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestTopic(unittest2.TestCase):
PROJECT = 'PROJECT'
TOPIC_NAME = 'topic_name'
TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME)
def _getTargetClass(self):
from gcloud.pubsub.topic import Topic
return Topic
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor_w_explicit_timestamp(self):
client = _Client(project=self.PROJECT)
topic = self._makeOne(self.TOPIC_NAME,
client=client,
timestamp_messages=True)
self.assertEqual(topic.name, self.TOPIC_NAME)
self.assertEqual(topic.project, self.PROJECT)
self.assertEqual(topic.full_name, self.TOPIC_PATH)
self.assertTrue(topic.timestamp_messages)
def test_from_api_repr(self):
client = _Client(project=self.PROJECT)
resource = {'name': self.TOPIC_PATH}
klass = self._getTargetClass()
topic = klass.from_api_repr(resource, client=client)
self.assertEqual(topic.name, self.TOPIC_NAME)
self.assertTrue(topic._client is client)
self.assertEqual(topic.project, self.PROJECT)
self.assertEqual(topic.full_name, self.TOPIC_PATH)
def test_from_api_repr_with_bad_client(self):
PROJECT1 = 'PROJECT1'
PROJECT2 = 'PROJECT2'
client = _Client(project=PROJECT1)
PATH = 'projects/%s/topics/%s' % (PROJECT2, self.TOPIC_NAME)
resource = {'name': PATH}
klass = self._getTargetClass()
self.assertRaises(ValueError, klass.from_api_repr,
resource, client=client)
def test_create_w_bound_client(self):
client = _Client(project=self.PROJECT)
api = client.publisher_api = _FauxPublisherAPI()
api._topic_create_response = {'name': self.TOPIC_PATH}
topic = self._makeOne(self.TOPIC_NAME, client=client)
topic.create()
self.assertEqual(api._topic_created, self.TOPIC_PATH)
def test_create_w_alternate_client(self):
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.publisher_api = _FauxPublisherAPI()
api._topic_create_response = {'name': self.TOPIC_PATH}
topic = self._makeOne(self.TOPIC_NAME, client=client1)
topic.create(client=client2)
self.assertEqual(api._topic_created, self.TOPIC_PATH)
def test_exists_miss_w_bound_client(self):
client = _Client(project=self.PROJECT)
api = client.publisher_api = _FauxPublisherAPI()
topic = self._makeOne(self.TOPIC_NAME, client=client)
self.assertFalse(topic.exists())
self.assertEqual(api._topic_got, self.TOPIC_PATH)
def test_exists_hit_w_alternate_client(self):
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.publisher_api = _FauxPublisherAPI()
api._topic_get_response = {'name': self.TOPIC_PATH}
topic = self._makeOne(self.TOPIC_NAME, client=client1)
self.assertTrue(topic.exists(client=client2))
self.assertEqual(api._topic_got, self.TOPIC_PATH)
def test_delete_w_bound_client(self):
client = _Client(project=self.PROJECT)
api = client.publisher_api = _FauxPublisherAPI()
api._topic_delete_response = {}
topic = self._makeOne(self.TOPIC_NAME, client=client)
topic.delete()
self.assertEqual(api._topic_deleted, self.TOPIC_PATH)
def test_delete_w_alternate_client(self):
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.publisher_api = _FauxPublisherAPI()
api._topic_delete_response = {}
topic = self._makeOne(self.TOPIC_NAME, client=client1)
topic.delete(client=client2)
self.assertEqual(api._topic_deleted, self.TOPIC_PATH)
def test_publish_single_bytes_wo_attrs_w_bound_client(self):
import base64
PAYLOAD = b'This is the message text'
B64 = base64.b64encode(PAYLOAD).decode('ascii')
MSGID = 'DEADBEEF'
MESSAGE = {'data': B64, 'attributes': {}}
client = _Client(project=self.PROJECT)
api = client.publisher_api = _FauxPublisherAPI()
api._topic_publish_response = [MSGID]
topic = self._makeOne(self.TOPIC_NAME, client=client)
msgid = topic.publish(PAYLOAD)
self.assertEqual(msgid, MSGID)
self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE]))
def test_publish_single_bytes_wo_attrs_w_add_timestamp_alt_client(self):
import base64
import datetime
from gcloud.pubsub import topic as MUT
from gcloud._helpers import _RFC3339_MICROS
from gcloud._testing import _Monkey
NOW = datetime.datetime.utcnow()
def _utcnow():
return NOW
PAYLOAD = b'This is the message text'
B64 = base64.b64encode(PAYLOAD).decode('ascii')
MSGID = 'DEADBEEF'
MESSAGE = {
'data': B64,
'attributes': {'timestamp': NOW.strftime(_RFC3339_MICROS)},
}
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.publisher_api = _FauxPublisherAPI()
api._topic_publish_response = [MSGID]
topic = self._makeOne(self.TOPIC_NAME, client=client1,
timestamp_messages=True)
with _Monkey(MUT, _NOW=_utcnow):
msgid = topic.publish(PAYLOAD, client=client2)
self.assertEqual(msgid, MSGID)
self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE]))
def test_publish_single_bytes_w_add_timestamp_w_ts_in_attrs(self):
import base64
PAYLOAD = b'This is the message text'
B64 = base64.b64encode(PAYLOAD).decode('ascii')
MSGID = 'DEADBEEF'
OVERRIDE = '2015-04-10T16:46:22.868399Z'
MESSAGE = {'data': B64,
'attributes': {'timestamp': OVERRIDE}}
client = _Client(project=self.PROJECT)
api = client.publisher_api = _FauxPublisherAPI()
api._topic_publish_response = [MSGID]
topic = self._makeOne(self.TOPIC_NAME, client=client,
timestamp_messages=True)
msgid = topic.publish(PAYLOAD, timestamp=OVERRIDE)
self.assertEqual(msgid, MSGID)
self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE]))
def test_publish_single_w_attrs(self):
import base64
PAYLOAD = b'This is the message text'
B64 = base64.b64encode(PAYLOAD).decode('ascii')
MSGID = 'DEADBEEF'
MESSAGE = {'data': B64,
'attributes': {'attr1': 'value1', 'attr2': 'value2'}}
client = _Client(project=self.PROJECT)
api = client.publisher_api = _FauxPublisherAPI()
api._topic_publish_response = [MSGID]
topic = self._makeOne(self.TOPIC_NAME, client=client)
msgid = topic.publish(PAYLOAD, attr1='value1', attr2='value2')
self.assertEqual(msgid, MSGID)
self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE]))
def test_publish_multiple_w_bound_client(self):
import base64
PAYLOAD1 = b'This is the first message text'
PAYLOAD2 = b'This is the second message text'
B64_1 = base64.b64encode(PAYLOAD1)
B64_2 = base64.b64encode(PAYLOAD2)
MSGID1 = 'DEADBEEF'
MSGID2 = 'BEADCAFE'
MESSAGE1 = {'data': B64_1.decode('ascii'),
'attributes': {}}
MESSAGE2 = {'data': B64_2.decode('ascii'),
'attributes': {'attr1': 'value1', 'attr2': 'value2'}}
client = _Client(project=self.PROJECT)
api = client.publisher_api = _FauxPublisherAPI()
api._topic_publish_response = [MSGID1, MSGID2]
topic = self._makeOne(self.TOPIC_NAME, client=client)
with topic.batch() as batch:
batch.publish(PAYLOAD1)
batch.publish(PAYLOAD2, attr1='value1', attr2='value2')
self.assertEqual(list(batch), [MSGID1, MSGID2])
self.assertEqual(list(batch.messages), [])
self.assertEqual(api._topic_published,
(self.TOPIC_PATH, [MESSAGE1, MESSAGE2]))
def test_publish_w_no_messages(self):
client = _Client(project=self.PROJECT)
api = client.publisher_api = _FauxPublisherAPI()
api._topic_publish_response = []
topic = self._makeOne(self.TOPIC_NAME, client=client)
with topic.batch() as batch:
pass
self.assertEqual(list(batch.messages), [])
self.assertEqual(api._api_called, 0)
def test_publish_multiple_w_alternate_client(self):
import base64
PAYLOAD1 = b'This is the first message text'
PAYLOAD2 = b'This is the second message text'
B64_1 = base64.b64encode(PAYLOAD1)
B64_2 = base64.b64encode(PAYLOAD2)
MSGID1 = 'DEADBEEF'
MSGID2 = 'BEADCAFE'
MESSAGE1 = {'data': B64_1.decode('ascii'), 'attributes': {}}
MESSAGE2 = {
'data': B64_2.decode('ascii'),
'attributes': {'attr1': 'value1', 'attr2': 'value2'},
}
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.publisher_api = _FauxPublisherAPI()
api._topic_publish_response = [MSGID1, MSGID2]
topic = self._makeOne(self.TOPIC_NAME, client=client1)
with topic.batch(client=client2) as batch:
batch.publish(PAYLOAD1)
batch.publish(PAYLOAD2, attr1='value1', attr2='value2')
self.assertEqual(list(batch), [MSGID1, MSGID2])
self.assertEqual(list(batch.messages), [])
self.assertEqual(api._topic_published,
(self.TOPIC_PATH, [MESSAGE1, MESSAGE2]))
def test_publish_multiple_error(self):
PAYLOAD1 = b'This is the first message text'
PAYLOAD2 = b'This is the second message text'
client = _Client(project=self.PROJECT)
api = client.publisher_api = _FauxPublisherAPI()
topic = self._makeOne(self.TOPIC_NAME, client=client)
try:
with topic.batch() as batch:
batch.publish(PAYLOAD1)
batch.publish(PAYLOAD2, attr1='value1', attr2='value2')
raise _Bugout()
except _Bugout:
pass
self.assertEqual(list(batch), [])
self.assertEqual(getattr(api, '_topic_published', self), self)
def test_subscription(self):
from gcloud.pubsub.subscription import Subscription
client = _Client(project=self.PROJECT)
topic = self._makeOne(self.TOPIC_NAME, client=client)
SUBSCRIPTION_NAME = 'subscription_name'
subscription = topic.subscription(SUBSCRIPTION_NAME)
self.assertIsInstance(subscription, Subscription)
self.assertEqual(subscription.name, SUBSCRIPTION_NAME)
self.assertTrue(subscription.topic is topic)
def test_list_subscriptions_no_paging(self):
from gcloud.pubsub.subscription import Subscription
SUB_NAME_1 = 'subscription_1'
SUB_PATH_1 = 'projects/%s/subscriptions/%s' % (
self.PROJECT, SUB_NAME_1)
SUB_NAME_2 = 'subscription_2'
SUB_PATH_2 = 'projects/%s/subscriptions/%s' % (
self.PROJECT, SUB_NAME_2)
SUBS_LIST = [SUB_PATH_1, SUB_PATH_2]
TOKEN = 'TOKEN'
client = _Client(project=self.PROJECT)
api = client.publisher_api = _FauxPublisherAPI()
api._topic_list_subscriptions_response = SUBS_LIST, TOKEN
topic = self._makeOne(self.TOPIC_NAME, client=client)
subscriptions, next_page_token = topic.list_subscriptions()
self.assertEqual(len(subscriptions), 2)
subscription = subscriptions[0]
self.assertIsInstance(subscription, Subscription)
self.assertEqual(subscriptions[0].name, SUB_NAME_1)
self.assertTrue(subscription.topic is topic)
subscription = subscriptions[1]
self.assertIsInstance(subscription, Subscription)
self.assertEqual(subscriptions[1].name, SUB_NAME_2)
self.assertTrue(subscription.topic is topic)
self.assertEqual(next_page_token, TOKEN)
self.assertEqual(api._topic_listed,
(self.TOPIC_PATH, None, None))
def test_list_subscriptions_with_paging(self):
from gcloud.pubsub.subscription import Subscription
SUB_NAME_1 = 'subscription_1'
SUB_PATH_1 = 'projects/%s/subscriptions/%s' % (
self.PROJECT, SUB_NAME_1)
SUB_NAME_2 = 'subscription_2'
SUB_PATH_2 = 'projects/%s/subscriptions/%s' % (
self.PROJECT, SUB_NAME_2)
SUBS_LIST = [SUB_PATH_1, SUB_PATH_2]
PAGE_SIZE = 10
TOKEN = 'TOKEN'
client = _Client(project=self.PROJECT)
api = client.publisher_api = _FauxPublisherAPI()
api._topic_list_subscriptions_response = SUBS_LIST, None
topic = self._makeOne(self.TOPIC_NAME, client=client)
subscriptions, next_page_token = topic.list_subscriptions(
page_size=PAGE_SIZE, page_token=TOKEN)
self.assertEqual(len(subscriptions), 2)
subscription = subscriptions[0]
self.assertIsInstance(subscription, Subscription)
self.assertEqual(subscriptions[0].name, SUB_NAME_1)
self.assertTrue(subscription.topic is topic)
subscription = subscriptions[1]
self.assertIsInstance(subscription, Subscription)
self.assertEqual(subscriptions[1].name, SUB_NAME_2)
self.assertTrue(subscription.topic is topic)
self.assertEqual(next_page_token, None)
self.assertEqual(api._topic_listed,
(self.TOPIC_PATH, PAGE_SIZE, TOKEN))
def test_list_subscriptions_missing_key(self):
client = _Client(project=self.PROJECT)
api = client.publisher_api = _FauxPublisherAPI()
api._topic_list_subscriptions_response = (), None
topic = self._makeOne(self.TOPIC_NAME, client=client)
subscriptions, next_page_token = topic.list_subscriptions()
self.assertEqual(len(subscriptions), 0)
self.assertEqual(next_page_token, None)
self.assertEqual(api._topic_listed,
(self.TOPIC_PATH, None, None))
def test_get_iam_policy_w_bound_client(self):
from gcloud.pubsub.iam import (
PUBSUB_ADMIN_ROLE,
PUBSUB_EDITOR_ROLE,
PUBSUB_VIEWER_ROLE,
PUBSUB_PUBLISHER_ROLE,
PUBSUB_SUBSCRIBER_ROLE,
)
OWNER1 = 'user:phred@example.com'
OWNER2 = 'group:cloud-logs@google.com'
EDITOR1 = 'domain:google.com'
EDITOR2 = 'user:phred@example.com'
VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com'
VIEWER2 = 'user:phred@example.com'
PUBLISHER = 'user:phred@example.com'
SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com'
POLICY = {
'etag': 'DEADBEEF',
'version': 17,
'bindings': [
{'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]},
{'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]},
{'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]},
{'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]},
{'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]},
],
}
client = _Client(project=self.PROJECT)
api = client.iam_policy_api = _FauxIAMPolicy()
api._get_iam_policy_response = POLICY
topic = self._makeOne(self.TOPIC_NAME, client=client)
policy = topic.get_iam_policy()
self.assertEqual(policy.etag, 'DEADBEEF')
self.assertEqual(policy.version, 17)
self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1])
self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2])
self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2])
self.assertEqual(sorted(policy.publishers), [PUBLISHER])
self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER])
self.assertEqual(api._got_iam_policy, self.TOPIC_PATH)
def test_get_iam_policy_w_alternate_client(self):
POLICY = {
'etag': 'ACAB',
}
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.iam_policy_api = _FauxIAMPolicy()
api._get_iam_policy_response = POLICY
topic = self._makeOne(self.TOPIC_NAME, client=client1)
policy = topic.get_iam_policy(client=client2)
self.assertEqual(policy.etag, 'ACAB')
self.assertEqual(policy.version, None)
self.assertEqual(sorted(policy.owners), [])
self.assertEqual(sorted(policy.editors), [])
self.assertEqual(sorted(policy.viewers), [])
self.assertEqual(api._got_iam_policy, self.TOPIC_PATH)
def test_set_iam_policy_w_bound_client(self):
from gcloud.pubsub.iam import Policy
from gcloud.pubsub.iam import (
PUBSUB_ADMIN_ROLE,
PUBSUB_EDITOR_ROLE,
PUBSUB_VIEWER_ROLE,
PUBSUB_PUBLISHER_ROLE,
PUBSUB_SUBSCRIBER_ROLE,
)
OWNER1 = 'group:cloud-logs@google.com'
OWNER2 = 'user:phred@example.com'
EDITOR1 = 'domain:google.com'
EDITOR2 = 'user:phred@example.com'
VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com'
VIEWER2 = 'user:phred@example.com'
PUBLISHER = 'user:phred@example.com'
SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com'
POLICY = {
'etag': 'DEADBEEF',
'version': 17,
'bindings': [
{'role': PUBSUB_ADMIN_ROLE,
'members': [OWNER1, OWNER2]},
{'role': PUBSUB_EDITOR_ROLE,
'members': [EDITOR1, EDITOR2]},
{'role': PUBSUB_VIEWER_ROLE,
'members': [VIEWER1, VIEWER2]},
{'role': PUBSUB_PUBLISHER_ROLE,
'members': [PUBLISHER]},
{'role': PUBSUB_SUBSCRIBER_ROLE,
'members': [SUBSCRIBER]},
],
}
RESPONSE = POLICY.copy()
RESPONSE['etag'] = 'ABACABAF'
RESPONSE['version'] = 18
client = _Client(project=self.PROJECT)
api = client.iam_policy_api = _FauxIAMPolicy()
api._set_iam_policy_response = RESPONSE
topic = self._makeOne(self.TOPIC_NAME, client=client)
policy = Policy('DEADBEEF', 17)
policy.owners.add(OWNER1)
policy.owners.add(OWNER2)
policy.editors.add(EDITOR1)
policy.editors.add(EDITOR2)
policy.viewers.add(VIEWER1)
policy.viewers.add(VIEWER2)
policy.publishers.add(PUBLISHER)
policy.subscribers.add(SUBSCRIBER)
new_policy = topic.set_iam_policy(policy)
self.assertEqual(new_policy.etag, 'ABACABAF')
self.assertEqual(new_policy.version, 18)
self.assertEqual(sorted(new_policy.owners), [OWNER1, OWNER2])
self.assertEqual(sorted(new_policy.editors), [EDITOR1, EDITOR2])
self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2])
self.assertEqual(sorted(new_policy.publishers), [PUBLISHER])
self.assertEqual(sorted(new_policy.subscribers), [SUBSCRIBER])
self.assertEqual(api._set_iam_policy, (self.TOPIC_PATH, POLICY))
def test_set_iam_policy_w_alternate_client(self):
from gcloud.pubsub.iam import Policy
RESPONSE = {'etag': 'ACAB'}
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.iam_policy_api = _FauxIAMPolicy()
api._set_iam_policy_response = RESPONSE
topic = self._makeOne(self.TOPIC_NAME, client=client1)
policy = Policy()
new_policy = topic.set_iam_policy(policy, client=client2)
self.assertEqual(new_policy.etag, 'ACAB')
self.assertEqual(new_policy.version, None)
self.assertEqual(sorted(new_policy.owners), [])
self.assertEqual(sorted(new_policy.editors), [])
self.assertEqual(sorted(new_policy.viewers), [])
self.assertEqual(api._set_iam_policy, (self.TOPIC_PATH, {}))
def test_check_iam_permissions_w_bound_client(self):
from gcloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE
ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE]
client = _Client(project=self.PROJECT)
api = client.iam_policy_api = _FauxIAMPolicy()
api._test_iam_permissions_response = ROLES[:-1]
topic = self._makeOne(self.TOPIC_NAME, client=client)
allowed = topic.check_iam_permissions(ROLES)
self.assertEqual(allowed, ROLES[:-1])
self.assertEqual(api._tested_iam_permissions,
(self.TOPIC_PATH, ROLES))
def test_check_iam_permissions_w_alternate_client(self):
from gcloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE
ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE]
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.iam_policy_api = _FauxIAMPolicy()
api._test_iam_permissions_response = []
topic = self._makeOne(self.TOPIC_NAME, client=client1)
allowed = topic.check_iam_permissions(ROLES, client=client2)
self.assertEqual(len(allowed), 0)
self.assertEqual(api._tested_iam_permissions,
(self.TOPIC_PATH, ROLES))
class TestBatch(unittest2.TestCase):
PROJECT = 'PROJECT'
def _getTargetClass(self):
from gcloud.pubsub.topic import Batch
return Batch
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_ctor_defaults(self):
topic = _Topic()
client = _Client(project=self.PROJECT)
batch = self._makeOne(topic, client)
self.assertTrue(batch.topic is topic)
self.assertTrue(batch.client is client)
self.assertEqual(len(batch.messages), 0)
self.assertEqual(len(batch.message_ids), 0)
def test___iter___empty(self):
topic = _Topic()
client = object()
batch = self._makeOne(topic, client)
self.assertEqual(list(batch), [])
def test___iter___non_empty(self):
topic = _Topic()
client = object()
batch = self._makeOne(topic, client)
batch.message_ids[:] = ['ONE', 'TWO', 'THREE']
self.assertEqual(list(batch), ['ONE', 'TWO', 'THREE'])
def test_publish_bytes_wo_attrs(self):
import base64
PAYLOAD = b'This is the message text'
B64 = base64.b64encode(PAYLOAD).decode('ascii')
MESSAGE = {'data': B64,
'attributes': {}}
client = _Client(project=self.PROJECT)
topic = _Topic()
batch = self._makeOne(topic, client=client)
batch.publish(PAYLOAD)
self.assertEqual(batch.messages, [MESSAGE])
def test_publish_bytes_w_add_timestamp(self):
import base64
PAYLOAD = b'This is the message text'
B64 = base64.b64encode(PAYLOAD).decode('ascii')
MESSAGE = {'data': B64,
'attributes': {'timestamp': 'TIMESTAMP'}}
client = _Client(project=self.PROJECT)
topic = _Topic(timestamp_messages=True)
batch = self._makeOne(topic, client=client)
batch.publish(PAYLOAD)
self.assertEqual(batch.messages, [MESSAGE])
def test_commit_w_bound_client(self):
import base64
PAYLOAD1 = b'This is the first message text'
PAYLOAD2 = b'This is the second message text'
B64_1 = base64.b64encode(PAYLOAD1)
B64_2 = base64.b64encode(PAYLOAD2)
MSGID1 = 'DEADBEEF'
MSGID2 = 'BEADCAFE'
MESSAGE1 = {'data': B64_1.decode('ascii'),
'attributes': {}}
MESSAGE2 = {'data': B64_2.decode('ascii'),
'attributes': {'attr1': 'value1', 'attr2': 'value2'}}
client = _Client(project='PROJECT')
api = client.publisher_api = _FauxPublisherAPI()
api._topic_publish_response = [MSGID1, MSGID2]
topic = _Topic()
batch = self._makeOne(topic, client=client)
batch.publish(PAYLOAD1)
batch.publish(PAYLOAD2, attr1='value1', attr2='value2')
batch.commit()
self.assertEqual(list(batch), [MSGID1, MSGID2])
self.assertEqual(list(batch.messages), [])
self.assertEqual(api._topic_published,
(topic.full_name, [MESSAGE1, MESSAGE2]))
def test_commit_w_alternate_client(self):
import base64
PAYLOAD1 = b'This is the first message text'
PAYLOAD2 = b'This is the second message text'
B64_1 = base64.b64encode(PAYLOAD1)
B64_2 = base64.b64encode(PAYLOAD2)
MSGID1 = 'DEADBEEF'
MSGID2 = 'BEADCAFE'
MESSAGE1 = {'data': B64_1.decode('ascii'),
'attributes': {}}
MESSAGE2 = {'data': B64_2.decode('ascii'),
'attributes': {'attr1': 'value1', 'attr2': 'value2'}}
client1 = _Client(project='PROJECT')
client2 = _Client(project='PROJECT')
api = client2.publisher_api = _FauxPublisherAPI()
api._topic_publish_response = [MSGID1, MSGID2]
topic = _Topic()
batch = self._makeOne(topic, client=client1)
batch.publish(PAYLOAD1)
batch.publish(PAYLOAD2, attr1='value1', attr2='value2')
batch.commit(client=client2)
self.assertEqual(list(batch), [MSGID1, MSGID2])
self.assertEqual(list(batch.messages), [])
self.assertEqual(api._topic_published,
(topic.full_name, [MESSAGE1, MESSAGE2]))
def test_context_mgr_success(self):
import base64
PAYLOAD1 = b'This is the first message text'
PAYLOAD2 = b'This is the second message text'
B64_1 = base64.b64encode(PAYLOAD1)
B64_2 = base64.b64encode(PAYLOAD2)
MSGID1 = 'DEADBEEF'
MSGID2 = 'BEADCAFE'
MESSAGE1 = {'data': B64_1.decode('ascii'),
'attributes': {}}
MESSAGE2 = {'data': B64_2.decode('ascii'),
'attributes': {'attr1': 'value1', 'attr2': 'value2'}}
client = _Client(project='PROJECT')
api = client.publisher_api = _FauxPublisherAPI()
api._topic_publish_response = [MSGID1, MSGID2]
topic = _Topic()
batch = self._makeOne(topic, client=client)
with batch as other:
batch.publish(PAYLOAD1)
batch.publish(PAYLOAD2, attr1='value1', attr2='value2')
self.assertTrue(other is batch)
self.assertEqual(list(batch), [MSGID1, MSGID2])
self.assertEqual(list(batch.messages), [])
self.assertEqual(api._topic_published,
(topic.full_name, [MESSAGE1, MESSAGE2]))
def test_context_mgr_failure(self):
import base64
PAYLOAD1 = b'This is the first message text'
PAYLOAD2 = b'This is the second message text'
B64_1 = base64.b64encode(PAYLOAD1)
B64_2 = base64.b64encode(PAYLOAD2)
MESSAGE1 = {'data': B64_1.decode('ascii'),
'attributes': {}}
MESSAGE2 = {'data': B64_2.decode('ascii'),
'attributes': {'attr1': 'value1', 'attr2': 'value2'}}
client = _Client(project='PROJECT')
api = client.publisher_api = _FauxPublisherAPI()
topic = _Topic()
batch = self._makeOne(topic, client=client)
try:
with batch as other:
batch.publish(PAYLOAD1)
batch.publish(PAYLOAD2, attr1='value1', attr2='value2')
raise _Bugout()
except _Bugout:
pass
self.assertTrue(other is batch)
self.assertEqual(list(batch), [])
self.assertEqual(list(batch.messages), [MESSAGE1, MESSAGE2])
self.assertEqual(getattr(api, '_topic_published', self), self)
class _FauxPublisherAPI(object):
_api_called = 0
def topic_create(self, topic_path):
self._topic_created = topic_path
return self._topic_create_response
def topic_get(self, topic_path):
from gcloud.exceptions import NotFound
self._topic_got = topic_path
try:
return self._topic_get_response
except AttributeError:
raise NotFound(topic_path)
def topic_delete(self, topic_path):
self._topic_deleted = topic_path
return self._topic_delete_response
def topic_publish(self, topic_path, messages):
self._topic_published = topic_path, messages
self._api_called += 1
return self._topic_publish_response
def topic_list_subscriptions(self, topic_path, page_size=None,
page_token=None):
self._topic_listed = topic_path, page_size, page_token
return self._topic_list_subscriptions_response
class _FauxIAMPolicy(object):
def get_iam_policy(self, target_path):
self._got_iam_policy = target_path
return self._get_iam_policy_response
def set_iam_policy(self, target_path, policy):
self._set_iam_policy = target_path, policy
return self._set_iam_policy_response
def test_iam_permissions(self, target_path, permissions):
self._tested_iam_permissions = target_path, permissions
return self._test_iam_permissions_response
class _Topic(object):
def __init__(self, name="NAME", project="PROJECT",
timestamp_messages=False):
self.full_name = 'projects/%s/topics/%s' % (project, name)
self.path = '/%s' % (self.full_name,)
self.timestamp_messages = timestamp_messages
def _timestamp_message(self, attrs):
if self.timestamp_messages:
attrs['timestamp'] = 'TIMESTAMP'
class _Client(object):
connection = None
def __init__(self, project):
self.project = project
class _Bugout(Exception):
pass | unknown | codeparrot/codeparrot-clean | ||
"""Tests for --subprocesses subprocess profiling support."""
import argparse
import io
import os
import signal
import subprocess
import sys
import tempfile
import threading
import time
import unittest
from unittest.mock import MagicMock, patch
from test.support import (
SHORT_TIMEOUT,
reap_children,
requires_remote_subprocess_debugging,
)
# Guard imports that require _remote_debugging module.
# This module is not available on all platforms (e.g., WASI).
try:
from profiling.sampling._child_monitor import (
get_child_pids,
ChildProcessMonitor,
is_python_process,
_MAX_CHILD_PROFILERS,
_CLEANUP_INTERVAL_CYCLES,
)
except ImportError:
# Module will be skipped via @requires_remote_subprocess_debugging decorators
get_child_pids = None
ChildProcessMonitor = None
is_python_process = None
_MAX_CHILD_PROFILERS = None
_CLEANUP_INTERVAL_CYCLES = None
try:
from profiling.sampling.cli import (
_add_sampling_options,
_validate_args,
_build_child_profiler_args,
_build_output_pattern,
_setup_child_monitor,
)
except ImportError:
# cli module imports sample module which requires _remote_debugging
_add_sampling_options = None
_validate_args = None
_build_child_profiler_args = None
_build_output_pattern = None
_setup_child_monitor = None
from .helpers import _cleanup_process
# String to check for in stderr when profiler lacks permissions (e.g., macOS)
_PERMISSION_ERROR_MSG = "Permission Error"
def _readline_with_timeout(file_obj, timeout):
# Thread-based readline with timeout - works across all platforms
# including Windows where select() doesn't work with pipes.
# Returns the line read, or None if timeout occurred.
result = [None]
exception = [None]
def reader():
try:
result[0] = file_obj.readline()
except Exception as e:
exception[0] = e
thread = threading.Thread(target=reader, daemon=True)
thread.start()
thread.join(timeout=timeout)
if thread.is_alive():
return None
if exception[0] is not None:
raise exception[0]
return result[0]
def _wait_for_process_ready(proc, timeout):
# Wait for a subprocess to be ready using polling instead of fixed sleep.
# Returns True if process is ready, False if it exited or timeout.
deadline = time.time() + timeout
poll_interval = 0.01
while time.time() < deadline:
if proc.poll() is not None:
return False
try:
if sys.platform == "linux":
if os.path.exists(f"/proc/{proc.pid}/exe"):
return True
else:
return True
except OSError:
pass
time.sleep(poll_interval)
poll_interval = min(poll_interval * 2, 0.1)
return proc.poll() is None
@requires_remote_subprocess_debugging()
class TestGetChildPids(unittest.TestCase):
"""Tests for the get_child_pids function."""
def setUp(self):
reap_children()
def tearDown(self):
reap_children()
def test_get_child_pids_from_remote_debugging(self):
"""Test get_child_pids from _remote_debugging module."""
try:
import _remote_debugging
# Test that the function exists
self.assertTrue(hasattr(_remote_debugging, "get_child_pids"))
# Test with current process (should return empty or have children if any)
result = _remote_debugging.get_child_pids(os.getpid())
self.assertIsInstance(result, list)
except (ImportError, AttributeError):
self.skipTest("_remote_debugging.get_child_pids not available")
def test_get_child_pids_fallback(self):
"""Test the fallback implementation for get_child_pids."""
# Test with current process
result = get_child_pids(os.getpid())
self.assertIsInstance(result, list)
@unittest.skipUnless(sys.platform == "linux", "Linux only")
def test_discover_child_process_linux(self):
"""Test that we can discover child processes on Linux."""
# Create a child process
proc = subprocess.Popen(
[sys.executable, "-c", "import time; time.sleep(10)"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
try:
# Poll until child appears
deadline = time.time() + SHORT_TIMEOUT
children = []
while time.time() < deadline:
children = get_child_pids(os.getpid())
if proc.pid in children:
break
time.sleep(0.05)
self.assertIn(
proc.pid,
children,
f"Child PID {proc.pid} not discovered within {SHORT_TIMEOUT}s. "
f"Found PIDs: {children}",
)
finally:
_cleanup_process(proc)
def test_recursive_child_discovery(self):
"""Test that recursive=True finds grandchildren."""
# Create a child that spawns a grandchild and keeps a reference to it
# so we can clean it up via the child process
code = """
import subprocess
import sys
import threading
grandchild = subprocess.Popen([sys.executable, '-c', 'import time; time.sleep(60)'])
print(grandchild.pid, flush=True)
# Wait for parent to send signal byte (cross-platform)
# Using threading with timeout so test doesn't hang if something goes wrong
# Timeout is 60s (2x test timeout) to ensure child outlives test in worst case
def wait_for_signal():
try:
sys.stdin.buffer.read(1)
except:
pass
t = threading.Thread(target=wait_for_signal, daemon=True)
t.start()
t.join(timeout=60)
# Clean up grandchild before exiting
if grandchild.poll() is None:
grandchild.terminate()
try:
grandchild.wait(timeout=2)
except subprocess.TimeoutExpired:
grandchild.kill()
try:
grandchild.wait(timeout=2)
except subprocess.TimeoutExpired:
grandchild.wait()
"""
proc = subprocess.Popen(
[sys.executable, "-c", code],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
)
grandchild_pid = None
try:
# Read grandchild PID with thread-based timeout
# This prevents indefinite blocking on all platforms
grandchild_pid_line = _readline_with_timeout(
proc.stdout, SHORT_TIMEOUT
)
if grandchild_pid_line is None:
self.fail(
f"Timeout waiting for grandchild PID from child process "
f"(child PID: {proc.pid})"
)
if not grandchild_pid_line:
self.fail(
f"Child process {proc.pid} closed stdout without printing "
f"grandchild PID"
)
grandchild_pid = int(grandchild_pid_line.strip())
# Poll until grandchild is visible
deadline = time.time() + SHORT_TIMEOUT
pids_recursive = []
while time.time() < deadline:
pids_recursive = get_child_pids(os.getpid(), recursive=True)
if grandchild_pid in pids_recursive:
break
time.sleep(0.05)
self.assertIn(
proc.pid,
pids_recursive,
f"Child PID {proc.pid} not found in recursive discovery. "
f"Found: {pids_recursive}",
)
self.assertIn(
grandchild_pid,
pids_recursive,
f"Grandchild PID {grandchild_pid} not found in recursive discovery. "
f"Found: {pids_recursive}",
)
# Non-recursive should find only direct child
pids_direct = get_child_pids(os.getpid(), recursive=False)
self.assertIn(
proc.pid,
pids_direct,
f"Child PID {proc.pid} not found in non-recursive discovery. "
f"Found: {pids_direct}",
)
self.assertNotIn(
grandchild_pid,
pids_direct,
f"Grandchild PID {grandchild_pid} should NOT be in non-recursive "
f"discovery. Found: {pids_direct}",
)
finally:
# Send signal byte to child to trigger cleanup, then close stdin
try:
proc.stdin.write(b"x")
proc.stdin.flush()
proc.stdin.close()
except OSError:
pass
proc.stdout.close()
_cleanup_process(proc)
# The grandchild may not have been cleaned up by the child process
# (e.g., if the child was killed). Explicitly terminate the
# grandchild to prevent PermissionError on Windows when removing
# temp directories.
if grandchild_pid is not None:
try:
os.kill(grandchild_pid, signal.SIGTERM)
except (OSError, ProcessLookupError):
pass # Process already exited
def test_nonexistent_pid_returns_empty(self):
"""Test that nonexistent PID returns empty list."""
# Use a very high PID that's unlikely to exist
result = get_child_pids(999999999)
self.assertEqual(result, [])
@requires_remote_subprocess_debugging()
class TestChildProcessMonitor(unittest.TestCase):
"""Tests for the ChildProcessMonitor class."""
def setUp(self):
reap_children()
def tearDown(self):
reap_children()
def test_monitor_creation(self):
"""Test that ChildProcessMonitor can be created."""
monitor = ChildProcessMonitor(
pid=os.getpid(),
cli_args=["-r", "10khz", "-d", "5"],
output_pattern="test_{pid}.pstats",
)
self.assertEqual(monitor.parent_pid, os.getpid())
self.assertEqual(monitor.cli_args, ["-r", "10khz", "-d", "5"])
self.assertEqual(monitor.output_pattern, "test_{pid}.pstats")
def test_monitor_lifecycle(self):
"""Test monitor lifecycle via context manager."""
monitor = ChildProcessMonitor(
pid=os.getpid(), cli_args=[], output_pattern=None
)
# Before entering context, thread should not exist
self.assertIsNone(monitor._monitor_thread)
with monitor:
# Inside context, thread should be running
self.assertIsNotNone(monitor._monitor_thread)
self.assertTrue(monitor._monitor_thread.is_alive())
# After exiting context, thread should be stopped
self.assertFalse(monitor._monitor_thread.is_alive())
def test_spawned_profilers_property(self):
"""Test that spawned_profilers returns a copy of the list."""
monitor = ChildProcessMonitor(
pid=os.getpid(), cli_args=[], output_pattern=None
)
# Should return empty list initially
profilers = monitor.spawned_profilers
self.assertEqual(profilers, [])
self.assertIsNot(profilers, monitor._spawned_profilers)
def test_context_manager(self):
"""Test that ChildProcessMonitor works as a context manager."""
with ChildProcessMonitor(
pid=os.getpid(), cli_args=[], output_pattern=None
) as monitor:
self.assertIsNotNone(monitor._monitor_thread)
self.assertTrue(monitor._monitor_thread.is_alive())
# After exiting context, thread should be stopped
self.assertFalse(monitor._monitor_thread.is_alive())
@requires_remote_subprocess_debugging()
class TestCLIChildrenFlag(unittest.TestCase):
"""Tests for the --subprocesses CLI flag."""
def setUp(self):
reap_children()
def tearDown(self):
reap_children()
def test_subprocesses_flag_parsed(self):
"""Test that --subprocesses flag is recognized."""
parser = argparse.ArgumentParser()
_add_sampling_options(parser)
# Parse with --subprocesses
args = parser.parse_args(["--subprocesses"])
self.assertTrue(args.subprocesses)
# Parse without --subprocesses
args = parser.parse_args([])
self.assertFalse(args.subprocesses)
def test_subprocesses_incompatible_with_live(self):
"""Test that --subprocesses is incompatible with --live."""
# Create mock args with both subprocesses and live
args = argparse.Namespace(
subprocesses=True,
live=True,
async_aware=False,
format="pstats",
mode="wall",
sort=None,
limit=None,
no_summary=False,
opcodes=False,
blocking=False,
interval=1000,
)
parser = argparse.ArgumentParser()
with self.assertRaises(SystemExit):
_validate_args(args, parser)
def test_build_child_profiler_args(self):
"""Test building CLI args for child profilers."""
args = argparse.Namespace(
sample_interval_usec=200,
duration=15,
all_threads=True,
realtime_stats=False,
native=True,
gc=True,
opcodes=False,
async_aware=False,
mode="cpu",
format="flamegraph",
)
child_args = _build_child_profiler_args(args)
# Verify flag-value pairs are correctly paired (flag followed by value)
def assert_flag_value_pair(flag, value):
self.assertIn(
flag,
child_args,
f"Flag '{flag}' not found in args: {child_args}",
)
flag_index = child_args.index(flag)
self.assertGreater(
len(child_args),
flag_index + 1,
f"No value after flag '{flag}' in args: {child_args}",
)
self.assertEqual(
child_args[flag_index + 1],
str(value),
f"Flag '{flag}' should be followed by '{value}', got "
f"'{child_args[flag_index + 1]}' in args: {child_args}",
)
assert_flag_value_pair("-r", 5000)
assert_flag_value_pair("-d", 15)
assert_flag_value_pair("--mode", "cpu")
# Verify standalone flags are present
self.assertIn(
"-a", child_args, f"Flag '-a' not found in args: {child_args}"
)
self.assertIn(
"--native",
child_args,
f"Flag '--native' not found in args: {child_args}",
)
self.assertIn(
"--flamegraph",
child_args,
f"Flag '--flamegraph' not found in args: {child_args}",
)
self.assertNotIn(
"--browser",
child_args,
f"Flag '--browser' should not be in child args: {child_args}",
)
def test_build_child_profiler_args_no_gc(self):
"""Test building CLI args with --no-gc."""
args = argparse.Namespace(
sample_interval_usec=100,
duration=5,
all_threads=False,
realtime_stats=False,
native=False,
gc=False, # Explicitly disabled
opcodes=False,
async_aware=False,
mode="wall",
format="pstats",
)
child_args = _build_child_profiler_args(args)
self.assertIn(
"--no-gc",
child_args,
f"Flag '--no-gc' not found when gc=False. Args: {child_args}",
)
def test_build_output_pattern_with_outfile(self):
"""Test output pattern generation with user-specified output."""
# With extension
args = argparse.Namespace(outfile="output.html", format="flamegraph")
pattern = _build_output_pattern(args)
self.assertEqual(pattern, "output_{pid}.html")
# Without extension
args = argparse.Namespace(outfile="output", format="pstats")
pattern = _build_output_pattern(args)
self.assertEqual(pattern, "output_{pid}")
def test_build_output_pattern_default(self):
"""Test output pattern generation with default output."""
# Flamegraph format
args = argparse.Namespace(outfile=None, format="flamegraph")
pattern = _build_output_pattern(args)
self.assertIn("{pid}", pattern)
self.assertIn("flamegraph", pattern)
self.assertTrue(pattern.endswith(".html"))
# Heatmap format
args = argparse.Namespace(outfile=None, format="heatmap")
pattern = _build_output_pattern(args)
self.assertEqual(pattern, "heatmap_{pid}")
@requires_remote_subprocess_debugging()
class TestChildrenIntegration(unittest.TestCase):
"""Integration tests for --subprocesses functionality."""
def setUp(self):
reap_children()
def tearDown(self):
reap_children()
def test_setup_child_monitor(self):
"""Test setting up a child monitor from args."""
args = argparse.Namespace(
sample_interval_usec=100,
duration=5,
all_threads=False,
realtime_stats=False,
native=False,
gc=True,
opcodes=False,
async_aware=False,
mode="wall",
format="pstats",
outfile=None,
)
monitor = _setup_child_monitor(args, os.getpid())
# Use addCleanup to ensure monitor is properly cleaned up even if
# assertions fail
self.addCleanup(monitor.__exit__, None, None, None)
self.assertIsNotNone(monitor)
self.assertEqual(
monitor.parent_pid,
os.getpid(),
f"Monitor parent_pid should be {os.getpid()}, got {monitor.parent_pid}",
)
@requires_remote_subprocess_debugging()
class TestIsPythonProcess(unittest.TestCase):
"""Tests for the is_python_process function."""
def setUp(self):
reap_children()
def tearDown(self):
reap_children()
def test_is_python_process_current_process(self):
"""Test that current process is detected as Python."""
# Current process should be Python
result = is_python_process(os.getpid())
self.assertTrue(
result,
f"Current process (PID {os.getpid()}) should be detected as Python",
)
def test_is_python_process_python_subprocess(self):
"""Test that a Python subprocess is detected as Python."""
# Start a Python subprocess
proc = subprocess.Popen(
[sys.executable, "-c", "import time; time.sleep(10)"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
try:
# Poll until Python runtime structures are initialized
# (is_python_process probes for runtime structures which take
# time to initialize after process start)
deadline = time.time() + SHORT_TIMEOUT
detected = False
while time.time() < deadline:
if proc.poll() is not None:
self.fail(f"Process {proc.pid} exited unexpectedly")
if is_python_process(proc.pid):
detected = True
break
time.sleep(0.05)
self.assertTrue(
detected,
f"Python subprocess (PID {proc.pid}) should be detected as Python "
f"within {SHORT_TIMEOUT}s",
)
finally:
_cleanup_process(proc)
@unittest.skipUnless(sys.platform == "linux", "Linux only test")
def test_is_python_process_non_python_subprocess(self):
"""Test that a non-Python subprocess is not detected as Python."""
# Start a non-Python subprocess (sleep command)
proc = subprocess.Popen(
["sleep", "10"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
try:
# Wait for process to be ready using polling
self.assertTrue(
_wait_for_process_ready(proc, SHORT_TIMEOUT),
f"Process {proc.pid} should be ready within {SHORT_TIMEOUT}s",
)
self.assertFalse(
is_python_process(proc.pid),
f"Non-Python subprocess 'sleep' (PID {proc.pid}) should NOT be "
f"detected as Python",
)
finally:
_cleanup_process(proc)
def test_is_python_process_nonexistent_pid(self):
"""Test that nonexistent PID returns False."""
# Use a very high PID that's unlikely to exist
result = is_python_process(999999999)
self.assertFalse(
result,
"Nonexistent PID 999999999 should return False",
)
def test_is_python_process_exited_process(self):
"""Test handling of a process that exits quickly."""
# Start a process that exits immediately
proc = subprocess.Popen(
[sys.executable, "-c", "pass"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
# Wait for it to exit
proc.wait(timeout=SHORT_TIMEOUT)
# Should return False for exited process (not raise)
result = is_python_process(proc.pid)
self.assertFalse(
result, f"Exited process (PID {proc.pid}) should return False"
)
@requires_remote_subprocess_debugging()
class TestMaxChildProfilersLimit(unittest.TestCase):
"""Tests for the _MAX_CHILD_PROFILERS limit."""
def setUp(self):
reap_children()
def tearDown(self):
reap_children()
def test_max_profilers_constant_exists(self):
"""Test that _MAX_CHILD_PROFILERS constant is defined."""
self.assertEqual(
_MAX_CHILD_PROFILERS,
100,
f"_MAX_CHILD_PROFILERS should be 100, got {_MAX_CHILD_PROFILERS}",
)
def test_cleanup_interval_constant_exists(self):
"""Test that _CLEANUP_INTERVAL_CYCLES constant is defined."""
self.assertEqual(
_CLEANUP_INTERVAL_CYCLES,
10,
f"_CLEANUP_INTERVAL_CYCLES should be 10, got {_CLEANUP_INTERVAL_CYCLES}",
)
def test_monitor_respects_max_limit(self):
"""Test that monitor refuses to spawn more than _MAX_CHILD_PROFILERS."""
# Create a monitor
monitor = ChildProcessMonitor(
pid=os.getpid(),
cli_args=["-r", "10khz", "-d", "5"],
output_pattern="test_{pid}.pstats",
)
# Manually fill up the profilers list to the limit
mock_profilers = [MagicMock() for _ in range(_MAX_CHILD_PROFILERS)]
for mock_proc in mock_profilers:
mock_proc.poll.return_value = None # Simulate running process
monitor._spawned_profilers = mock_profilers
# Try to spawn another profiler - should be rejected
stderr_capture = io.StringIO()
with patch("sys.stderr", stderr_capture):
monitor._spawn_profiler_for_child(99999)
# Verify warning was printed
stderr_output = stderr_capture.getvalue()
self.assertIn(
"Max child profilers",
stderr_output,
f"Expected warning about max profilers, got: {stderr_output}",
)
self.assertIn(
str(_MAX_CHILD_PROFILERS),
stderr_output,
f"Warning should mention limit ({_MAX_CHILD_PROFILERS}): {stderr_output}",
)
# Verify no new profiler was added
self.assertEqual(
len(monitor._spawned_profilers),
_MAX_CHILD_PROFILERS,
f"Should still have {_MAX_CHILD_PROFILERS} profilers, got "
f"{len(monitor._spawned_profilers)}",
)
@requires_remote_subprocess_debugging()
class TestWaitForProfilers(unittest.TestCase):
"""Tests for the wait_for_profilers method."""
def setUp(self):
reap_children()
def tearDown(self):
reap_children()
def test_wait_for_profilers_empty_list(self):
"""Test that wait_for_profilers returns immediately with no profilers."""
monitor = ChildProcessMonitor(
pid=os.getpid(), cli_args=[], output_pattern=None
)
# Should return immediately without printing anything
stderr_capture = io.StringIO()
with unittest.mock.patch("sys.stderr", stderr_capture):
start = time.time()
monitor.wait_for_profilers(timeout=10.0)
elapsed = time.time() - start
# Should complete very quickly (less than 1 second)
self.assertLess(
elapsed,
1.0,
f"wait_for_profilers with empty list took {elapsed:.2f}s, expected < 1s",
)
# No "Waiting for..." message should be printed
self.assertNotIn(
"Waiting for",
stderr_capture.getvalue(),
"Should not print waiting message when no profilers",
)
def test_wait_for_profilers_with_completed_process(self):
"""Test waiting for profilers that complete quickly."""
monitor = ChildProcessMonitor(
pid=os.getpid(), cli_args=[], output_pattern=None
)
# Start a process that exits quickly
proc = subprocess.Popen(
[sys.executable, "-c", "pass"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
# Add to spawned profilers
monitor._spawned_profilers.append(proc)
try:
stderr_capture = io.StringIO()
with unittest.mock.patch("sys.stderr", stderr_capture):
start = time.time()
monitor.wait_for_profilers(timeout=SHORT_TIMEOUT)
elapsed = time.time() - start
# Should complete quickly since process exits fast
self.assertLess(
elapsed,
5.0,
f"wait_for_profilers took {elapsed:.2f}s for quick process",
)
# Should print waiting message
self.assertIn(
"Waiting for 1 child profiler",
stderr_capture.getvalue(),
"Should print waiting message",
)
finally:
_cleanup_process(proc)
def test_wait_for_profilers_timeout(self):
"""Test that wait_for_profilers respects timeout."""
monitor = ChildProcessMonitor(
pid=os.getpid(), cli_args=[], output_pattern=None
)
# Start a process that runs for a long time
proc = subprocess.Popen(
[sys.executable, "-c", "import time; time.sleep(60)"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
# Add to spawned profilers
monitor._spawned_profilers.append(proc)
try:
stderr_capture = io.StringIO()
with unittest.mock.patch("sys.stderr", stderr_capture):
start = time.time()
# Use short timeout
monitor.wait_for_profilers(timeout=0.5)
elapsed = time.time() - start
# Should timeout after approximately 0.5 seconds
self.assertGreater(
elapsed,
0.4,
f"wait_for_profilers returned too quickly ({elapsed:.2f}s)",
)
self.assertLess(
elapsed,
2.0,
f"wait_for_profilers took too long ({elapsed:.2f}s), timeout not respected",
)
finally:
_cleanup_process(proc)
def test_wait_for_profilers_multiple(self):
"""Test waiting for multiple profilers."""
monitor = ChildProcessMonitor(
pid=os.getpid(), cli_args=[], output_pattern=None
)
# Start multiple processes
procs = []
for _ in range(3):
proc = subprocess.Popen(
[sys.executable, "-c", "import time; time.sleep(0.1)"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
procs.append(proc)
monitor._spawned_profilers.append(proc)
try:
stderr_capture = io.StringIO()
with unittest.mock.patch("sys.stderr", stderr_capture):
monitor.wait_for_profilers(timeout=SHORT_TIMEOUT)
# Should report correct count
self.assertIn(
"Waiting for 3 child profiler",
stderr_capture.getvalue(),
"Should report correct profiler count",
)
finally:
for proc in procs:
_cleanup_process(proc)
@requires_remote_subprocess_debugging()
class TestEndToEndChildrenCLI(unittest.TestCase):
"""End-to-end tests for --subprocesses CLI flag."""
def setUp(self):
reap_children()
def tearDown(self):
reap_children()
def test_subprocesses_flag_spawns_child_and_creates_output(self):
"""Test that --subprocesses flag works end-to-end with actual subprocesses."""
# Create a temporary directory for output files
with tempfile.TemporaryDirectory() as tmpdir:
# Create a script that spawns a child Python process
parent_script = f"""
import subprocess
import sys
import time
# Spawn a child that does some work
child = subprocess.Popen([
sys.executable, '-c',
'import time; [i**2 for i in range(1000)]; time.sleep(2)'
])
# Do some work in parent
for i in range(1000):
_ = i ** 2
time.sleep(2)
child.wait()
"""
script_file = os.path.join(tmpdir, "parent_script.py")
with open(script_file, "w") as f:
f.write(parent_script)
output_file = os.path.join(tmpdir, "profile.pstats")
# Run the profiler with --subprocesses flag
result = subprocess.run(
[
sys.executable,
"-m",
"profiling.sampling",
"run",
"--subprocesses",
"-d",
"3",
"-r",
"100",
"-o",
output_file,
script_file,
],
capture_output=True,
text=True,
timeout=SHORT_TIMEOUT,
)
# Check that parent output file was created
self.assertTrue(
os.path.exists(output_file),
f"Parent profile output not created. "
f"stdout: {result.stdout}, stderr: {result.stderr}",
)
# Check for child profiler output files (pattern: profile_{pid}.pstats)
output_files = os.listdir(tmpdir)
child_profiles = [
f
for f in output_files
if f.startswith("profile_") and f.endswith(".pstats")
]
# Note: Child profiling is best-effort; the child may exit before
# profiler attaches, or the process may not be detected as Python.
# We just verify the mechanism doesn't crash.
if result.returncode != 0:
self.fail(
f"Profiler exited with code {result.returncode}. "
f"stdout: {result.stdout}, stderr: {result.stderr}"
)
def test_subprocesses_flag_with_flamegraph_output(self):
"""Test --subprocesses with flamegraph output format."""
with tempfile.TemporaryDirectory() as tmpdir:
# Simple parent that spawns a child
parent_script = f"""
import subprocess
import sys
import time
child = subprocess.Popen([sys.executable, '-c', 'import time; time.sleep(1)'])
time.sleep(1)
child.wait()
"""
script_file = os.path.join(tmpdir, "parent.py")
with open(script_file, "w") as f:
f.write(parent_script)
output_file = os.path.join(tmpdir, "flame.html")
result = subprocess.run(
[
sys.executable,
"-m",
"profiling.sampling",
"run",
"--subprocesses",
"-d",
"2",
"-r",
"100",
"--flamegraph",
"-o",
output_file,
script_file,
],
capture_output=True,
text=True,
timeout=SHORT_TIMEOUT,
)
self.assertTrue(
os.path.exists(output_file),
f"Flamegraph output not created. stderr: {result.stderr}",
)
# Verify it's valid HTML
with open(output_file, "r") as f:
content = f.read()
self.assertIn(
"<html",
content.lower(),
"Flamegraph output should be HTML",
)
def test_subprocesses_flag_no_crash_on_quick_child(self):
"""Test that --subprocesses doesn't crash when child exits quickly."""
with tempfile.TemporaryDirectory() as tmpdir:
# Parent spawns a child that exits immediately
parent_script = f"""
import subprocess
import sys
import time
# Child exits immediately
child = subprocess.Popen([sys.executable, '-c', 'pass'])
child.wait()
time.sleep(1)
"""
script_file = os.path.join(tmpdir, "parent.py")
with open(script_file, "w") as f:
f.write(parent_script)
output_file = os.path.join(tmpdir, "profile.pstats")
result = subprocess.run(
[
sys.executable,
"-m",
"profiling.sampling",
"run",
"--subprocesses",
"-d",
"2",
"-r",
"100",
"-o",
output_file,
script_file,
],
capture_output=True,
text=True,
timeout=SHORT_TIMEOUT,
)
# Should not crash - exit code 0
self.assertEqual(
result.returncode,
0,
f"Profiler crashed with quick-exit child. "
f"stderr: {result.stderr}",
)
if __name__ == "__main__":
unittest.main() | python | github | https://github.com/python/cpython | Lib/test/test_profiling/test_sampling_profiler/test_children.py |
# Generated by the protocol buffer compiler. DO NOT EDIT!
from google.protobuf import descriptor
from google.protobuf import message
from google.protobuf import reflection
from google.protobuf import descriptor_pb2
from zippylog.envelope import register_message
# @@protoc_insertion_point(imports)
DESCRIPTOR = descriptor.FileDescriptor(
name='zippylog/device/streamer.proto',
package='zippylog.device.streamer',
serialized_pb='\n\x1ezippylog/device/streamer.proto\x12\x18zippylog.device.streamer\"\x14\n\x06\x43reate\x12\n\n\x02id\x18\x01 \x01(\x0c\"\x15\n\x07\x44\x65stroy\x12\n\n\x02id\x18\x01 \x01(\x0c\"7\n\x13SubscriptionExpired\x12\n\n\x02id\x18\x01 \x01(\x0c\x12\x14\n\x0csubscription\x18\x02 \x01(\x0c\"4\n\x10ReceiveKeepalive\x12\n\n\x02id\x18\x01 \x01(\x0c\x12\x14\n\x0csubscription\x18\x02 \x01(\x0c\"D\n SubscriptionRenewedFromKeepalive\x12\n\n\x02id\x18\x01 \x01(\x0c\x12\x14\n\x0csubscription\x18\x02 \x01(\x0c\"F\n\"RejectKeepaliveUnknownSubscription\x12\n\n\x02id\x18\x01 \x01(\x0c\x12\x14\n\x0csubscription\x18\x02 \x01(\x0c\"=\n\x19\x45rrorRenewingSubscription\x12\n\n\x02id\x18\x01 \x01(\x0c\x12\x14\n\x0csubscription\x18\x02 \x01(\x0c')
_CREATE = descriptor.Descriptor(
name='Create',
full_name='zippylog.device.streamer.Create',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='zippylog.device.streamer.Create.id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=60,
serialized_end=80,
)
_DESTROY = descriptor.Descriptor(
name='Destroy',
full_name='zippylog.device.streamer.Destroy',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='zippylog.device.streamer.Destroy.id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=82,
serialized_end=103,
)
_SUBSCRIPTIONEXPIRED = descriptor.Descriptor(
name='SubscriptionExpired',
full_name='zippylog.device.streamer.SubscriptionExpired',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='zippylog.device.streamer.SubscriptionExpired.id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='subscription', full_name='zippylog.device.streamer.SubscriptionExpired.subscription', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=105,
serialized_end=160,
)
_RECEIVEKEEPALIVE = descriptor.Descriptor(
name='ReceiveKeepalive',
full_name='zippylog.device.streamer.ReceiveKeepalive',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='zippylog.device.streamer.ReceiveKeepalive.id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='subscription', full_name='zippylog.device.streamer.ReceiveKeepalive.subscription', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=162,
serialized_end=214,
)
_SUBSCRIPTIONRENEWEDFROMKEEPALIVE = descriptor.Descriptor(
name='SubscriptionRenewedFromKeepalive',
full_name='zippylog.device.streamer.SubscriptionRenewedFromKeepalive',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='zippylog.device.streamer.SubscriptionRenewedFromKeepalive.id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='subscription', full_name='zippylog.device.streamer.SubscriptionRenewedFromKeepalive.subscription', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=216,
serialized_end=284,
)
_REJECTKEEPALIVEUNKNOWNSUBSCRIPTION = descriptor.Descriptor(
name='RejectKeepaliveUnknownSubscription',
full_name='zippylog.device.streamer.RejectKeepaliveUnknownSubscription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='zippylog.device.streamer.RejectKeepaliveUnknownSubscription.id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='subscription', full_name='zippylog.device.streamer.RejectKeepaliveUnknownSubscription.subscription', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=286,
serialized_end=356,
)
_ERRORRENEWINGSUBSCRIPTION = descriptor.Descriptor(
name='ErrorRenewingSubscription',
full_name='zippylog.device.streamer.ErrorRenewingSubscription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='zippylog.device.streamer.ErrorRenewingSubscription.id', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='subscription', full_name='zippylog.device.streamer.ErrorRenewingSubscription.subscription', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=358,
serialized_end=419,
)
DESCRIPTOR.message_types_by_name['Create'] = _CREATE
DESCRIPTOR.message_types_by_name['Destroy'] = _DESTROY
DESCRIPTOR.message_types_by_name['SubscriptionExpired'] = _SUBSCRIPTIONEXPIRED
DESCRIPTOR.message_types_by_name['ReceiveKeepalive'] = _RECEIVEKEEPALIVE
DESCRIPTOR.message_types_by_name['SubscriptionRenewedFromKeepalive'] = _SUBSCRIPTIONRENEWEDFROMKEEPALIVE
DESCRIPTOR.message_types_by_name['RejectKeepaliveUnknownSubscription'] = _REJECTKEEPALIVEUNKNOWNSUBSCRIPTION
DESCRIPTOR.message_types_by_name['ErrorRenewingSubscription'] = _ERRORRENEWINGSUBSCRIPTION
class Create(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CREATE
ZIPPYLOG_NAMESPACE = 1
ZIPPYLOG_ENUMERATION = 84
# @@protoc_insertion_point(class_scope:zippylog.device.streamer.Create)
class Destroy(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _DESTROY
ZIPPYLOG_NAMESPACE = 1
ZIPPYLOG_ENUMERATION = 85
# @@protoc_insertion_point(class_scope:zippylog.device.streamer.Destroy)
class SubscriptionExpired(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _SUBSCRIPTIONEXPIRED
ZIPPYLOG_NAMESPACE = 1
ZIPPYLOG_ENUMERATION = 86
# @@protoc_insertion_point(class_scope:zippylog.device.streamer.SubscriptionExpired)
class ReceiveKeepalive(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _RECEIVEKEEPALIVE
ZIPPYLOG_NAMESPACE = 1
ZIPPYLOG_ENUMERATION = 87
# @@protoc_insertion_point(class_scope:zippylog.device.streamer.ReceiveKeepalive)
class SubscriptionRenewedFromKeepalive(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _SUBSCRIPTIONRENEWEDFROMKEEPALIVE
ZIPPYLOG_NAMESPACE = 1
ZIPPYLOG_ENUMERATION = 88
# @@protoc_insertion_point(class_scope:zippylog.device.streamer.SubscriptionRenewedFromKeepalive)
class RejectKeepaliveUnknownSubscription(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _REJECTKEEPALIVEUNKNOWNSUBSCRIPTION
ZIPPYLOG_NAMESPACE = 1
ZIPPYLOG_ENUMERATION = 89
# @@protoc_insertion_point(class_scope:zippylog.device.streamer.RejectKeepaliveUnknownSubscription)
class ErrorRenewingSubscription(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _ERRORRENEWINGSUBSCRIPTION
ZIPPYLOG_NAMESPACE = 1
ZIPPYLOG_ENUMERATION = 90
# @@protoc_insertion_point(class_scope:zippylog.device.streamer.ErrorRenewingSubscription)
register_message(1, 84, 'zippylog.device.streamer_pb2', 'Create')
register_message(1, 85, 'zippylog.device.streamer_pb2', 'Destroy')
register_message(1, 86, 'zippylog.device.streamer_pb2', 'SubscriptionExpired')
register_message(1, 87, 'zippylog.device.streamer_pb2', 'ReceiveKeepalive')
register_message(1, 88, 'zippylog.device.streamer_pb2', 'SubscriptionRenewedFromKeepalive')
register_message(1, 89, 'zippylog.device.streamer_pb2', 'RejectKeepaliveUnknownSubscription')
register_message(1, 90, 'zippylog.device.streamer_pb2', 'ErrorRenewingSubscription')
# @@protoc_insertion_point(module_scope) | unknown | codeparrot/codeparrot-clean | ||
""" net_diff.py
2015-09-15
Compares the network list in Nets.out and WLnets.out.
Lists all the nets that are in Nets.out but not in WLnets.out.
"""
INPUT_FILE_DIR = "../input_files/"
FORBIDDEN_LINES = ["-", "#", "NAME", "NET", "Steiner"] # Strings beginning unwanted lines
nets = list()
WLnets = list()
missingNets = list()
# Get the nets
with open(INPUT_FILE_DIR+'Nets.out', 'r') as netsInput:
nets = netsInput.read().splitlines()
# Get the WLnets...
with open(INPUT_FILE_DIR+'WLnets.out', 'r') as WLnetsInput:
for line in WLnetsInput:
#... but only if they do not begin with a forbidden char.
forbidden = False
for forbiddenChar in FORBIDDEN_LINES:
if line.startswith(forbiddenChar):
forbidden = True
if not forbidden:
line = line.strip(' \n')
lineSplitted = line.split(' ')
# Keep only the first column
WLnets.append(lineSplitted[0])
# If the net from the list nets is not found in the list WLnets,
# add it to the list missingNets.
for net in nets:
try:
WLnets.index(net)
except:
missingNets.append(net)
print(missingNets)
print len(missingNets),"missing nets in WLnets.out out of", len(nets) | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Class to transform an subgraph into another.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from copy import deepcopy
from functools import partial
from six import iteritems
from six import iterkeys
from six import string_types
from six import StringIO
from tensorflow.contrib.graph_editor import reroute
from tensorflow.contrib.graph_editor import select
from tensorflow.contrib.graph_editor import subgraph
from tensorflow.contrib.graph_editor import util
from tensorflow.python.framework import ops as tf_ops
from tensorflow.python.platform import tf_logging as logging
__all__ = [
"replace_t_with_placeholder_handler",
"keep_t_if_possible_handler",
"assign_renamed_collections_handler",
"transform_op_if_inside_handler",
"copy_op_handler",
"Transformer",
"TransformerInfo",
"copy",
"copy_with_input_replacements",
"graph_replace",
]
def replace_t_with_placeholder_handler(info, t):
"""Transform a tensor into a placeholder tensor.
This handler is typically used to transform a subgraph input tensor into a
placeholder.
Args:
info: Transform._TmpInfo instance.
t: tensor whose input must be transformed into a place holder.
Returns:
The tensor generated by the newly created place holder.
"""
with info.graph_.as_default():
t_ = util.make_placeholder_from_tensor(t, scope=info.scope_)
return t_
def keep_t_if_possible_handler(info, t):
"""Transform a tensor into itself (identity) if possible.
This handler transform a tensor into itself if the source and destination
graph are the same. Otherwise it will create a placeholder.
This handler is typically used to transform a hidden input tensors.
Args:
info: Transform._TmpInfo instance.
t: tensor whose input must be transformed into a place holder.
Returns:
The tensor generated by the newly created place holder.
"""
if info.graph is info.graph_:
return t
else:
return replace_t_with_placeholder_handler(info, t)
def assign_renamed_collections_handler(info, elem, elem_):
"""Add the transformed elem to the (renamed) collections of elem.
A collection is renamed only if is not a known key, as described in
`tf.GraphKeys`.
Args:
info: Transform._TmpInfo instance.
elem: the original element (`tf.Tensor` or `tf.Operation`)
elem_: the transformed element
"""
known_collection_names = util.get_predefined_collection_names()
for name, collection in iteritems(info.collections):
if elem not in collection:
continue
if name in known_collection_names:
transformed_name = name
else:
transformed_name = info.new_name(name)
info.graph_.add_to_collection(transformed_name, elem_)
def transform_op_if_inside_handler(info, op, keep_if_possible=True):
"""Transform an optional op only if it is inside the subgraph.
This handler is typically use to handle original op: it is fine to keep them
if they are inside the subgraph, otherwise they are just ignored.
Args:
info: Transform._TmpInfo instance.
op: the optional op to transform (or ignore).
keep_if_possible: re-attach to the original op if possible, that is,
if the source graph and the destination graph are the same.
Returns:
The transformed op or None.
"""
if op in info.sgv.ops:
return info.transformed_ops[op]
else:
if keep_if_possible and info.graph is info.graph_:
return op
else:
return None
def copy_op_handler(info, op, copy_shape=True):
"""Copy a `tf.Operation`.
Args:
info: Transform._TmpInfo instance.
op: the `tf.Operation` to be copied.
copy_shape: also copy the shape of the tensor
Returns:
A `(op, op_outputs)` tuple containing the transformed op and its outputs.
"""
# pylint: disable=protected-access
# Clone the node def:
node_def_ = deepcopy(op._node_def)
# Transform name:
name_ = info.new_name(op.name)
name_ = info.graph_.unique_name(name_)
node_def_.name = name_
# Copy the other inputs needed for initialization
output_types_ = op._output_types[:]
input_types_ = op._input_types[:]
# Make a copy of the op_def too.
# Its unique to every _type_ of Operation.
op_def_ = deepcopy(op._op_def)
# Initialize a new Operation instance
op_ = tf_ops.Operation(node_def_, info.graph_, [], output_types_,
[], input_types_, None, op_def_)
# copy the shape over
if copy_shape:
for t, t_ in zip(op.outputs, op_.outputs):
t_.set_shape(t.get_shape())
# Finalize original op.
if op._original_op:
original_op = info.transform_original_op_handler(info, op._original_op)
if original_op is None:
logging.debug("Could not find original op of: %s", op_.name)
else:
op_._original_op = original_op
# Add op to the graph
info.graph_._add_op(op_)
return op_, op_.outputs
class TransformerInfo(object):
""""Contains information about the result of a transform operation."""
def __init__(self, info):
"""Constructor.
Args:
info: an instance of Transformer._TmpInfo containing various internal
information about the transform operation.
"""
self._graph = info.graph
self._scope = info.scope
self._graph_ = info.graph_
self._scope_ = info.scope_
self._transformed_ops = info.transformed_ops
self._transformed_ts = info.transformed_ts
def _get_transformed_map(self, top):
"""Return the correct container depending on the type of `top`."""
if isinstance(top, tf_ops.Operation):
return self._transformed_ops
elif isinstance(top, tf_ops.Tensor):
return self._transformed_ts
else:
raise TypeError(
"Expected a tf.Tensor or a tf.Operation, got a {}".format(
type(top)))
def _transformed_elem(self, original_top, missing_fn=None):
"""Return the transformed op/tensor corresponding to the original one.
Args:
original_top: the original tensor/operation.
missing_fn: function handling the case where the counterpart
cannot be found. By default, None is returned.
Returns:
the transformed tensor/operation (or None if no match is found).
"""
transformed_map = self._get_transformed_map(original_top)
if isinstance(original_top, string_types):
for original, transformed in iteritems(transformed_map):
if original.name == original_top:
return transformed
return None if missing_fn is None else missing_fn(original_top)
else:
if original_top not in transformed_map:
return None if missing_fn is None else missing_fn(original_top)
return transformed_map[original_top]
def _original_elem(self, transformed_top, missing_fn=None):
"""Return the original op/tensor corresponding to the transformed one.
Args:
transformed_top: the transformed tensor/operation.
missing_fn: function handling the case where the counterpart
cannot be found. By default, None is returned.
Returns:
the original tensor/operation (or None if no match is found).
"""
transformed_map = self._get_transformed_map(transformed_top)
if isinstance(transformed_top, string_types):
finder = lambda transformed: transformed.name == transformed_top
else:
finder = lambda transformed: transformed == transformed_top
for original, transformed in iteritems(transformed_map):
if finder(transformed):
return original
return None if missing_fn is None else missing_fn(transformed_top)
def transformed(self, original, missing_fn=None):
"""Return the transformed op/tensor corresponding to the original one.
Note that the output of this function mimics the hierarchy
of its input argument `original`.
Given an iterable, it returns a list. Given an operation or a tensor,
it will return an operation or a tensor.
Args:
original: the original tensor/operation.
missing_fn: function handling the case where the counterpart
cannot be found. By default, None is returned.
Returns:
the transformed tensor/operation (or None if no match is found).
"""
transformed_elem = partial(self._transformed_elem, missing_fn=missing_fn)
return util.transform_tree(original, transformed_elem)
def original(self, transformed, missing_fn=None):
"""Return the original op/tensor corresponding to the transformed one.
Note that the output of this function mimics the hierarchy
of its input argument `transformed`.
Given an iterable, it returns a list. Given an operation or a tensor,
it will return an operation or a tensor.
Args:
transformed: the transformed tensor/operation.
missing_fn: function handling the case where the counterpart
cannot be found. By default, None is returned.
Returns:
the original tensor/operation (or None if no match is found).
"""
original_elem = partial(self._original_elem, missing_fn=missing_fn)
return util.transform_tree(transformed, original_elem)
def __str__(self):
res = StringIO()
print("Transform result info:", file=res)
if self._graph == self._graph_:
in_place_str = "" if self._scope_ else " IN-PLACE"
print(" Within graph[{}]{}".format(
id(self._graph), in_place_str), file=res)
else:
print(" graph[{}] => graph[{}]".format(
id(self._graph), id(self._graph_)), file=res)
if self._scope:
print(" Relative to source scope: {}".format(self._scope), file=res)
if self._scope_:
print(" Scope destination: {}".format(self._scope_), file=res)
print("Operations mapping:", file=res)
for op, op_ in iteritems(self._transformed_ops):
print(" {} => {}".format(op.name, op_.name), file=res)
return res.getvalue()
class _TmpInfo(object):
"""Transformer temporary data.
An instance of this class holds all the information relevant to a call
to a transformer instance (that is, a call to __call__). An instance
is created for the life-time of the __call__ function and is passed as
argument to the handlers.
"""
def __init__(self, sgv, dst_graph, dst_scope, src_scope):
self.sgv = sgv
self.sgv_inputs_set = frozenset(sgv.inputs)
self.ops = frozenset(sgv.ops)
self.control_outputs = util.ControlOutputs(sgv.graph)
self.graph = sgv.graph
self.scope = src_scope
self.graph_ = dst_graph
self.scope_ = dst_scope
self.transformed_ops = {}
self.transformed_ts = {}
self.collections = dict((key, self.graph.get_collection(key))
for key in self.graph.get_all_collection_keys())
self.cyclic_ops = []
self.transform_original_op_handler = transform_op_if_inside_handler
def new_name(self, name):
"""Compute a destination name from a source name.
Args:
name: the name to be "transformed".
Returns:
The transformed name.
Raises:
ValueError: if the source scope is used (that is, not an empty string)
and the source name does not belong to the source scope.
"""
scope = self.scope
if not name.startswith(scope):
raise ValueError("{} does not belong to source scope: {}.".format(
name, scope))
rel_name = name[len(scope):]
name_ = self.scope_ + rel_name
return name_
class Transformer(object):
"""Transform a subgraph into another one.
By default, the constructor create a transform which copy a subgraph and
replaces inputs with placeholders. This behavior can be modified by changing
the handlers.
"""
def __init__(self):
"""Transformer constructor.
The following members can be modified:
transform_op_handler: handle the transformation of a `tf.Operation`.
This handler defaults to a simple copy.
assign_collections_handler: handle the assignment of collections.
This handler defaults to assigning new collections created under the
given name-scope.
transform_external_input_handler: handle the transform of the inputs to
the given subgraph. This handler defaults to creating placeholders
instead of the ops just before the input tensors of the subgraph.
transform_external_hidden_input_handler: handle the transform of the
hidden inputs of the subgraph, that is, the inputs which are not listed
in sgv.inputs. This handler defaults to a transform which keep the same
input if the source and destination graphs are the same, otherwise
use placeholders.
transform_original_op_handler: handle the transform of original_op. This
handler defaults to transforming original_op only if they are in the
subgraph, otherwise they are ignored.
"""
# handlers
self.transform_op_handler = copy_op_handler
self.transform_control_input_handler = transform_op_if_inside_handler
self.assign_collections_handler = assign_renamed_collections_handler
self.transform_external_input_handler = replace_t_with_placeholder_handler
self.transform_external_hidden_input_handler = keep_t_if_possible_handler
self.transform_original_op_handler = transform_op_if_inside_handler
def __call__(self,
sgv,
dst_graph,
dst_scope,
src_scope="",
reuse_dst_scope=False):
"""Execute the transformation.
Args:
sgv: the source subgraph-view.
dst_graph: the destination graph.
dst_scope: the destination scope.
src_scope: the source scope, which specify the path from which the
relative path of the transformed nodes are computed. For instance, if
src_scope is a/ and dst_scoped is b/, then the node a/x/y will have a
relative path of x/y and will be transformed into b/x/y.
reuse_dst_scope: if True the dst_scope is re-used if it already exists.
Otherwise, the scope is given a unique name based on the one given
by appending an underscore followed by a digit (default).
Returns:
A tuple `(sgv, info)` where:
`sgv` is the transformed subgraph view;
`info` is an instance of TransformerInfo containing
information about the transform, including mapping between
original and transformed tensors and operations.
Raises:
ValueError: if the arguments are invalid.
"""
sgv = subgraph.make_view(sgv)
if not isinstance(dst_graph, tf_ops.Graph):
raise TypeError("Expected a tf.Graph, got: {}".format(type(dst_graph)))
src_scope = util.scope_finalize(src_scope)
dst_scope = util.scope_finalize(dst_scope)
# Potentially create new scope if reuse_dst_scope is False
if dst_scope and not reuse_dst_scope:
dst_scope = util.scope_finalize(dst_graph.unique_name(dst_scope[:-1]))
# Create temporary info used during this transform call
info = _TmpInfo(sgv, dst_graph, dst_scope, src_scope)
info.transform_original_op_handler = self.transform_original_op_handler
self._copy_ops(info)
self._connect_ops(info)
# Compute information about the transformation
res_info = TransformerInfo(info)
sgv_ = self._transform_sgv(info, sgv)
return sgv_, res_info
def _copy_ops(self, info):
"""Copy ops without connecting them."""
for op in info.sgv.ops:
logging.debug("Copying op: %s", op.name)
# TODO(fkp): return a subgraph?
op_, op_outputs_ = self.transform_op_handler(info, op)
if op is op_:
raise ValueError("In-place tranformation not allowed.")
# Process op.
info.transformed_ops[op] = op_
self.assign_collections_handler(info, op, op_)
# Process output tensors.
for op_output, op_output_ in zip(op.outputs, op_outputs_):
info.transformed_ts[op_output] = op_output_
self.assign_collections_handler(info, op_output, op_output_)
def _connect_ops(self, info):
"""Connect the previously copied ops."""
for op in info.sgv.ops:
logging.debug("Finalizing op: %s", op.name)
op_ = info.transformed_ops[op]
# pylint: disable=protected-access
if op_.inputs:
raise ValueError("The newly transformed op should not have "
"any inputs yet: {}".format(op_.name))
inputs_ = [self._transformed_t(info, t) for t in op.inputs]
for t in inputs_:
op_._add_input(t)
# Finalize control inputs:
control_inputs_ = [self.transform_control_input_handler(info, ci)
for ci in op.control_inputs]
control_inputs_ = [ci for ci in control_inputs_ if ci is not None]
reroute.add_control_inputs(op_, control_inputs_)
def _transform_sgv(self, info, sgv):
"""Transform a subgraph view.
For convenience, a transform operation returns a subgraph view of the
transformed graph.
Args:
info: Temporary information for this transorfm call.
sgv: the subgraph to be transformed.
Returns:
The transformed subgraph.
"""
ops_ = [op_ for _, op_ in iteritems(info.transformed_ops)]
sgv_ = subgraph.SubGraphView(ops_)
sgv_inputs_ = sgv_.inputs
sgv_outputs_ = sgv_.outputs
# re-order inputs
input_map_ = []
for input_t in sgv.inputs:
if input_t not in info.transformed_ts:
continue
input_t_ = info.transformed_ts[input_t]
if input_t_ not in sgv_inputs_:
continue
input_t_index_ = sgv_.input_index(input_t_)
input_map_.append(input_t_index_)
# re-order outputs
output_map_ = []
for output_t in sgv.outputs:
if output_t not in info.transformed_ts:
continue
output_t_ = info.transformed_ts[output_t]
if output_t_ not in sgv_outputs_:
continue
output_t_index_ = sgv_.output_index(output_t_)
output_map_.append(output_t_index_)
return sgv_.remap(input_map_, output_map_)
def _transformed_t(self, info, t):
"""Return tre transformed tensor of `t`."""
if t not in info.transformed_ts:
# If op is not in the subgraph.
if t in info.sgv_inputs_set:
# t is an input of the subgraph.
return self.transform_external_input_handler(info, t)
else:
# t is a hidden input of the subgraph.
return self.transform_external_hidden_input_handler(info, t)
else:
# If op is in the subgraph, just return its transformed.
return info.transformed_ts[t]
def copy(sgv, dst_graph=None, dst_scope="", src_scope="",
reuse_dst_scope=False):
"""Copy a subgraph.
Args:
sgv: the source subgraph-view. This argument is converted to a subgraph
using the same rules than the function subgraph.make_view.
dst_graph: the destination graph.
dst_scope: the destination scope.
src_scope: the source scope.
reuse_dst_scope: if True the dst_scope is re-used if it already exists.
Otherwise, the scope is given a unique name based on the one given
by appending an underscore followed by a digit (default).
Returns:
A tuple `(sgv, info)` where:
`sgv` is the transformed subgraph view;
`info` is an instance of TransformerInfo containing
information about the transform, including mapping between
original and transformed tensors and operations.
Raises:
TypeError: if `dst_graph` is not a `tf.Graph`.
StandardError: if sgv cannot be converted to a SubGraphView using
the same rules than the function subgraph.make_view.
"""
sgv = subgraph.make_view(sgv)
if dst_graph is None:
dst_graph = sgv.graph
if not isinstance(dst_graph, tf_ops.Graph):
raise TypeError("Expected a tf.Graph, got: {}".format(type(dst_graph)))
copier = Transformer()
return copier(
sgv, dst_graph, dst_scope, src_scope, reuse_dst_scope=reuse_dst_scope)
def copy_with_input_replacements(sgv, replacement_ts,
dst_graph=None, dst_scope="", src_scope="",
reuse_dst_scope=False):
"""Copy a subgraph, replacing some of its inputs.
Note a replacement only happens if the tensor to be replaced
is an input of the given subgraph. The inputs of a subgraph can
be queried using sgv.inputs.
Args:
sgv: the source subgraph-view. This argument is converted to a subgraph
using the same rules as the function subgraph.make_view.
replacement_ts: dictionary mapping from original tensors to the
replaced one.
dst_graph: the destination graph.
dst_scope: the destination scope.
src_scope: the source scope.
reuse_dst_scope: if True the dst_scope is re-used if it already exists.
Otherwise, the scope is given a unique name based on the one given
by appending an underscore followed by a digit (default).
Returns:
A tuple `(sgv, info)` where:
`sgv` is the transformed subgraph view;
`info` is an instance of TransformerInfo containing
information about the transform, including mapping between
original and transformed tensors and operations.
Raises:
TypeError: if dst_graph is not a tf.Graph.
StandardError: if sgv cannot be converted to a SubGraphView using
the same rules as the function subgraph.make_view.
"""
sgv = subgraph.make_view(sgv)
if dst_graph is None:
dst_graph = sgv.graph
if not isinstance(dst_graph, tf_ops.Graph):
raise TypeError("Expected a tf.Graph, got: {}".format(type(dst_graph)))
copier = Transformer()
# Replace tensor if possible.
def replace_t_with_replacement_handler(info, t):
if t in replacement_ts:
return replacement_ts[t]
else:
return keep_t_if_possible_handler(info, t)
copier.transform_external_input_handler = replace_t_with_replacement_handler
return copier(
sgv, dst_graph, dst_scope, src_scope, reuse_dst_scope=reuse_dst_scope)
def graph_replace(target_ts, replacement_ts, dst_scope="",
src_scope="", reuse_dst_scope=False):
"""Create a new graph which compute the targets from the replaced Tensors.
Args:
target_ts: a single tf.Tensor or an iterable of tf.Tensor.
replacement_ts: dictionary mapping from original tensors to replaced tensors
dst_scope: the destination scope.
src_scope: the source scope.
reuse_dst_scope: if True the dst_scope is re-used if it already exists.
Otherwise, the scope is given a unique name based on the one given
by appending an underscore followed by a digit (default).
Returns:
A single tf.Tensor or a list of target tf.Tensor, depending on
the type of the input argument `target_ts`.
The returned tensors are recomputed using the tensors from replacement_ts.
Raises:
ValueError: if the targets are not connected to replacement_ts.
"""
# Identify operations in the graph that will change.
# Start forward walk at Tensors that will be replaced, and
# backward walk at the target output Tensors.
flatten_target_ts = util.flatten_tree(target_ts)
# Construct the forward control dependencies edges so that
# the get_walks_intersection_ops can also traverse the
# control dependencies.
graph = util.get_unique_graph(flatten_target_ts, check_types=(tf_ops.Tensor))
control_ios = util.ControlOutputs(graph)
ops = select.get_walks_intersection_ops(list(iterkeys(replacement_ts)),
flatten_target_ts,
control_ios=control_ios)
if not ops:
raise ValueError("Targets and replacements are not connected!")
# Create a copy of the relevant subgraph
_, info = copy_with_input_replacements(
ops, replacement_ts, None, dst_scope, src_scope, reuse_dst_scope)
# Return the transformed targets but keep the original if the transformed
# counterpart cannot be found
missing_fn = lambda original_t: original_t
return info.transformed(target_ts, missing_fn) | unknown | codeparrot/codeparrot-clean | ||
"""Tests for legendre module.
"""
from __future__ import division
import numpy as np
import numpy.polynomial.legendre as leg
import numpy.polynomial.polynomial as poly
from numpy.testing import *
P0 = np.array([ 1])
P1 = np.array([ 0, 1])
P2 = np.array([-1, 0, 3])/2
P3 = np.array([ 0, -3, 0, 5])/2
P4 = np.array([ 3, 0, -30, 0, 35])/8
P5 = np.array([ 0, 15, 0, -70, 0, 63])/8
P6 = np.array([-5, 0, 105, 0,-315, 0, 231])/16
P7 = np.array([ 0,-35, 0, 315, 0, -693, 0, 429])/16
P8 = np.array([35, 0,-1260, 0,6930, 0,-12012, 0,6435])/128
P9 = np.array([ 0,315, 0,-4620, 0,18018, 0,-25740, 0,12155])/128
Plist = [P0, P1, P2, P3, P4, P5, P6, P7, P8, P9]
def trim(x) :
return leg.legtrim(x, tol=1e-6)
class TestConstants(TestCase) :
def test_legdomain(self) :
assert_equal(leg.legdomain, [-1, 1])
def test_legzero(self) :
assert_equal(leg.legzero, [0])
def test_legone(self) :
assert_equal(leg.legone, [1])
def test_legx(self) :
assert_equal(leg.legx, [0, 1])
class TestArithmetic(TestCase) :
x = np.linspace(-1, 1, 100)
y0 = poly.polyval(x, P0)
y1 = poly.polyval(x, P1)
y2 = poly.polyval(x, P2)
y3 = poly.polyval(x, P3)
y4 = poly.polyval(x, P4)
y5 = poly.polyval(x, P5)
y6 = poly.polyval(x, P6)
y7 = poly.polyval(x, P7)
y8 = poly.polyval(x, P8)
y9 = poly.polyval(x, P9)
y = [y0, y1, y2, y3, y4, y5, y6, y7, y8, y9]
def test_legval(self) :
def f(x) :
return x*(x**2 - 1)
#check empty input
assert_equal(leg.legval([], [1]).size, 0)
#check normal input)
for i in range(10) :
msg = "At i=%d" % i
ser = np.zeros
tgt = self.y[i]
res = leg.legval(self.x, [0]*i + [1])
assert_almost_equal(res, tgt, err_msg=msg)
#check that shape is preserved
for i in range(3) :
dims = [2]*i
x = np.zeros(dims)
assert_equal(leg.legval(x, [1]).shape, dims)
assert_equal(leg.legval(x, [1,0]).shape, dims)
assert_equal(leg.legval(x, [1,0,0]).shape, dims)
def test_legadd(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(max(i,j) + 1)
tgt[i] += 1
tgt[j] += 1
res = leg.legadd([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_legsub(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(max(i,j) + 1)
tgt[i] += 1
tgt[j] -= 1
res = leg.legsub([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_legmulx(self):
assert_equal(leg.legmulx([0]), [0])
assert_equal(leg.legmulx([1]), [0,1])
for i in range(1, 5):
tmp = 2*i + 1
ser = [0]*i + [1]
tgt = [0]*(i - 1) + [i/tmp, 0, (i + 1)/tmp]
assert_equal(leg.legmulx(ser), tgt)
def test_legmul(self) :
# check values of result
for i in range(5) :
pol1 = [0]*i + [1]
val1 = leg.legval(self.x, pol1)
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
pol2 = [0]*j + [1]
val2 = leg.legval(self.x, pol2)
pol3 = leg.legmul(pol1, pol2)
val3 = leg.legval(self.x, pol3)
assert_(len(pol3) == i + j + 1, msg)
assert_almost_equal(val3, val1*val2, err_msg=msg)
def test_legdiv(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
ci = [0]*i + [1]
cj = [0]*j + [1]
tgt = leg.legadd(ci, cj)
quo, rem = leg.legdiv(tgt, ci)
res = leg.legadd(leg.legmul(quo, ci), rem)
assert_equal(trim(res), trim(tgt), err_msg=msg)
class TestCalculus(TestCase) :
def test_legint(self) :
# check exceptions
assert_raises(ValueError, leg.legint, [0], .5)
assert_raises(ValueError, leg.legint, [0], -1)
assert_raises(ValueError, leg.legint, [0], 1, [0,0])
# test integration of zero polynomial
for i in range(2, 5):
k = [0]*(i - 2) + [1]
res = leg.legint([0], m=i, k=k)
assert_almost_equal(res, [0, 1])
# check single integration with integration constant
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [1/scl]
legpol = leg.poly2leg(pol)
legint = leg.legint(legpol, m=1, k=[i])
res = leg.leg2poly(legint)
assert_almost_equal(trim(res), trim(tgt))
# check single integration with integration constant and lbnd
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
legpol = leg.poly2leg(pol)
legint = leg.legint(legpol, m=1, k=[i], lbnd=-1)
assert_almost_equal(leg.legval(-1, legint), i)
# check single integration with integration constant and scaling
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [2/scl]
legpol = leg.poly2leg(pol)
legint = leg.legint(legpol, m=1, k=[i], scl=2)
res = leg.leg2poly(legint)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with default k
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = leg.legint(tgt, m=1)
res = leg.legint(pol, m=j)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with defined k
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = leg.legint(tgt, m=1, k=[k])
res = leg.legint(pol, m=j, k=range(j))
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with lbnd
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = leg.legint(tgt, m=1, k=[k], lbnd=-1)
res = leg.legint(pol, m=j, k=range(j), lbnd=-1)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with scaling
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = leg.legint(tgt, m=1, k=[k], scl=2)
res = leg.legint(pol, m=j, k=range(j), scl=2)
assert_almost_equal(trim(res), trim(tgt))
def test_legder(self) :
# check exceptions
assert_raises(ValueError, leg.legder, [0], .5)
assert_raises(ValueError, leg.legder, [0], -1)
# check that zeroth deriviative does nothing
for i in range(5) :
tgt = [1] + [0]*i
res = leg.legder(tgt, m=0)
assert_equal(trim(res), trim(tgt))
# check that derivation is the inverse of integration
for i in range(5) :
for j in range(2,5) :
tgt = [1] + [0]*i
res = leg.legder(leg.legint(tgt, m=j), m=j)
assert_almost_equal(trim(res), trim(tgt))
# check derivation with scaling
for i in range(5) :
for j in range(2,5) :
tgt = [1] + [0]*i
res = leg.legder(leg.legint(tgt, m=j, scl=2), m=j, scl=.5)
assert_almost_equal(trim(res), trim(tgt))
class TestMisc(TestCase) :
def test_legfromroots(self) :
res = leg.legfromroots([])
assert_almost_equal(trim(res), [1])
for i in range(1,5) :
roots = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
pol = leg.legfromroots(roots)
res = leg.legval(roots, pol)
tgt = 0
assert_(len(pol) == i + 1)
assert_almost_equal(leg.leg2poly(pol)[-1], 1)
assert_almost_equal(res, tgt)
def test_legroots(self) :
assert_almost_equal(leg.legroots([1]), [])
assert_almost_equal(leg.legroots([1, 2]), [-.5])
for i in range(2,5) :
tgt = np.linspace(-1, 1, i)
res = leg.legroots(leg.legfromroots(tgt))
assert_almost_equal(trim(res), trim(tgt))
def test_legvander(self) :
# check for 1d x
x = np.arange(3)
v = leg.legvander(x, 3)
assert_(v.shape == (3,4))
for i in range(4) :
coef = [0]*i + [1]
assert_almost_equal(v[...,i], leg.legval(x, coef))
# check for 2d x
x = np.array([[1,2],[3,4],[5,6]])
v = leg.legvander(x, 3)
assert_(v.shape == (3,2,4))
for i in range(4) :
coef = [0]*i + [1]
assert_almost_equal(v[...,i], leg.legval(x, coef))
def test_legfit(self) :
def f(x) :
return x*(x - 1)*(x - 2)
# Test exceptions
assert_raises(ValueError, leg.legfit, [1], [1], -1)
assert_raises(TypeError, leg.legfit, [[1]], [1], 0)
assert_raises(TypeError, leg.legfit, [], [1], 0)
assert_raises(TypeError, leg.legfit, [1], [[[1]]], 0)
assert_raises(TypeError, leg.legfit, [1, 2], [1], 0)
assert_raises(TypeError, leg.legfit, [1], [1, 2], 0)
assert_raises(TypeError, leg.legfit, [1], [1], 0, w=[[1]])
assert_raises(TypeError, leg.legfit, [1], [1], 0, w=[1,1])
# Test fit
x = np.linspace(0,2)
y = f(x)
#
coef3 = leg.legfit(x, y, 3)
assert_equal(len(coef3), 4)
assert_almost_equal(leg.legval(x, coef3), y)
#
coef4 = leg.legfit(x, y, 4)
assert_equal(len(coef4), 5)
assert_almost_equal(leg.legval(x, coef4), y)
#
coef2d = leg.legfit(x, np.array([y,y]).T, 3)
assert_almost_equal(coef2d, np.array([coef3,coef3]).T)
# test weighting
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
y[0::2] = 0
wcoef3 = leg.legfit(x, yw, 3, w=w)
assert_almost_equal(wcoef3, coef3)
#
wcoef2d = leg.legfit(x, np.array([yw,yw]).T, 3, w=w)
assert_almost_equal(wcoef2d, np.array([coef3,coef3]).T)
def test_legtrim(self) :
coef = [2, -1, 1, 0]
# Test exceptions
assert_raises(ValueError, leg.legtrim, coef, -1)
# Test results
assert_equal(leg.legtrim(coef), coef[:-1])
assert_equal(leg.legtrim(coef, 1), coef[:-3])
assert_equal(leg.legtrim(coef, 2), [0])
def test_legline(self) :
assert_equal(leg.legline(3,4), [3, 4])
def test_leg2poly(self) :
for i in range(10) :
assert_almost_equal(leg.leg2poly([0]*i + [1]), Plist[i])
def test_poly2leg(self) :
for i in range(10) :
assert_almost_equal(leg.poly2leg(Plist[i]), [0]*i + [1])
def assert_poly_almost_equal(p1, p2):
assert_almost_equal(p1.coef, p2.coef)
assert_equal(p1.domain, p2.domain)
class TestLegendreClass(TestCase) :
p1 = leg.Legendre([1,2,3])
p2 = leg.Legendre([1,2,3], [0,1])
p3 = leg.Legendre([1,2])
p4 = leg.Legendre([2,2,3])
p5 = leg.Legendre([3,2,3])
def test_equal(self) :
assert_(self.p1 == self.p1)
assert_(self.p2 == self.p2)
assert_(not self.p1 == self.p2)
assert_(not self.p1 == self.p3)
assert_(not self.p1 == [1,2,3])
def test_not_equal(self) :
assert_(not self.p1 != self.p1)
assert_(not self.p2 != self.p2)
assert_(self.p1 != self.p2)
assert_(self.p1 != self.p3)
assert_(self.p1 != [1,2,3])
def test_add(self) :
tgt = leg.Legendre([2,4,6])
assert_(self.p1 + self.p1 == tgt)
assert_(self.p1 + [1,2,3] == tgt)
assert_([1,2,3] + self.p1 == tgt)
def test_sub(self) :
tgt = leg.Legendre([1])
assert_(self.p4 - self.p1 == tgt)
assert_(self.p4 - [1,2,3] == tgt)
assert_([2,2,3] - self.p1 == tgt)
def test_mul(self) :
tgt = leg.Legendre([4.13333333, 8.8, 11.23809524, 7.2, 4.62857143])
assert_poly_almost_equal(self.p1 * self.p1, tgt)
assert_poly_almost_equal(self.p1 * [1,2,3], tgt)
assert_poly_almost_equal([1,2,3] * self.p1, tgt)
def test_floordiv(self) :
tgt = leg.Legendre([1])
assert_(self.p4 // self.p1 == tgt)
assert_(self.p4 // [1,2,3] == tgt)
assert_([2,2,3] // self.p1 == tgt)
def test_mod(self) :
tgt = leg.Legendre([1])
assert_((self.p4 % self.p1) == tgt)
assert_((self.p4 % [1,2,3]) == tgt)
assert_(([2,2,3] % self.p1) == tgt)
def test_divmod(self) :
tquo = leg.Legendre([1])
trem = leg.Legendre([2])
quo, rem = divmod(self.p5, self.p1)
assert_(quo == tquo and rem == trem)
quo, rem = divmod(self.p5, [1,2,3])
assert_(quo == tquo and rem == trem)
quo, rem = divmod([3,2,3], self.p1)
assert_(quo == tquo and rem == trem)
def test_pow(self) :
tgt = leg.Legendre([1])
for i in range(5) :
res = self.p1**i
assert_(res == tgt)
tgt = tgt*self.p1
def test_call(self) :
# domain = [-1, 1]
x = np.linspace(-1, 1)
tgt = 3*(1.5*x**2 - .5) + 2*x + 1
assert_almost_equal(self.p1(x), tgt)
# domain = [0, 1]
x = np.linspace(0, 1)
xx = 2*x - 1
assert_almost_equal(self.p2(x), self.p1(xx))
def test_degree(self) :
assert_equal(self.p1.degree(), 2)
def test_trimdeg(self) :
assert_raises(ValueError, self.p1.cutdeg, .5)
assert_raises(ValueError, self.p1.cutdeg, -1)
assert_equal(len(self.p1.cutdeg(3)), 3)
assert_equal(len(self.p1.cutdeg(2)), 3)
assert_equal(len(self.p1.cutdeg(1)), 2)
assert_equal(len(self.p1.cutdeg(0)), 1)
def test_convert(self) :
x = np.linspace(-1,1)
p = self.p1.convert(domain=[0,1])
assert_almost_equal(p(x), self.p1(x))
def test_mapparms(self) :
parms = self.p2.mapparms()
assert_almost_equal(parms, [-1, 2])
def test_trim(self) :
coef = [1, 1e-6, 1e-12, 0]
p = leg.Legendre(coef)
assert_equal(p.trim().coef, coef[:3])
assert_equal(p.trim(1e-10).coef, coef[:2])
assert_equal(p.trim(1e-5).coef, coef[:1])
def test_truncate(self) :
assert_raises(ValueError, self.p1.truncate, .5)
assert_raises(ValueError, self.p1.truncate, 0)
assert_equal(len(self.p1.truncate(4)), 3)
assert_equal(len(self.p1.truncate(3)), 3)
assert_equal(len(self.p1.truncate(2)), 2)
assert_equal(len(self.p1.truncate(1)), 1)
def test_copy(self) :
p = self.p1.copy()
assert_(self.p1 == p)
def test_integ(self) :
p = self.p2.integ()
assert_almost_equal(p.coef, leg.legint([1,2,3], 1, 0, scl=.5))
p = self.p2.integ(lbnd=0)
assert_almost_equal(p(0), 0)
p = self.p2.integ(1, 1)
assert_almost_equal(p.coef, leg.legint([1,2,3], 1, 1, scl=.5))
p = self.p2.integ(2, [1, 2])
assert_almost_equal(p.coef, leg.legint([1,2,3], 2, [1,2], scl=.5))
def test_deriv(self) :
p = self.p2.integ(2, [1, 2])
assert_almost_equal(p.deriv(1).coef, self.p2.integ(1, [1]).coef)
assert_almost_equal(p.deriv(2).coef, self.p2.coef)
def test_roots(self) :
p = leg.Legendre(leg.poly2leg([0, -1, 0, 1]), [0, 1])
res = p.roots()
tgt = [0, .5, 1]
assert_almost_equal(res, tgt)
def test_linspace(self):
xdes = np.linspace(0, 1, 20)
ydes = self.p2(xdes)
xres, yres = self.p2.linspace(20)
assert_almost_equal(xres, xdes)
assert_almost_equal(yres, ydes)
def test_fromroots(self) :
roots = [0, .5, 1]
p = leg.Legendre.fromroots(roots, domain=[0, 1])
res = p.coef
tgt = leg.poly2leg([0, -1, 0, 1])
assert_almost_equal(res, tgt)
def test_fit(self) :
def f(x) :
return x*(x - 1)*(x - 2)
x = np.linspace(0,3)
y = f(x)
# test default value of domain
p = leg.Legendre.fit(x, y, 3)
assert_almost_equal(p.domain, [0,3])
# test that fit works in given domains
p = leg.Legendre.fit(x, y, 3, None)
assert_almost_equal(p(x), y)
assert_almost_equal(p.domain, [0,3])
p = leg.Legendre.fit(x, y, 3, [])
assert_almost_equal(p(x), y)
assert_almost_equal(p.domain, [-1, 1])
# test that fit accepts weights.
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
yw[0::2] = 0
p = leg.Legendre.fit(x, yw, 3, w=w)
assert_almost_equal(p(x), y)
def test_identity(self) :
x = np.linspace(0,3)
p = leg.Legendre.identity()
assert_almost_equal(p(x), x)
p = leg.Legendre.identity([1,3])
assert_almost_equal(p(x), x) | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Core Keras layers.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import types as python_types
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import activations
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
from tensorflow.python.keras.engine import InputSpec
from tensorflow.python.keras.engine import Layer
from tensorflow.python.keras.utils import conv_utils
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.keras.utils import tf_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import standard_ops
from tensorflow.python.util.tf_export import tf_export
@tf_export('keras.layers.Masking')
class Masking(Layer):
"""Masks a sequence by using a mask value to skip timesteps.
For each timestep in the input tensor (dimension #1 in the tensor),
if all values in the input tensor at that timestep
are equal to `mask_value`, then the timestep will be masked (skipped)
in all downstream layers (as long as they support masking).
If any downstream layer does not support masking yet receives such
an input mask, an exception will be raised.
Example:
Consider a Numpy data array `x` of shape `(samples, timesteps, features)`,
to be fed to an LSTM layer.
You want to mask timestep #3 and #5 because you lack data for
these timesteps. You can:
- set `x[:, 3, :] = 0.` and `x[:, 5, :] = 0.`
- insert a `Masking` layer with `mask_value=0.` before the LSTM layer:
```python
model = Sequential()
model.add(Masking(mask_value=0., input_shape=(timesteps, features)))
model.add(LSTM(32))
```
"""
def __init__(self, mask_value=0., **kwargs):
super(Masking, self).__init__(**kwargs)
self.supports_masking = True
self.mask_value = mask_value
def compute_mask(self, inputs, mask=None):
return K.any(math_ops.not_equal(inputs, self.mask_value), axis=-1)
def call(self, inputs):
boolean_mask = K.any(
math_ops.not_equal(inputs, self.mask_value), axis=-1, keepdims=True)
return inputs * math_ops.cast(boolean_mask, inputs.dtype)
def compute_output_shape(self, input_shape):
return input_shape
def get_config(self):
config = {'mask_value': self.mask_value}
base_config = super(Masking, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@tf_export('keras.layers.Dropout')
class Dropout(Layer):
"""Applies Dropout to the input.
Dropout consists in randomly setting
a fraction `rate` of input units to 0 at each update during training time,
which helps prevent overfitting.
Arguments:
rate: float between 0 and 1. Fraction of the input units to drop.
noise_shape: 1D integer tensor representing the shape of the
binary dropout mask that will be multiplied with the input.
For instance, if your inputs have shape
`(batch_size, timesteps, features)` and
you want the dropout mask to be the same for all timesteps,
you can use `noise_shape=(batch_size, 1, features)`.
seed: A Python integer to use as random seed.
"""
def __init__(self, rate, noise_shape=None, seed=None, **kwargs):
super(Dropout, self).__init__(**kwargs)
self.rate = rate
self.noise_shape = noise_shape
self.seed = seed
self.supports_masking = True
def _get_noise_shape(self, inputs):
# Subclasses of `Dropout` may implement `_get_noise_shape(self, inputs)`,
# which will override `self.noise_shape`, and allows for custom noise
# shapes with dynamically sized inputs.
if self.noise_shape is None:
return self.noise_shape
return nn_ops._get_noise_shape(inputs, self.noise_shape) # pylint: disable=protected-access
def call(self, inputs, training=None):
original_training_value = training
if training is None:
training = K.learning_phase()
def dropped_inputs():
return nn.dropout(inputs, 1 - self.rate,
noise_shape=self._get_noise_shape(inputs),
seed=self.seed)
output = tf_utils.smart_cond(training,
dropped_inputs,
lambda: array_ops.identity(inputs))
# EagerTensor object has no attribute _uses_learning_phase
if not context.executing_eagerly() and original_training_value is None:
output._uses_learning_phase = True # pylint: disable=protected-access
return output
def compute_output_shape(self, input_shape):
return input_shape
def get_config(self):
config = {
'rate': self.rate,
'noise_shape': self.noise_shape,
'seed': self.seed
}
base_config = super(Dropout, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@tf_export('keras.layers.SpatialDropout1D')
class SpatialDropout1D(Dropout):
"""Spatial 1D version of Dropout.
This version performs the same function as Dropout, however it drops
entire 1D feature maps instead of individual elements. If adjacent frames
within feature maps are strongly correlated (as is normally the case in
early convolution layers) then regular dropout will not regularize the
activations and will otherwise just result in an effective learning rate
decrease. In this case, SpatialDropout1D will help promote independence
between feature maps and should be used instead.
Arguments:
rate: float between 0 and 1. Fraction of the input units to drop.
Input shape:
3D tensor with shape:
`(samples, timesteps, channels)`
Output shape:
Same as input
References:
- [Efficient Object Localization Using Convolutional
Networks](https://arxiv.org/abs/1411.4280)
"""
def __init__(self, rate, **kwargs):
super(SpatialDropout1D, self).__init__(rate, **kwargs)
self.input_spec = InputSpec(ndim=3)
def _get_noise_shape(self, inputs):
input_shape = array_ops.shape(inputs)
noise_shape = (input_shape[0], 1, input_shape[2])
return noise_shape
@tf_export('keras.layers.SpatialDropout2D')
class SpatialDropout2D(Dropout):
"""Spatial 2D version of Dropout.
This version performs the same function as Dropout, however it drops
entire 2D feature maps instead of individual elements. If adjacent pixels
within feature maps are strongly correlated (as is normally the case in
early convolution layers) then regular dropout will not regularize the
activations and will otherwise just result in an effective learning rate
decrease. In this case, SpatialDropout2D will help promote independence
between feature maps and should be used instead.
Arguments:
rate: float between 0 and 1. Fraction of the input units to drop.
data_format: 'channels_first' or 'channels_last'.
In 'channels_first' mode, the channels dimension
(the depth) is at index 1,
in 'channels_last' mode is it at index 3.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
Input shape:
4D tensor with shape:
`(samples, channels, rows, cols)` if data_format='channels_first'
or 4D tensor with shape:
`(samples, rows, cols, channels)` if data_format='channels_last'.
Output shape:
Same as input
References:
- [Efficient Object Localization Using Convolutional
Networks](https://arxiv.org/abs/1411.4280)
"""
def __init__(self, rate, data_format=None, **kwargs):
super(SpatialDropout2D, self).__init__(rate, **kwargs)
if data_format is None:
data_format = K.image_data_format()
if data_format not in {'channels_last', 'channels_first'}:
raise ValueError('data_format must be in '
'{"channels_last", "channels_first"}')
self.data_format = data_format
self.input_spec = InputSpec(ndim=4)
def _get_noise_shape(self, inputs):
input_shape = array_ops.shape(inputs)
if self.data_format == 'channels_first':
return (input_shape[0], input_shape[1], 1, 1)
elif self.data_format == 'channels_last':
return (input_shape[0], 1, 1, input_shape[3])
@tf_export('keras.layers.SpatialDropout3D')
class SpatialDropout3D(Dropout):
"""Spatial 3D version of Dropout.
This version performs the same function as Dropout, however it drops
entire 3D feature maps instead of individual elements. If adjacent voxels
within feature maps are strongly correlated (as is normally the case in
early convolution layers) then regular dropout will not regularize the
activations and will otherwise just result in an effective learning rate
decrease. In this case, SpatialDropout3D will help promote independence
between feature maps and should be used instead.
Arguments:
rate: float between 0 and 1. Fraction of the input units to drop.
data_format: 'channels_first' or 'channels_last'.
In 'channels_first' mode, the channels dimension (the depth)
is at index 1, in 'channels_last' mode is it at index 4.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
Input shape:
5D tensor with shape:
`(samples, channels, dim1, dim2, dim3)` if data_format='channels_first'
or 5D tensor with shape:
`(samples, dim1, dim2, dim3, channels)` if data_format='channels_last'.
Output shape:
Same as input
References:
- [Efficient Object Localization Using Convolutional
Networks](https://arxiv.org/abs/1411.4280)
"""
def __init__(self, rate, data_format=None, **kwargs):
super(SpatialDropout3D, self).__init__(rate, **kwargs)
if data_format is None:
data_format = K.image_data_format()
if data_format not in {'channels_last', 'channels_first'}:
raise ValueError('data_format must be in '
'{"channels_last", "channels_first"}')
self.data_format = data_format
self.input_spec = InputSpec(ndim=5)
def _get_noise_shape(self, inputs):
input_shape = array_ops.shape(inputs)
if self.data_format == 'channels_first':
return (input_shape[0], input_shape[1], 1, 1, 1)
elif self.data_format == 'channels_last':
return (input_shape[0], 1, 1, 1, input_shape[4])
@tf_export('keras.layers.Activation')
class Activation(Layer):
"""Applies an activation function to an output.
Arguments:
activation: name of activation function to use
or alternatively, a Theano or TensorFlow operation.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as input.
"""
def __init__(self, activation, **kwargs):
super(Activation, self).__init__(**kwargs)
self.supports_masking = True
self.activation = activations.get(activation)
def call(self, inputs):
return self.activation(inputs)
def compute_output_shape(self, input_shape):
return input_shape
def get_config(self):
config = {'activation': activations.serialize(self.activation)}
base_config = super(Activation, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@tf_export('keras.layers.Reshape')
class Reshape(Layer):
"""Reshapes an output to a certain shape.
Arguments:
target_shape: target shape. Tuple of integers,
does not include the samples dimension (batch size).
Input shape:
Arbitrary, although all dimensions in the input shaped must be fixed.
Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
`(batch_size,) + target_shape`
Example:
```python
# as first layer in a Sequential model
model = Sequential()
model.add(Reshape((3, 4), input_shape=(12,)))
# now: model.output_shape == (None, 3, 4)
# note: `None` is the batch dimension
# as intermediate layer in a Sequential model
model.add(Reshape((6, 2)))
# now: model.output_shape == (None, 6, 2)
# also supports shape inference using `-1` as dimension
model.add(Reshape((-1, 2, 2)))
# now: model.output_shape == (None, 3, 2, 2)
```
"""
def __init__(self, target_shape, **kwargs):
super(Reshape, self).__init__(**kwargs)
self.target_shape = tuple(target_shape)
def _fix_unknown_dimension(self, input_shape, output_shape):
"""Find and replace a missing dimension in an output shape.
This is a near direct port of the internal Numpy function
`_fix_unknown_dimension` in `numpy/core/src/multiarray/shape.c`
Arguments:
input_shape: shape of array being reshaped
output_shape: desired shape of the array with at most
a single -1 which indicates a dimension that should be
derived from the input shape.
Returns:
The new output shape with a -1 replaced with its computed value.
Raises a ValueError if the total array size of the output_shape is
different then the input_shape, or more than one unknown dimension
is specified.
Raises:
ValueError: in case of invalid values
for `input_shape` or `input_shape`.
"""
output_shape = list(output_shape)
msg = 'total size of new array must be unchanged'
known, unknown = 1, None
for index, dim in enumerate(output_shape):
if dim < 0:
if unknown is None:
unknown = index
else:
raise ValueError('Can only specify one unknown dimension.')
else:
known *= dim
original = np.prod(input_shape, dtype=int)
if unknown is not None:
if known == 0 or original % known != 0:
raise ValueError(msg)
output_shape[unknown] = original // known
elif original != known:
raise ValueError(msg)
return output_shape
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if None in input_shape[1:]:
output_shape = [input_shape[0]]
# input shape (partially) unknown? replace -1's with None's
output_shape += tuple(s if s != -1 else None for s in self.target_shape)
else:
output_shape = [input_shape[0]]
output_shape += self._fix_unknown_dimension(input_shape[1:],
self.target_shape)
return tensor_shape.TensorShape(output_shape)
def call(self, inputs):
return array_ops.reshape(inputs,
(array_ops.shape(inputs)[0],) + self.target_shape)
def get_config(self):
config = {'target_shape': self.target_shape}
base_config = super(Reshape, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@tf_export('keras.layers.Permute')
class Permute(Layer):
"""Permutes the dimensions of the input according to a given pattern.
Useful for e.g. connecting RNNs and convnets together.
Example:
```python
model = Sequential()
model.add(Permute((2, 1), input_shape=(10, 64)))
# now: model.output_shape == (None, 64, 10)
# note: `None` is the batch dimension
```
Arguments:
dims: Tuple of integers. Permutation pattern, does not include the
samples dimension. Indexing starts at 1.
For instance, `(2, 1)` permutes the first and second dimension
of the input.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same as the input shape, but with the dimensions re-ordered according
to the specified pattern.
"""
def __init__(self, dims, **kwargs):
super(Permute, self).__init__(**kwargs)
self.dims = tuple(dims)
self.input_spec = InputSpec(ndim=len(self.dims) + 1)
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
output_shape = copy.copy(input_shape)
for i, dim in enumerate(self.dims):
target_dim = input_shape[dim]
output_shape[i + 1] = target_dim
return tensor_shape.TensorShape(output_shape)
def call(self, inputs):
return array_ops.transpose(inputs, perm=(0,) + self.dims)
def get_config(self):
config = {'dims': self.dims}
base_config = super(Permute, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@tf_export('keras.layers.Flatten')
class Flatten(Layer):
"""Flattens the input. Does not affect the batch size.
Arguments:
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, ..., channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, ...)`.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
Example:
```python
model = Sequential()
model.add(Convolution2D(64, 3, 3,
border_mode='same',
input_shape=(3, 32, 32)))
# now: model.output_shape == (None, 64, 32, 32)
model.add(Flatten())
# now: model.output_shape == (None, 65536)
```
"""
def __init__(self, data_format=None, **kwargs):
super(Flatten, self).__init__(**kwargs)
self.data_format = conv_utils.normalize_data_format(data_format)
self.input_spec = InputSpec(min_ndim=2)
def call(self, inputs):
if self.data_format == 'channels_first':
permutation = [0]
permutation.extend([i for i in
range(2, K.ndim(inputs))])
permutation.append(1)
inputs = array_ops.transpose(inputs, perm=permutation)
outputs = array_ops.reshape(inputs, (array_ops.shape(inputs)[0], -1))
if not context.executing_eagerly():
outputs.set_shape(self.compute_output_shape(inputs.get_shape()))
return outputs
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
output_shape = [input_shape[0]]
if all(input_shape[1:]):
output_shape += [np.prod(input_shape[1:])]
else:
output_shape += [None]
return tensor_shape.TensorShape(output_shape)
def get_config(self):
config = {'data_format': self.data_format}
base_config = super(Flatten, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@tf_export('keras.layers.RepeatVector')
class RepeatVector(Layer):
"""Repeats the input n times.
Example:
```python
model = Sequential()
model.add(Dense(32, input_dim=32))
# now: model.output_shape == (None, 32)
# note: `None` is the batch dimension
model.add(RepeatVector(3))
# now: model.output_shape == (None, 3, 32)
```
Arguments:
n: integer, repetition factor.
Input shape:
2D tensor of shape `(num_samples, features)`.
Output shape:
3D tensor of shape `(num_samples, n, features)`.
"""
def __init__(self, n, **kwargs):
super(RepeatVector, self).__init__(**kwargs)
self.n = n
self.input_spec = InputSpec(ndim=2)
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
return tensor_shape.TensorShape([input_shape[0], self.n, input_shape[1]])
def call(self, inputs):
return K.repeat(inputs, self.n)
def get_config(self):
config = {'n': self.n}
base_config = super(RepeatVector, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@tf_export('keras.layers.Lambda')
class Lambda(Layer):
"""Wraps arbitrary expression as a `Layer` object.
Examples:
```python
# add a x -> x^2 layer
model.add(Lambda(lambda x: x ** 2))
```
```python
# add a layer that returns the concatenation
# of the positive part of the input and
# the opposite of the negative part
def antirectifier(x):
x -= K.mean(x, axis=1, keepdims=True)
x = K.l2_normalize(x, axis=1)
pos = K.relu(x)
neg = K.relu(-x)
return K.concatenate([pos, neg], axis=1)
model.add(Lambda(antirectifier))
```
Arguments:
function: The function to be evaluated.
Takes input tensor as first argument.
output_shape: Expected output shape from function.
This argument can be inferred if not explicitly provided.
Can be a tuple or function.
If a tuple, it only specifies the first dimension onward;
sample dimension is assumed either the same as the input:
`output_shape = (input_shape[0], ) + output_shape`
or, the input is `None` and
the sample dimension is also `None`:
`output_shape = (None, ) + output_shape`
If a function, it specifies the entire shape as a function of the
input shape: `output_shape = f(input_shape)`
arguments: optional dictionary of keyword arguments to be passed
to the function.
Input shape:
Arbitrary. Use the keyword argument input_shape
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Specified by `output_shape` argument
"""
def __init__(self, function, output_shape=None, mask=None, arguments=None,
**kwargs):
super(Lambda, self).__init__(**kwargs)
self.function = function
self.arguments = arguments if arguments else {}
if mask is not None:
self.supports_masking = True
self.mask = mask
if output_shape is None:
self._output_shape = None
elif isinstance(output_shape, (tuple, list)):
self._output_shape = tuple(output_shape)
else:
if not callable(output_shape):
raise TypeError('In Lambda, `output_shape` '
'must be a list, a tuple, or a function.')
self._output_shape = output_shape
def compute_output_shape(self, input_shape):
input_shape = tuple(tensor_shape.TensorShape(input_shape).as_list())
if self._output_shape is None:
if context.executing_eagerly():
raise NotImplementedError
x = K.placeholder(shape=input_shape)
x = self.call(x)
if isinstance(x, list):
return [tensor_shape.TensorShape(K.int_shape(x_elem)) for x_elem in x]
else:
return tensor_shape.TensorShape(K.int_shape(x))
elif isinstance(self._output_shape, (tuple, list)):
if isinstance(input_shape, list):
num_samples = input_shape[0][0]
else:
num_samples = input_shape[0] if input_shape else None
return tensor_shape.TensorShape((num_samples,) +
tuple(self._output_shape))
else:
shape = self._output_shape(input_shape)
if not isinstance(shape, (list, tuple)):
raise ValueError(
'`output_shape` function must return a tuple or a list of tuples.')
if isinstance(shape, list):
if isinstance(shape[0], int) or shape[0] is None:
shape = tuple(shape)
return tensor_shape.TensorShape(shape)
def call(self, inputs, mask=None):
arguments = self.arguments
if generic_utils.has_arg(self.function, 'mask'):
arguments['mask'] = mask
return self.function(inputs, **arguments)
def compute_mask(self, inputs, mask=None):
if callable(self.mask):
return self.mask(inputs, mask)
return self.mask
def get_config(self):
if isinstance(self.function, python_types.LambdaType):
function = generic_utils.func_dump(self.function)
function_type = 'lambda'
else:
function = self.function.__name__
function_type = 'function'
if isinstance(self._output_shape, python_types.LambdaType):
output_shape = generic_utils.func_dump(self._output_shape)
output_shape_type = 'lambda'
elif callable(self._output_shape):
output_shape = self._output_shape.__name__
output_shape_type = 'function'
else:
output_shape = self._output_shape
output_shape_type = 'raw'
config = {
'function': function,
'function_type': function_type,
'output_shape': output_shape,
'output_shape_type': output_shape_type,
'arguments': self.arguments
}
base_config = super(Lambda, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@classmethod
def from_config(cls, config, custom_objects=None):
config = config.copy()
globs = globals()
if custom_objects:
globs = dict(list(globs.items()) + list(custom_objects.items()))
function_type = config.pop('function_type')
if function_type == 'function':
# Simple lookup in custom objects
function = generic_utils.deserialize_keras_object(
config['function'],
custom_objects=custom_objects,
printable_module_name='function in Lambda layer')
elif function_type == 'lambda':
# Unsafe deserialization from bytecode
function = generic_utils.func_load(config['function'], globs=globs)
else:
raise TypeError('Unknown function type:', function_type)
output_shape_type = config.pop('output_shape_type')
if output_shape_type == 'function':
# Simple lookup in custom objects
output_shape = generic_utils.deserialize_keras_object(
config['output_shape'],
custom_objects=custom_objects,
printable_module_name='output_shape function in Lambda layer')
elif output_shape_type == 'lambda':
# Unsafe deserialization from bytecode
output_shape = generic_utils.func_load(config['output_shape'],
globs=globs)
else:
output_shape = config['output_shape']
# If arguments were numpy array, they have been saved as
# list. We need to recover the ndarray
if 'arguments' in config:
for key in config['arguments']:
if isinstance(config['arguments'][key], dict):
arg_dict = config['arguments'][key]
if 'type' in arg_dict and arg_dict['type'] == 'ndarray':
# Overwrite the argument with its numpy translation
config['arguments'][key] = np.array(arg_dict['value'])
config['function'] = function
config['output_shape'] = output_shape
return cls(**config)
@tf_export('keras.layers.Dense')
class Dense(Layer):
"""Just your regular densely-connected NN layer.
`Dense` implements the operation:
`output = activation(dot(input, kernel) + bias)`
where `activation` is the element-wise activation function
passed as the `activation` argument, `kernel` is a weights matrix
created by the layer, and `bias` is a bias vector created by the layer
(only applicable if `use_bias` is `True`).
Note: if the input to the layer has a rank greater than 2, then
it is flattened prior to the initial dot product with `kernel`.
Example:
```python
# as first layer in a sequential model:
model = Sequential()
model.add(Dense(32, input_shape=(16,)))
# now the model will take as input arrays of shape (*, 16)
# and output arrays of shape (*, 32)
# after the first layer, you don't need to specify
# the size of the input anymore:
model.add(Dense(32))
```
Arguments:
units: Positive integer, dimensionality of the output space.
activation: Activation function to use.
If you don't specify anything, no activation is applied
(ie. "linear" activation: `a(x) = x`).
use_bias: Boolean, whether the layer uses a bias vector.
kernel_initializer: Initializer for the `kernel` weights matrix.
bias_initializer: Initializer for the bias vector.
kernel_regularizer: Regularizer function applied to
the `kernel` weights matrix.
bias_regularizer: Regularizer function applied to the bias vector.
activity_regularizer: Regularizer function applied to
the output of the layer (its "activation")..
kernel_constraint: Constraint function applied to
the `kernel` weights matrix.
bias_constraint: Constraint function applied to the bias vector.
Input shape:
nD tensor with shape: `(batch_size, ..., input_dim)`.
The most common situation would be
a 2D input with shape `(batch_size, input_dim)`.
Output shape:
nD tensor with shape: `(batch_size, ..., units)`.
For instance, for a 2D input with shape `(batch_size, input_dim)`,
the output would have shape `(batch_size, units)`.
"""
def __init__(self,
units,
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
if 'input_shape' not in kwargs and 'input_dim' in kwargs:
kwargs['input_shape'] = (kwargs.pop('input_dim'),)
super(Dense, self).__init__(
activity_regularizer=regularizers.get(activity_regularizer), **kwargs)
self.units = int(units)
self.activation = activations.get(activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.bias_constraint = constraints.get(bias_constraint)
self.supports_masking = True
self.input_spec = InputSpec(min_ndim=2)
def build(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
if input_shape[-1].value is None:
raise ValueError('The last dimension of the inputs to `Dense` '
'should be defined. Found `None`.')
self.input_spec = InputSpec(min_ndim=2,
axes={-1: input_shape[-1].value})
self.kernel = self.add_variable('kernel',
shape=[input_shape[-1].value, self.units],
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
dtype=self.dtype,
trainable=True)
if self.use_bias:
self.bias = self.add_variable('bias',
shape=[self.units,],
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
dtype=self.dtype,
trainable=True)
else:
self.bias = None
self.built = True
def call(self, inputs):
inputs = ops.convert_to_tensor(inputs, dtype=self.dtype)
shape = inputs.get_shape().as_list()
if len(shape) > 2:
# Broadcasting is required for the inputs.
outputs = standard_ops.tensordot(inputs, self.kernel, [[len(shape) - 1],
[0]])
# Reshape the output back to the original ndim of the input.
if not context.executing_eagerly():
output_shape = shape[:-1] + [self.units]
outputs.set_shape(output_shape)
else:
outputs = gen_math_ops.mat_mul(inputs, self.kernel)
if self.use_bias:
outputs = nn.bias_add(outputs, self.bias)
if self.activation is not None:
return self.activation(outputs) # pylint: disable=not-callable
return outputs
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
input_shape = input_shape.with_rank_at_least(2)
if input_shape[-1].value is None:
raise ValueError(
'The innermost dimension of input_shape must be defined, but saw: %s'
% input_shape)
return input_shape[:-1].concatenate(self.units)
def get_config(self):
config = {
'units': self.units,
'activation': activations.serialize(self.activation),
'use_bias': self.use_bias,
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'bias_initializer': initializers.serialize(self.bias_initializer),
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'bias_regularizer': regularizers.serialize(self.bias_regularizer),
'activity_regularizer':
regularizers.serialize(self.activity_regularizer),
'kernel_constraint': constraints.serialize(self.kernel_constraint),
'bias_constraint': constraints.serialize(self.bias_constraint)
}
base_config = super(Dense, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@tf_export('keras.layers.ActivityRegularization')
class ActivityRegularization(Layer):
"""Layer that applies an update to the cost function based input activity.
Arguments:
l1: L1 regularization factor (positive float).
l2: L2 regularization factor (positive float).
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as input.
"""
def __init__(self, l1=0., l2=0., **kwargs):
super(ActivityRegularization, self).__init__(
activity_regularizer=regularizers.L1L2(l1=l1, l2=l2), **kwargs)
self.supports_masking = True
self.l1 = l1
self.l2 = l2
def compute_output_shape(self, input_shape):
return input_shape
def get_config(self):
config = {'l1': self.l1, 'l2': self.l2}
base_config = super(ActivityRegularization, self).get_config()
return dict(list(base_config.items()) + list(config.items())) | unknown | codeparrot/codeparrot-clean | ||
import pg8000
from pg8000.tests.connection_settings import db_connect
import time
import warnings
from contextlib import closing
from decimal import Decimal
whole_begin_time = time.time()
tests = (
("cast(id / 100 as int2)", 'int2'),
("cast(id as int4)", 'int4'),
("cast(id * 100 as int8)", 'int8'),
("(id %% 2) = 0", 'bool'),
("N'Static text string'", 'txt'),
("cast(id / 100 as float4)", 'float4'),
("cast(id / 100 as float8)", 'float8'),
("cast(id / 100 as numeric)", 'numeric'),
("timestamp '2001-09-28' + id * interval '1 second'", 'timestamp'),
)
with warnings.catch_warnings(), closing(pg8000.connect(**db_connect)) as db:
for txt, name in tests:
query = """SELECT {0} AS column1, {0} AS column2, {0} AS column3,
{0} AS column4, {0} AS column5, {0} AS column6, {0} AS column7
FROM (SELECT generate_series(1, 10000) AS id) AS tbl""".format(txt)
cursor = db.cursor()
print("Beginning %s test..." % name)
for i in range(1, 5):
begin_time = time.time()
cursor.execute(query)
for row in cursor:
pass
end_time = time.time()
print("Attempt %s - %s seconds." % (i, end_time - begin_time))
db.commit()
cursor = db.cursor()
cursor.execute(
"CREATE TEMPORARY TABLE t1 (f1 serial primary key, "
"f2 bigint not null, f3 varchar(50) null, f4 bool)")
db.commit()
params = [(Decimal('7.4009'), 'season of mists...', True)] * 1000
print("Beginning executemany test...")
for i in range(1, 5):
begin_time = time.time()
cursor.executemany(
"insert into t1 (f2, f3, f4) values (%s, %s, %s)", params)
db.commit()
end_time = time.time()
print("Attempt {0} took {1} seconds.".format(i, end_time - begin_time))
print("Beginning reuse statements test...")
begin_time = time.time()
for i in range(2000):
cursor.execute("select count(*) from t1")
cursor.fetchall()
print("Took {0} seconds.".format(time.time() - begin_time))
print("Whole time - %s seconds." % (time.time() - whole_begin_time)) | unknown | codeparrot/codeparrot-clean | ||
import urlparse
import pprint
import MySQLdb
import hashlib
#exception + zone : select * from exception LEFT JOIN (peer as srcpeer, peer as dstpeer, connections, match_zone) on (connections.src_peer_id = srcpeer.peer_id and connections.dst_peer_id = dstpeer.peer_id and connections.exception_id = exception.exception_id and match_zone.exception_id = exception.exception_id);
#select de gros bourrin : select exc.url, exc.count, exc.md5, cap.http_request, srcpeer.peer_ip, dstpeer.peer_host, mz.zone, mz.arg_name, mz.rule_id from exception as exc, capture as cap, peer as srcpeer, peer as dstpeer, match_zone as mz, connections as conn WHERE (mz.exception_id = exc.exception_id and cap.exception_id = exc.exception_id and srcpeer.peer_id = conn.src_peer_id and conn.dst_peer_id = dstpeer.peer_id);
# select all exception with associated peers.
##select * from exception LEFT JOIN (peer as srcpeer, peer as dstpeer, connections) on (connections.src_peer_id = srcpeer.peer_id and connections.dst_peer_id = dstpeer.peer_id and exception.connection_id = connections.connection_id) LIMIT 10;
# select all exceptions with associated zone_match and peers.
# select * from exception LEFT JOIN (peer as srcpeer, peer as dstpeer, connections, match_zone) on (connections.src_peer_id = srcpeer.peer_id and connections.dst_peer_id = dstpeer.peer_id and exception.connection_id = connections.connection_id and match_zone.exception_id = exception.exception_id) where srcpeer.peer_ip != '88.191.133.106' and srcpeer.peer_ip != '82.234.123.117' and srcpeer.peer_ip != '82.247.12.197';
class signature_parser:
def __init__(self, host, user, password, dbname):
# print "[+] Connecting to database"
self.db = MySQLdb.connect(host, user, password, dbname)
if self.db is None:
print "ERROR!"
return
self.cursor = self.db.cursor()
if self.cursor is None:
print "ERROR!"
return
# Checking wether the base already exists
try:
self.cursor.execute("SELECT COUNT(*) FROM exception")
except:
self.dbcreate()
def dbcreate(self):
print ("[+] drop'ing and creating new tables")
self.cursor.execute("DROP TABLES IF EXISTS rules")
self.cursor.execute("CREATE TABLE rules (rule_id integer "
"auto_increment primary key "
", action TEXT, msg TEXT, rx TEXT, "
"rx_type INT, url TEXT, "
"zone TEXT, arg_name TEXT);")
self.cursor.execute("DROP TABLES IF EXISTS connections")
self.cursor.execute("CREATE TABLE connections (connection_id INTEGER "
"auto_increment primary key, "
"src_peer_id INT, dst_peer_id INT, exception_id INT, capture_id INT);")
self.cursor.execute("DROP TABLES IF EXISTS peer")
self.cursor.execute("CREATE TABLE peer (peer_id INTEGER "
"auto_increment primary key, "
"peer_ip TEXT, peer_host TEXT, peer_tags TEXT);")
self.cursor.execute("DROP TABLES IF EXISTS exception")
self.cursor.execute("CREATE TABLE exception (exception_id integer "
"auto_increment primary key "
",url TEXT, md5 TEXT, count INT default 1);")
self.cursor.execute("DROP TABLES IF EXISTS match_zone")
self.cursor.execute("CREATE TABLE match_zone (match_id INTEGER "
"auto_increment primary key, exception_id INTEGER, "
"zone TEXT, arg_name TEXT, rule_id INTEGER);")
self.cursor.execute("DROP TABLES IF EXISTS capture")
self.cursor.execute("CREATE TABLE capture (capture_id INTEGER "
"auto_increment primary key, http_request TEXT, exception_id INTEGER);")
# self.cursor.execute("DROP TABLES IF EXISTS router")
# self.cursor.execute("CREATE TABLE router(route_id INTEGER "
# "auto_increment primary key,"
# "exception_id INTEGER, rule_id INTEGER, "
# "conn_id INTEGER, capture_id INTEGER);")
def extract_sig(self, raw_rule, is_from_http=False, is_from_log=False):
start = raw_rule.find(": ")
if (start != -1):
if (is_from_log == True):
end = raw_rule[start:].find(", client: ")
if (end):
return (raw_rule[raw_rule.find(": ") + 2:
raw_rule.find(", client: ")])
elif (is_from_http == True):
return (raw_rule[raw_rule.find(": ") + 2:])
return ("")
def last_id(self):
self.cursor.execute("SELECT last_insert_id()")
data = self.cursor.fetchone()
return data[0]
def insert(self, fmt, *args):
self.cursor.execute(fmt, [args])
def add_capture(self, exception_id, raw_request):
#capture information
self.cursor.execute("SELECT COUNT(*) FROM capture where exception_id = %s", (str(exception_id)))
x = self.cursor.fetchone()
if (x is None or x[0] < 10):
# print "less than 10 "+str(x[0])
self.cursor.execute("INSERT INTO capture (http_request, exception_id)"
"VALUES (%s, %s)", (str(raw_request), str(exception_id)))
capture_id = self.last_id()
else:
capture_id = 0
return capture_id
def sig_to_db(self, raw_request, d, force_insert=False):
if (force_insert == False):
sig_hash = d["server"][0]+"#"+d["uri"][0]+"#"
for i in range(0, 50):
if "zone"+str(i) in d:
sig_hash = sig_hash + d["zone"+str(i)][0] + "#"
else:
break
if "var_name"+str(i) in d:
sig_hash = sig_hash + d["var_name"+str(i)][0] + "#"
sig_hash = sig_hash + d["id"+str(i)][0] + "#"
sig_md5 = hashlib.md5(sig_hash).hexdigest()
self.cursor.execute("SELECT exception_id FROM exception where md5 = %s LIMIT 1", (sig_md5))
exception_id = self.cursor.fetchone()
if (exception_id is not None):
self.add_capture(exception_id[0], raw_request)
self.cursor.execute("UPDATE exception SET count=count+1 where md5 = %s", (sig_md5))
return
#peer information
sig_hash = d["server"][0]+"#"+d["uri"][0]+"#"
self.cursor.execute("INSERT INTO peer (peer_ip) "
"VALUES (%s)", (d["ip"][0]))
ip_id = self.last_id()
self.cursor.execute("INSERT INTO peer (peer_host) "
"VALUES (%s)", (d["server"][0]))
host_id = self.last_id()
#exception
self.cursor.execute("INSERT INTO exception (url) VALUES "
"(%s)", (d["uri"][0]))
exception_id = self.last_id()
#capture information
capture_id = self.add_capture(exception_id, raw_request)
# print "cap id : "+str(capture_id)
#connection information
self.cursor.execute("INSERT INTO connections (src_peer_id, dst_peer_id, exception_id, capture_id)"
"VALUES (%s, %s, %s, %s)", (str(ip_id), str(host_id), str(exception_id), str(capture_id)))
connection_id = self.last_id()
#match_zones
for i in range(0, 50):
zn = ""
vn = ""
if "zone"+str(i) in d:
zn = d["zone"+str(i)][0]
sig_hash = sig_hash + d["zone"+str(i)][0] + "#"
else:
break
if "var_name"+str(i) in d:
vn = d["var_name"+str(i)][0]
sig_hash = sig_hash + d["var_name"+str(i)][0] + "#"
sig_hash = sig_hash + d["id"+str(i)][0] + "#"
self.cursor.execute("INSERT INTO match_zone (exception_id, zone, arg_name, rule_id) "
"VALUES (%s, %s, %s, %s)", (str(exception_id), zn, vn, d["id"+str(i)][0]))
self.cursor.execute("UPDATE exception SET md5=%s WHERE exception_id=%s", (hashlib.md5(sig_hash).hexdigest(), str(exception_id)))
return (connection_id)
def raw_parser(self, raw_request, raw_rule, is_from_http=True, is_from_log=False):
sig = self.extract_sig(raw_rule, is_from_http, is_from_log)
tmpdict = urlparse.parse_qs(sig)
connection_id = self.sig_to_db(raw_request, tmpdict, force_insert=False)
self.db.close() | unknown | codeparrot/codeparrot-clean | ||
<?php
namespace Illuminate\Contracts\Support;
interface HasOnceHash
{
/**
* Compute the hash that should be used to represent the object when given to a function using "once".
*
* @return string
*/
public function onceHash();
} | php | github | https://github.com/laravel/framework | src/Illuminate/Contracts/Support/HasOnceHash.php |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.connect.runtime;
import org.apache.kafka.connect.connector.ConnectRecord;
import org.apache.kafka.connect.runtime.errors.ProcessingContext;
import org.apache.kafka.connect.runtime.errors.RetryWithToleranceOperator;
import org.apache.kafka.connect.runtime.errors.Stage;
import org.apache.kafka.connect.transforms.Transformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Objects;
import java.util.StringJoiner;
import java.util.stream.Collectors;
/**
* Represents a chain of {@link Transformation}s to be applied to a {@link ConnectRecord} serially.
* @param <T> The type of record included in the {@link ProcessingContext} associated with each record
* @param <R> The type of record (must be an implementation of {@link ConnectRecord})
*/
public class TransformationChain<T, R extends ConnectRecord<R>> implements AutoCloseable {
private static final Logger log = LoggerFactory.getLogger(TransformationChain.class);
private final List<TransformationStage<R>> transformationStages;
private final RetryWithToleranceOperator<T> retryWithToleranceOperator;
public TransformationChain(List<TransformationStage<R>> transformationStages, RetryWithToleranceOperator<T> retryWithToleranceOperator) {
this.transformationStages = transformationStages;
this.retryWithToleranceOperator = retryWithToleranceOperator;
}
public R apply(ProcessingContext<T> context, R record) {
if (transformationStages.isEmpty()) return record;
for (final TransformationStage<R> transformationStage : transformationStages) {
final R current = record;
log.trace("Applying transformation {} to {}",
transformationStage.transformClass().getName(), record);
// execute the operation
record = retryWithToleranceOperator.execute(context, () -> transformationStage.apply(current), Stage.TRANSFORMATION, transformationStage.transformClass());
if (record == null) break;
}
return record;
}
@Override
public void close() {
for (TransformationStage<R> transformationStage : transformationStages) {
transformationStage.close();
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TransformationChain<?, ?> that = (TransformationChain<?, ?>) o;
return Objects.equals(transformationStages, that.transformationStages);
}
@Override
public int hashCode() {
return Objects.hash(transformationStages);
}
public String toString() {
StringJoiner chain = new StringJoiner(", ", getClass().getName() + "{", "}");
for (TransformationStage<R> transformationStage : transformationStages) {
chain.add(transformationStage.transformClass().getName());
}
return chain.toString();
}
public List<TransformationStage.StageInfo> transformationChainInfo() {
return transformationStages.stream().map(TransformationStage::transformationStageInfo).collect(Collectors.toList());
}
} | java | github | https://github.com/apache/kafka | connect/runtime/src/main/java/org/apache/kafka/connect/runtime/TransformationChain.java |
from pyspark.sql import SQLContext
import h5py
from pyspark import SparkConf, SparkContext
from datetime import datetime, date, timedelta
import sys
from pyspark.sql.types import Row, StructField, StructType, StringType, IntegerType, LongType
import os
from os.path import dirname, basename
from tempfile import NamedTemporaryFile
from utils.helper import saveDataset # If you added a file in sc in above step then import it for usage
import json
import argparse
import subprocess
import socket
def add_all_dates(config, originalpath):
sparkles_tmp_dir_local = config['SPARKLES_TMP_DIR_LOCAL']
sparkles_tmp_dir = config['SPARKLES_TMP_DIR']
hfile = NamedTemporaryFile(delete=False, dir=sparkles_tmp_dir_local)
with h5py.File(originalpath) as curr_file:
filekeys = curr_file.keys()
for k in filekeys:
print>>hfile, k
hfile.close()
subprocess.check_call(['hdfs', 'dfs', '-copyFromLocal', '-f', hfile.name, sparkles_tmp_dir]) # Copy the temporary file to HDFS
os.unlink(hfile.name)
return hfile
def orders_sql(configstr, orders, sqlContext, userdatadir, originalpath, description, details):
schemaString_orders = "id ref ob_id created destroyed side price quantity is_round past_id new_id p_id"
fields_orders = []
for field_name in schemaString_orders.split():
if(field_name not in ['created', 'destroyed']):
fields_orders.append(StructField(field_name, IntegerType(), True))
else:
fields_orders.append(StructField(field_name, LongType(), True))
schema_orders = StructType(fields_orders)
# Apply the schema to the RDD.
schemaOrders = sqlContext.createDataFrame(orders, schema_orders)
# schemaOrders.show()
# schemaOrders.registerTempTable("orders")
# results = sqlContext.sql("SELECT id, price, side FROM orders")
# for (id, price, side) in results.collect():
# print(price)
saveDataset(configstr, schemaOrders, userdatadir, "orders", originalpath, description, details)
def cancels_sql(configstr, cancels, sqlContext, userdatadir, originalpath, description, details):
schemaString_cancels = "id past_id new_id ob_id timestamp side price quantity"
fields_cancels = []
for field_name in schemaString_cancels.split():
if(field_name not in ['timestamp']):
fields_cancels.append(StructField(field_name, IntegerType(), True))
else:
fields_cancels.append(StructField(field_name, LongType(), True))
schema_cancels = StructType(fields_cancels)
schemaCancels = sqlContext.createDataFrame(cancels, schema_cancels)
# schemaCancels.saveAsParquetFile("files/cancels_name.parquet")
saveDataset(configstr, schemaCancels, userdatadir, "cancels", originalpath, description, details)
def trades_sql(configstr, trades, sqlContext, userdatadir, originalpath, description, details):
schemaString_trades = "id ref o_id ob_id timestamp side quantity price p_id cp_id"
fields_trades = []
for field_name in schemaString_trades.split():
if(field_name not in ['timestamp']):
fields_trades.append(StructField(field_name, IntegerType(), True))
else:
fields_trades.append(StructField(field_name, LongType(), True))
schema_trades = StructType(fields_trades)
schemaTrades = sqlContext.createDataFrame(trades, schema_trades)
# schemaTrades.saveAsParquetFile("files/trades_name.parquet")
saveDataset(configstr, schemaTrades, userdatadir, "trades", originalpath, description, details)
def import_hdf5(x, originalpath, table):
with h5py.File(originalpath) as f:
data = f[str(x)].get(table)
return list(data[:])
def numpy_to_native(x):
return x.tolist()
def main():
conf = SparkConf()
conf.setAppName("Data Import")
# conf.set("spark.jars", "file:/shared_data/spark_jars/hadoop-openstack-3.0.0-SNAPSHOT.jar")
sc = SparkContext(conf=conf)
parser = argparse.ArgumentParser()
parser.add_argument("backend", type=str)
parser.add_argument("originalpaths", type=str)
parser.add_argument("description", type=str)
parser.add_argument("details", type=str)
parser.add_argument("userdatadir", type=str)
parser.add_argument("configstr", type=str)
parser.add_argument("partitions", type=int)
args = parser.parse_args()
# Swift Connection
if(args.backend == 'swift'):
hadoopConf = sc._jsc.hadoopConfiguration()
hadoopConf.set("fs.swift.impl", "org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem")
hadoopConf.set("fs.swift.service.SparkTest.auth.url", os.environ['OS_AUTH_URL'] + "/tokens")
hadoopConf.set("fs.swift.service.SparkTest.http.port", "8443")
hadoopConf.set("fs.swift.service.SparkTest.auth.endpoint.prefix", "/")
hadoopConf.set("fs.swift.service.SparkTest.region", os.environ['OS_REGION_NAME'])
hadoopConf.set("fs.swift.service.SparkTest.public", "false")
hadoopConf.set("fs.swift.service.SparkTest.tenant", os.environ['OS_TENANT_ID'])
hadoopConf.set("fs.swift.service.SparkTest.username", os.environ['OS_USERNAME'])
hadoopConf.set("fs.swift.service.SparkTest.password", os.environ['OS_PASSWORD'])
partitions = args.partitions # Default number of jobs
helperpath = dirname(os.path.abspath(__file__))
sc.addFile(helperpath + "/utils/helper.py") # To import custom modules
originalpaths = json.loads(args.originalpaths)
description = args.description
details = args.details
userdatadir = args.userdatadir
configstr = args.configstr
config = json.loads(configstr)
for originalpath in originalpaths:
hfile = add_all_dates(config, originalpath)
hfile_name = basename(hfile.name) # hfile.name always returns the full path of the temp file
raw_file = sc.textFile('hdfs://' + socket.gethostname() + ':' + str(config['HADOOP_RPC_PORT']) + config['SPARKLES_TMP_DIR'] + hfile_name, partitions)
rdd1 = raw_file.flatMap(lambda x: import_hdf5(x, originalpath, "ORDERS"))
rdd1 = rdd1.map(numpy_to_native)
rdd2 = raw_file.flatMap(lambda x: import_hdf5(x, originalpath, "CANCELS"))
rdd2 = rdd2.map(numpy_to_native)
rdd3 = raw_file.flatMap(lambda x: import_hdf5(x, originalpath, "TRADES"))
rdd3 = rdd3.map(numpy_to_native)
# print(rdd1.count())
sqlContext = SQLContext(sc)
orders_sql(configstr, rdd1, sqlContext, userdatadir, originalpath, description, details)
cancels_sql(configstr, rdd2, sqlContext, userdatadir, originalpath, description, details)
trades_sql(configstr, rdd3, sqlContext, userdatadir, originalpath, description, details)
# os.unlink(hfile.name)
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
---
applies_to:
stack:
serverless:
navigation_title: "Text type family"
mapped_pages:
- https://www.elastic.co/guide/en/elasticsearch/reference/current/text-type-family.html
---
# Text type family [text]
The text family includes the following field types:
* [`text`](/reference/elasticsearch/mapping-reference/text.md), the traditional field type for full-text content such as the body of an email or the description of a product.
* [`match_only_text`](/reference/elasticsearch/mapping-reference/match-only-text.md), a variant of `text` field type with limited functionality. Scoring is always disabled and the `standard` analyzer is always used. It suited for match only free text uses cases. Meaning that the fact that there is a match is important, but scoring and where the match happens is not relevant. Note that positional queries are possible, but are slow.
* [`pattern_text`](/reference/elasticsearch/mapping-reference/pattern-text.md), a variant of `text` which is optimized for space efficient storage of log messages. Pattern text reduces space usage for messages that contain many repeated sequences, like the explanatory text of a log message. Pattern text also disables scoring, but unlike `match_only_text`, positional data can be stored for fast phrase queries. | unknown | github | https://github.com/elastic/elasticsearch | docs/reference/elasticsearch/mapping-reference/text-type-family.md |
//// [tests/cases/conformance/statements/VariableStatements/usingDeclarations/awaitUsingDeclarationsInForOf.5.ts] ////
//// [awaitUsingDeclarationsInForOf.5.ts]
// https://github.com/microsoft/TypeScript/pull/55558#issuecomment-1817595357
declare const x: any[]
for (await using of x);
export async function test() {
for (await using of x);
}
//// [awaitUsingDeclarationsInForOf.5.js]
// https://github.com/microsoft/TypeScript/pull/55558#issuecomment-1817595357
for (await using of x)
;
export async function test() {
for (await using of x)
;
} | javascript | github | https://github.com/microsoft/TypeScript | tests/baselines/reference/awaitUsingDeclarationsInForOf.5(target=esnext).js |
//// [tests/cases/compiler/breakInIterationOrSwitchStatement2.ts] ////
//// [breakInIterationOrSwitchStatement2.ts]
do {
break;
}
while (true);
//// [breakInIterationOrSwitchStatement2.js]
"use strict";
do {
break;
} while (true); | javascript | github | https://github.com/microsoft/TypeScript | tests/baselines/reference/breakInIterationOrSwitchStatement2.js |
"""
KBase Job Manager
The main class here defines a manager for running jobs (as Job objects).
This class knows how to fetch job status, kill jobs, etc.
It also communicates with the front end over the KBaseJobs channel.
It is intended for use as a singleton - use the get_manager() function
to fetch it.
"""
__author__ = "Bill Riehl <wjriehl@lbl.gov>"
__version__ = "0.0.1"
import biokbase.narrative.clients as clients
from .job import Job
from ipykernel.comm import Comm
import threading
import json
import logging
from biokbase.narrative.common import kblogging
from biokbase.narrative.common.log_common import EVENT_MSG_SEP
from IPython.display import HTML
from jinja2 import Template
import dateutil.parser
import datetime
from biokbase.narrative.app_util import system_variable
from biokbase.narrative.exception_util import (
NarrativeException,
transform_job_exception
)
import traceback
import sys
class JobManager(object):
"""
The KBase Job Manager clsas. This handles all jobs and makes their status available.
On status lookups, it feeds the results to the KBaseJobs channel that the front end
listens to.
"""
__instance = None
# keys = job_id, values = { refresh = T/F, job = Job object }
_running_jobs = dict()
_lookup_timer = None
_comm = None
_log = kblogging.get_logger(__name__)
# TODO: should this not be done globally?
_running_lookup_loop = False
def __new__(cls):
if JobManager.__instance is None:
JobManager.__instance = object.__new__(cls)
return JobManager.__instance
def initialize_jobs(self):
"""
Initializes this JobManager.
This is expected to be run by a running Narrative, and naturally linked to a workspace.
So it does the following steps.
1. app_util.system_variable('workspace_id')
2. get list of jobs with that ws id from UJS (also gets tag, cell_id, run_id)
3. initialize the Job objects by running NJS.get_job_params on each of those (also gets app_id)
4. start the status lookup loop.
"""
ws_id = system_variable('workspace_id')
try:
nar_jobs = clients.get('user_and_job_state').list_jobs2({
'authstrat': 'kbaseworkspace',
'authparams': [str(ws_id)]
})
except Exception as e:
kblogging.log_event(self._log, 'init_error', {'err': str(e)})
new_e = transform_job_exception(e)
error = {
'error': 'Unable to get initial jobs list',
'message': getattr(new_e, 'message', 'Unknown reason'),
'code': getattr(new_e, 'code', -1),
'source': getattr(new_e, 'source', 'jobmanager'),
'name': getattr(new_e, 'name', type(e).__name__),
'service': 'user_and_job_state'
}
self._send_comm_message('job_init_err', error)
raise new_e
for info in nar_jobs:
job_id = info[0]
user_info = info[1]
job_meta = info[10]
try:
job_info = clients.get('job_service').get_job_params(job_id)[0]
self._running_jobs[job_id] = {
'refresh': True,
'job': Job.from_state(job_id,
job_info,
user_info[0],
app_id=job_info.get('app_id'),
tag=job_meta.get('tag', 'release'),
cell_id=job_meta.get('cell_id', None),
run_id=job_meta.get('run_id', None))
}
except Exception as e:
kblogging.log_event(self._log, 'init_error', {'err': str(e)})
new_e = transform_job_exception(e)
error = {
'error': 'Unable to get job info on initial lookup',
'job_id': job_id,
'message': getattr(new_e, 'message', 'Unknown reason'),
'code': getattr(new_e, 'code', -1),
'source': getattr(new_e, 'source', 'jobmanager'),
'name': getattr(new_e, 'name', type(e).__name__),
'service': 'job_service'
}
self._send_comm_message('job_init_lookup_err', error)
raise new_e # should crash and burn on any of these.
if not self._running_lookup_loop:
# only keep one loop at a time in cause this gets called again!
if self._lookup_timer is not None:
self._lookup_timer.cancel()
self._running_lookup_loop = True
self._lookup_job_status_loop()
else:
self._lookup_all_job_status()
def list_jobs(self):
"""
List all job ids, their info, and status in a quick HTML format.
"""
try:
status_set = list()
for job_id in self._running_jobs:
job = self._running_jobs[job_id]['job']
job_state = job.state()
job_params = job.parameters()
job_state['app_id'] = job_params[0].get('app_id', 'Unknown App')
job_state['owner'] = job.owner
status_set.append(job_state)
if not len(status_set):
return "No running jobs!"
status_set = sorted(status_set, key=lambda s: s['creation_time'])
for i in range(len(status_set)):
status_set[i]['creation_time'] = datetime.datetime.strftime(datetime.datetime.fromtimestamp(status_set[i]['creation_time']/1000), "%Y-%m-%d %H:%M:%S")
exec_start = status_set[i].get('exec_start_time', None)
if 'finish_time' in status_set[i]:
finished = status_set[i].get('finish_time', None)
if finished is not None and exec_start:
delta = datetime.datetime.fromtimestamp(finished/1000.0) - datetime.datetime.fromtimestamp(exec_start/1000.0)
delta = delta - datetime.timedelta(microseconds=delta.microseconds)
status_set[i]['run_time'] = str(delta)
status_set[i]['finish_time'] = datetime.datetime.strftime(datetime.datetime.fromtimestamp(status_set[i]['finish_time']/1000), "%Y-%m-%d %H:%M:%S")
elif exec_start:
delta = datetime.datetime.utcnow() - datetime.datetime.utcfromtimestamp(exec_start/1000.0)
delta = delta - datetime.timedelta(microseconds=delta.microseconds)
status_set[i]['run_time'] = str(delta)
else:
status_set[i]['run_time'] = 'Not started'
tmpl = """
<table class="table table-bordered table-striped table-condensed">
<tr>
<th>Id</th>
<th>Name</th>
<th>Submitted</th>
<th>Submitted By</th>
<th>Status</th>
<th>Run Time</th>
<th>Complete Time</th>
</tr>
{% for j in jobs %}
<tr>
<td>{{ j.job_id|e }}</td>
<td>{{ j.app_id|e }}</td>
<td>{{ j.creation_time|e }}</td>
<td>{{ j.owner|e }}</td>
<td>{{ j.job_state|e }}</td>
<td>{{ j.run_time|e }}</td>
<td>{% if j.finish_time %}{{ j.finish_time|e }}{% else %}Incomplete{% endif %}</td>
</tr>
{% endfor %}
</table>
"""
return HTML(Template(tmpl).render(jobs=status_set))
except Exception as e:
kblogging.log_event(self._log, "list_jobs.error", {'err': str(e)})
raise
def get_jobs_list(self):
"""
A convenience method for fetching an unordered list of all running Jobs.
"""
return [j['job'] for j in self._running_jobs.values()]
# def _get_existing_job(self, job_tuple):
# """
# creates a Job object from a job_id that already exists.
# If no job exists, raises an Exception.
# Parameters:
# -----------
# job_tuple : The expected 5-tuple representing a Job. The format is:
# (job_id, set of job inputs (as JSON), version tag, cell id that started the job, run id of the job)
# """
# # remove the prefix (if present) and take the last element in the split
# job_id = job_tuple[0].split(':')[-1]
# try:
# job_info = clients.get('job_service').get_job_params(job_id)[0]
# return Job.from_state(job_id, job_info, app_id=job_tuple[1], tag=job_tuple[2], cell_id=job_tuple[3], run_id=job_tuple[4])
# except Exception as e:
# kblogging.log_event(self._log, "get_existing_job.error", {'job_id': job_id, 'err': str(e)})
# raise
def _construct_job_status(self, job_id):
"""
Always creates a Job Status.
It'll embed error messages into the status if there are problems.
"""
state = {}
widget_info = None
app_spec = {}
job = self.get_job(job_id)
if job is None:
state = {
'job_state': 'error',
'error': {
'error': 'Job does not seem to exist, or it is otherwise unavailable.',
'message': 'Job does not exist',
'name': 'Job Error',
'code': -1,
'exception': {
'error_message': 'job not found in JobManager',
'error_type': 'ValueError',
'error_stacktrace': ''
}
},
'cell_id': None,
'run_id': None
}
return {
'state': state,
'app_spec': app_spec,
'widget_info': widget_info,
'owner': None
}
try:
app_spec = job.app_spec()
except Exception as e:
kblogging.log_event(self._log, "lookup_job_status.error", {'err': str(e)})
try:
state = job.state()
except Exception as e:
kblogging.log_event(self._log, "lookup_job_status.error", {'err': str(e)})
new_e = transform_job_exception(e)
e_type = type(e).__name__
e_message = str(new_e).replace('<', '<').replace('>', '>')
e_trace = traceback.format_exc().replace('<', '<').replace('>', '>')
e_code = getattr(new_e, "code", -2)
e_source = getattr(new_e, "source", "JobManager")
state = {
'job_state': 'error',
'error': {
'error': 'Unable to find current job state. Please try again later, or contact KBase.',
'message': 'Unable to return job state',
'name': 'Job Error',
'code': e_code,
'source': e_source,
'exception': {
'error_message': e_message,
'error_type': e_type,
'error_stacktrace': e_trace,
}
},
'creation_time': 0,
'cell_id': job.cell_id,
'run_id': job.run_id,
'job_id': job_id
}
if state.get('finished', 0) == 1:
try:
widget_info = job.get_viewer_params(state)
except Exception as e:
# Can't get viewer params
new_e = transform_job_exception(e)
kblogging.log_event(self._log, "lookup_job_status.error", {'err': str(e)})
state['job_state'] = 'error'
state['error'] = {
'error': 'Unable to generate App output viewer!\nThe App appears to have completed successfully,\nbut we cannot construct its output viewer.\nPlease contact the developer of this App for assistance.',
'message': 'Unable to build output viewer parameters!',
'name': 'App Error',
'code': getattr(new_e, "code", -1),
'source': getattr(new_e, "source", "JobManager")
}
if 'canceling' in self._running_jobs[job_id]:
state['job_state'] = 'canceling'
return {'state': state,
'spec': app_spec,
'widget_info': widget_info,
'owner': job.owner}
def _lookup_job_status(self, job_id):
"""
Will raise a ValueError if job_id doesn't exist.
Sends the status over the comm channel as the usual job_status message.
"""
status = self._construct_job_status(job_id)
self._send_comm_message('job_status', status)
def _lookup_all_job_status(self, ignore_refresh_flag=False):
"""
Looks up status for all jobs.
Once job info is acquired, it gets pushed to the front end over the
'KBaseJobs' channel.
"""
status_set = dict()
# grab the list of running job ids, so we don't run into update-while-iterating problems.
for job_id in self._running_jobs.keys():
if self._running_jobs[job_id]['refresh'] or ignore_refresh_flag:
status_set[job_id] = self._construct_job_status(job_id)
self._send_comm_message('job_status_all', status_set)
def _lookup_job_status_loop(self):
"""
Initialize a loop that will look up job info. This uses a Timer thread on a 10
second loop to update things.
"""
self._lookup_all_job_status()
self._lookup_timer = threading.Timer(10, self._lookup_job_status_loop)
self._lookup_timer.start()
def cancel_job_lookup_loop(self):
"""
Cancels a running timer if one's still alive.
"""
if self._lookup_timer:
self._lookup_timer.cancel()
self._lookup_timer = None
self._running_lookup_loop = False
def register_new_job(self, job):
"""
Registers a new Job with the manager - should only be invoked when a new Job gets
started. This stores the Job locally and pushes it over the comm channel to the
Narrative where it gets serialized.
Parameters:
-----------
job : biokbase.narrative.jobs.job.Job object
The new Job that was started.
"""
self._running_jobs[job.job_id] = {'job': job, 'refresh': True}
# push it forward! create a new_job message.
self._lookup_job_status(job.job_id)
self._send_comm_message('new_job', {})
def get_job(self, job_id):
"""
Returns a Job with the given job_id.
Raises a ValueError if not found.
"""
if job_id in self._running_jobs:
return self._running_jobs[job_id]['job']
else:
raise ValueError('No job present with id {}'.format(job_id))
def _handle_comm_message(self, msg):
"""
Handles comm messages that come in from the other end of the KBaseJobs channel.
All messages (of any use) should have a 'request_type' property.
Possible types:
* all_status
refresh all jobs that are flagged to be looked up. Will send a
message back with all lookup status.
* job_status
refresh the single job given in the 'job_id' field. Sends a message
back with that single job's status, or an error message.
* stop_update_loop
stop the running refresh loop, if there's one going (might be
one more pass, depending on the thread state)
* start_update_loop
reinitialize the refresh loop.
* stop_job_update
flag the given job id (should be an accompanying 'job_id' field) that the front
end knows it's in a terminal state and should no longer have its status looked
up in the refresh cycle.
* start_job_update
remove the flag that gets set by stop_job_update (needs an accompanying 'job_id'
field)
"""
if 'request_type' in msg['content']['data']:
r_type = msg['content']['data']['request_type']
job_id = msg['content']['data'].get('job_id', None)
if job_id is not None and job_id not in self._running_jobs:
# If it's not a real job, just silently ignore the request.
# Maybe return an error? Yeah. Let's do that.
# self._send_comm_message('job_comm_error', {'job_id': job_id, 'message': 'Unknown job id', 'request_type': r_type})
# TODO: perhaps we should implement request/response here. All we really need is to thread a message
# id through
self._send_comm_message('job_does_not_exist', {'job_id': job_id, 'request_type': r_type})
return
if r_type == 'all_status':
self._lookup_all_job_status(ignore_refresh_flag=True)
elif r_type == 'job_status':
if job_id is not None:
self._lookup_job_status(job_id)
elif r_type == 'stop_update_loop':
if self._lookup_timer is not None:
self._lookup_timer.cancel()
elif r_type == 'start_update_loop':
self._lookup_job_status_loop()
elif r_type == 'stop_job_update':
if job_id is not None:
self._running_jobs[job_id]['refresh'] = False
elif r_type == 'start_job_update':
if job_id is not None:
self._running_jobs[job_id]['refresh'] = True
elif r_type == 'delete_job':
if job_id is not None:
try:
self.delete_job(job_id)
except Exception as e:
self._send_comm_message('job_comm_error', {'message': str(e), 'request_type': r_type, 'job_id': job_id})
elif r_type == 'cancel_job':
if job_id is not None:
try:
self.cancel_job(job_id)
except Exception as e:
self._send_comm_message('job_comm_error', {'message': str(e), 'request_type': r_type, 'job_id': job_id})
elif r_type == 'job_logs':
if job_id is not None:
first_line = msg['content']['data'].get('first_line', 0)
num_lines = msg['content']['data'].get('num_lines', None)
self._get_job_logs(job_id, first_line=first_line, num_lines=num_lines)
else:
raise ValueError('Need a job id to fetch jobs!')
elif r_type == 'job_logs_latest':
if job_id is not None:
num_lines = msg['content']['data'].get('num_lines', None)
self._get_latest_job_logs(job_id, num_lines=num_lines)
else:
self._send_comm_message('job_comm_error', {'message': 'Unknown message', 'request_type': r_type})
raise ValueError('Unknown KBaseJobs message "{}"'.format(r_type))
def _get_latest_job_logs(self, job_id, num_lines=None):
job = self.get_job(job_id)
if job is None:
raise ValueError('job "{}" not found while fetching logs!'.format(job_id))
(max_lines, logs) = job.log()
first_line = 0
if num_lines is not None and max_lines > num_lines:
first_line = max_lines - num_lines
logs = logs[first_line:]
self._send_comm_message('job_logs', {'job_id': job_id, 'first': first_line, 'max_lines': max_lines, 'lines': logs, 'latest': True})
def _get_job_logs(self, job_id, first_line=0, num_lines=None):
job = self.get_job(job_id)
if job is None:
raise ValueError('job "{}" not found!'.format(job_id))
(max_lines, log_slice) = job.log(first_line=first_line, num_lines=num_lines)
self._send_comm_message('job_logs', {'job_id': job_id, 'first': first_line, 'max_lines': max_lines, 'lines': log_slice, 'latest': False})
def delete_job(self, job_id):
"""
If the job_id doesn't exist, raises a ValueError.
Attempts to delete a job, and cancels it first. If the job cannot be canceled,
raises an exception. If it can be canceled but not deleted, it gets canceled, then raises
an exception.
"""
if job_id is None:
raise ValueError('Job id required for deletion!')
if job_id not in self._running_jobs:
self._send_comm_message('job_does_not_exist', {'job_id': job_id, 'source': 'delete_job'})
return
# raise ValueError('Attempting to cancel a Job that does not exist!')
try:
self.cancel_job(job_id)
except Exception as e:
raise
try:
clients.get('user_and_job_state').delete_job(job_id)
except Exception as e:
raise
del self._running_jobs[job_id]
self._send_comm_message('job_deleted', {'job_id': job_id})
def cancel_job(self, job_id):
"""
Cancels a running job, placing it in a canceled state.
Does NOT delete the job.
Raises an exception if the current user doesn't have permission to cancel the job.
"""
if job_id is None:
raise ValueError('Job id required for cancellation!')
if job_id not in self._running_jobs:
self._send_comm_message('job_does_not_exist', {'job_id': job_id, 'source': 'cancel_job'})
return
try:
job = self.get_job(job_id)
state = job.state()
if state.get('canceled', 0) == 1 or state.get('finished', 0) == 1:
# It's already finished, don't try to cancel it again.
return
except Exception as e:
raise ValueError('Unable to get Job state')
# Stop updating the job status while we try to cancel.
# Also, set it to have a special state of 'canceling' while we're doing the cancel
is_refreshing = self._running_jobs[job_id].get('refresh', False)
self._running_jobs[job_id]['refresh'] = False
self._running_jobs[job_id]['canceling'] = True
try:
clients.get('job_service').cancel_job({'job_id': job_id})
except Exception as e:
new_e = transform_job_exception(e)
error = {
'error': 'Unable to get cancel job',
'message': getattr(new_e, 'message', 'Unknown reason'),
'code': getattr(new_e, 'code', -1),
'source': getattr(new_e, 'source', 'jobmanager'),
'name': getattr(new_e, 'name', type(e).__name__),
'request_type': 'cancel_job',
'job_id': job_id
}
self._send_comm_message('job_comm_error', error)
raise(e)
finally:
self._running_jobs[job_id]['refresh'] = is_refreshing
del self._running_jobs[job_id]['canceling']
#
# self._send_comm_message('job_canceled', {'job_id': job_id})
# Rather than a separate message, how about triggering a job-status message:
self._lookup_job_status(job_id)
def _send_comm_message(self, msg_type, content):
"""
Sends a ipykernel.Comm message to the KBaseJobs channel with the given msg_type
and content. These just get encoded into the message itself.
"""
msg = {
'msg_type': msg_type,
'content': content
}
if self._comm is None:
self._comm = Comm(target_name='KBaseJobs', data={})
self._comm.on_msg(self._handle_comm_message)
self._comm.send(msg) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import unittest
from compiled_file_system import CompiledFileSystem
from manifest_data_source import ManifestDataSource
from object_store_creator import ObjectStoreCreator
from test_file_system import TestFileSystem
file_system = TestFileSystem({
"_manifest_features.json": json.dumps({
'req0': {
'extension_types': ['platform_app', 'extension']
},
'req1': {
'extension_types': 'all'
},
'opt0': {
'extension_types': ['extension']
},
'opt1': {
'extension_types': ['hosted_app']
},
'free0': {
'extension_types': ['platform_app']
},
'free1': {
'extension_types': ['platform_app', 'hosted_app', 'extension']
},
'only0': {
'extension_types': 'all'
},
'only1': {
'extension_types': ['platform_app']
},
'rec0': {
'extension_types': ['extension']
},
'rec1': {
'extension_types': ['platform_app', 'extension']
}
}),
"manifest.json": json.dumps({
'required': [
{
'name': 'req0',
'example': 'Extension'
},
{'name': 'req1'}
],
'only_one': [
{'name': 'only0'},
{'name': 'only1'}
],
'recommended': [
{'name': 'rec0'},
{'name': 'rec1'}
],
'optional': [
{'name': 'opt0'},
{'name': 'opt1'}
]
})
})
class ManifestDataSourceTest(unittest.TestCase):
def testCreateManifestData(self):
expected_extensions = {
'required': [
{
'name': 'req0',
'example': 'Extension'
},
{'name': 'req1'}
],
'recommended': [
{'name': 'rec0'},
{'name': 'rec1'}
],
'only_one': [
{'name': 'only0'}
],
'optional': [
{'name': 'free1'},
{
'name': 'opt0',
'is_last': True
}
]
}
expected_apps = {
'required': [
{
'name': 'req0',
'example': 'Application'
},
{'name': 'req1'}
],
'recommended': [
{'name': 'rec1'}
],
'only_one': [
{'name': 'only0'},
{'name': 'only1'}
],
'optional': [
{'name': 'free0'},
{
'name': 'free1',
'is_last': True
}
]
}
mds = ManifestDataSource(
CompiledFileSystem.Factory(file_system, ObjectStoreCreator.ForTest()),
file_system, 'manifest.json', '_manifest_features.json')
self.assertEqual(expected_extensions, mds.get('extensions'))
self.assertEqual(expected_apps, mds.get('apps'))
if __name__ == '__main__':
unittest.main() | unknown | codeparrot/codeparrot-clean | ||
package libnetwork_test
import (
"context"
"encoding/json"
"fmt"
"net"
"net/http"
"net/http/httptest"
"net/netip"
"os"
"os/exec"
"path/filepath"
"regexp"
"strings"
"sync"
"testing"
cerrdefs "github.com/containerd/errdefs"
"github.com/containerd/log"
"github.com/moby/moby/v2/daemon/libnetwork"
"github.com/moby/moby/v2/daemon/libnetwork/config"
"github.com/moby/moby/v2/daemon/libnetwork/driverapi"
"github.com/moby/moby/v2/daemon/libnetwork/drivers/bridge"
"github.com/moby/moby/v2/daemon/libnetwork/ipams/defaultipam"
"github.com/moby/moby/v2/daemon/libnetwork/ipams/null"
"github.com/moby/moby/v2/daemon/libnetwork/ipamutils"
"github.com/moby/moby/v2/daemon/libnetwork/netlabel"
"github.com/moby/moby/v2/daemon/libnetwork/nlwrap"
"github.com/moby/moby/v2/daemon/libnetwork/options"
"github.com/moby/moby/v2/daemon/libnetwork/osl"
"github.com/moby/moby/v2/daemon/libnetwork/types"
"github.com/moby/moby/v2/internal/testutil/netnsutils"
"github.com/moby/moby/v2/pkg/plugins"
"github.com/moby/sys/reexec"
"github.com/pkg/errors"
"github.com/vishvananda/netns"
"golang.org/x/sync/errgroup"
"gotest.tools/v3/assert"
is "gotest.tools/v3/assert/cmp"
)
const (
bridgeNetType = "bridge"
)
func newController(t *testing.T) *libnetwork.Controller {
t.Helper()
c, err := libnetwork.New(
context.Background(),
config.OptionDataDir(t.TempDir()),
config.OptionBridgeConfig(bridge.Configuration{
EnableIPForwarding: true,
}),
config.OptionDefaultAddressPoolConfig(ipamutils.GetLocalScopeDefaultNetworks()),
)
assert.NilError(t, err)
t.Cleanup(c.Stop)
return c
}
func createTestNetwork(c *libnetwork.Controller, networkType, networkName string, netOption options.Generic, ipamV4Configs, ipamV6Configs []*libnetwork.IpamConf) (*libnetwork.Network, error) {
return c.NewNetwork(context.Background(), networkType, networkName, "",
libnetwork.NetworkOptionGeneric(netOption),
libnetwork.NetworkOptionIpam(defaultipam.DriverName, "", ipamV4Configs, ipamV6Configs, nil))
}
func getEmptyGenericOption() map[string]any {
return map[string]any{netlabel.GenericData: map[string]string{}}
}
func getPortMapping() []types.PortBinding {
return []types.PortBinding{
{Proto: types.TCP, Port: 230, HostPort: 23000},
{Proto: types.UDP, Port: 200, HostPort: 22000},
{Proto: types.TCP, Port: 120, HostPort: 12000},
{Proto: types.TCP, Port: 320, HostPort: 32000, HostPortEnd: 32999},
{Proto: types.UDP, Port: 420, HostPort: 42000, HostPortEnd: 42001},
}
}
func TestNull(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
cnt, err := controller.NewSandbox(context.Background(), "null_container",
libnetwork.OptionHostname("test"),
libnetwork.OptionDomainname("example.com"),
libnetwork.OptionExtraHost("web", netip.MustParseAddr("192.168.0.1")))
assert.NilError(t, err)
network, err := createTestNetwork(controller, "null", "testnull", options.Generic{}, nil, nil)
assert.NilError(t, err)
ep, err := network.CreateEndpoint(context.Background(), "testep")
assert.NilError(t, err)
err = ep.Join(context.Background(), cnt)
assert.NilError(t, err)
err = ep.Leave(context.Background(), cnt)
assert.NilError(t, err)
err = ep.Delete(context.Background(), false)
assert.NilError(t, err)
err = cnt.Delete(context.Background())
assert.NilError(t, err)
// host type is special network. Cannot be removed.
err = network.Delete()
// TODO(thaJeztah): should this be an [errdefs.ErrInvalidParameter] ?
assert.Check(t, is.ErrorType(err, cerrdefs.IsPermissionDenied))
assert.Check(t, is.Error(err, `network of type "null" cannot be deleted`))
}
func TestUnknownDriver(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
_, err := createTestNetwork(controller, "unknowndriver", "testnetwork", options.Generic{}, nil, nil)
// TODO(thaJeztah): should attempting to use a non-existing plugin/driver return an [errdefs.ErrInvalidParameter] ?
assert.Check(t, is.ErrorType(err, cerrdefs.IsNotFound))
assert.Check(t, is.Error(err, "could not find plugin unknowndriver in v1 plugin registry: plugin not found"))
}
func TestNilRemoteDriver(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
_, err := controller.NewNetwork(context.Background(), "framerelay", "dummy", "",
libnetwork.NetworkOptionGeneric(getEmptyGenericOption()))
// TODO(thaJeztah): should attempting to use a non-existing plugin/driver return an [errdefs.InvalidParameter] ?
assert.Check(t, is.ErrorType(err, cerrdefs.IsNotFound))
assert.Check(t, is.Error(err, "could not find plugin framerelay in v1 plugin registry: plugin not found"))
}
func TestNetworkName(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
netOption := options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork",
},
}
_, err := createTestNetwork(controller, bridgeNetType, "", netOption, nil, nil)
assert.Check(t, is.ErrorType(err, cerrdefs.IsInvalidArgument), "Expected to fail with ErrInvalidName error")
const networkName = "testnetwork"
n, err := createTestNetwork(controller, bridgeNetType, networkName, netOption, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, n.Delete())
}()
assert.Check(t, is.Equal(n.Name(), networkName))
}
func TestNetworkType(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
netOption := options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork",
},
}
n, err := createTestNetwork(controller, bridgeNetType, "testnetwork", netOption, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, n.Delete())
}()
assert.Check(t, is.Equal(n.Type(), bridgeNetType))
}
func TestNetworkID(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
netOption := options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork",
},
}
n, err := createTestNetwork(controller, bridgeNetType, "testnetwork", netOption, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, n.Delete())
}()
assert.Check(t, n.ID() != "", "Expected non-empty network id")
}
func TestDeleteNetworkWithActiveEndpoints(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
option := options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork",
},
}
network, err := createTestNetwork(controller, bridgeNetType, "testnetwork", option, nil, nil)
assert.NilError(t, err)
ep, err := network.CreateEndpoint(context.Background(), "testep")
assert.NilError(t, err)
err = network.Delete()
var activeEndpointsError *libnetwork.ActiveEndpointsError
assert.Check(t, errors.As(err, &activeEndpointsError))
assert.Check(t, is.ErrorContains(err, "has active endpoints"))
// TODO(thaJeztah): should this be [errdefs.ErrConflict] or [errdefs.ErrInvalidParameter]?
assert.Check(t, is.ErrorType(err, cerrdefs.IsPermissionDenied))
// Done testing. Now cleanup.
err = ep.Delete(context.Background(), false)
assert.NilError(t, err)
err = network.Delete()
assert.NilError(t, err)
}
func TestNetworkConfig(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
// Verify config network cannot inherit another config network
_, err := controller.NewNetwork(context.Background(), "bridge", "config_network0", "",
libnetwork.NetworkOptionConfigOnly(),
libnetwork.NetworkOptionConfigFrom("anotherConfigNw"),
)
// TODO(thaJeztah): should this be [errdefs.ErrInvalidParameter]?
assert.Check(t, is.ErrorType(err, cerrdefs.IsPermissionDenied))
assert.Check(t, is.Error(err, "a configuration network cannot depend on another configuration network"))
// Create supported config network
option := options.Generic{
netlabel.GenericData: map[string]string{
bridge.EnableICC: "false",
},
}
ipamV4ConfList := []*libnetwork.IpamConf{{PreferredPool: "192.168.100.0/24", SubPool: "192.168.100.128/25", Gateway: "192.168.100.1"}}
ipamV6ConfList := []*libnetwork.IpamConf{{PreferredPool: "2001:db8:abcd::/64", SubPool: "2001:db8:abcd::ef99/80", Gateway: "2001:db8:abcd::22"}}
netOptions := []libnetwork.NetworkOption{
libnetwork.NetworkOptionConfigOnly(),
libnetwork.NetworkOptionEnableIPv4(true),
libnetwork.NetworkOptionEnableIPv6(true),
libnetwork.NetworkOptionGeneric(option),
libnetwork.NetworkOptionIpam("default", "", ipamV4ConfList, ipamV6ConfList, nil),
}
configNetwork, err := controller.NewNetwork(context.Background(), bridgeNetType, "config_network0", "", netOptions...)
assert.NilError(t, err)
// Verify a config-only network cannot be created with network operator configurations
for i, opt := range []libnetwork.NetworkOption{
libnetwork.NetworkOptionInternalNetwork(),
libnetwork.NetworkOptionAttachable(true),
libnetwork.NetworkOptionIngress(true),
} {
t.Run(fmt.Sprintf("config-only-%d", i), func(t *testing.T) {
_, err = controller.NewNetwork(context.Background(), bridgeNetType, "testBR", "",
libnetwork.NetworkOptionConfigOnly(), opt)
// TODO(thaJeztah): should this be [errdefs.ErrInvalidParameter]?
assert.Check(t, is.ErrorType(err, cerrdefs.IsPermissionDenied))
assert.Check(t, is.Error(err, "configuration network can only contain network specific fields. Network operator fields like [ ingress | internal | attachable | scope ] are not supported."))
})
}
// Verify a network cannot be created with both config-from and network specific configurations
for i, opt := range []libnetwork.NetworkOption{
libnetwork.NetworkOptionEnableIPv4(false),
libnetwork.NetworkOptionEnableIPv6(true),
libnetwork.NetworkOptionIpam("my-ipam", "", nil, nil, nil),
libnetwork.NetworkOptionIpam("", "", ipamV4ConfList, nil, nil),
libnetwork.NetworkOptionIpam("", "", nil, ipamV6ConfList, nil),
libnetwork.NetworkOptionLabels(map[string]string{"number": "two"}),
libnetwork.NetworkOptionDriverOpts(map[string]string{"com.docker.network.driver.mtu": "1600"}),
} {
t.Run(fmt.Sprintf("config-from-%d", i), func(t *testing.T) {
_, err = controller.NewNetwork(context.Background(), bridgeNetType, "testBR", "",
libnetwork.NetworkOptionConfigFrom("config_network0"), opt)
// TODO(thaJeztah): should this be [errdefs.ErrInvalidParameter]?
assert.Check(t, is.ErrorType(err, cerrdefs.IsPermissionDenied))
//nolint:dupword // ignore "Duplicate words (network) found (dupword)"
// Doing a partial match here omn the error-string here, as this produces either;
//
// - user-specified configurations are not supported if the network depends on a configuration network.
// - network driver options are not supported if the network depends on a configuration network.
//
// We can consider changing this to a proper test-table.
assert.Check(t, is.ErrorContains(err, `not supported if the network depends on a configuration network`))
})
}
// Create a valid network
network, err := controller.NewNetwork(context.Background(), bridgeNetType, "testBR", "",
libnetwork.NetworkOptionConfigFrom("config_network0"))
assert.NilError(t, err)
// Verify the config network cannot be removed
err = configNetwork.Delete()
// TODO(thaJeztah): should this be [errdefs.ErrConflict] or [errdefs.ErrInvalidParameter]?
assert.Check(t, is.ErrorType(err, cerrdefs.IsPermissionDenied))
assert.Check(t, is.Error(err, `configuration network "config_network0" is in use`))
// Delete network
err = network.Delete()
assert.NilError(t, err)
// Verify the config network can now be removed
err = configNetwork.Delete()
assert.NilError(t, err)
}
func TestUnknownNetwork(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
option := options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork",
},
}
network, err := createTestNetwork(controller, bridgeNetType, "testnetwork", option, nil, nil)
assert.NilError(t, err)
err = network.Delete()
assert.NilError(t, err)
err = network.Delete()
assert.Check(t, is.ErrorType(err, cerrdefs.IsNotFound))
assert.Check(t, is.ErrorContains(err, "unknown network testnetwork id"))
}
func TestUnknownEndpoint(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
option := options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork",
},
}
ipamV4ConfList := []*libnetwork.IpamConf{{PreferredPool: "192.168.100.0/24"}}
network, err := createTestNetwork(controller, bridgeNetType, "testnetwork", option, ipamV4ConfList, nil)
assert.NilError(t, err)
_, err = network.CreateEndpoint(context.Background(), "")
assert.Check(t, is.ErrorType(err, cerrdefs.IsInvalidArgument), "Expected to fail with ErrInvalidName error")
assert.Check(t, is.ErrorContains(err, "invalid name:"))
ep, err := network.CreateEndpoint(context.Background(), "testep")
assert.NilError(t, err)
err = ep.Delete(context.Background(), false)
assert.NilError(t, err)
// Done testing. Now cleanup
err = network.Delete()
assert.NilError(t, err)
}
func TestNetworkEndpointsWalkers(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
// Create network 1 and add 2 endpoint: ep11, ep12
netOption := options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "network1",
},
}
net1, err := createTestNetwork(controller, bridgeNetType, "network1", netOption, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, net1.Delete())
}()
ep11, err := net1.CreateEndpoint(context.Background(), "ep11")
assert.NilError(t, err)
defer func() {
assert.Check(t, ep11.Delete(context.Background(), false))
}()
ep12, err := net1.CreateEndpoint(context.Background(), "ep12")
assert.NilError(t, err)
defer func() {
assert.Check(t, ep12.Delete(context.Background(), false))
}()
// Test list methods on net1
epList1 := net1.Endpoints()
assert.Check(t, is.Len(epList1, 2), "Endpoints() returned wrong number of elements")
// endpoint order is not guaranteed
assert.Check(t, is.Contains(epList1, ep11), "Endpoints() did not return all the expected elements")
assert.Check(t, is.Contains(epList1, ep12), "Endpoints() did not return all the expected elements")
// Test Endpoint Walk method
var epName string
var epWanted *libnetwork.Endpoint
wlk := func(ep *libnetwork.Endpoint) bool {
if ep.Name() == epName {
epWanted = ep
return true
}
return false
}
// Look for ep1 on network1
epName = "ep11"
net1.WalkEndpoints(wlk)
assert.Assert(t, epWanted != nil)
assert.Assert(t, is.Equal(epWanted, ep11))
ctx := t.Context()
current := len(controller.Networks(ctx))
// Create network 2
netOption = options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "network2",
},
}
net2, err := createTestNetwork(controller, bridgeNetType, "network2", netOption, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, net2.Delete())
}()
// Test Networks method
assert.Assert(t, is.Len(controller.Networks(ctx), current+1))
// Test Network Walk method
var netName string
var netWanted *libnetwork.Network
nwWlk := func(nw *libnetwork.Network) bool {
if nw.Name() == netName {
netWanted = nw
return true
}
return false
}
// Look for network named "network1" and "network2"
netName = "network1"
controller.WalkNetworks(nwWlk)
assert.Assert(t, netWanted != nil)
assert.Check(t, is.Equal(net1.ID(), netWanted.ID()))
netName = "network2"
controller.WalkNetworks(nwWlk)
assert.Assert(t, netWanted != nil)
assert.Check(t, is.Equal(net2.ID(), netWanted.ID()))
}
func TestDuplicateEndpoint(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
netOption := options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork",
},
}
n, err := createTestNetwork(controller, bridgeNetType, "testnetwork", netOption, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, n.Delete())
}()
ep, err := n.CreateEndpoint(context.Background(), "ep1")
assert.NilError(t, err)
defer func() {
assert.Check(t, ep.Delete(context.Background(), false))
}()
ep2, err := n.CreateEndpoint(context.Background(), "ep1")
defer func() {
// Cleanup ep2 as well, else network cleanup might fail for failure cases
if ep2 != nil {
assert.NilError(t, ep2.Delete(context.Background(), false))
}
}()
// TODO(thaJeztah): should this be [errdefs.ErrConflict] or [errdefs.ErrInvalidParameter]?
assert.Check(t, is.ErrorType(err, cerrdefs.IsPermissionDenied))
assert.Check(t, is.Error(err, "endpoint with name ep1 already exists in network testnetwork"))
}
func TestControllerQuery(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
// Create network 1
netOption := options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "network1",
},
}
net1, err := createTestNetwork(controller, bridgeNetType, "network1", netOption, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, net1.Delete())
}()
// Create network 2
netOption = options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "network2",
},
}
net2, err := createTestNetwork(controller, bridgeNetType, "network2", netOption, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, net2.Delete())
}()
_, err = controller.NetworkByName("")
assert.Check(t, is.ErrorType(err, cerrdefs.IsInvalidArgument))
assert.Check(t, is.ErrorContains(err, "invalid name:"))
_, err = controller.NetworkByID("")
assert.Check(t, is.ErrorType(err, cerrdefs.IsInvalidArgument))
assert.Check(t, is.Error(err, "invalid id: id is empty"))
g, err := controller.NetworkByID("network1")
assert.Check(t, is.ErrorType(err, cerrdefs.IsNotFound))
assert.Check(t, is.Error(err, "network network1 not found"))
assert.Check(t, is.Nil(g), "search network using name as ID should not yield a result")
g, err = controller.NetworkByName("network1")
assert.NilError(t, err)
assert.Assert(t, g != nil, "NetworkByName() did not find the network")
assert.Assert(t, is.Equal(g, net1), "NetworkByName() returned the wrong network")
g, err = controller.NetworkByID(net1.ID())
assert.NilError(t, err)
assert.Assert(t, is.Equal(net1.ID(), g.ID()), "NetworkByID() returned unexpected element: %v", g)
g, err = controller.NetworkByName("network2")
assert.NilError(t, err)
assert.Check(t, g != nil, "NetworkByName() did not find the network")
assert.Check(t, is.Equal(g, net2), "NetworkByName() returned the wrong network")
g, err = controller.NetworkByID(net2.ID())
assert.NilError(t, err)
assert.Check(t, is.Equal(g.ID(), net2.ID()), "NetworkByID() returned unexpected element: %v", g)
}
func TestNetworkQuery(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
// Create network 1 and add 2 endpoint: ep11, ep12
netOption := options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "network1",
},
}
net1, err := createTestNetwork(controller, bridgeNetType, "network1", netOption, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, net1.Delete())
}()
ep11, err := net1.CreateEndpoint(context.Background(), "ep11")
assert.NilError(t, err)
defer func() {
assert.Check(t, ep11.Delete(context.Background(), false))
}()
ep12, err := net1.CreateEndpoint(context.Background(), "ep12")
assert.NilError(t, err)
defer func() {
assert.Check(t, ep12.Delete(context.Background(), false))
}()
e, err := net1.EndpointByName("ep11")
assert.NilError(t, err)
assert.Check(t, is.Equal(e, ep11), "EndpointByName() returned the wrong endpoint")
_, err = net1.EndpointByName("")
assert.Check(t, is.ErrorType(err, cerrdefs.IsInvalidArgument))
assert.Check(t, is.ErrorContains(err, "invalid name:"))
e, err = net1.EndpointByName("IamNotAnEndpoint")
assert.Check(t, is.ErrorType(err, cerrdefs.IsNotFound))
assert.Check(t, is.Error(err, "endpoint IamNotAnEndpoint not found"))
assert.Check(t, is.Nil(e), "EndpointByName() returned endpoint on error")
}
const containerID = "valid_c"
func TestEndpointDeleteWithActiveContainer(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
n, err := createTestNetwork(controller, bridgeNetType, "testnetwork", options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork",
},
}, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, n.Delete())
}()
n2, err := createTestNetwork(controller, bridgeNetType, "testnetwork2", options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork2",
},
}, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, n2.Delete())
}()
ep, err := n.CreateEndpoint(context.Background(), "ep1")
assert.NilError(t, err)
defer func() {
assert.Check(t, ep.Delete(context.Background(), false))
}()
cnt, err := controller.NewSandbox(context.Background(), containerID,
libnetwork.OptionHostname("test"),
libnetwork.OptionDomainname("example.com"),
libnetwork.OptionExtraHost("web", netip.MustParseAddr("192.168.0.1")))
assert.NilError(t, err)
defer func() {
assert.Check(t, cnt.Delete(context.Background()))
}()
err = ep.Join(context.Background(), cnt)
assert.NilError(t, err)
defer func() {
assert.Check(t, ep.Leave(context.Background(), cnt))
}()
err = ep.Delete(context.Background(), false)
var activeContainerError *libnetwork.ActiveContainerError
assert.Check(t, errors.As(err, &activeContainerError))
assert.Check(t, is.ErrorContains(err, "has active containers"))
// TODO(thaJeztah): should this be [errdefs.ErrConflict] or [errdefs.ErrInvalidParameter]?
assert.Check(t, is.ErrorType(err, cerrdefs.IsPermissionDenied))
}
func TestEndpointMultipleJoins(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
n, err := createTestNetwork(controller, bridgeNetType, "testmultiple", options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testmultiple",
},
}, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, n.Delete())
}()
ep, err := n.CreateEndpoint(context.Background(), "ep1")
assert.NilError(t, err)
defer func() {
assert.Check(t, ep.Delete(context.Background(), false))
}()
sbx1, err := controller.NewSandbox(context.Background(), containerID,
libnetwork.OptionHostname("test"),
libnetwork.OptionDomainname("example.com"),
libnetwork.OptionExtraHost("web", netip.MustParseAddr("192.168.0.1")),
)
assert.NilError(t, err)
defer func() {
assert.Check(t, sbx1.Delete(context.Background()))
}()
sbx2, err := controller.NewSandbox(context.Background(), "c2")
assert.NilError(t, err)
defer func() {
assert.Check(t, sbx2.Delete(context.Background()))
}()
err = ep.Join(context.Background(), sbx1)
assert.NilError(t, err)
defer func() {
assert.Check(t, ep.Leave(context.Background(), sbx1))
}()
err = ep.Join(context.Background(), sbx2)
// TODO(thaJeztah): should this be [errdefs.ErrConflict] or [errdefs.ErrInvalidParameter]?
assert.Check(t, is.ErrorType(err, cerrdefs.IsPermissionDenied))
assert.Check(t, is.Error(err, "another container is attached to the same network endpoint"))
}
func TestLeaveAll(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
n, err := createTestNetwork(controller, bridgeNetType, "testnetwork", options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork",
},
}, nil, nil)
assert.NilError(t, err)
defer func() {
// If this goes through, it means cnt.Delete() effectively detached from all the endpoints
assert.Check(t, n.Delete())
}()
n2, err := createTestNetwork(controller, bridgeNetType, "testnetwork2", options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork2",
},
}, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, n2.Delete())
}()
ep1, err := n.CreateEndpoint(context.Background(), "ep1")
assert.NilError(t, err)
ep2, err := n2.CreateEndpoint(context.Background(), "ep2")
assert.NilError(t, err)
cnt, err := controller.NewSandbox(context.Background(), "leaveall")
assert.NilError(t, err)
err = ep1.Join(context.Background(), cnt)
assert.NilError(t, err, "Failed to join ep1")
err = ep2.Join(context.Background(), cnt)
assert.NilError(t, err, "Failed to join ep2")
err = cnt.Delete(context.Background())
assert.NilError(t, err)
}
func TestContainerInvalidLeave(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
n, err := createTestNetwork(controller, bridgeNetType, "testnetwork", options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork",
},
}, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, n.Delete())
}()
ep, err := n.CreateEndpoint(context.Background(), "ep1")
assert.NilError(t, err)
defer func() {
assert.Check(t, ep.Delete(context.Background(), false))
}()
cnt, err := controller.NewSandbox(context.Background(), containerID,
libnetwork.OptionHostname("test"),
libnetwork.OptionDomainname("example.com"),
libnetwork.OptionExtraHost("web", netip.MustParseAddr("192.168.0.1")))
assert.NilError(t, err)
defer func() {
assert.Check(t, cnt.Delete(context.Background()))
}()
err = ep.Leave(context.Background(), cnt)
assert.Assert(t, is.ErrorType(err, cerrdefs.IsPermissionDenied), "Expected to fail leave from an endpoint which has no active join")
assert.Check(t, is.Error(err, "cannot leave endpoint with no attached sandbox"))
err = ep.Leave(context.Background(), nil)
assert.Assert(t, is.ErrorType(err, cerrdefs.IsInvalidArgument), "Expected to fail leave with a nil Sandbox")
// FIXME(thaJeztah): this error includes the raw data of the sandbox (as `<nil>`), which is not very informative
assert.Check(t, is.Error(err, "invalid Sandbox passed to endpoint leave: <nil>"))
fsbx := &libnetwork.Sandbox{}
err = ep.Leave(context.Background(), fsbx)
assert.Assert(t, is.ErrorType(err, cerrdefs.IsInvalidArgument), "Expected to fail leave with invalid Sandbox")
//nolint:dupword // Ignore "Duplicate words (map[]) found (dupword)"
// FIXME(thaJeztah): this error includes the raw data of the sandbox, which is not very human-readable or informative;
// invalid Sandbox passed to endpoint leave: &{ {{ []} { [] [] []} map[] false false []} [] <nil> <nil> <nil> {{{} 0} {0 0}} [] map[] map[] <nil> 0 false false false false false [] {0 0} {0 0}}
assert.Check(t, is.ErrorContains(err, "invalid Sandbox passed to endpoint leave"))
}
func TestEndpointUpdateParent(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
n, err := createTestNetwork(controller, bridgeNetType, "testnetwork", options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork",
},
}, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, n.Delete())
}()
ep1, err := n.CreateEndpoint(context.Background(), "ep1")
assert.NilError(t, err)
ep2, err := n.CreateEndpoint(context.Background(), "ep2")
assert.NilError(t, err)
sbx1, err := controller.NewSandbox(context.Background(), containerID,
libnetwork.OptionHostname("test"),
libnetwork.OptionDomainname("example.com"),
libnetwork.OptionExtraHost("web", netip.MustParseAddr("192.168.0.1")))
assert.NilError(t, err)
defer func() {
assert.Check(t, sbx1.Delete(context.Background()))
}()
sbx2, err := controller.NewSandbox(context.Background(), "c2",
libnetwork.OptionHostname("test2"),
libnetwork.OptionDomainname("example.com"),
libnetwork.OptionHostsPath("/var/lib/docker/test_network/container2/hosts"),
libnetwork.OptionExtraHost("web", netip.MustParseAddr("192.168.0.2")))
assert.NilError(t, err)
defer func() {
assert.Check(t, sbx2.Delete(context.Background()))
}()
err = ep1.Join(context.Background(), sbx1)
assert.NilError(t, err)
err = ep2.Join(context.Background(), sbx2)
assert.NilError(t, err)
}
func TestInvalidRemoteDriver(t *testing.T) {
mux := http.NewServeMux()
server := httptest.NewServer(mux)
defer server.Close()
mux.HandleFunc("/Plugin.Activate", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", plugins.VersionMimetype)
_, _ = fmt.Fprintln(w, `{"Implements": ["InvalidDriver"]}`)
})
err := os.MkdirAll(specPath, 0o755)
assert.NilError(t, err)
defer func() {
assert.Check(t, os.RemoveAll(specPath))
}()
err = os.WriteFile(filepath.Join(specPath, "invalid-network-driver.spec"), []byte(server.URL), 0o644)
assert.NilError(t, err)
ctrlr, err := libnetwork.New(context.Background(), config.OptionDataDir(t.TempDir()))
assert.NilError(t, err)
defer ctrlr.Stop()
_, err = ctrlr.NewNetwork(context.Background(), "invalid-network-driver", "dummy", "",
libnetwork.NetworkOptionGeneric(getEmptyGenericOption()))
assert.Check(t, is.ErrorIs(err, plugins.ErrNotImplements))
}
func TestValidRemoteDriver(t *testing.T) {
mux := http.NewServeMux()
server := httptest.NewServer(mux)
defer server.Close()
mux.HandleFunc("/Plugin.Activate", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", plugins.VersionMimetype)
_, _ = fmt.Fprintf(w, `{"Implements": ["%s"]}`, driverapi.NetworkPluginEndpointType)
})
mux.HandleFunc(fmt.Sprintf("/%s.GetCapabilities", driverapi.NetworkPluginEndpointType), func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", plugins.VersionMimetype)
_, _ = fmt.Fprintf(w, `{"Scope":"local"}`)
})
mux.HandleFunc(fmt.Sprintf("/%s.CreateNetwork", driverapi.NetworkPluginEndpointType), func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", plugins.VersionMimetype)
_, _ = fmt.Fprintf(w, "null")
})
mux.HandleFunc(fmt.Sprintf("/%s.DeleteNetwork", driverapi.NetworkPluginEndpointType), func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", plugins.VersionMimetype)
_, _ = fmt.Fprintf(w, "null")
})
err := os.MkdirAll(specPath, 0o755)
assert.NilError(t, err)
defer func() {
assert.Check(t, os.RemoveAll(specPath))
}()
err = os.WriteFile(filepath.Join(specPath, "valid-network-driver.spec"), []byte(server.URL), 0o644)
assert.NilError(t, err)
controller := newController(t)
n, err := controller.NewNetwork(context.Background(), "valid-network-driver", "dummy", "",
libnetwork.NetworkOptionGeneric(getEmptyGenericOption()))
if err != nil {
// Only fail if we could not find the plugin driver
if cerrdefs.IsNotFound(err) {
t.Fatal(err)
}
return
}
defer func() {
assert.Check(t, n.Delete())
}()
}
func makeTesthostNetwork(t *testing.T, c *libnetwork.Controller) *libnetwork.Network {
t.Helper()
n, err := createTestNetwork(c, "host", "testhost", options.Generic{}, nil, nil)
assert.NilError(t, err)
return n
}
func makeTestIPv6Network(t *testing.T, c *libnetwork.Controller) *libnetwork.Network {
t.Helper()
netOptions := options.Generic{
netlabel.EnableIPv4: true,
netlabel.EnableIPv6: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork",
},
}
ipamV6ConfList := []*libnetwork.IpamConf{
{PreferredPool: "fd81:fb6e:38ba:abcd::/64", Gateway: "fd81:fb6e:38ba:abcd::9"},
}
n, err := createTestNetwork(c,
"bridge",
"testnetwork",
netOptions,
nil,
ipamV6ConfList,
)
assert.NilError(t, err)
return n
}
func TestHost(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
sbx1, err := controller.NewSandbox(context.Background(), "host_c1",
libnetwork.OptionHostname("test1"),
libnetwork.OptionDomainname("example.com"),
libnetwork.OptionExtraHost("web", netip.MustParseAddr("192.168.0.1")),
libnetwork.OptionUseDefaultSandbox())
assert.NilError(t, err)
defer func() {
assert.Check(t, sbx1.Delete(context.Background()))
}()
sbx2, err := controller.NewSandbox(context.Background(), "host_c2",
libnetwork.OptionHostname("test2"),
libnetwork.OptionDomainname("example.com"),
libnetwork.OptionExtraHost("web", netip.MustParseAddr("192.168.0.1")),
libnetwork.OptionUseDefaultSandbox())
assert.NilError(t, err)
defer func() {
assert.Check(t, sbx2.Delete(context.Background()))
}()
network := makeTesthostNetwork(t, controller)
ep1, err := network.CreateEndpoint(context.Background(), "testep1")
assert.NilError(t, err)
err = ep1.Join(context.Background(), sbx1)
assert.NilError(t, err)
ep2, err := network.CreateEndpoint(context.Background(), "testep2")
assert.NilError(t, err)
err = ep2.Join(context.Background(), sbx2)
assert.NilError(t, err)
err = ep1.Leave(context.Background(), sbx1)
assert.NilError(t, err)
err = ep2.Leave(context.Background(), sbx2)
assert.NilError(t, err)
err = ep1.Delete(context.Background(), false)
assert.NilError(t, err)
err = ep2.Delete(context.Background(), false)
assert.NilError(t, err)
// Try to create another host endpoint and join/leave that.
cnt3, err := controller.NewSandbox(context.Background(), "host_c3",
libnetwork.OptionHostname("test3"),
libnetwork.OptionDomainname("example.com"),
libnetwork.OptionExtraHost("web", netip.MustParseAddr("192.168.0.1")),
libnetwork.OptionUseDefaultSandbox())
assert.NilError(t, err)
defer func() {
assert.Check(t, cnt3.Delete(context.Background()))
}()
ep3, err := network.CreateEndpoint(context.Background(), "testep3")
assert.NilError(t, err)
err = ep3.Join(context.Background(), sbx2)
assert.NilError(t, err)
err = ep3.Leave(context.Background(), sbx2)
assert.NilError(t, err)
err = ep3.Delete(context.Background(), false)
assert.NilError(t, err)
}
func checkSandbox(t *testing.T, info libnetwork.EndpointInfo) {
key := info.Sandbox().Key()
sbNs, err := netns.GetFromPath(key)
assert.NilError(t, err, "Failed to get network namespace path %q", key)
defer func() {
assert.Check(t, sbNs.Close())
}()
nh, err := nlwrap.NewHandleAt(sbNs)
assert.NilError(t, err)
_, err = nh.LinkByName("eth0")
assert.NilError(t, err, "Could not find the interface eth0 inside the sandbox")
_, err = nh.LinkByName("eth1")
assert.NilError(t, err, "Could not find the interface eth1 inside the sandbox")
}
func TestEndpointJoin(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
// Create network 1 and add 2 endpoint: ep11, ep12
netOption := options.Generic{
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork1",
bridge.EnableICC: "true",
bridge.EnableIPMasquerade: "true",
},
}
ipamV6ConfList := []*libnetwork.IpamConf{{PreferredPool: "fe90::/64", Gateway: "fe90::22"}}
n1, err := controller.NewNetwork(context.Background(), bridgeNetType, "testnetwork1", "",
libnetwork.NetworkOptionGeneric(netOption),
libnetwork.NetworkOptionEnableIPv4(true),
libnetwork.NetworkOptionEnableIPv6(true),
libnetwork.NetworkOptionIpam(defaultipam.DriverName, "", nil, ipamV6ConfList, nil),
)
assert.NilError(t, err)
defer func() {
assert.Check(t, n1.Delete())
}()
ep1, err := n1.CreateEndpoint(context.Background(), "ep1")
assert.NilError(t, err)
defer func() {
assert.Check(t, ep1.Delete(context.Background(), false))
}()
// Validate if ep.Info() only gives me IP address info and not names and gateway during CreateEndpoint()
info := ep1.Info()
iface := info.Iface()
if iface.Address() != nil {
assert.Check(t, iface.Address().IP.To4() != nil, "Invalid IP address returned: %v", iface.Address())
}
if iface.AddressIPv6() != nil {
// Should be nil if it's an IPv6 address;https://github.com/moby/moby/pull/49329#discussion_r1925981233
assert.Check(t, iface.AddressIPv6().IP.To4() == nil, "Invalid IPv6 address returned: %v", iface.AddressIPv6())
}
assert.Check(t, is.Len(info.Gateway(), 0), "Expected empty gateway for an empty endpoint. Instead found a gateway: %v", info.Gateway())
assert.Check(t, is.Len(info.GatewayIPv6(), 0), "Expected empty gateway for an empty ipv6 endpoint. Instead found a gateway: %v", info.GatewayIPv6())
assert.Check(t, is.Nil(info.Sandbox()), "Expected an empty sandbox key for an empty endpoint")
// test invalid joins
err = ep1.Join(context.Background(), nil)
assert.Assert(t, is.ErrorType(err, cerrdefs.IsInvalidArgument), "Expected to fail join with nil Sandbox")
// FIXME(thaJeztah): this error includes the raw data of the sandbox (as `<nil>`), which is not very informative
assert.Check(t, is.Error(err, "invalid Sandbox passed to endpoint join: <nil>"))
fsbx := &libnetwork.Sandbox{}
err = ep1.Join(context.Background(), fsbx)
assert.Assert(t, is.ErrorType(err, cerrdefs.IsInvalidArgument), "Expected to fail join with invalid Sandbox")
//nolint:dupword // ignore "Duplicate words (map[]) found (dupword)"
// FIXME(thaJeztah): this error includes the raw data of the sandbox, which is not very human-readable or informative;
// invalid Sandbox passed to endpoint join: &{ {{ []} { [] [] []} map[] false false []} [] <nil> <nil> <nil> {{{} 0} {0 0}} [] map[] map[] <nil> 0 false false false false false [] {0 0} {0 0}}
assert.Check(t, is.ErrorContains(err, "invalid Sandbox passed to endpoint join"))
sb, err := controller.NewSandbox(context.Background(), containerID,
libnetwork.OptionHostname("test"),
libnetwork.OptionDomainname("example.com"),
libnetwork.OptionExtraHost("web", netip.MustParseAddr("192.168.0.1")))
assert.NilError(t, err)
defer func() {
assert.Check(t, sb.Delete(context.Background()))
}()
err = ep1.Join(context.Background(), sb)
assert.NilError(t, err)
defer func() {
assert.Check(t, ep1.Leave(context.Background(), sb))
}()
// Validate if ep.Info() only gives valid gateway and sandbox key after has container has joined.
info = ep1.Info()
assert.Check(t, len(info.Gateway()) > 0, "Expected a valid gateway for a joined endpoint")
assert.Check(t, len(info.GatewayIPv6()) > 0, "Expected a valid ipv6 gateway for a joined endpoint")
assert.Check(t, info.Sandbox() != nil, "Expected an non-empty sandbox key for a joined endpoint")
// Check endpoint provided container information
assert.Check(t, is.Equal(sb.Key(), ep1.Info().Sandbox().Key()), "Endpoint Info returned unexpected sandbox key: %s", sb.Key())
// Attempt retrieval of endpoint interfaces statistics
stats, err := sb.Statistics()
assert.NilError(t, err)
_, ok := stats["eth0"]
assert.Assert(t, ok, "Did not find eth0 statistics")
// Now test the container joining another network
n2, err := createTestNetwork(controller, bridgeNetType, "testnetwork2",
options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork2",
},
}, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, n2.Delete())
}()
ep2, err := n2.CreateEndpoint(context.Background(), "ep2")
assert.NilError(t, err)
defer func() {
assert.Check(t, ep2.Delete(context.Background(), false))
}()
err = ep2.Join(context.Background(), sb)
assert.NilError(t, err)
defer func() {
assert.Check(t, ep2.Leave(context.Background(), sb))
}()
assert.Check(t, is.Equal(ep1.Info().Sandbox().Key(), ep2.Info().Sandbox().Key()), "ep1 and ep2 returned different container sandbox key")
checkSandbox(t, info)
}
func TestExternalKey(t *testing.T) {
externalKeyTest(t, false)
}
func externalKeyTest(t *testing.T, reexec bool) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
n, err := createTestNetwork(controller, bridgeNetType, "testnetwork", options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork",
},
}, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, n.Delete())
}()
n2, err := createTestNetwork(controller, bridgeNetType, "testnetwork2", options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork2",
},
}, nil, nil)
assert.NilError(t, err)
defer func() {
assert.Check(t, n2.Delete())
}()
ep, err := n.CreateEndpoint(context.Background(), "ep1")
assert.NilError(t, err)
defer func() {
assert.Check(t, ep.Delete(context.Background(), false))
}()
ep2, err := n2.CreateEndpoint(context.Background(), "ep2")
assert.NilError(t, err)
defer func() {
assert.Check(t, ep2.Delete(context.Background(), false))
}()
cnt, err := controller.NewSandbox(context.Background(), containerID,
libnetwork.OptionHostname("test"),
libnetwork.OptionDomainname("example.com"),
libnetwork.OptionUseExternalKey(),
libnetwork.OptionExtraHost("web", netip.MustParseAddr("192.168.0.1")))
assert.NilError(t, err)
defer func() {
assert.Check(t, cnt.Delete(context.Background()))
}()
// Join endpoint to sandbox before SetKey
err = ep.Join(context.Background(), cnt)
assert.NilError(t, err)
defer func() {
assert.Check(t, ep.Leave(context.Background(), cnt))
}()
sbox := ep.Info().Sandbox()
assert.Assert(t, sbox != nil, "Expected to have a valid Sandbox")
if reexec {
err := reexecSetKey("this-must-fail", containerID, controller.ID())
if err == nil {
t.Fatalf("libnetwork-setkey must fail if the corresponding namespace is not created")
}
} else {
// Setting an non-existing key (namespace) must fail
if err := sbox.SetKey(context.Background(), "this-must-fail"); err == nil {
t.Fatalf("Setkey must fail if the corresponding namespace is not created")
}
}
// Create a new OS sandbox using the osl API before using it in SetKey
extOsBox, err := osl.NewSandbox("ValidKey", true, false)
assert.NilError(t, err, "Failed to create new osl sandbox")
defer func() {
if err := extOsBox.Destroy(); err != nil {
log.G(t.Context()).Warnf("Failed to remove os sandbox: %v", err)
}
}()
if reexec {
err = reexecSetKey("ValidKey", containerID, controller.ID())
assert.NilError(t, err, "libnetwork-setkey failed")
} else {
err = sbox.SetKey(context.Background(), "ValidKey")
assert.NilError(t, err, "setkey failed")
}
// Join endpoint to sandbox after SetKey
err = ep2.Join(context.Background(), sbox)
assert.NilError(t, err)
defer func() {
assert.Check(t, ep2.Leave(context.Background(), sbox))
}()
assert.Assert(t, is.Equal(ep.Info().Sandbox().Key(), ep2.Info().Sandbox().Key()), "ep1 and ep2 returned different container sandbox key")
checkSandbox(t, ep.Info())
}
func reexecSetKey(key string, containerID string, controllerID string) error {
type libcontainerState struct {
NamespacePaths map[string]string
}
var (
state libcontainerState
b []byte
err error
)
state.NamespacePaths = make(map[string]string)
state.NamespacePaths["NEWNET"] = key
if b, err = json.Marshal(state); err != nil {
return err
}
cmd := &exec.Cmd{
Path: reexec.Self(),
Args: append([]string{"libnetwork-setkey"}, containerID, controllerID),
Stdin: strings.NewReader(string(b)),
Stdout: os.Stdout,
Stderr: os.Stderr,
}
return cmd.Run()
}
func TestResolvConf(t *testing.T) {
tmpDir := t.TempDir()
originResolvConfPath := filepath.Join(tmpDir, "origin_resolv.conf")
resolvConfPath := filepath.Join(tmpDir, "resolv.conf")
// Strip comments that end in a newline (a comment with no newline at the end
// of the file will not be stripped).
stripCommentsRE := regexp.MustCompile(`(?m)^#.*\n`)
testcases := []struct {
name string
makeNet func(t *testing.T, c *libnetwork.Controller) *libnetwork.Network
delNet bool
epOpts []libnetwork.EndpointOption
sbOpts []libnetwork.SandboxOption
originResolvConf string
expResolvConf string
}{
{
name: "IPv6 network",
makeNet: makeTestIPv6Network,
delNet: true,
originResolvConf: "search pommesfrites.fr\nnameserver 12.34.56.78\nnameserver 2001:4860:4860::8888\n",
expResolvConf: "nameserver 127.0.0.11\nsearch pommesfrites.fr\noptions ndots:0",
},
{
name: "host network",
makeNet: makeTesthostNetwork,
epOpts: []libnetwork.EndpointOption{libnetwork.CreateOptionDisableResolution()},
sbOpts: []libnetwork.SandboxOption{libnetwork.OptionUseDefaultSandbox()},
originResolvConf: "search localhost.net\nnameserver 127.0.0.1\nnameserver 2001:4860:4860::8888\n",
expResolvConf: "nameserver 127.0.0.1\nnameserver 2001:4860:4860::8888\nsearch localhost.net",
},
}
for _, tc := range testcases {
t.Run(tc.name, func(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
c := newController(t)
err := os.WriteFile(originResolvConfPath, []byte(tc.originResolvConf), 0o644)
assert.NilError(t, err)
n := tc.makeNet(t, c)
if tc.delNet {
defer func() {
assert.Check(t, n.Delete())
}()
}
sbOpts := append(tc.sbOpts,
libnetwork.OptionResolvConfPath(resolvConfPath),
libnetwork.OptionOriginResolvConfPath(originResolvConfPath),
)
sb, err := c.NewSandbox(context.Background(), containerID, sbOpts...)
assert.NilError(t, err)
defer func() {
assert.Check(t, sb.Delete(context.Background()))
}()
ep, err := n.CreateEndpoint(context.Background(), "ep", tc.epOpts...)
assert.NilError(t, err)
defer func() {
assert.Check(t, ep.Delete(context.Background(), false))
}()
err = ep.Join(context.Background(), sb)
assert.NilError(t, err)
defer func() {
assert.Check(t, ep.Leave(context.Background(), sb))
}()
finfo, err := os.Stat(resolvConfPath)
assert.NilError(t, err)
expFMode := (os.FileMode)(0o644)
assert.Check(t, is.Equal(finfo.Mode().String(), expFMode.String()))
content, err := os.ReadFile(resolvConfPath)
assert.NilError(t, err)
actual := stripCommentsRE.ReplaceAllString(string(content), "")
actual = strings.TrimSpace(actual)
assert.Check(t, is.Equal(actual, tc.expResolvConf))
})
}
}
type parallelTester struct {
osctx *netnsutils.OSContext
controller *libnetwork.Controller
net1, net2 *libnetwork.Network
iterCnt int
}
func (pt parallelTester) Do(t *testing.T, thrNumber int) error {
teardown, err := pt.osctx.Set()
if err != nil {
return err
}
defer teardown(t)
var ep *libnetwork.Endpoint
if thrNumber == 1 {
ep, err = pt.net1.EndpointByName(fmt.Sprintf("pep%d", thrNumber))
} else {
ep, err = pt.net2.EndpointByName(fmt.Sprintf("pep%d", thrNumber))
}
if err != nil {
return errors.WithStack(err)
}
if ep == nil {
return errors.New("got nil ep with no error")
}
cid := fmt.Sprintf("%drace", thrNumber)
sb, err := pt.controller.GetSandbox(cid)
if err != nil {
return err
}
for i := 0; i < pt.iterCnt; i++ {
if err := ep.Join(context.Background(), sb); err != nil {
if !cerrdefs.IsPermissionDenied(err) {
return errors.Wrapf(err, "thread %d", thrNumber)
}
}
if err := ep.Leave(context.Background(), sb); err != nil {
if !cerrdefs.IsPermissionDenied(err) {
return errors.Wrapf(err, "thread %d", thrNumber)
}
}
}
if err := errors.WithStack(sb.Delete(context.Background())); err != nil {
return err
}
return errors.WithStack(ep.Delete(context.Background(), false))
}
func TestParallel(t *testing.T) {
const (
first = 1
last = 3
numThreads = last - first + 1
iterCnt = 25
)
osctx := netnsutils.SetupTestOSContextEx(t)
defer osctx.Cleanup(t)
controller := newController(t)
netOption := options.Generic{
netlabel.EnableIPv4: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "network",
},
}
net1 := makeTesthostNetwork(t, controller)
defer net1.Delete()
net2, err := createTestNetwork(controller, "bridge", "network2", netOption, nil, nil)
assert.NilError(t, err)
defer net2.Delete()
_, err = net1.CreateEndpoint(context.Background(), "pep1")
assert.NilError(t, err)
_, err = net2.CreateEndpoint(context.Background(), "pep2")
assert.NilError(t, err)
_, err = net2.CreateEndpoint(context.Background(), "pep3")
assert.NilError(t, err)
sboxes := make([]*libnetwork.Sandbox, numThreads)
sboxes[first-1], err = controller.NewSandbox(context.Background(), fmt.Sprintf("%drace", first), libnetwork.OptionUseDefaultSandbox())
assert.NilError(t, err)
for thd := first + 1; thd <= last; thd++ {
sboxes[thd-1], err = controller.NewSandbox(context.Background(), fmt.Sprintf("%drace", thd))
assert.NilError(t, err)
}
pt := parallelTester{
osctx: osctx,
controller: controller,
net1: net1,
net2: net2,
iterCnt: iterCnt,
}
var eg errgroup.Group
for i := first; i <= last; i++ {
eg.Go(func() error { return pt.Do(t, i) })
}
err = eg.Wait()
assert.NilError(t, err)
}
func TestBridge(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
netOption := options.Generic{
netlabel.EnableIPv4: true,
netlabel.EnableIPv6: true,
netlabel.GenericData: map[string]string{
bridge.BridgeName: "testnetwork",
bridge.EnableICC: "true",
bridge.EnableIPMasquerade: "true",
},
}
ipamV4ConfList := []*libnetwork.IpamConf{{PreferredPool: "192.168.100.0/24", Gateway: "192.168.100.1"}}
ipamV6ConfList := []*libnetwork.IpamConf{{PreferredPool: "fe90::/64", Gateway: "fe90::22"}}
network, err := createTestNetwork(controller, bridgeNetType, "testnetwork", netOption, ipamV4ConfList, ipamV6ConfList)
assert.NilError(t, err)
defer func() {
assert.Check(t, network.Delete())
}()
ep, err := network.CreateEndpoint(context.Background(), "testep")
assert.NilError(t, err)
sb, err := controller.NewSandbox(context.Background(), containerID, libnetwork.OptionPortMapping(getPortMapping()))
assert.NilError(t, err)
defer func() {
assert.Check(t, sb.Delete(context.Background()))
}()
err = ep.Join(context.Background(), sb)
assert.NilError(t, err)
epInfo, err := ep.DriverInfo()
assert.NilError(t, err)
pmd, ok := epInfo[netlabel.PortMap]
assert.Assert(t, ok, "Could not find expected info in endpoint data")
pm, ok := pmd.([]types.PortBinding)
assert.Assert(t, ok, "Unexpected format for port mapping in endpoint operational data")
expectedLen := 10
if !isV6Listenable() {
expectedLen = 5
}
assert.Check(t, is.Len(pm, expectedLen), "Incomplete data for port mapping in endpoint operational data")
}
var (
v6ListenableCached bool
v6ListenableOnce sync.Once
)
// This is copied from the bridge driver package b/c the bridge driver is not platform agnostic.
func isV6Listenable() bool {
v6ListenableOnce.Do(func() {
ln, err := net.Listen("tcp6", "[::1]:0")
if err != nil {
// When the kernel was booted with `ipv6.disable=1`,
// we get err "listen tcp6 [::1]:0: socket: address family not supported by protocol"
// https://github.com/moby/moby/issues/42288
log.G(context.TODO()).Debugf("port_mapping: v6Listenable=false (%v)", err)
} else {
v6ListenableCached = true
_ = ln.Close()
}
})
return v6ListenableCached
}
func TestBridgeRequiresIPAM(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
_, err := controller.NewNetwork(context.Background(), bridgeNetType, "testnetwork", "",
libnetwork.NetworkOptionIpam(null.DriverName, "", nil, nil, nil),
)
assert.Check(t, is.ErrorContains(err, "IPv4 or IPv6 must be enabled"))
}
func TestNullIpam(t *testing.T) {
defer netnsutils.SetupTestOSContext(t)()
controller := newController(t)
tests := []struct {
networkType string
}{
{networkType: bridgeNetType},
{networkType: "macvlan"},
{networkType: "ipvlan"},
}
for _, tc := range tests {
t.Run(tc.networkType, func(t *testing.T) {
_, err := controller.NewNetwork(context.Background(), tc.networkType, "tnet1-"+tc.networkType, "",
libnetwork.NetworkOptionEnableIPv4(true),
libnetwork.NetworkOptionIpam(null.DriverName, "", nil, nil, nil),
)
assert.Check(t, is.ErrorContains(err, "ipv4 pool is empty"))
_, err = controller.NewNetwork(context.Background(), tc.networkType, "tnet2-"+tc.networkType, "",
libnetwork.NetworkOptionEnableIPv6(true),
libnetwork.NetworkOptionIpam(null.DriverName, "", nil, nil, nil),
)
assert.Check(t, is.ErrorContains(err, "ipv6 pool is empty"))
})
}
} | go | github | https://github.com/moby/moby | daemon/libnetwork/libnetwork_linux_test.go |
import pytest
import mock
from api.base.settings.defaults import API_BASE
from framework.auth.core import Auth
from osf_tests.factories import (
NodeFactory,
ProjectFactory,
RegistrationFactory,
AuthUserFactory,
ForkFactory
)
from rest_framework import exceptions
from website import mails
from osf.utils import permissions
from api.nodes.serializers import NodeForksSerializer
@pytest.fixture()
def user():
return AuthUserFactory()
@pytest.mark.django_db
class TestNodeForksList:
@pytest.fixture()
def pointer(self, user):
return ProjectFactory(creator=user)
@pytest.fixture()
def private_project(self, user, pointer):
private_project = ProjectFactory()
private_project.add_contributor(
user, permissions=[permissions.READ, permissions.WRITE])
private_project.add_pointer(pointer, auth=Auth(user), save=True)
private_project.save()
return private_project
@pytest.fixture()
def public_project(self, user):
return ProjectFactory(is_public=True, creator=user)
@pytest.fixture()
def private_component(self, user, private_project):
return NodeFactory(parent=private_project, creator=user)
@pytest.fixture()
def public_component(self, user, public_project):
return NodeFactory(parent=public_project, creator=user, is_public=True)
@pytest.fixture()
def private_fork(self, user, private_project):
return ForkFactory(project=private_project, user=user)
@pytest.fixture()
def public_fork(self, user, public_project):
return ForkFactory(project=public_project, user=user)
@pytest.fixture()
def private_project_url(self, private_project):
return '/{}nodes/{}/forks/'.format(API_BASE, private_project._id)
@pytest.fixture()
def public_project_url(self, public_project):
return '/{}nodes/{}/forks/'.format(API_BASE, public_project._id)
def test_can_access_public_node_forks_list_when_unauthenticated(
self, app, public_project, public_fork, public_project_url):
res = app.get(public_project_url)
assert res.status_code == 200
assert len(res.json['data']) == 0
# Fork defaults to private
assert not public_fork.is_public
public_fork.is_public = True
public_fork.save()
res = app.get(public_project_url)
assert len(res.json['data']) == 1
assert public_fork.is_public
data = res.json['data'][0]
assert data['attributes']['title'] == 'Fork of ' + public_project.title
assert data['id'] == public_fork._id
assert not data['attributes']['registration']
assert data['attributes']['fork']
def test_can_access_public_node_forks_list_authenticated_contributor(
self, app, user, public_project, public_fork, public_project_url):
res = app.get(public_project_url, auth=user.auth)
assert res.status_code == 200
assert not public_fork.is_public
assert len(res.json['data']) == 1
data = res.json['data'][0]
assert data['attributes']['title'] == 'Fork of ' + public_project.title
assert data['id'] == public_fork._id
assert not data['attributes']['registration']
assert data['attributes']['fork']
def test_can_access_public_node_forks_list_authenticated_non_contributor(
self, app, public_project, public_fork, public_project_url):
non_contrib = AuthUserFactory()
res = app.get(public_project_url, auth=non_contrib.auth)
assert res.status_code == 200
assert len(res.json['data']) == 0
# Fork defaults to private
assert not public_fork.is_public
public_fork.is_public = True
public_fork.save()
res = app.get(public_project_url)
assert len(res.json['data']) == 1
assert public_fork.is_public
data = res.json['data'][0]
assert data['attributes']['title'] == 'Fork of ' + public_project.title
assert data['id'] == public_fork._id
assert not data['attributes']['registration']
assert data['attributes']['fork']
def test_authenticated_contributor_can_access_private_node_forks_list(
self, app, user, private_project, private_component,
private_fork, pointer, private_project_url):
res = app.get(
private_project_url +
'?embed=children&embed=node_links&embed=logs&embed=contributors&embed=forked_from',
auth=user.auth)
assert res.status_code == 200
assert len(res.json['data']) == 1
data = res.json['data'][0]
assert data['attributes']['title'] == 'Fork of ' + \
private_project.title
assert data['id'] == private_fork._id
fork_contributors = data['embeds']['contributors']['data'][0]['embeds']['users']['data']
assert fork_contributors['attributes']['family_name'] == user.family_name
assert fork_contributors['id'] == user._id
forked_children = data['embeds']['children']['data'][0]
assert forked_children['id'] == private_component.forks.first()._id
assert forked_children['attributes']['title'] == private_component.title
forked_node_links = data['embeds']['node_links']['data'][0]['embeds']['target_node']['data']
assert forked_node_links['id'] == pointer._id
assert forked_node_links['attributes']['title'] == pointer.title
auth = Auth(user)
expected_logs = list(
private_project.get_aggregate_logs_queryset(
auth
).values_list('action', flat=True)
)
expected_logs.append('node_forked')
forked_logs = data['embeds']['logs']['data']
forked_log_actions = [
log['attributes']['action']for log in forked_logs
]
assert set(expected_logs) == set(forked_log_actions)
assert len(set(forked_log_actions)) == len(set(expected_logs))
forked_from = data['embeds']['forked_from']['data']
assert forked_from['id'] == private_project._id
def test_node_forks_list_errors(self, app, private_project_url):
# test_cannot_access_private_node_forks_list_unauthenticated
res = app.get(private_project_url, expect_errors=True)
assert res.status_code == 401
assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail
# test_authenticated_non_contributor_cannot_access_private_node_forks_list
non_contrib = AuthUserFactory()
res = app.get(
private_project_url,
auth=non_contrib.auth,
expect_errors=True)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
def test_forks_list_does_not_show_registrations_of_forks(
self, app, public_project, public_fork, public_project_url):
reg = RegistrationFactory(project=public_fork, is_public=True)
# confirm registration shows up in node forks
assert reg in public_project.forks.all()
res = app.get(public_project_url)
# confirm registration of fork does not show up in public data
assert len(res.json['data']) == 0
@pytest.mark.django_db
class TestNodeForkCreate:
@pytest.fixture()
def user_two(self):
return AuthUserFactory()
@pytest.fixture()
def private_project(self, user):
return ProjectFactory(creator=user)
@pytest.fixture()
def public_project(self, user):
return ProjectFactory(is_public=True, creator=user)
@pytest.fixture()
def private_project_url(self, private_project):
return '/{}nodes/{}/forks/'.format(API_BASE, private_project._id)
@pytest.fixture()
def public_project_url(self, public_project):
return '/{}nodes/{}/forks/'.format(API_BASE, public_project._id)
@pytest.fixture()
def fork_data(self):
return {
'data': {
'type': 'nodes'
}
}
@pytest.fixture()
def fork_data_with_title(self):
return {
'data': {
'type': 'nodes',
'attributes':
{'title': 'My Forked Project'}
}
}
def test_create_fork_from_public_project_with_new_title(
self, app, user, public_project, fork_data_with_title, public_project_url):
res = app.post_json_api(
public_project_url,
fork_data_with_title,
auth=user.auth)
assert res.status_code == 201
assert res.json['data']['id'] == public_project.forks.first()._id
assert res.json['data']['attributes']['title'] == fork_data_with_title['data']['attributes']['title']
def test_create_fork_from_private_project_with_new_title(
self, app, user, private_project, fork_data_with_title, private_project_url):
res = app.post_json_api(
private_project_url,
fork_data_with_title,
auth=user.auth)
assert res.status_code == 201
assert res.json['data']['id'] == private_project.forks.first()._id
assert res.json['data']['attributes']['title'] == fork_data_with_title['data']['attributes']['title']
def test_can_fork_public_node_logged_in(
self, app, public_project, fork_data, public_project_url):
non_contrib = AuthUserFactory()
res = app.post_json_api(
public_project_url,
fork_data,
auth=non_contrib.auth)
assert res.status_code == 201
assert res.json['data']['id'] == public_project.forks.first()._id
assert res.json['data']['attributes']['title'] == 'Fork of ' + \
public_project.title
def test_cannot_fork_errors(
self, app, fork_data, public_project_url,
private_project_url):
# test_cannot_fork_public_node_logged_out
res = app.post_json_api(
public_project_url, fork_data,
expect_errors=True)
assert res.status_code == 401
assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail
# test_cannot_fork_private_node_logged_out
res = app.post_json_api(
private_project_url, fork_data,
expect_errors=True)
assert res.status_code == 401
assert res.json['errors'][0]['detail'] == exceptions.NotAuthenticated.default_detail
# test_cannot_fork_private_node_logged_in_non_contributor
non_contrib = AuthUserFactory()
res = app.post_json_api(
private_project_url, fork_data,
auth=non_contrib.auth,
expect_errors=True)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail
def test_can_fork_public_node_logged_in_contributor(
self, app, user, public_project, fork_data, public_project_url):
res = app.post_json_api(public_project_url, fork_data, auth=user.auth)
assert res.status_code == 201
assert res.json['data']['id'] == public_project.forks.first()._id
assert res.json['data']['attributes']['title'] == 'Fork of ' + \
public_project.title
def test_can_fork_private_node_logged_in_contributor(
self, app, user, private_project, fork_data, private_project_url):
res = app.post_json_api(
private_project_url +
'?embed=children&embed=node_links&embed=logs&embed=contributors&embed=forked_from',
fork_data, auth=user.auth)
assert res.status_code == 201
data = res.json['data']
assert data['attributes']['title'] == 'Fork of ' + \
private_project.title
fork_contributors = data['embeds']['contributors']['data'][0]['embeds']['users']['data']
assert fork_contributors['attributes']['family_name'] == user.family_name
assert fork_contributors['id'] == user._id
forked_from = data['embeds']['forked_from']['data']
assert forked_from['id'] == private_project._id
def test_fork_private_components_no_access(
self, app, user_two, public_project,
fork_data, public_project_url):
user_three = AuthUserFactory()
url = public_project_url + '?embed=children'
NodeFactory(
parent=public_project,
creator=user_two,
is_public=False
)
res = app.post_json_api(url, fork_data, auth=user_three.auth)
assert res.status_code == 201
# Private components that you do not have access to are not forked
assert res.json['data']['embeds']['children']['links']['meta']['total'] == 0
def test_fork_components_you_can_access(
self, app, user, private_project,
fork_data, private_project_url):
url = private_project_url + '?embed=children'
new_component = NodeFactory(parent=private_project, creator=user)
res = app.post_json_api(url, fork_data, auth=user.auth)
assert res.status_code == 201
assert res.json['data']['embeds']['children']['links']['meta']['total'] == 1
assert res.json['data']['embeds']['children']['data'][0]['id'] == new_component.forks.first(
)._id
assert res.json['data']['embeds']['children']['data'][0]['attributes']['title'] == new_component.title
def test_fork_private_node_links(
self, app, user, user_two, private_project,
fork_data, private_project_url):
private_pointer = ProjectFactory(creator=user_two)
actual_pointer = private_project.add_pointer(
private_pointer, auth=Auth(user_two), save=True)
url = private_project_url + '?embed=node_links'
# Node link is forked, but shows up as a private node link
res = app.post_json_api(url, fork_data, auth=user.auth)
assert res.status_code == 201
assert (res.json['data']['embeds']['node_links']['data'][0]['embeds']['target_node']
['errors'][0]['detail'] == exceptions.PermissionDenied.default_detail)
assert res.json['data']['embeds']['node_links']['links']['meta']['total'] == 1
private_project.rm_pointer(actual_pointer, auth=Auth(user_two))
def test_fork_node_links_you_can_access(
self, app, user, user_two, private_project,
fork_data, private_project_url):
pointer = ProjectFactory(creator=user)
private_project.add_pointer(pointer, auth=Auth(user_two), save=True)
url = private_project_url + '?embed=node_links'
res = app.post_json_api(url, fork_data, auth=user.auth)
assert res.status_code == 201
assert res.json['data']['embeds']['node_links']['data'][0]['embeds']['target_node']['data']['id'] == pointer._id
assert res.json['data']['embeds']['node_links']['links']['meta']['total'] == 1
def test_can_fork_registration(
self, app, user, private_project, fork_data):
registration = RegistrationFactory(project=private_project, user=user)
url = '/{}registrations/{}/forks/'.format(API_BASE, registration._id)
res = app.post_json_api(url, fork_data, auth=user.auth)
assert res.status_code == 201
assert res.json['data']['id'] == registration.forks.first()._id
assert res.json['data']['attributes']['title'] == 'Fork of ' + \
registration.title
def test_read_only_contributor_can_fork_private_registration(
self, app, private_project, fork_data, private_project_url):
read_contrib = AuthUserFactory()
private_project.add_contributor(
read_contrib,
permissions=[permissions.READ], save=True)
res = app.post_json_api(
private_project_url, fork_data,
auth=read_contrib.auth)
assert res.status_code == 201
assert res.json['data']['id'] == private_project.forks.first()._id
assert res.json['data']['attributes']['title'] == 'Fork of ' + \
private_project.title
def test_send_email_success(
self, app, user, public_project_url,
fork_data_with_title, public_project):
with mock.patch.object(mails, 'send_mail', return_value=None) as mock_send_mail:
res = app.post_json_api(
public_project_url,
fork_data_with_title,
auth=user.auth)
assert res.status_code == 201
assert res.json['data']['id'] == public_project.forks.first()._id
mock_send_mail.assert_called_with(
user.email,
mails.FORK_COMPLETED,
title=public_project.title,
guid=res.json['data']['id'],
mimetype='html',
can_change_preferences=False)
def test_send_email_failed(
self, app, user, public_project_url,
fork_data_with_title, public_project):
with mock.patch.object(NodeForksSerializer, 'save', side_effect=Exception()):
with mock.patch.object(mails, 'send_mail', return_value=None) as mock_send_mail:
with pytest.raises(Exception):
app.post_json_api(
public_project_url,
fork_data_with_title,
auth=user.auth)
mock_send_mail.assert_called_with(
user.email,
mails.FORK_FAILED,
title=public_project.title,
guid=public_project._id,
mimetype='html',
can_change_preferences=False) | unknown | codeparrot/codeparrot-clean | ||
/*
Copyright 2019 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package install installs the experimental API group, making it available as
// an option to all of the API encoding/decoding machinery.
package install
import (
"k8s.io/apimachinery/pkg/runtime"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
"k8s.io/kubernetes/pkg/api/legacyscheme"
"k8s.io/kubernetes/pkg/apis/apiserverinternal"
"k8s.io/kubernetes/pkg/apis/apiserverinternal/v1alpha1"
)
func init() {
Install(legacyscheme.Scheme)
}
// Install registers the API group and adds types to a scheme
func Install(scheme *runtime.Scheme) {
utilruntime.Must(apiserverinternal.AddToScheme(scheme))
utilruntime.Must(v1alpha1.AddToScheme(scheme))
utilruntime.Must(scheme.SetVersionPriority(v1alpha1.SchemeGroupVersion))
} | go | github | https://github.com/kubernetes/kubernetes | pkg/apis/apiserverinternal/install/install.go |
import webapp2
import ferris.core.routing as routing
# Sentinel for the uri methods.
route_sentinel = object()
class Uri(object):
"""
URI Utility class to help controllers and anything else
that deals with URIs
"""
def get_route_name(self,
prefix=route_sentinel, controller=route_sentinel, action=route_sentinel):
"""
Function used to build the route name for a given prefix, controller, and
action. For example, build_action_route('admin','pages','view', id=2)
will give you "admin:pages:view". Set prefix to False to exclude the
current prefix from the route name.
"""
prefix = prefix if prefix != route_sentinel else self.route.prefix
controller = controller if controller != route_sentinel else self.route.controller
action = action if action != route_sentinel else self.route.action
return routing.name_from_canonical_parts(prefix, controller, action)
def uri(self, route_name=None,
prefix=route_sentinel, controller=route_sentinel, action=route_sentinel,
_pass_all=False, *args, **kwargs):
"""
Generate in-application URIs (or URLs).
:param route_name: The route name for which to generate a URI for, if not provided then prefix, controller, and action will be used to determine the route name
:param prefix: The prefix of the desired URI, if omitted then the current prefix is used.
:param controller: The controller name of the desired URI, if omitted then the current controller is used.
:param action: The action name of the desired URI, if omitted then the current action is used.
:param _pass_all: will pass all current URI parameters to the generated URI (useful for pagination, etc.)
:param _full: generate a full URI, including the hostname.
:param kwargs: arguments passed at URL or GET parameters.
Examples::
uri('foxes:run') # -> /foxes/run
uri(prefix=False, controller='foxes', action='run') # -> /foxes/run
# when currently at /foxes/run
uri(action='hide') # -> /foxes/hide
"""
if not route_name:
route_name = self.get_route_name(prefix, controller, action)
if _pass_all:
tkwargs = dict(self.request.route_kwargs)
targs = tuple(self.request.route_args)
targs = args + targs
gargs = dict(self.request.GET)
tkwargs.update(gargs)
tkwargs.update(kwargs)
else:
tkwargs = kwargs
tkwargs = {key: value for key, value in tkwargs.items()
if value is not None}
for key, value in tkwargs.items():
if isinstance(value, unicode):
tkwargs[key] = value.encode("utf-8")
return webapp2.uri_for(route_name, *args, **tkwargs)
def uri_exists(self, route_name=None,
prefix=route_sentinel, controller=route_sentinel, action=route_sentinel,
*args, **kwargs):
"""
Check if a route exists.
"""
if not route_name:
route_name = self.get_route_name(prefix, controller, action)
return routing.route_name_exists(route_name)
def on_uri(self, route_name=None,
prefix=route_sentinel, controller=route_sentinel, action=route_sentinel,
**kwargs):
"""
Checks to see if we're currently on the specified route.
"""
if not route_name:
route_name = self.get_route_name(prefix, controller, action)
if route_name == routing.current_route_name():
route_matches = True
else:
route_matches = False
if not kwargs or not route_matches:
return route_matches
for name, value in kwargs.items():
if not self.request.params.get(name, None) == value and not self.request.route_kwargs.get(name, None) == value:
return False
return True | unknown | codeparrot/codeparrot-clean | ||
import A from "./a";
import "./b";
import "./c";
import D from "./d";
import "./e";
import "./f";
import a from "./dep?a";
import b from "./dep?b";
import c from "./dep?c";
import d from "./dep?d";
import e from "./dep?e";
import f from "./dep?f";
it("should generate valid code", () => {
expect(A()).toBe("x");
expect(new D().method()).toBe("x");
});
it("a should be used", () => {
expect(a).toBe(true);
});
if (process.env.NODE_ENV === "production") {
it("b should be unused", () => {
expect(b).toBe(false);
});
}
it("c should be used", () => {
expect(c).toBe(true);
});
if (process.env.NODE_ENV === "production") {
it("d should be used", () => {
expect(d).toBe(true);
});
it("e should be unused", () => {
expect(e).toBe(false);
});
}
it("f should be used", () => {
expect(f).toBe(true);
}); | javascript | github | https://github.com/webpack/webpack | test/cases/inner-graph/export-default-named/index.js |
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.dev/license
*/
import {Component, Injectable} from '@angular/core';
import {ActivatedRoute} from '@angular/router';
@Component({
selector: 'app-routes-home',
standalone: false,
template: `
<h1>home works!</h1>
<div style="display:flex; flex-direction: column;">
<a routerLink="/demo-app/todos/routes/home">Home</a>
<a routerLink="/demo-app/todos/routes/route-one">Route One</a>
<a routerLink="/demo-app/todos/routes/route-two">Route Two</a>
<a routerLink="/demo-app/todos/routes/route-params/hello">Route Params</a>
<a routerLink="/demo-app/todos/routes/route-query-params" [queryParams]="routeQueryParmas"
>Route Query Params</a
>
<a routerLink="/demo-app/todos/routes/route-data">Route Data</a>
</div>
<hr />
<div class="flex items-center border-2 border-dashed border-gray-600 p-2">
<h1>Route Data: </h1>
<pre class="p-2 bg-gray-100">{{ routeData | json }}</pre>
</div>
<br />
<div class="flex items-center border-2 border-dashed border-gray-600 p-2">
<h1>Route Params: </h1>
<pre class="p-2 bg-gray-100">{{ routeParams | json }}</pre>
</div>
<br />
<div
class="flex items-center border-2 border-dashed border-gray-600
p-2"
>
<h1>Qquery Params: </h1>
<pre class="p-2 bg-gray-100">{{ queryParams | json }}</pre>
</div>
`,
})
export class RoutesHomeComponent {
routeData: any;
routeParams: any;
queryParams: any;
routeQueryParmas = {'message': 'Hello from route param!!'};
constructor(private activatedRoute: ActivatedRoute) {}
ngOnInit(): void {
this.routeData = this.activatedRoute.snapshot.data;
this.routeParams = this.activatedRoute.snapshot.params;
this.queryParams = this.activatedRoute.snapshot.queryParams;
}
}
@Component({
selector: 'app-routes-aux',
template: 'Component Aux',
})
export class RoutesAuxComponent {}
@Component({
selector: 'app-routes-one',
template: `<h1>Route 1 works</h1>`,
})
export class RoutesOneComponent {}
@Component({
selector: 'app-routes-two',
template: `<h1>Route 2 works</h1>`,
})
export class RoutesTwoComponent {}
@Component({
selector: 'app-routes-standalone',
template: '<h1>Standalone Route</h1>',
})
export class RoutesStandaloneComponent {}
@Injectable()
export class Service1 {
value = `Service One Id: ${Math.floor(Math.random() * 500)}`;
}
@Injectable()
export class Service2 {
value = `Service Two Id: ${Math.floor(Math.random() * 500)}`;
}
@Injectable()
export class Service3 {
value = `Service Three Id: ${Math.floor(Math.random() * 500)}`;
}
@Injectable()
export class Service4 {
value = `Service Four Id: ${Math.floor(Math.random() * 500)}`;
} | typescript | github | https://github.com/angular/angular | devtools/src/app/demo-app/todo/routes/routes.component.ts |
from itertools import product
import numpy as np
from numpy.linalg import norm
from numpy.testing import (assert_, assert_allclose,
assert_equal, suppress_warnings)
from pytest import raises as assert_raises
from scipy.sparse import issparse, lil_matrix
from scipy.sparse.linalg import aslinearoperator
from scipy.optimize import least_squares
from scipy.optimize._lsq.least_squares import IMPLEMENTED_LOSSES
from scipy.optimize._lsq.common import EPS, make_strictly_feasible
def fun_trivial(x, a=0):
return (x - a)**2 + 5.0
def jac_trivial(x, a=0.0):
return 2 * (x - a)
def fun_2d_trivial(x):
return np.array([x[0], x[1]])
def jac_2d_trivial(x):
return np.identity(2)
def fun_rosenbrock(x):
return np.array([10 * (x[1] - x[0]**2), (1 - x[0])])
def jac_rosenbrock(x):
return np.array([
[-20 * x[0], 10],
[-1, 0]
])
def jac_rosenbrock_bad_dim(x):
return np.array([
[-20 * x[0], 10],
[-1, 0],
[0.0, 0.0]
])
def fun_rosenbrock_cropped(x):
return fun_rosenbrock(x)[0]
def jac_rosenbrock_cropped(x):
return jac_rosenbrock(x)[0]
# When x is 1-D array, return is 2-D array.
def fun_wrong_dimensions(x):
return np.array([x, x**2, x**3])
def jac_wrong_dimensions(x, a=0.0):
return np.atleast_3d(jac_trivial(x, a=a))
def fun_bvp(x):
n = int(np.sqrt(x.shape[0]))
u = np.zeros((n + 2, n + 2))
x = x.reshape((n, n))
u[1:-1, 1:-1] = x
y = u[:-2, 1:-1] + u[2:, 1:-1] + u[1:-1, :-2] + u[1:-1, 2:] - 4 * x + x**3
return y.ravel()
class BroydenTridiagonal(object):
def __init__(self, n=100, mode='sparse'):
np.random.seed(0)
self.n = n
self.x0 = -np.ones(n)
self.lb = np.linspace(-2, -1.5, n)
self.ub = np.linspace(-0.8, 0.0, n)
self.lb += 0.1 * np.random.randn(n)
self.ub += 0.1 * np.random.randn(n)
self.x0 += 0.1 * np.random.randn(n)
self.x0 = make_strictly_feasible(self.x0, self.lb, self.ub)
if mode == 'sparse':
self.sparsity = lil_matrix((n, n), dtype=int)
i = np.arange(n)
self.sparsity[i, i] = 1
i = np.arange(1, n)
self.sparsity[i, i - 1] = 1
i = np.arange(n - 1)
self.sparsity[i, i + 1] = 1
self.jac = self._jac
elif mode == 'operator':
self.jac = lambda x: aslinearoperator(self._jac(x))
elif mode == 'dense':
self.sparsity = None
self.jac = lambda x: self._jac(x).toarray()
else:
assert_(False)
def fun(self, x):
f = (3 - x) * x + 1
f[1:] -= x[:-1]
f[:-1] -= 2 * x[1:]
return f
def _jac(self, x):
J = lil_matrix((self.n, self.n))
i = np.arange(self.n)
J[i, i] = 3 - 2 * x
i = np.arange(1, self.n)
J[i, i - 1] = -1
i = np.arange(self.n - 1)
J[i, i + 1] = -2
return J
class ExponentialFittingProblem(object):
"""Provide data and function for exponential fitting in the form
y = a + exp(b * x) + noise."""
def __init__(self, a, b, noise, n_outliers=1, x_range=(-1, 1),
n_points=11, random_seed=None):
np.random.seed(random_seed)
self.m = n_points
self.n = 2
self.p0 = np.zeros(2)
self.x = np.linspace(x_range[0], x_range[1], n_points)
self.y = a + np.exp(b * self.x)
self.y += noise * np.random.randn(self.m)
outliers = np.random.randint(0, self.m, n_outliers)
self.y[outliers] += 50 * noise * np.random.rand(n_outliers)
self.p_opt = np.array([a, b])
def fun(self, p):
return p[0] + np.exp(p[1] * self.x) - self.y
def jac(self, p):
J = np.empty((self.m, self.n))
J[:, 0] = 1
J[:, 1] = self.x * np.exp(p[1] * self.x)
return J
def cubic_soft_l1(z):
rho = np.empty((3, z.size))
t = 1 + z
rho[0] = 3 * (t**(1/3) - 1)
rho[1] = t ** (-2/3)
rho[2] = -2/3 * t**(-5/3)
return rho
LOSSES = list(IMPLEMENTED_LOSSES.keys()) + [cubic_soft_l1]
class BaseMixin(object):
def test_basic(self):
# Test that the basic calling sequence works.
res = least_squares(fun_trivial, 2., method=self.method)
assert_allclose(res.x, 0, atol=1e-4)
assert_allclose(res.fun, fun_trivial(res.x))
def test_args_kwargs(self):
# Test that args and kwargs are passed correctly to the functions.
a = 3.0
for jac in ['2-point', '3-point', 'cs', jac_trivial]:
with suppress_warnings() as sup:
sup.filter(UserWarning,
"jac='(3-point|cs)' works equivalently to '2-point' for method='lm'")
res = least_squares(fun_trivial, 2.0, jac, args=(a,),
method=self.method)
res1 = least_squares(fun_trivial, 2.0, jac, kwargs={'a': a},
method=self.method)
assert_allclose(res.x, a, rtol=1e-4)
assert_allclose(res1.x, a, rtol=1e-4)
assert_raises(TypeError, least_squares, fun_trivial, 2.0,
args=(3, 4,), method=self.method)
assert_raises(TypeError, least_squares, fun_trivial, 2.0,
kwargs={'kaboom': 3}, method=self.method)
def test_jac_options(self):
for jac in ['2-point', '3-point', 'cs', jac_trivial]:
with suppress_warnings() as sup:
sup.filter(UserWarning,
"jac='(3-point|cs)' works equivalently to '2-point' for method='lm'")
res = least_squares(fun_trivial, 2.0, jac, method=self.method)
assert_allclose(res.x, 0, atol=1e-4)
assert_raises(ValueError, least_squares, fun_trivial, 2.0, jac='oops',
method=self.method)
def test_nfev_options(self):
for max_nfev in [None, 20]:
res = least_squares(fun_trivial, 2.0, max_nfev=max_nfev,
method=self.method)
assert_allclose(res.x, 0, atol=1e-4)
def test_x_scale_options(self):
for x_scale in [1.0, np.array([0.5]), 'jac']:
res = least_squares(fun_trivial, 2.0, x_scale=x_scale)
assert_allclose(res.x, 0)
assert_raises(ValueError, least_squares, fun_trivial,
2.0, x_scale='auto', method=self.method)
assert_raises(ValueError, least_squares, fun_trivial,
2.0, x_scale=-1.0, method=self.method)
assert_raises(ValueError, least_squares, fun_trivial,
2.0, x_scale=None, method=self.method)
assert_raises(ValueError, least_squares, fun_trivial,
2.0, x_scale=1.0+2.0j, method=self.method)
def test_diff_step(self):
# res1 and res2 should be equivalent.
# res2 and res3 should be different.
res1 = least_squares(fun_trivial, 2.0, diff_step=1e-1,
method=self.method)
res2 = least_squares(fun_trivial, 2.0, diff_step=-1e-1,
method=self.method)
res3 = least_squares(fun_trivial, 2.0,
diff_step=None, method=self.method)
assert_allclose(res1.x, 0, atol=1e-4)
assert_allclose(res2.x, 0, atol=1e-4)
assert_allclose(res3.x, 0, atol=1e-4)
assert_equal(res1.x, res2.x)
assert_equal(res1.nfev, res2.nfev)
assert_(res2.nfev != res3.nfev)
def test_incorrect_options_usage(self):
assert_raises(TypeError, least_squares, fun_trivial, 2.0,
method=self.method, options={'no_such_option': 100})
assert_raises(TypeError, least_squares, fun_trivial, 2.0,
method=self.method, options={'max_nfev': 100})
def test_full_result(self):
# MINPACK doesn't work very well with factor=100 on this problem,
# thus using low 'atol'.
res = least_squares(fun_trivial, 2.0, method=self.method)
assert_allclose(res.x, 0, atol=1e-4)
assert_allclose(res.cost, 12.5)
assert_allclose(res.fun, 5)
assert_allclose(res.jac, 0, atol=1e-4)
assert_allclose(res.grad, 0, atol=1e-2)
assert_allclose(res.optimality, 0, atol=1e-2)
assert_equal(res.active_mask, 0)
if self.method == 'lm':
assert_(res.nfev < 30)
assert_(res.njev is None)
else:
assert_(res.nfev < 10)
assert_(res.njev < 10)
assert_(res.status > 0)
assert_(res.success)
def test_full_result_single_fev(self):
# MINPACK checks the number of nfev after the iteration,
# so it's hard to tell what he is going to compute.
if self.method == 'lm':
return
res = least_squares(fun_trivial, 2.0, method=self.method,
max_nfev=1)
assert_equal(res.x, np.array([2]))
assert_equal(res.cost, 40.5)
assert_equal(res.fun, np.array([9]))
assert_equal(res.jac, np.array([[4]]))
assert_equal(res.grad, np.array([36]))
assert_equal(res.optimality, 36)
assert_equal(res.active_mask, np.array([0]))
assert_equal(res.nfev, 1)
assert_equal(res.njev, 1)
assert_equal(res.status, 0)
assert_equal(res.success, 0)
def test_rosenbrock(self):
x0 = [-2, 1]
x_opt = [1, 1]
for jac, x_scale, tr_solver in product(
['2-point', '3-point', 'cs', jac_rosenbrock],
[1.0, np.array([1.0, 0.2]), 'jac'],
['exact', 'lsmr']):
with suppress_warnings() as sup:
sup.filter(UserWarning,
"jac='(3-point|cs)' works equivalently to '2-point' for method='lm'")
res = least_squares(fun_rosenbrock, x0, jac, x_scale=x_scale,
tr_solver=tr_solver, method=self.method)
assert_allclose(res.x, x_opt)
def test_rosenbrock_cropped(self):
x0 = [-2, 1]
if self.method == 'lm':
assert_raises(ValueError, least_squares, fun_rosenbrock_cropped,
x0, method='lm')
else:
for jac, x_scale, tr_solver in product(
['2-point', '3-point', 'cs', jac_rosenbrock_cropped],
[1.0, np.array([1.0, 0.2]), 'jac'],
['exact', 'lsmr']):
res = least_squares(
fun_rosenbrock_cropped, x0, jac, x_scale=x_scale,
tr_solver=tr_solver, method=self.method)
assert_allclose(res.cost, 0, atol=1e-14)
def test_fun_wrong_dimensions(self):
assert_raises(ValueError, least_squares, fun_wrong_dimensions,
2.0, method=self.method)
def test_jac_wrong_dimensions(self):
assert_raises(ValueError, least_squares, fun_trivial,
2.0, jac_wrong_dimensions, method=self.method)
def test_fun_and_jac_inconsistent_dimensions(self):
x0 = [1, 2]
assert_raises(ValueError, least_squares, fun_rosenbrock, x0,
jac_rosenbrock_bad_dim, method=self.method)
def test_x0_multidimensional(self):
x0 = np.ones(4).reshape(2, 2)
assert_raises(ValueError, least_squares, fun_trivial, x0,
method=self.method)
def test_x0_complex_scalar(self):
x0 = 2.0 + 0.0*1j
assert_raises(ValueError, least_squares, fun_trivial, x0,
method=self.method)
def test_x0_complex_array(self):
x0 = [1.0, 2.0 + 0.0*1j]
assert_raises(ValueError, least_squares, fun_trivial, x0,
method=self.method)
def test_bvp(self):
# This test was introduced with fix #5556. It turned out that
# dogbox solver had a bug with trust-region radius update, which
# could block its progress and create an infinite loop. And this
# discrete boundary value problem is the one which triggers it.
n = 10
x0 = np.ones(n**2)
if self.method == 'lm':
max_nfev = 5000 # To account for Jacobian estimation.
else:
max_nfev = 100
res = least_squares(fun_bvp, x0, ftol=1e-2, method=self.method,
max_nfev=max_nfev)
assert_(res.nfev < max_nfev)
assert_(res.cost < 0.5)
def test_error_raised_when_all_tolerances_below_eps(self):
# Test that all 0 tolerances are not allowed.
assert_raises(ValueError, least_squares, fun_trivial, 2.0,
method=self.method, ftol=None, xtol=None, gtol=None)
def test_convergence_with_only_one_tolerance_enabled(self):
if self.method == 'lm':
return # should not do test
x0 = [-2, 1]
x_opt = [1, 1]
for ftol, xtol, gtol in [(1e-8, None, None),
(None, 1e-8, None),
(None, None, 1e-8)]:
res = least_squares(fun_rosenbrock, x0, jac=jac_rosenbrock,
ftol=ftol, gtol=gtol, xtol=xtol,
method=self.method)
assert_allclose(res.x, x_opt)
class BoundsMixin(object):
def test_inconsistent(self):
assert_raises(ValueError, least_squares, fun_trivial, 2.0,
bounds=(10.0, 0.0), method=self.method)
def test_infeasible(self):
assert_raises(ValueError, least_squares, fun_trivial, 2.0,
bounds=(3., 4), method=self.method)
def test_wrong_number(self):
assert_raises(ValueError, least_squares, fun_trivial, 2.,
bounds=(1., 2, 3), method=self.method)
def test_inconsistent_shape(self):
assert_raises(ValueError, least_squares, fun_trivial, 2.0,
bounds=(1.0, [2.0, 3.0]), method=self.method)
# 1-D array wont't be broadcasted
assert_raises(ValueError, least_squares, fun_rosenbrock, [1.0, 2.0],
bounds=([0.0], [3.0, 4.0]), method=self.method)
def test_in_bounds(self):
for jac in ['2-point', '3-point', 'cs', jac_trivial]:
res = least_squares(fun_trivial, 2.0, jac=jac,
bounds=(-1.0, 3.0), method=self.method)
assert_allclose(res.x, 0.0, atol=1e-4)
assert_equal(res.active_mask, [0])
assert_(-1 <= res.x <= 3)
res = least_squares(fun_trivial, 2.0, jac=jac,
bounds=(0.5, 3.0), method=self.method)
assert_allclose(res.x, 0.5, atol=1e-4)
assert_equal(res.active_mask, [-1])
assert_(0.5 <= res.x <= 3)
def test_bounds_shape(self):
for jac in ['2-point', '3-point', 'cs', jac_2d_trivial]:
x0 = [1.0, 1.0]
res = least_squares(fun_2d_trivial, x0, jac=jac)
assert_allclose(res.x, [0.0, 0.0])
res = least_squares(fun_2d_trivial, x0, jac=jac,
bounds=(0.5, [2.0, 2.0]), method=self.method)
assert_allclose(res.x, [0.5, 0.5])
res = least_squares(fun_2d_trivial, x0, jac=jac,
bounds=([0.3, 0.2], 3.0), method=self.method)
assert_allclose(res.x, [0.3, 0.2])
res = least_squares(
fun_2d_trivial, x0, jac=jac, bounds=([-1, 0.5], [1.0, 3.0]),
method=self.method)
assert_allclose(res.x, [0.0, 0.5], atol=1e-5)
def test_rosenbrock_bounds(self):
x0_1 = np.array([-2.0, 1.0])
x0_2 = np.array([2.0, 2.0])
x0_3 = np.array([-2.0, 2.0])
x0_4 = np.array([0.0, 2.0])
x0_5 = np.array([-1.2, 1.0])
problems = [
(x0_1, ([-np.inf, -1.5], np.inf)),
(x0_2, ([-np.inf, 1.5], np.inf)),
(x0_3, ([-np.inf, 1.5], np.inf)),
(x0_4, ([-np.inf, 1.5], [1.0, np.inf])),
(x0_2, ([1.0, 1.5], [3.0, 3.0])),
(x0_5, ([-50.0, 0.0], [0.5, 100]))
]
for x0, bounds in problems:
for jac, x_scale, tr_solver in product(
['2-point', '3-point', 'cs', jac_rosenbrock],
[1.0, [1.0, 0.5], 'jac'],
['exact', 'lsmr']):
res = least_squares(fun_rosenbrock, x0, jac, bounds,
x_scale=x_scale, tr_solver=tr_solver,
method=self.method)
assert_allclose(res.optimality, 0.0, atol=1e-5)
class SparseMixin(object):
def test_exact_tr_solver(self):
p = BroydenTridiagonal()
assert_raises(ValueError, least_squares, p.fun, p.x0, p.jac,
tr_solver='exact', method=self.method)
assert_raises(ValueError, least_squares, p.fun, p.x0,
tr_solver='exact', jac_sparsity=p.sparsity,
method=self.method)
def test_equivalence(self):
sparse = BroydenTridiagonal(mode='sparse')
dense = BroydenTridiagonal(mode='dense')
res_sparse = least_squares(
sparse.fun, sparse.x0, jac=sparse.jac,
method=self.method)
res_dense = least_squares(
dense.fun, dense.x0, jac=sparse.jac,
method=self.method)
assert_equal(res_sparse.nfev, res_dense.nfev)
assert_allclose(res_sparse.x, res_dense.x, atol=1e-20)
assert_allclose(res_sparse.cost, 0, atol=1e-20)
assert_allclose(res_dense.cost, 0, atol=1e-20)
def test_tr_options(self):
p = BroydenTridiagonal()
res = least_squares(p.fun, p.x0, p.jac, method=self.method,
tr_options={'btol': 1e-10})
assert_allclose(res.cost, 0, atol=1e-20)
def test_wrong_parameters(self):
p = BroydenTridiagonal()
assert_raises(ValueError, least_squares, p.fun, p.x0, p.jac,
tr_solver='best', method=self.method)
assert_raises(TypeError, least_squares, p.fun, p.x0, p.jac,
tr_solver='lsmr', tr_options={'tol': 1e-10})
def test_solver_selection(self):
sparse = BroydenTridiagonal(mode='sparse')
dense = BroydenTridiagonal(mode='dense')
res_sparse = least_squares(sparse.fun, sparse.x0, jac=sparse.jac,
method=self.method)
res_dense = least_squares(dense.fun, dense.x0, jac=dense.jac,
method=self.method)
assert_allclose(res_sparse.cost, 0, atol=1e-20)
assert_allclose(res_dense.cost, 0, atol=1e-20)
assert_(issparse(res_sparse.jac))
assert_(isinstance(res_dense.jac, np.ndarray))
def test_numerical_jac(self):
p = BroydenTridiagonal()
for jac in ['2-point', '3-point', 'cs']:
res_dense = least_squares(p.fun, p.x0, jac, method=self.method)
res_sparse = least_squares(
p.fun, p.x0, jac,method=self.method,
jac_sparsity=p.sparsity)
assert_equal(res_dense.nfev, res_sparse.nfev)
assert_allclose(res_dense.x, res_sparse.x, atol=1e-20)
assert_allclose(res_dense.cost, 0, atol=1e-20)
assert_allclose(res_sparse.cost, 0, atol=1e-20)
def test_with_bounds(self):
p = BroydenTridiagonal()
for jac, jac_sparsity in product(
[p.jac, '2-point', '3-point', 'cs'], [None, p.sparsity]):
res_1 = least_squares(
p.fun, p.x0, jac, bounds=(p.lb, np.inf),
method=self.method,jac_sparsity=jac_sparsity)
res_2 = least_squares(
p.fun, p.x0, jac, bounds=(-np.inf, p.ub),
method=self.method, jac_sparsity=jac_sparsity)
res_3 = least_squares(
p.fun, p.x0, jac, bounds=(p.lb, p.ub),
method=self.method, jac_sparsity=jac_sparsity)
assert_allclose(res_1.optimality, 0, atol=1e-10)
assert_allclose(res_2.optimality, 0, atol=1e-10)
assert_allclose(res_3.optimality, 0, atol=1e-10)
def test_wrong_jac_sparsity(self):
p = BroydenTridiagonal()
sparsity = p.sparsity[:-1]
assert_raises(ValueError, least_squares, p.fun, p.x0,
jac_sparsity=sparsity, method=self.method)
def test_linear_operator(self):
p = BroydenTridiagonal(mode='operator')
res = least_squares(p.fun, p.x0, p.jac, method=self.method)
assert_allclose(res.cost, 0.0, atol=1e-20)
assert_raises(ValueError, least_squares, p.fun, p.x0, p.jac,
method=self.method, tr_solver='exact')
def test_x_scale_jac_scale(self):
p = BroydenTridiagonal()
res = least_squares(p.fun, p.x0, p.jac, method=self.method,
x_scale='jac')
assert_allclose(res.cost, 0.0, atol=1e-20)
p = BroydenTridiagonal(mode='operator')
assert_raises(ValueError, least_squares, p.fun, p.x0, p.jac,
method=self.method, x_scale='jac')
class LossFunctionMixin(object):
def test_options(self):
for loss in LOSSES:
res = least_squares(fun_trivial, 2.0, loss=loss,
method=self.method)
assert_allclose(res.x, 0, atol=1e-15)
assert_raises(ValueError, least_squares, fun_trivial, 2.0,
loss='hinge', method=self.method)
def test_fun(self):
# Test that res.fun is actual residuals, and not modified by loss
# function stuff.
for loss in LOSSES:
res = least_squares(fun_trivial, 2.0, loss=loss,
method=self.method)
assert_equal(res.fun, fun_trivial(res.x))
def test_grad(self):
# Test that res.grad is true gradient of loss function at the
# solution. Use max_nfev = 1, to avoid reaching minimum.
x = np.array([2.0]) # res.x will be this.
res = least_squares(fun_trivial, x, jac_trivial, loss='linear',
max_nfev=1, method=self.method)
assert_equal(res.grad, 2 * x * (x**2 + 5))
res = least_squares(fun_trivial, x, jac_trivial, loss='huber',
max_nfev=1, method=self.method)
assert_equal(res.grad, 2 * x)
res = least_squares(fun_trivial, x, jac_trivial, loss='soft_l1',
max_nfev=1, method=self.method)
assert_allclose(res.grad,
2 * x * (x**2 + 5) / (1 + (x**2 + 5)**2)**0.5)
res = least_squares(fun_trivial, x, jac_trivial, loss='cauchy',
max_nfev=1, method=self.method)
assert_allclose(res.grad, 2 * x * (x**2 + 5) / (1 + (x**2 + 5)**2))
res = least_squares(fun_trivial, x, jac_trivial, loss='arctan',
max_nfev=1, method=self.method)
assert_allclose(res.grad, 2 * x * (x**2 + 5) / (1 + (x**2 + 5)**4))
res = least_squares(fun_trivial, x, jac_trivial, loss=cubic_soft_l1,
max_nfev=1, method=self.method)
assert_allclose(res.grad,
2 * x * (x**2 + 5) / (1 + (x**2 + 5)**2)**(2/3))
def test_jac(self):
# Test that res.jac.T.dot(res.jac) gives Gauss-Newton approximation
# of Hessian. This approximation is computed by doubly differentiating
# the cost function and dropping the part containing second derivative
# of f. For a scalar function it is computed as
# H = (rho' + 2 * rho'' * f**2) * f'**2, if the expression inside the
# brackets is less than EPS it is replaced by EPS. Here, we check
# against the root of H.
x = 2.0 # res.x will be this.
f = x**2 + 5 # res.fun will be this.
res = least_squares(fun_trivial, x, jac_trivial, loss='linear',
max_nfev=1, method=self.method)
assert_equal(res.jac, 2 * x)
# For `huber` loss the Jacobian correction is identically zero
# in outlier region, in such cases it is modified to be equal EPS**0.5.
res = least_squares(fun_trivial, x, jac_trivial, loss='huber',
max_nfev=1, method=self.method)
assert_equal(res.jac, 2 * x * EPS**0.5)
# Now, let's apply `loss_scale` to turn the residual into an inlier.
# The loss function becomes linear.
res = least_squares(fun_trivial, x, jac_trivial, loss='huber',
f_scale=10, max_nfev=1)
assert_equal(res.jac, 2 * x)
# 'soft_l1' always gives a positive scaling.
res = least_squares(fun_trivial, x, jac_trivial, loss='soft_l1',
max_nfev=1, method=self.method)
assert_allclose(res.jac, 2 * x * (1 + f**2)**-0.75)
# For 'cauchy' the correction term turns out to be negative, and it
# replaced by EPS**0.5.
res = least_squares(fun_trivial, x, jac_trivial, loss='cauchy',
max_nfev=1, method=self.method)
assert_allclose(res.jac, 2 * x * EPS**0.5)
# Now use scaling to turn the residual to inlier.
res = least_squares(fun_trivial, x, jac_trivial, loss='cauchy',
f_scale=10, max_nfev=1, method=self.method)
fs = f / 10
assert_allclose(res.jac, 2 * x * (1 - fs**2)**0.5 / (1 + fs**2))
# 'arctan' gives an outlier.
res = least_squares(fun_trivial, x, jac_trivial, loss='arctan',
max_nfev=1, method=self.method)
assert_allclose(res.jac, 2 * x * EPS**0.5)
# Turn to inlier.
res = least_squares(fun_trivial, x, jac_trivial, loss='arctan',
f_scale=20.0, max_nfev=1, method=self.method)
fs = f / 20
assert_allclose(res.jac, 2 * x * (1 - 3 * fs**4)**0.5 / (1 + fs**4))
# cubic_soft_l1 will give an outlier.
res = least_squares(fun_trivial, x, jac_trivial, loss=cubic_soft_l1,
max_nfev=1)
assert_allclose(res.jac, 2 * x * EPS**0.5)
# Turn to inlier.
res = least_squares(fun_trivial, x, jac_trivial,
loss=cubic_soft_l1, f_scale=6, max_nfev=1)
fs = f / 6
assert_allclose(res.jac,
2 * x * (1 - fs**2 / 3)**0.5 * (1 + fs**2)**(-5/6))
def test_robustness(self):
for noise in [0.1, 1.0]:
p = ExponentialFittingProblem(1, 0.1, noise, random_seed=0)
for jac in ['2-point', '3-point', 'cs', p.jac]:
res_lsq = least_squares(p.fun, p.p0, jac=jac,
method=self.method)
assert_allclose(res_lsq.optimality, 0, atol=1e-2)
for loss in LOSSES:
if loss == 'linear':
continue
res_robust = least_squares(
p.fun, p.p0, jac=jac, loss=loss, f_scale=noise,
method=self.method)
assert_allclose(res_robust.optimality, 0, atol=1e-2)
assert_(norm(res_robust.x - p.p_opt) <
norm(res_lsq.x - p.p_opt))
class TestDogbox(BaseMixin, BoundsMixin, SparseMixin, LossFunctionMixin):
method = 'dogbox'
class TestTRF(BaseMixin, BoundsMixin, SparseMixin, LossFunctionMixin):
method = 'trf'
def test_lsmr_regularization(self):
p = BroydenTridiagonal()
for regularize in [True, False]:
res = least_squares(p.fun, p.x0, p.jac, method='trf',
tr_options={'regularize': regularize})
assert_allclose(res.cost, 0, atol=1e-20)
class TestLM(BaseMixin):
method = 'lm'
def test_bounds_not_supported(self):
assert_raises(ValueError, least_squares, fun_trivial,
2.0, bounds=(-3.0, 3.0), method='lm')
def test_m_less_n_not_supported(self):
x0 = [-2, 1]
assert_raises(ValueError, least_squares, fun_rosenbrock_cropped, x0,
method='lm')
def test_sparse_not_supported(self):
p = BroydenTridiagonal()
assert_raises(ValueError, least_squares, p.fun, p.x0, p.jac,
method='lm')
def test_jac_sparsity_not_supported(self):
assert_raises(ValueError, least_squares, fun_trivial, 2.0,
jac_sparsity=[1], method='lm')
def test_LinearOperator_not_supported(self):
p = BroydenTridiagonal(mode="operator")
assert_raises(ValueError, least_squares, p.fun, p.x0, p.jac,
method='lm')
def test_loss(self):
res = least_squares(fun_trivial, 2.0, loss='linear', method='lm')
assert_allclose(res.x, 0.0, atol=1e-4)
assert_raises(ValueError, least_squares, fun_trivial, 2.0,
method='lm', loss='huber')
def test_basic():
# test that 'method' arg is really optional
res = least_squares(fun_trivial, 2.0)
assert_allclose(res.x, 0, atol=1e-10)
def test_small_tolerances_for_lm():
for ftol, xtol, gtol in [(None, 1e-13, 1e-13),
(1e-13, None, 1e-13),
(1e-13, 1e-13, None)]:
assert_raises(ValueError, least_squares, fun_trivial, 2.0, xtol=xtol,
ftol=ftol, gtol=gtol, method='lm') | unknown | codeparrot/codeparrot-clean | ||
import os
import re
import shutil
import time
import warnings
from io import StringIO
from unittest import mock, skipUnless
from admin_scripts.tests import AdminScriptTestCase
from django.core import management
from django.core.management import execute_from_command_line
from django.core.management.base import CommandError
from django.core.management.commands.makemessages import \
Command as MakeMessagesCommand
from django.core.management.utils import find_command
from django.test import SimpleTestCase, override_settings
from django.test.utils import captured_stderr, captured_stdout
from django.utils._os import symlinks_supported
from django.utils.translation import TranslatorCommentWarning
from .utils import POFileAssertionMixin, RunInTmpDirMixin, copytree
LOCALE = 'de'
has_xgettext = find_command('xgettext')
@skipUnless(has_xgettext, 'xgettext is mandatory for extraction tests')
class ExtractorTests(POFileAssertionMixin, RunInTmpDirMixin, SimpleTestCase):
work_subdir = 'commands'
PO_FILE = 'locale/%s/LC_MESSAGES/django.po' % LOCALE
def _run_makemessages(self, **options):
os.chdir(self.test_dir)
out = StringIO()
management.call_command('makemessages', locale=[LOCALE], verbosity=2, stdout=out, **options)
output = out.getvalue()
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = fp.read()
return output, po_contents
def assertMsgIdPlural(self, msgid, haystack, use_quotes=True):
return self._assertPoKeyword('msgid_plural', msgid, haystack, use_quotes=use_quotes)
def assertMsgStr(self, msgstr, haystack, use_quotes=True):
return self._assertPoKeyword('msgstr', msgstr, haystack, use_quotes=use_quotes)
def assertNotMsgId(self, msgid, s, use_quotes=True):
if use_quotes:
msgid = '"%s"' % msgid
msgid = re.escape(msgid)
return self.assertTrue(not re.search('^msgid %s' % msgid, s, re.MULTILINE))
def _assertPoLocComment(self, assert_presence, po_filename, line_number, *comment_parts):
with open(po_filename, 'r') as fp:
po_contents = fp.read()
if os.name == 'nt':
# #: .\path\to\file.html:123
cwd_prefix = '%s%s' % (os.curdir, os.sep)
else:
# #: path/to/file.html:123
cwd_prefix = ''
path = os.path.join(cwd_prefix, *comment_parts)
parts = [path]
if isinstance(line_number, str):
line_number = self._get_token_line_number(path, line_number)
if line_number is not None:
parts.append(':%d' % line_number)
needle = ''.join(parts)
pattern = re.compile(r'^\#\:.*' + re.escape(needle), re.MULTILINE)
if assert_presence:
return self.assertRegex(po_contents, pattern, '"%s" not found in final .po file.' % needle)
else:
return self.assertNotRegex(po_contents, pattern, '"%s" shouldn\'t be in final .po file.' % needle)
def _get_token_line_number(self, path, token):
with open(path) as f:
for line, content in enumerate(f, 1):
if token in content:
return line
self.fail("The token '%s' could not be found in %s, please check the test config" % (token, path))
def assertLocationCommentPresent(self, po_filename, line_number, *comment_parts):
r"""
self.assertLocationCommentPresent('django.po', 42, 'dirA', 'dirB', 'foo.py')
verifies that the django.po file has a gettext-style location comment of the form
`#: dirA/dirB/foo.py:42`
(or `#: .\dirA\dirB\foo.py:42` on Windows)
None can be passed for the line_number argument to skip checking of
the :42 suffix part.
A string token can also be passed as line_number, in which case it
will be searched in the template, and its line number will be used.
A msgid is a suitable candidate.
"""
return self._assertPoLocComment(True, po_filename, line_number, *comment_parts)
def assertLocationCommentNotPresent(self, po_filename, line_number, *comment_parts):
"""Check the opposite of assertLocationComment()"""
return self._assertPoLocComment(False, po_filename, line_number, *comment_parts)
def assertRecentlyModified(self, path):
"""
Assert that file was recently modified (modification time was less than 10 seconds ago).
"""
delta = time.time() - os.stat(path).st_mtime
self.assertLess(delta, 10, "%s was recently modified" % path)
def assertNotRecentlyModified(self, path):
"""
Assert that file was not recently modified (modification time was more than 10 seconds ago).
"""
delta = time.time() - os.stat(path).st_mtime
self.assertGreater(delta, 10, "%s wasn't recently modified" % path)
class BasicExtractorTests(ExtractorTests):
@override_settings(USE_I18N=False)
def test_use_i18n_false(self):
"""
makemessages also runs successfully when USE_I18N is False.
"""
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r', encoding='utf-8') as fp:
po_contents = fp.read()
# Check two random strings
self.assertIn('#. Translators: One-line translator comment #1', po_contents)
self.assertIn('msgctxt "Special trans context #1"', po_contents)
def test_comments_extractor(self):
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r', encoding='utf-8') as fp:
po_contents = fp.read()
self.assertNotIn('This comment should not be extracted', po_contents)
# Comments in templates
self.assertIn('#. Translators: This comment should be extracted', po_contents)
self.assertIn(
"#. Translators: Django comment block for translators\n#. "
"string's meaning unveiled",
po_contents
)
self.assertIn('#. Translators: One-line translator comment #1', po_contents)
self.assertIn('#. Translators: Two-line translator comment #1\n#. continued here.', po_contents)
self.assertIn('#. Translators: One-line translator comment #2', po_contents)
self.assertIn('#. Translators: Two-line translator comment #2\n#. continued here.', po_contents)
self.assertIn('#. Translators: One-line translator comment #3', po_contents)
self.assertIn('#. Translators: Two-line translator comment #3\n#. continued here.', po_contents)
self.assertIn('#. Translators: One-line translator comment #4', po_contents)
self.assertIn('#. Translators: Two-line translator comment #4\n#. continued here.', po_contents)
self.assertIn(
'#. Translators: One-line translator comment #5 -- with '
'non ASCII characters: áéíóúö',
po_contents
)
self.assertIn(
'#. Translators: Two-line translator comment #5 -- with '
'non ASCII characters: áéíóúö\n#. continued here.',
po_contents
)
def test_special_char_extracted(self):
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r', encoding='utf-8') as fp:
po_contents = fp.read()
self.assertMsgId("Non-breaking space\u00a0:", po_contents)
def test_blocktrans_trimmed(self):
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = fp.read()
# should not be trimmed
self.assertNotMsgId('Text with a few line breaks.', po_contents)
# should be trimmed
self.assertMsgId("Again some text with a few line breaks, this time should be trimmed.", po_contents)
# #21406 -- Should adjust for eaten line numbers
self.assertMsgId("Get my line number", po_contents)
self.assertLocationCommentPresent(self.PO_FILE, 'Get my line number', 'templates', 'test.html')
def test_force_en_us_locale(self):
"""Value of locale-munging option used by the command is the right one"""
self.assertTrue(MakeMessagesCommand.leave_locale_alone)
def test_extraction_error(self):
msg = (
'Translation blocks must not include other block tags: blocktrans '
'(file %s, line 3)' % os.path.join('templates', 'template_with_error.tpl')
)
with self.assertRaisesMessage(SyntaxError, msg):
management.call_command('makemessages', locale=[LOCALE], extensions=['tpl'], verbosity=0)
# The temporary file was cleaned up
self.assertFalse(os.path.exists('./templates/template_with_error.tpl.py'))
def test_unicode_decode_error(self):
shutil.copyfile('./not_utf8.sample', './not_utf8.txt')
out = StringIO()
management.call_command('makemessages', locale=[LOCALE], stdout=out)
self.assertIn("UnicodeDecodeError: skipped file not_utf8.txt in .", out.getvalue())
def test_unicode_file_name(self):
open(os.path.join(self.test_dir, 'vidéo.txt'), 'a').close()
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
def test_extraction_warning(self):
"""test xgettext warning about multiple bare interpolation placeholders"""
shutil.copyfile('./code.sample', './code_sample.py')
out = StringIO()
management.call_command('makemessages', locale=[LOCALE], stdout=out)
self.assertIn("code_sample.py:4", out.getvalue())
def test_template_message_context_extractor(self):
"""
Message contexts are correctly extracted for the {% trans %} and
{% blocktrans %} template tags (#14806).
"""
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = fp.read()
# {% trans %}
self.assertIn('msgctxt "Special trans context #1"', po_contents)
self.assertMsgId("Translatable literal #7a", po_contents)
self.assertIn('msgctxt "Special trans context #2"', po_contents)
self.assertMsgId("Translatable literal #7b", po_contents)
self.assertIn('msgctxt "Special trans context #3"', po_contents)
self.assertMsgId("Translatable literal #7c", po_contents)
# {% trans %} with a filter
for minor_part in 'abcdefgh': # Iterate from #7.1a to #7.1h template markers
self.assertIn('msgctxt "context #7.1{}"'.format(minor_part), po_contents)
self.assertMsgId('Translatable literal #7.1{}'.format(minor_part), po_contents)
# {% blocktrans %}
self.assertIn('msgctxt "Special blocktrans context #1"', po_contents)
self.assertMsgId("Translatable literal #8a", po_contents)
self.assertIn('msgctxt "Special blocktrans context #2"', po_contents)
self.assertMsgId("Translatable literal #8b-singular", po_contents)
self.assertIn("Translatable literal #8b-plural", po_contents)
self.assertIn('msgctxt "Special blocktrans context #3"', po_contents)
self.assertMsgId("Translatable literal #8c-singular", po_contents)
self.assertIn("Translatable literal #8c-plural", po_contents)
self.assertIn('msgctxt "Special blocktrans context #4"', po_contents)
self.assertMsgId("Translatable literal #8d %(a)s", po_contents)
def test_context_in_single_quotes(self):
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = fp.read()
# {% trans %}
self.assertIn('msgctxt "Context wrapped in double quotes"', po_contents)
self.assertIn('msgctxt "Context wrapped in single quotes"', po_contents)
# {% blocktrans %}
self.assertIn('msgctxt "Special blocktrans context wrapped in double quotes"', po_contents)
self.assertIn('msgctxt "Special blocktrans context wrapped in single quotes"', po_contents)
def test_template_comments(self):
"""Template comment tags on the same line of other constructs (#19552)"""
# Test detection/end user reporting of old, incorrect templates
# translator comments syntax
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter('always')
management.call_command('makemessages', locale=[LOCALE], extensions=['thtml'], verbosity=0)
self.assertEqual(len(ws), 3)
for w in ws:
self.assertTrue(issubclass(w.category, TranslatorCommentWarning))
self.assertRegex(
str(ws[0].message),
r"The translator-targeted comment 'Translators: ignored i18n "
r"comment #1' \(file templates[/\\]comments.thtml, line 4\) "
r"was ignored, because it wasn't the last item on the line\."
)
self.assertRegex(
str(ws[1].message),
r"The translator-targeted comment 'Translators: ignored i18n "
r"comment #3' \(file templates[/\\]comments.thtml, line 6\) "
r"was ignored, because it wasn't the last item on the line\."
)
self.assertRegex(
str(ws[2].message),
r"The translator-targeted comment 'Translators: ignored i18n "
r"comment #4' \(file templates[/\\]comments.thtml, line 8\) "
r"was ignored, because it wasn't the last item on the line\."
)
# Now test .po file contents
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = fp.read()
self.assertMsgId('Translatable literal #9a', po_contents)
self.assertNotIn('ignored comment #1', po_contents)
self.assertNotIn('Translators: ignored i18n comment #1', po_contents)
self.assertMsgId("Translatable literal #9b", po_contents)
self.assertNotIn('ignored i18n comment #2', po_contents)
self.assertNotIn('ignored comment #2', po_contents)
self.assertMsgId('Translatable literal #9c', po_contents)
self.assertNotIn('ignored comment #3', po_contents)
self.assertNotIn('ignored i18n comment #3', po_contents)
self.assertMsgId('Translatable literal #9d', po_contents)
self.assertNotIn('ignored comment #4', po_contents)
self.assertMsgId('Translatable literal #9e', po_contents)
self.assertNotIn('ignored comment #5', po_contents)
self.assertNotIn('ignored i18n comment #4', po_contents)
self.assertMsgId('Translatable literal #9f', po_contents)
self.assertIn('#. Translators: valid i18n comment #5', po_contents)
self.assertMsgId('Translatable literal #9g', po_contents)
self.assertIn('#. Translators: valid i18n comment #6', po_contents)
self.assertMsgId('Translatable literal #9h', po_contents)
self.assertIn('#. Translators: valid i18n comment #7', po_contents)
self.assertMsgId('Translatable literal #9i', po_contents)
self.assertRegex(po_contents, r'#\..+Translators: valid i18n comment #8')
self.assertRegex(po_contents, r'#\..+Translators: valid i18n comment #9')
self.assertMsgId("Translatable literal #9j", po_contents)
def test_makemessages_find_files(self):
"""
find_files only discover files having the proper extensions.
"""
cmd = MakeMessagesCommand()
cmd.ignore_patterns = ['CVS', '.*', '*~', '*.pyc']
cmd.symlinks = False
cmd.domain = 'django'
cmd.extensions = ['html', 'txt', 'py']
cmd.verbosity = 0
cmd.locale_paths = []
cmd.default_locale_path = os.path.join(self.test_dir, 'locale')
found_files = cmd.find_files(self.test_dir)
found_exts = set([os.path.splitext(tfile.file)[1] for tfile in found_files])
self.assertEqual(found_exts.difference({'.py', '.html', '.txt'}), set())
cmd.extensions = ['js']
cmd.domain = 'djangojs'
found_files = cmd.find_files(self.test_dir)
found_exts = set([os.path.splitext(tfile.file)[1] for tfile in found_files])
self.assertEqual(found_exts.difference({'.js'}), set())
@mock.patch('django.core.management.commands.makemessages.popen_wrapper')
def test_makemessages_gettext_version(self, mocked_popen_wrapper):
# "Normal" output:
mocked_popen_wrapper.return_value = (
"xgettext (GNU gettext-tools) 0.18.1\n"
"Copyright (C) 1995-1998, 2000-2010 Free Software Foundation, Inc.\n"
"License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>\n"
"This is free software: you are free to change and redistribute it.\n"
"There is NO WARRANTY, to the extent permitted by law.\n"
"Written by Ulrich Drepper.\n", '', 0)
cmd = MakeMessagesCommand()
self.assertEqual(cmd.gettext_version, (0, 18, 1))
# Version number with only 2 parts (#23788)
mocked_popen_wrapper.return_value = (
"xgettext (GNU gettext-tools) 0.17\n", '', 0)
cmd = MakeMessagesCommand()
self.assertEqual(cmd.gettext_version, (0, 17))
# Bad version output
mocked_popen_wrapper.return_value = (
"any other return value\n", '', 0)
cmd = MakeMessagesCommand()
with self.assertRaisesMessage(CommandError, "Unable to get gettext version. Is it installed?"):
cmd.gettext_version
def test_po_file_encoding_when_updating(self):
"""
Update of PO file doesn't corrupt it with non-UTF-8 encoding on Windows
(#23271).
"""
BR_PO_BASE = 'locale/pt_BR/LC_MESSAGES/django'
shutil.copyfile(BR_PO_BASE + '.pristine', BR_PO_BASE + '.po')
management.call_command('makemessages', locale=['pt_BR'], verbosity=0)
self.assertTrue(os.path.exists(BR_PO_BASE + '.po'))
with open(BR_PO_BASE + '.po', 'r', encoding='utf-8') as fp:
po_contents = fp.read()
self.assertMsgStr("Größe", po_contents)
class JavascriptExtractorTests(ExtractorTests):
PO_FILE = 'locale/%s/LC_MESSAGES/djangojs.po' % LOCALE
def test_javascript_literals(self):
_, po_contents = self._run_makemessages(domain='djangojs')
self.assertMsgId('This literal should be included.', po_contents)
self.assertMsgId('gettext_noop should, too.', po_contents)
self.assertMsgId('This one as well.', po_contents)
self.assertMsgId(r'He said, \"hello\".', po_contents)
self.assertMsgId("okkkk", po_contents)
self.assertMsgId("TEXT", po_contents)
self.assertMsgId("It's at http://example.com", po_contents)
self.assertMsgId("String", po_contents)
self.assertMsgId("/* but this one will be too */ 'cause there is no way of telling...", po_contents)
self.assertMsgId("foo", po_contents)
self.assertMsgId("bar", po_contents)
self.assertMsgId("baz", po_contents)
self.assertMsgId("quz", po_contents)
self.assertMsgId("foobar", po_contents)
def test_media_static_dirs_ignored(self):
"""
Regression test for #23583.
"""
with override_settings(STATIC_ROOT=os.path.join(self.test_dir, 'static/'),
MEDIA_ROOT=os.path.join(self.test_dir, 'media_root/')):
_, po_contents = self._run_makemessages(domain='djangojs')
self.assertMsgId("Static content inside app should be included.", po_contents)
self.assertNotMsgId("Content from STATIC_ROOT should not be included", po_contents)
@override_settings(STATIC_ROOT=None, MEDIA_ROOT='')
def test_default_root_settings(self):
"""
Regression test for #23717.
"""
_, po_contents = self._run_makemessages(domain='djangojs')
self.assertMsgId("Static content inside app should be included.", po_contents)
class IgnoredExtractorTests(ExtractorTests):
def test_ignore_directory(self):
out, po_contents = self._run_makemessages(ignore_patterns=[
os.path.join('ignore_dir', '*'),
])
self.assertIn("ignoring directory ignore_dir", out)
self.assertMsgId('This literal should be included.', po_contents)
self.assertNotMsgId('This should be ignored.', po_contents)
def test_ignore_subdirectory(self):
out, po_contents = self._run_makemessages(ignore_patterns=[
'templates/*/ignore.html',
'templates/subdir/*',
])
self.assertIn("ignoring directory subdir", out)
self.assertNotMsgId('This subdir should be ignored too.', po_contents)
def test_ignore_file_patterns(self):
out, po_contents = self._run_makemessages(ignore_patterns=[
'xxx_*',
])
self.assertIn("ignoring file xxx_ignored.html", out)
self.assertNotMsgId('This should be ignored too.', po_contents)
def test_media_static_dirs_ignored(self):
with override_settings(STATIC_ROOT=os.path.join(self.test_dir, 'static/'),
MEDIA_ROOT=os.path.join(self.test_dir, 'media_root/')):
out, _ = self._run_makemessages()
self.assertIn("ignoring directory static", out)
self.assertIn("ignoring directory media_root", out)
class SymlinkExtractorTests(ExtractorTests):
def setUp(self):
super().setUp()
self.symlinked_dir = os.path.join(self.test_dir, 'templates_symlinked')
def test_symlink(self):
if os.path.exists(self.symlinked_dir):
self.assertTrue(os.path.islink(self.symlinked_dir))
else:
if symlinks_supported():
os.symlink(os.path.join(self.test_dir, 'templates'), self.symlinked_dir)
else:
self.skipTest("os.symlink() not available on this OS + Python version combination.")
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0, symlinks=True)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = fp.read()
self.assertMsgId('This literal should be included.', po_contents)
self.assertLocationCommentPresent(self.PO_FILE, None, 'templates_symlinked', 'test.html')
class CopyPluralFormsExtractorTests(ExtractorTests):
PO_FILE_ES = 'locale/es/LC_MESSAGES/django.po'
def test_copy_plural_forms(self):
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = fp.read()
self.assertIn('Plural-Forms: nplurals=2; plural=(n != 1)', po_contents)
def test_override_plural_forms(self):
"""Ticket #20311."""
management.call_command('makemessages', locale=['es'], extensions=['djtpl'], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE_ES))
with open(self.PO_FILE_ES, 'r', encoding='utf-8') as fp:
po_contents = fp.read()
found = re.findall(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', po_contents, re.MULTILINE | re.DOTALL)
self.assertEqual(1, len(found))
def test_trans_and_plural_blocktrans_collision(self):
"""
Ensures a correct workaround for the gettext bug when handling a literal
found inside a {% trans %} tag and also in another file inside a
{% blocktrans %} with a plural (#17375).
"""
management.call_command('makemessages', locale=[LOCALE], extensions=['html', 'djtpl'], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = fp.read()
self.assertNotIn("#-#-#-#-# django.pot (PACKAGE VERSION) #-#-#-#-#\\n", po_contents)
self.assertMsgId('First `trans`, then `blocktrans` with a plural', po_contents)
self.assertMsgIdPlural('Plural for a `trans` and `blocktrans` collision case', po_contents)
class NoWrapExtractorTests(ExtractorTests):
def test_no_wrap_enabled(self):
management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_wrap=True)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = fp.read()
self.assertMsgId(
'This literal should also be included wrapped or not wrapped '
'depending on the use of the --no-wrap option.',
po_contents
)
def test_no_wrap_disabled(self):
management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_wrap=False)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = fp.read()
self.assertMsgId(
'""\n"This literal should also be included wrapped or not '
'wrapped depending on the "\n"use of the --no-wrap option."',
po_contents,
use_quotes=False
)
class LocationCommentsTests(ExtractorTests):
def test_no_location_enabled(self):
"""Behavior is correct if --no-location switch is specified. See #16903."""
management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_location=True)
self.assertTrue(os.path.exists(self.PO_FILE))
self.assertLocationCommentNotPresent(self.PO_FILE, None, 'test.html')
def test_no_location_disabled(self):
"""Behavior is correct if --no-location switch isn't specified."""
management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_location=False)
self.assertTrue(os.path.exists(self.PO_FILE))
# #16903 -- Standard comment with source file relative path should be present
self.assertLocationCommentPresent(self.PO_FILE, 'Translatable literal #6b', 'templates', 'test.html')
def test_location_comments_for_templatized_files(self):
"""
Ensure no leaky paths in comments, e.g. #: path\to\file.html.py:123
Refs #21209/#26341.
"""
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = fp.read()
self.assertMsgId('#: templates/test.html.py', po_contents)
self.assertLocationCommentNotPresent(self.PO_FILE, None, '.html.py')
self.assertLocationCommentPresent(self.PO_FILE, 5, 'templates', 'test.html')
class KeepPotFileExtractorTests(ExtractorTests):
POT_FILE = 'locale/django.pot'
def test_keep_pot_disabled_by_default(self):
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertFalse(os.path.exists(self.POT_FILE))
def test_keep_pot_explicitly_disabled(self):
management.call_command('makemessages', locale=[LOCALE], verbosity=0,
keep_pot=False)
self.assertFalse(os.path.exists(self.POT_FILE))
def test_keep_pot_enabled(self):
management.call_command('makemessages', locale=[LOCALE], verbosity=0,
keep_pot=True)
self.assertTrue(os.path.exists(self.POT_FILE))
class MultipleLocaleExtractionTests(ExtractorTests):
PO_FILE_PT = 'locale/pt/LC_MESSAGES/django.po'
PO_FILE_DE = 'locale/de/LC_MESSAGES/django.po'
LOCALES = ['pt', 'de', 'ch']
def test_multiple_locales(self):
management.call_command('makemessages', locale=['pt', 'de'], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE_PT))
self.assertTrue(os.path.exists(self.PO_FILE_DE))
class ExcludedLocaleExtractionTests(ExtractorTests):
work_subdir = 'exclude'
LOCALES = ['en', 'fr', 'it']
PO_FILE = 'locale/%s/LC_MESSAGES/django.po'
def _set_times_for_all_po_files(self):
"""
Set access and modification times to the Unix epoch time for all the .po files.
"""
for locale in self.LOCALES:
os.utime(self.PO_FILE % locale, (0, 0))
def setUp(self):
super().setUp()
copytree('canned_locale', 'locale')
self._set_times_for_all_po_files()
def test_command_help(self):
with captured_stdout(), captured_stderr():
# `call_command` bypasses the parser; by calling
# `execute_from_command_line` with the help subcommand we
# ensure that there are no issues with the parser itself.
execute_from_command_line(['django-admin', 'help', 'makemessages'])
def test_one_locale_excluded(self):
management.call_command('makemessages', exclude=['it'], stdout=StringIO())
self.assertRecentlyModified(self.PO_FILE % 'en')
self.assertRecentlyModified(self.PO_FILE % 'fr')
self.assertNotRecentlyModified(self.PO_FILE % 'it')
def test_multiple_locales_excluded(self):
management.call_command('makemessages', exclude=['it', 'fr'], stdout=StringIO())
self.assertRecentlyModified(self.PO_FILE % 'en')
self.assertNotRecentlyModified(self.PO_FILE % 'fr')
self.assertNotRecentlyModified(self.PO_FILE % 'it')
def test_one_locale_excluded_with_locale(self):
management.call_command('makemessages', locale=['en', 'fr'], exclude=['fr'], stdout=StringIO())
self.assertRecentlyModified(self.PO_FILE % 'en')
self.assertNotRecentlyModified(self.PO_FILE % 'fr')
self.assertNotRecentlyModified(self.PO_FILE % 'it')
def test_multiple_locales_excluded_with_locale(self):
management.call_command('makemessages', locale=['en', 'fr', 'it'], exclude=['fr', 'it'],
stdout=StringIO())
self.assertRecentlyModified(self.PO_FILE % 'en')
self.assertNotRecentlyModified(self.PO_FILE % 'fr')
self.assertNotRecentlyModified(self.PO_FILE % 'it')
class CustomLayoutExtractionTests(ExtractorTests):
work_subdir = 'project_dir'
def test_no_locale_raises(self):
msg = "Unable to find a locale path to store translations for file"
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command('makemessages', locale=LOCALE, verbosity=0)
def test_project_locale_paths(self):
"""
* translations for an app containing a locale folder are stored in that folder
* translations outside of that app are in LOCALE_PATHS[0]
"""
with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, 'project_locale')]):
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
project_de_locale = os.path.join(
self.test_dir, 'project_locale', 'de', 'LC_MESSAGES', 'django.po')
app_de_locale = os.path.join(
self.test_dir, 'app_with_locale', 'locale', 'de', 'LC_MESSAGES', 'django.po')
self.assertTrue(os.path.exists(project_de_locale))
self.assertTrue(os.path.exists(app_de_locale))
with open(project_de_locale, 'r') as fp:
po_contents = fp.read()
self.assertMsgId('This app has no locale directory', po_contents)
self.assertMsgId('This is a project-level string', po_contents)
with open(app_de_locale, 'r') as fp:
po_contents = fp.read()
self.assertMsgId('This app has a locale directory', po_contents)
@skipUnless(has_xgettext, 'xgettext is mandatory for extraction tests')
class NoSettingsExtractionTests(AdminScriptTestCase):
def test_makemessages_no_settings(self):
out, err = self.run_django_admin(['makemessages', '-l', 'en', '-v', '0'])
self.assertNoOutput(err)
self.assertNoOutput(out) | unknown | codeparrot/codeparrot-clean | ||
const path = require('path')
const { spawn } = require('child_process')
const fs = require('fs/promises')
const cwd = process.cwd()
async function main() {
const tarballs = await fs.readdir(path.join(cwd, 'public'))
const nextTarball = tarballs.find((item) => !item.includes('-swc'))
await fs.rename(
path.join(cwd, 'public', nextTarball),
path.join(cwd, nextTarball)
)
await new Promise((resolve, reject) => {
const child = spawn('tar', ['-xf', nextTarball], {
stdio: 'inherit',
shell: true,
cwd,
})
child.on('exit', (code) => {
if (code) {
return reject(`Failed with code ${code}`)
}
resolve()
})
})
const unpackedPackageJson = path.join(cwd, 'package/package.json')
const parsedPackageJson = JSON.parse(
await fs.readFile(unpackedPackageJson, 'utf8')
)
const { optionalDependencies } = parsedPackageJson
for (const key of Object.keys(optionalDependencies)) {
optionalDependencies[key] = optionalDependencies[key].replace(
'DEPLOY_URL',
process.env.VERCEL_URL
)
}
await fs.writeFile(
unpackedPackageJson,
JSON.stringify(parsedPackageJson, null, 2)
)
await fs.unlink(nextTarball)
await new Promise((resolve, reject) => {
const child = spawn('tar', ['-czf', nextTarball, 'package'], {
stdio: 'inherit',
shell: true,
cwd,
})
child.on('exit', (code) => {
if (code) {
return reject(`Failed with code ${code}`)
}
resolve()
})
})
await fs.rename(
path.join(cwd, nextTarball),
path.join(cwd, 'public', nextTarball)
)
}
main().catch((err) => {
console.error(err)
process.exit(1)
}) | javascript | github | https://github.com/vercel/next.js | scripts/inject-deploy-url.js |
#
# Copyright 2012 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from concurrent import futures
import logging
import re
import socket
import unittest
from tornado.concurrent import (
Future,
run_on_executor,
future_set_result_unless_cancelled,
)
from tornado.escape import utf8, to_unicode
from tornado import gen
from tornado.iostream import IOStream
from tornado.tcpserver import TCPServer
from tornado.testing import AsyncTestCase, bind_unused_port, gen_test
class MiscFutureTest(AsyncTestCase):
def test_future_set_result_unless_cancelled(self):
fut = Future() # type: Future[int]
future_set_result_unless_cancelled(fut, 42)
self.assertEqual(fut.result(), 42)
self.assertFalse(fut.cancelled())
fut = Future()
fut.cancel()
is_cancelled = fut.cancelled()
future_set_result_unless_cancelled(fut, 42)
self.assertEqual(fut.cancelled(), is_cancelled)
if not is_cancelled:
self.assertEqual(fut.result(), 42)
# The following series of classes demonstrate and test various styles
# of use, with and without generators and futures.
class CapServer(TCPServer):
@gen.coroutine
def handle_stream(self, stream, address):
data = yield stream.read_until(b"\n")
data = to_unicode(data)
if data == data.upper():
stream.write(b"error\talready capitalized\n")
else:
# data already has \n
stream.write(utf8("ok\t%s" % data.upper()))
stream.close()
class CapError(Exception):
pass
class BaseCapClient(object):
def __init__(self, port):
self.port = port
def process_response(self, data):
m = re.match("(.*)\t(.*)\n", to_unicode(data))
if m is None:
raise Exception("did not match")
status, message = m.groups()
if status == "ok":
return message
else:
raise CapError(message)
class GeneratorCapClient(BaseCapClient):
@gen.coroutine
def capitalize(self, request_data):
logging.debug("capitalize")
stream = IOStream(socket.socket())
logging.debug("connecting")
yield stream.connect(("127.0.0.1", self.port))
stream.write(utf8(request_data + "\n"))
logging.debug("reading")
data = yield stream.read_until(b"\n")
logging.debug("returning")
stream.close()
raise gen.Return(self.process_response(data))
class ClientTestMixin(object):
def setUp(self):
super(ClientTestMixin, self).setUp() # type: ignore
self.server = CapServer()
sock, port = bind_unused_port()
self.server.add_sockets([sock])
self.client = self.client_class(port=port)
def tearDown(self):
self.server.stop()
super(ClientTestMixin, self).tearDown() # type: ignore
def test_future(self):
future = self.client.capitalize("hello")
self.io_loop.add_future(future, self.stop)
self.wait()
self.assertEqual(future.result(), "HELLO")
def test_future_error(self):
future = self.client.capitalize("HELLO")
self.io_loop.add_future(future, self.stop)
self.wait()
self.assertRaisesRegexp(CapError, "already capitalized", future.result)
def test_generator(self):
@gen.coroutine
def f():
result = yield self.client.capitalize("hello")
self.assertEqual(result, "HELLO")
self.io_loop.run_sync(f)
def test_generator_error(self):
@gen.coroutine
def f():
with self.assertRaisesRegexp(CapError, "already capitalized"):
yield self.client.capitalize("HELLO")
self.io_loop.run_sync(f)
class GeneratorClientTest(ClientTestMixin, AsyncTestCase):
client_class = GeneratorCapClient
class RunOnExecutorTest(AsyncTestCase):
@gen_test
def test_no_calling(self):
class Object(object):
def __init__(self):
self.executor = futures.thread.ThreadPoolExecutor(1)
@run_on_executor
def f(self):
return 42
o = Object()
answer = yield o.f()
self.assertEqual(answer, 42)
@gen_test
def test_call_with_no_args(self):
class Object(object):
def __init__(self):
self.executor = futures.thread.ThreadPoolExecutor(1)
@run_on_executor()
def f(self):
return 42
o = Object()
answer = yield o.f()
self.assertEqual(answer, 42)
@gen_test
def test_call_with_executor(self):
class Object(object):
def __init__(self):
self.__executor = futures.thread.ThreadPoolExecutor(1)
@run_on_executor(executor="_Object__executor")
def f(self):
return 42
o = Object()
answer = yield o.f()
self.assertEqual(answer, 42)
@gen_test
def test_async_await(self):
class Object(object):
def __init__(self):
self.executor = futures.thread.ThreadPoolExecutor(1)
@run_on_executor()
def f(self):
return 42
o = Object()
async def f():
answer = await o.f()
return answer
result = yield f()
self.assertEqual(result, 42)
if __name__ == "__main__":
unittest.main() | unknown | codeparrot/codeparrot-clean | ||
''' This agent is based on the Bellman equation, used to create a matrix which will help the agent to find the
suitable move, given a certain position. The learning function constructs the matrix from random games. This matrix has
an element for each reward position, agent position and move. There is an option at the end of the program to let the initial
position of the agent not be only the center, to make things trickier! '''
import numpy as np
import random
Q = np.zeros(shape = (2,3,2)) # We shape the Q matrix from Bellman equation for 2 reward states, 3 agent states, 2 actions
Agent_state = [[1,0,0],[0,1,0],[0,0,1]] #possible agent positions
# An easy way to save the different states of the game is as follows: the reward can only have two
# positions, so we assign 0 for left and 1 for right. For the agent, we can have three different
# positions so 0 = left, 1 = center, 2 = right. Putting the reward + agent together, we can have 6
# possible states, that we can number from 0 to 5 using S=position_agent+3*position_reward. Then
# we can build a reward matrix, where for the states where there is a coincidence (S=0+0=0 and
# S=2+3*1=5) we assing value one, and for the non coinciding states we assing 0:
Reward=[1, 0, 0, 0, 0, 1] # Reward matrix from Bellman equation.
state = [0,0,0] # empty state
# Copy paste from rl_toy_example.py + comments from Patrick--------------------
def moveto(position,agent_position, status):
agent_position += position # x += y adds y to the variable x
if agent_position < 0: #since we always add 1 or -1 randomly to the position it can happen, that we get negative indices
agent_position = 0 #position -1 therefore is always set to 0
#agent_position = self.agent_position if we set this it will be found in max 2 steps (CHEAT)
elif agent_position > 2: #same for to big values. indices bigger than 2 are not in the state vector anymore
agent_position = 2
if status == "running" and state[agent_position] == 1: #if the agent is positioned at the same index as we initially set the reward
status = "win"
return 1, status, agent_position
else:
return 0, status, agent_position
def next_move():
return random.randint(0, 1)*2-1 #gives randomly either 1 or -1
# ----------------------------------------------------------------------------
# Learning function ----------------------------------------------------------
def learning(idx_reward,agent_position,agent_move,gamma): # This function construct the matrix Q by means of the Bellman equation
position_agent_0 = agent_position # Position of the Agent at time t
position_agent_1 = moveto(agent_move, agent_position, status)[2] # Position of the Agent at time t+1
if agent_move == -1: # we assing the value 0 for a left move (agent_move=-1) and 1 for a right move
a_move = 0
else:
a_move = 1
if idx_reward == 2: # we assign 1 to the right position for the reward (idx_reward=2) and 0 for left position
n_idx_reward = 1
else:
n_idx_reward = 0
# Bellman equation:
Q[n_idx_reward, position_agent_0, a_move] = Reward[position_agent_1 + 3*n_idx_reward] + gamma*np.max([Q[n_idx_reward, position_agent_1, i] for i in [0,1]])
# -----------------------------------------------------------------------------
# Teached agent ---------------------------------------------------------------
def cleverplayer(idx_reward,agent_position):
if idx_reward == 2: # we assign 1 to the right position for the reward (idx_reward=2) and 0 for left position
n_idx_reward = 1
else:
n_idx_reward = 0
# We compare the value of Q for the given state and the two possible movements. The highest value
# gives us the move to do! In the (rare) case the values are equal, we choose randomly
if Q[n_idx_reward, agent_position, 0] > Q[n_idx_reward, agent_position, 1]:
return +1
elif Q[n_idx_reward, agent_position, 0] < Q[n_idx_reward, agent_position, 1]:
return -1
else:
return random.randint(0, 1)*2-1
# -----------------------------------------------------------------------------
# Learning phase: the goal is to construct the matrix Q by playing random games
iterations=100 # Number of iterations of the learning process
gamma=0.9 # gamma of the Bellman equation. Roughly, this parameter tells you how much you want to take in account from future moves
for ii in range(0,iterations):
idx_reward=random.randint(0, 1)*2
state[idx_reward] = 1 #places reward randomly in state
agent_position = 1
agent_position = random.randint(0, 2) # Uncomment for random initial position (harder for the agent)
status = "running"
while status == "running":
agent_move = next_move()
learning(idx_reward, agent_position, agent_move,gamma)
reward, status, agent_position = moveto(agent_move, agent_position, status)
# -----------------------------------------------------------------------------
# Play! -----------------------------------------------------------------------
iterations_play=100
number_steps = 0
for jj in range(0,iterations_play):
idx_reward=random.randint(0, 1)*2
state[idx_reward] = 1
agent_position = 1
agent_position = random.randint(0, 2) # Uncomment for random initial position (harder for the agent)
status = "running"
while status == "running":
number_steps += 1 #saving number of steps
agent_move = cleverplayer(idx_reward,agent_position)
reward, status, agent_position = moveto(agent_move, agent_position, status)
print('\n The mean number of steps is ' + repr(number_steps / iterations_play) + '\n') | unknown | codeparrot/codeparrot-clean | ||
/*
* jclossls.c
*
* This file was part of the Independent JPEG Group's software:
* Copyright (C) 1998, Thomas G. Lane.
* Lossless JPEG Modifications:
* Copyright (C) 1999, Ken Murchison.
* libjpeg-turbo Modifications:
* Copyright (C) 2022, 2024, D. R. Commander.
* For conditions of distribution and use, see the accompanying README.ijg
* file.
*
* This file contains prediction, sample differencing, and point transform
* routines for the lossless JPEG compressor.
*/
#define JPEG_INTERNALS
#include "jinclude.h"
#include "jpeglib.h"
#include "jlossls.h"
#ifdef C_LOSSLESS_SUPPORTED
/************************** Sample differencing **************************/
/*
* In order to avoid a performance penalty for checking which predictor is
* being used and which row is being processed for each call of the
* undifferencer, and to promote optimization, we have separate differencing
* functions for each predictor selection value.
*
* We are able to avoid duplicating source code by implementing the predictors
* and differencers as macros. Each of the differencing functions is simply a
* wrapper around a DIFFERENCE macro with the appropriate PREDICTOR macro
* passed as an argument.
*/
/* Forward declarations */
LOCAL(void) reset_predictor(j_compress_ptr cinfo, int ci);
/* Predictor for the first column of the first row: 2^(P-Pt-1) */
#define INITIAL_PREDICTORx (1 << (cinfo->data_precision - cinfo->Al - 1))
/* Predictor for the first column of the remaining rows: Rb */
#define INITIAL_PREDICTOR2 prev_row[0]
/*
* 1-Dimensional differencer routine.
*
* This macro implements the 1-D horizontal predictor (1). INITIAL_PREDICTOR
* is used as the special case predictor for the first column, which must be
* either INITIAL_PREDICTOR2 or INITIAL_PREDICTORx. The remaining samples
* use PREDICTOR1.
*/
#define DIFFERENCE_1D(INITIAL_PREDICTOR) \
lossless_comp_ptr losslessc = (lossless_comp_ptr)cinfo->fdct; \
boolean restart = FALSE; \
int samp, Ra; \
\
samp = *input_buf++; \
*diff_buf++ = samp - INITIAL_PREDICTOR; \
\
while (--width) { \
Ra = samp; \
samp = *input_buf++; \
*diff_buf++ = samp - PREDICTOR1; \
} \
\
/* Account for restart interval (no-op if not using restarts) */ \
if (cinfo->restart_interval) { \
if (--(losslessc->restart_rows_to_go[ci]) == 0) { \
reset_predictor(cinfo, ci); \
restart = TRUE; \
} \
}
/*
* 2-Dimensional differencer routine.
*
* This macro implements the 2-D horizontal predictors (#2-7). PREDICTOR2 is
* used as the special case predictor for the first column. The remaining
* samples use PREDICTOR, which is a function of Ra, Rb, and Rc.
*
* Because prev_row and output_buf may point to the same storage area (in an
* interleaved image with Vi=1, for example), we must take care to buffer Rb/Rc
* before writing the current reconstructed sample value into output_buf.
*/
#define DIFFERENCE_2D(PREDICTOR) \
lossless_comp_ptr losslessc = (lossless_comp_ptr)cinfo->fdct; \
int samp, Ra, Rb, Rc; \
\
Rb = *prev_row++; \
samp = *input_buf++; \
*diff_buf++ = samp - PREDICTOR2; \
\
while (--width) { \
Rc = Rb; \
Rb = *prev_row++; \
Ra = samp; \
samp = *input_buf++; \
*diff_buf++ = samp - PREDICTOR; \
} \
\
/* Account for restart interval (no-op if not using restarts) */ \
if (cinfo->restart_interval) { \
if (--losslessc->restart_rows_to_go[ci] == 0) \
reset_predictor(cinfo, ci); \
}
/*
* Differencers for the second and subsequent rows in a scan or restart
* interval. The first sample in the row is differenced using the vertical
* predictor (2). The rest of the samples are differenced using the predictor
* specified in the scan header.
*/
METHODDEF(void)
jpeg_difference1(j_compress_ptr cinfo, int ci,
_JSAMPROW input_buf, _JSAMPROW prev_row,
JDIFFROW diff_buf, JDIMENSION width)
{
DIFFERENCE_1D(INITIAL_PREDICTOR2);
(void)(restart);
}
METHODDEF(void)
jpeg_difference2(j_compress_ptr cinfo, int ci,
_JSAMPROW input_buf, _JSAMPROW prev_row,
JDIFFROW diff_buf, JDIMENSION width)
{
DIFFERENCE_2D(PREDICTOR2);
(void)(Ra);
(void)(Rc);
}
METHODDEF(void)
jpeg_difference3(j_compress_ptr cinfo, int ci,
_JSAMPROW input_buf, _JSAMPROW prev_row,
JDIFFROW diff_buf, JDIMENSION width)
{
DIFFERENCE_2D(PREDICTOR3);
(void)(Ra);
}
METHODDEF(void)
jpeg_difference4(j_compress_ptr cinfo, int ci,
_JSAMPROW input_buf, _JSAMPROW prev_row,
JDIFFROW diff_buf, JDIMENSION width)
{
DIFFERENCE_2D(PREDICTOR4);
}
METHODDEF(void)
jpeg_difference5(j_compress_ptr cinfo, int ci,
_JSAMPROW input_buf, _JSAMPROW prev_row,
JDIFFROW diff_buf, JDIMENSION width)
{
DIFFERENCE_2D(PREDICTOR5);
}
METHODDEF(void)
jpeg_difference6(j_compress_ptr cinfo, int ci,
_JSAMPROW input_buf, _JSAMPROW prev_row,
JDIFFROW diff_buf, JDIMENSION width)
{
DIFFERENCE_2D(PREDICTOR6);
}
METHODDEF(void)
jpeg_difference7(j_compress_ptr cinfo, int ci,
_JSAMPROW input_buf, _JSAMPROW prev_row,
JDIFFROW diff_buf, JDIMENSION width)
{
DIFFERENCE_2D(PREDICTOR7);
(void)(Rc);
}
/*
* Differencer for the first row in a scan or restart interval. The first
* sample in the row is differenced using the special predictor constant
* x = 2 ^ (P-Pt-1). The rest of the samples are differenced using the
* 1-D horizontal predictor (1).
*/
METHODDEF(void)
jpeg_difference_first_row(j_compress_ptr cinfo, int ci,
_JSAMPROW input_buf, _JSAMPROW prev_row,
JDIFFROW diff_buf, JDIMENSION width)
{
DIFFERENCE_1D(INITIAL_PREDICTORx);
/*
* Now that we have differenced the first row, we want to use the
* differencer that corresponds to the predictor specified in the
* scan header.
*
* Note that we don't do this if we have just reset the predictor
* for a new restart interval.
*/
if (!restart) {
switch (cinfo->Ss) {
case 1:
losslessc->predict_difference[ci] = jpeg_difference1;
break;
case 2:
losslessc->predict_difference[ci] = jpeg_difference2;
break;
case 3:
losslessc->predict_difference[ci] = jpeg_difference3;
break;
case 4:
losslessc->predict_difference[ci] = jpeg_difference4;
break;
case 5:
losslessc->predict_difference[ci] = jpeg_difference5;
break;
case 6:
losslessc->predict_difference[ci] = jpeg_difference6;
break;
case 7:
losslessc->predict_difference[ci] = jpeg_difference7;
break;
}
}
}
/*
* Reset predictor at the start of a pass or restart interval.
*/
LOCAL(void)
reset_predictor(j_compress_ptr cinfo, int ci)
{
lossless_comp_ptr losslessc = (lossless_comp_ptr)cinfo->fdct;
/* Initialize restart counter */
losslessc->restart_rows_to_go[ci] =
cinfo->restart_interval / cinfo->MCUs_per_row;
/* Set difference function to first row function */
losslessc->predict_difference[ci] = jpeg_difference_first_row;
}
/********************** Sample downscaling by 2^Pt ***********************/
METHODDEF(void)
simple_downscale(j_compress_ptr cinfo,
_JSAMPROW input_buf, _JSAMPROW output_buf, JDIMENSION width)
{
do {
*output_buf++ = (_JSAMPLE)RIGHT_SHIFT(*input_buf++, cinfo->Al);
} while (--width);
}
METHODDEF(void)
noscale(j_compress_ptr cinfo,
_JSAMPROW input_buf, _JSAMPROW output_buf, JDIMENSION width)
{
memcpy(output_buf, input_buf, width * sizeof(_JSAMPLE));
}
/*
* Initialize for a processing pass.
*/
METHODDEF(void)
start_pass_lossless(j_compress_ptr cinfo)
{
lossless_comp_ptr losslessc = (lossless_comp_ptr)cinfo->fdct;
int ci;
/* Set scaler function based on Pt */
if (cinfo->Al)
losslessc->scaler_scale = simple_downscale;
else
losslessc->scaler_scale = noscale;
/* Check that the restart interval is an integer multiple of the number
* of MCUs in an MCU row.
*/
if (cinfo->restart_interval % cinfo->MCUs_per_row != 0)
ERREXIT2(cinfo, JERR_BAD_RESTART,
cinfo->restart_interval, cinfo->MCUs_per_row);
/* Set predictors for start of pass */
for (ci = 0; ci < cinfo->num_components; ci++)
reset_predictor(cinfo, ci);
}
/*
* Initialize the lossless compressor.
*/
GLOBAL(void)
_jinit_lossless_compressor(j_compress_ptr cinfo)
{
lossless_comp_ptr losslessc;
#if BITS_IN_JSAMPLE == 8
if (cinfo->data_precision > BITS_IN_JSAMPLE || cinfo->data_precision < 2)
#else
if (cinfo->data_precision > BITS_IN_JSAMPLE ||
cinfo->data_precision < BITS_IN_JSAMPLE - 3)
#endif
ERREXIT1(cinfo, JERR_BAD_PRECISION, cinfo->data_precision);
/* Create subobject in permanent pool */
losslessc = (lossless_comp_ptr)
(*cinfo->mem->alloc_small) ((j_common_ptr)cinfo, JPOOL_PERMANENT,
sizeof(jpeg_lossless_compressor));
cinfo->fdct = (struct jpeg_forward_dct *)losslessc;
losslessc->pub.start_pass = start_pass_lossless;
}
#endif /* C_LOSSLESS_SUPPORTED */ | c | github | https://github.com/opencv/opencv | 3rdparty/libjpeg-turbo/src/jclossls.c |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This file is part of the prometeo project.
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
"""
__author__ = 'Emanuele Bertoldi <emanuele.bertoldi@gmail.com>'
__copyright__ = 'Copyright (c) 2011 Emanuele Bertoldi'
__version__ = '0.0.5'
from django import forms
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from prometeo.core.auth.forms import UserEditForm
class UserRegistrationForm(UserEditForm):
"""Form for user registration.
"""
def __init__(self, *args, **kwargs):
super(UserRegistrationForm, self).__init__(*args, **kwargs)
self.fields['tos'] = forms.BooleanField(widget=forms.CheckboxInput(), label=mark_safe(_(u'I have read and agree to the <a href="/terms-service" target="_blank">Terms of Service</a>')), error_messages={ 'required': u"You must agree to the terms to register." })
self.fields['pp'] = forms.BooleanField(widget=forms.CheckboxInput(), label=mark_safe(_(u'I have read and agree to the <a href="/privacy" target="_blank">Privacy Policy</a>')), error_messages={ 'required': u"You must agree to the privacy policy to register." })
del self.fields['is_staff']
del self.fields['is_active']
del self.fields['is_superuser']
del self.fields['groups']
del self.fields['user_permissions'] | unknown | codeparrot/codeparrot-clean | ||
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm, PasswordChangeForm, SetPasswordForm, UserChangeForm, PasswordResetForm
from django.test import TestCase
class UserCreationFormTest(TestCase):
fixtures = ['authtestdata.json']
def test_user_already_exists(self):
data = {
'username': 'testclient',
'password1': 'test123',
'password2': 'test123',
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["username"].errors,
[u'A user with that username already exists.'])
def test_invalid_data(self):
data = {
'username': 'jsmith!',
'password1': 'test123',
'password2': 'test123',
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["username"].errors,
[u'This value may contain only letters, numbers and @/./+/-/_ characters.'])
def test_password_verification(self):
# The verification password is incorrect.
data = {
'username': 'jsmith',
'password1': 'test123',
'password2': 'test',
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["password2"].errors,
[u"The two password fields didn't match."])
def test_both_passwords(self):
# One (or both) passwords weren't given
data = {'username': 'jsmith'}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form['password1'].errors,
[u'This field is required.'])
self.assertEqual(form['password2'].errors,
[u'This field is required.'])
data['password2'] = 'test123'
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form['password1'].errors,
[u'This field is required.'])
def test_success(self):
# The success case.
data = {
'username': 'jsmith@example.com',
'password1': 'test123',
'password2': 'test123',
}
form = UserCreationForm(data)
self.assertTrue(form.is_valid())
u = form.save()
self.assertEqual(repr(u), '<User: jsmith@example.com>')
class AuthenticationFormTest(TestCase):
fixtures = ['authtestdata.json']
def test_invalid_username(self):
# The user submits an invalid username.
data = {
'username': 'jsmith_does_not_exist',
'password': 'test123',
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(),
[u'Please enter a correct username and password. Note that both fields are case-sensitive.'])
def test_inactive_user(self):
# The user is inactive.
data = {
'username': 'inactive',
'password': 'password',
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(),
[u'This account is inactive.'])
def test_success(self):
# The success case
data = {
'username': 'testclient',
'password': 'password',
}
form = AuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.non_field_errors(), [])
class SetPasswordFormTest(TestCase):
fixtures = ['authtestdata.json']
def test_password_verification(self):
# The two new passwords do not match.
user = User.objects.get(username='testclient')
data = {
'new_password1': 'abc123',
'new_password2': 'abc',
}
form = SetPasswordForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(form["new_password2"].errors,
[u"The two password fields didn't match."])
def test_success(self):
user = User.objects.get(username='testclient')
data = {
'new_password1': 'abc123',
'new_password2': 'abc123',
}
form = SetPasswordForm(user, data)
self.assertTrue(form.is_valid())
class PasswordChangeFormTest(TestCase):
fixtures = ['authtestdata.json']
def test_incorrect_password(self):
user = User.objects.get(username='testclient')
data = {
'old_password': 'test',
'new_password1': 'abc123',
'new_password2': 'abc123',
}
form = PasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(form["old_password"].errors,
[u'Your old password was entered incorrectly. Please enter it again.'])
def test_password_verification(self):
# The two new passwords do not match.
user = User.objects.get(username='testclient')
data = {
'old_password': 'password',
'new_password1': 'abc123',
'new_password2': 'abc',
}
form = PasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(form["new_password2"].errors,
[u"The two password fields didn't match."])
def test_success(self):
# The success case.
user = User.objects.get(username='testclient')
data = {
'old_password': 'password',
'new_password1': 'abc123',
'new_password2': 'abc123',
}
form = PasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
def test_field_order(self):
# Regression test - check the order of fields:
user = User.objects.get(username='testclient')
self.assertEqual(PasswordChangeForm(user, {}).fields.keys(),
['old_password', 'new_password1', 'new_password2'])
class UserChangeFormTest(TestCase):
fixtures = ['authtestdata.json']
def test_username_validity(self):
user = User.objects.get(username='testclient')
data = {'username': 'not valid'}
form = UserChangeForm(data, instance=user)
self.assertFalse(form.is_valid())
self.assertEqual(form['username'].errors,
[u'This value may contain only letters, numbers and @/./+/-/_ characters.'])
def test_bug_14242(self):
# A regression test, introduce by adding an optimization for the
# UserChangeForm.
class MyUserForm(UserChangeForm):
def __init__(self, *args, **kwargs):
super(MyUserForm, self).__init__(*args, **kwargs)
self.fields['groups'].help_text = 'These groups give users different permissions'
class Meta(UserChangeForm.Meta):
fields = ('groups',)
# Just check we can create it
form = MyUserForm({})
class PasswordResetFormTest(TestCase):
fixtures = ['authtestdata.json']
def test_invalid_email(self):
data = {'email':'not valid'}
form = PasswordResetForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form['email'].errors,
[u'Enter a valid e-mail address.'])
def test_nonexistant_email(self):
# Test nonexistant email address
data = {'email':'foo@bar.com'}
form = PasswordResetForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors,
{'email': [u"That e-mail address doesn't have an associated user account. Are you sure you've registered?"]})
def test_cleaned_data(self):
# Regression test
user = User.objects.create_user("jsmith3", "jsmith3@example.com", "test123")
data = {'email':'jsmith3@example.com'}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['email'], u'jsmith3@example.com')
def test_bug_5605(self):
# bug #5605, preserve the case of the user name (before the @ in the
# email address) when creating a user.
user = User.objects.create_user('forms_test2', 'tesT@EXAMple.com', 'test')
self.assertEqual(user.email, 'tesT@example.com')
user = User.objects.create_user('forms_test3', 'tesT', 'test')
self.assertEqual(user.email, 'tesT') | unknown | codeparrot/codeparrot-clean | ||
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from django.contrib.auth.models import User
class TestLogin(StaticLiveServerTestCase):
def setUp(self):
self.username = 'alice'
self.email = 'alice@example.org'
self.password = 'test'
User.objects.create_user(self.username, self.email, self.password)
self.browser = webdriver.Firefox()
#self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_successful_login(self):
self.browser.get(self.live_server_url)
self.browser.find_element_by_link_text('Login').click()
input_username = self.browser.find_element_by_id('id_username')
input_username.send_keys(self.username)
input_password = self.browser.find_element_by_id('id_password')
input_password.send_keys(self.password)
self.browser.find_element_by_css_selector('[type=submit]').click()
self.assertIsNotNone(self.browser.find_element_by_id('logout'))
def test_failing_login(self):
self.browser.get(self.live_server_url)
self.browser.find_element_by_link_text('Login').click()
input_username = self.browser.find_element_by_id('id_username')
input_username.send_keys(self.username)
input_password = self.browser.find_element_by_id('id_password')
input_password.send_keys('foobar')
self.browser.find_element_by_css_selector('[type=submit]').click()
alert = self.browser.find_element_by_class_name('alert-danger')
self.assertEqual(
alert.text,
'Bitte einen gültigen Benutzername und ein Passwort eingeben. Beide Felder berücksichtigen die Groß-/Kleinschreibung.'
) | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2002-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.context.annotation;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.core.annotation.AnnotationAttributes;
import org.springframework.core.env.Environment;
import org.springframework.core.io.ResourceLoader;
import org.springframework.core.type.filter.AnnotationTypeFilter;
import org.springframework.core.type.filter.AspectJTypeFilter;
import org.springframework.core.type.filter.AssignableTypeFilter;
import org.springframework.core.type.filter.RegexPatternTypeFilter;
import org.springframework.core.type.filter.TypeFilter;
import org.springframework.util.Assert;
/**
* Collection of utilities for working with {@link ComponentScan @ComponentScan}
* {@linkplain ComponentScan.Filter type filters}.
*
* @author Chris Beams
* @author Juergen Hoeller
* @author Sam Brannen
* @since 5.3.13
* @see ComponentScan.Filter
* @see org.springframework.core.type.filter.TypeFilter
*/
public abstract class TypeFilterUtils {
/**
* Create {@linkplain TypeFilter type filters} from the supplied
* {@link AnnotationAttributes}, such as those sourced from
* {@link ComponentScan#includeFilters()} or {@link ComponentScan#excludeFilters()}.
* <p>Each {@link TypeFilter} will be instantiated using an appropriate
* constructor, with {@code BeanClassLoaderAware}, {@code BeanFactoryAware},
* {@code EnvironmentAware}, and {@code ResourceLoaderAware} contracts
* invoked if they are implemented by the type filter.
* @param filterAttributes {@code AnnotationAttributes} for a
* {@link ComponentScan.Filter @Filter} declaration
* @param environment the {@code Environment} to make available to filters
* @param resourceLoader the {@code ResourceLoader} to make available to filters
* @param registry the {@code BeanDefinitionRegistry} to make available to filters
* as a {@link org.springframework.beans.factory.BeanFactory} if applicable
* @return a list of instantiated and configured type filters
* @see TypeFilter
* @see AnnotationTypeFilter
* @see AssignableTypeFilter
* @see AspectJTypeFilter
* @see RegexPatternTypeFilter
* @see org.springframework.beans.factory.BeanClassLoaderAware
* @see org.springframework.beans.factory.BeanFactoryAware
* @see org.springframework.context.EnvironmentAware
* @see org.springframework.context.ResourceLoaderAware
*/
public static List<TypeFilter> createTypeFiltersFor(AnnotationAttributes filterAttributes, Environment environment,
ResourceLoader resourceLoader, BeanDefinitionRegistry registry) {
List<TypeFilter> typeFilters = new ArrayList<>();
FilterType filterType = filterAttributes.getEnum("type");
for (Class<?> filterClass : filterAttributes.getClassArray("classes")) {
switch (filterType) {
case ANNOTATION -> {
Assert.isAssignable(Annotation.class, filterClass,
"@ComponentScan ANNOTATION type filter requires an annotation type");
@SuppressWarnings("unchecked")
Class<Annotation> annotationType = (Class<Annotation>) filterClass;
typeFilters.add(new AnnotationTypeFilter(annotationType));
}
case ASSIGNABLE_TYPE -> typeFilters.add(new AssignableTypeFilter(filterClass));
case CUSTOM -> {
Assert.isAssignable(TypeFilter.class, filterClass,
"@ComponentScan CUSTOM type filter requires a TypeFilter implementation");
TypeFilter filter = ParserStrategyUtils.instantiateClass(filterClass, TypeFilter.class,
environment, resourceLoader, registry);
typeFilters.add(filter);
}
default ->
throw new IllegalArgumentException("Filter type not supported with Class value: " + filterType);
}
}
for (String expression : filterAttributes.getStringArray("pattern")) {
switch (filterType) {
case ASPECTJ -> typeFilters.add(new AspectJTypeFilter(expression, resourceLoader.getClassLoader()));
case REGEX -> typeFilters.add(new RegexPatternTypeFilter(Pattern.compile(expression)));
default ->
throw new IllegalArgumentException("Filter type not supported with String pattern: " + filterType);
}
}
return typeFilters;
}
} | java | github | https://github.com/spring-projects/spring-framework | spring-context/src/main/java/org/springframework/context/annotation/TypeFilterUtils.java |
# coding: utf-8
from __future__ import absolute_import, unicode_literals
import os
import re
import datetime
from django.conf import settings, global_settings
from django.core import mail
from django.core.exceptions import ImproperlyConfigured
from django.core.files import temp as tempfile
from django.core.urlresolvers import get_script_prefix, reverse, set_script_prefix
# Register auth models with the admin.
from django.contrib import admin
from django.contrib.auth import get_permission_codename
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.views.main import TO_FIELD_VAR
from django.contrib.admin.models import LogEntry, DELETION
from django.contrib.admin.sites import LOGIN_FORM_KEY
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.admin.util import quote
from django.contrib.admin.validation import ModelAdminValidator
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.models import Group, User, Permission
from django.contrib.contenttypes.models import ContentType
from django.db import connection
from django.forms.util import ErrorList
from django.template.response import TemplateResponse
from django.test import TestCase
from django.test.utils import patch_logger
from django.test.utils import override_settings
from django.utils import formats
from django.utils import translation
from django.utils import unittest
from django.utils.cache import get_max_age
from django.utils.encoding import iri_to_uri, force_bytes, force_text
from django.utils.html import escape
from django.utils.http import urlencode, urlquote
from django.utils.six.moves.urllib.parse import parse_qsl, urljoin, urlparse
from django.utils._os import upath
from django.utils import six
# local test models
from .models import (Article, BarAccount, CustomArticle, EmptyModel, FooAccount,
Gallery, ModelWithStringPrimaryKey, Person, Persona, Picture, Podcast,
Section, Subscriber, Vodcast, Language, Collector, Widget, Grommet,
DooHickey, FancyDoodad, Whatsit, Category, Post, Plot, FunkyTag, Chapter,
Book, Promo, WorkHour, Employee, Question, Answer, Inquisition, Actor,
FoodDelivery, RowLevelChangePermissionModel, Paper, CoverLetter, Story,
OtherStory, ComplexSortedPerson, PluggableSearchPerson, Parent, Child, AdminOrderedField,
AdminOrderedModelMethod, AdminOrderedAdminMethod, AdminOrderedCallable,
Report, MainPrepopulated, RelatedPrepopulated, UnorderedObject,
Simple, UndeletableObject, Choice, ShortMessage, Telegram, Pizza, Topping)
from .admin import site, site2
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminViewBasicTestCase(TestCase):
fixtures = ['admin-views-users.xml', 'admin-views-colors.xml',
'admin-views-fabrics.xml', 'admin-views-books.xml']
# Store the bit of the URL where the admin is registered as a class
# variable. That way we can test a second AdminSite just by subclassing
# this test case and changing urlbit.
urlbit = 'admin'
urls = "admin_views.urls"
def setUp(self):
self.old_USE_I18N = settings.USE_I18N
self.old_USE_L10N = settings.USE_L10N
self.old_LANGUAGE_CODE = settings.LANGUAGE_CODE
self.client.login(username='super', password='secret')
settings.USE_I18N = True
def tearDown(self):
settings.USE_I18N = self.old_USE_I18N
settings.USE_L10N = self.old_USE_L10N
settings.LANGUAGE_CODE = self.old_LANGUAGE_CODE
self.client.logout()
formats.reset_format_cache()
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertTrue(response.content.index(force_bytes(text1)) < response.content.index(force_bytes(text2)),
failing_msg
)
class AdminViewBasicTest(AdminViewBasicTestCase):
def testTrailingSlashRequired(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
response = self.client.get('/test_admin/%s/admin_views/article/add' % self.urlbit)
self.assertRedirects(response,
'/test_admin/%s/admin_views/article/add/' % self.urlbit, status_code=301
)
def testBasicAddGet(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get('/test_admin/%s/admin_views/section/add/' % self.urlbit)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def testAddWithGETArgs(self):
response = self.client.get('/test_admin/%s/admin_views/section/add/' % self.urlbit, {'name': 'My Section'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response"
)
def testBasicEditGet(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get('/test_admin/%s/admin_views/section/1/' % self.urlbit)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def testBasicEditGetStringPK(self):
"""
Ensure GET on the change_view works (returns an HTTP 404 error, see
#11191) when passing a string as the PK argument for a model with an
integer PK field.
"""
response = self.client.get('/test_admin/%s/admin_views/section/abc/' % self.urlbit)
self.assertEqual(response.status_code, 404)
def testBasicInheritanceGetStringPK(self):
"""
Ensure GET on the change_view works on inherited models (returns an
HTTP 404 error, see #19951) when passing a string as the PK argument
for a model with an integer PK field.
"""
response = self.client.get('/test_admin/%s/admin_views/supervillain/abc/' % self.urlbit)
self.assertEqual(response.status_code, 404)
def testBasicAddPost(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post('/test_admin/%s/admin_views/section/add/' % self.urlbit, post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def testPopupAddPost(self):
"""
Ensure http response from a popup is properly escaped.
"""
post_data = {
'_popup': '1',
'title': 'title with a new\nline',
'content': 'some content',
'date_0': '2010-09-10',
'date_1': '14:55:39',
}
response = self.client.post('/test_admin/%s/admin_views/article/add/' % self.urlbit, post_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissAddAnotherPopup')
self.assertContains(response, 'title with a new\\u000Aline')
# Post data for edit inline
inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": "1",
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver æøå skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": "1",
"article_set-1-id": "2",
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": "3",
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def testBasicEditPost(self):
"""
A smoke test to ensure POST on edit_view works.
"""
response = self.client.post('/test_admin/%s/admin_views/section/1/' % self.urlbit, self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def testEditSaveAs(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
})
response = self.client.post('/test_admin/%s/admin_views/section/1/' % self.urlbit, post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def testChangeListSortingCallable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'o': 2})
self.assertContentBefore(response, 'Oldest content', 'Middle content',
"Results of sorting on callable are out of order.")
self.assertContentBefore(response, 'Middle content', 'Newest content',
"Results of sorting on callable are out of order.")
def testChangeListSortingModel(self):
"""
Ensure we can sort on a list_display field that is a Model method
(colunn 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'o': '-3'})
self.assertContentBefore(response, 'Newest content', 'Middle content',
"Results of sorting on Model method are out of order.")
self.assertContentBefore(response, 'Middle content', 'Oldest content',
"Results of sorting on Model method are out of order.")
def testChangeListSortingModelAdmin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(colunn 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'o': '4'})
self.assertContentBefore(response, 'Oldest content', 'Middle content',
"Results of sorting on ModelAdmin method are out of order.")
self.assertContentBefore(response, 'Middle content', 'Newest content',
"Results of sorting on ModelAdmin method are out of order.")
def testChangeListSortingMultiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
# Sort by name, gender
# This hard-codes the URL because it'll fail if it runs against the
# 'admin2' custom admin (which doesn't have the Person model).
response = self.client.get('/test_admin/admin/admin_views/person/', {'o': '1.2'})
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get('/test_admin/admin/admin_views/person/', {'o': '-2.1'})
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def testChangeListSortingPreserveQuerySetOrdering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
# This hard-codes the URL because it'll fail if it runs against the
# 'admin2' custom admin (which doesn't have the Person model).
response = self.client.get('/test_admin/admin/admin_views/person/', {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def testChangeListSortingModelMeta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso='ur', name='Urdu')
l2 = Language.objects.create(iso='ar', name='Arabic')
link1 = reverse('admin:admin_views_language_change', args=(quote(l1.pk),))
link2 = reverse('admin:admin_views_language_change', args=(quote(l2.pk),))
response = self.client.get('/test_admin/admin/admin_views/language/', {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get('/test_admin/admin/admin_views/language/', {'o': '-1'})
self.assertContentBefore(response, link1, link2)
def testChangeListSortingOverrideModelAdmin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_podcast_change', args=(p2.pk,))
response = self.client.get('/test_admin/admin/admin_views/podcast/', {})
self.assertContentBefore(response, link1, link2)
def testMultipleSortSameField(self):
# Check that we get the columns we expect if we have two columns
# that correspond to the same ordering field
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(quote(p1.pk),))
link2 = reverse('admin:admin_views_podcast_change', args=(quote(p2.pk),))
response = self.client.get('/test_admin/admin/admin_views/podcast/', {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse('admin:admin_views_complexsortedperson_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_complexsortedperson_change', args=(p2.pk,))
response = self.client.get('/test_admin/admin/admin_views/complexsortedperson/', {})
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, 'Name')
self.assertContains(response, 'Colored name')
# Check order
self.assertContentBefore(response, 'Name', 'Colored name')
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def testSortIndicatorsAdminOrder(self):
"""
Ensures that the admin shows default sort indicators for all
kinds of 'ordering' fields: field names, method on the model
admin and model itself, and other callables. See #17252.
"""
models = [(AdminOrderedField, 'adminorderedfield'),
(AdminOrderedModelMethod, 'adminorderedmodelmethod'),
(AdminOrderedAdminMethod, 'adminorderedadminmethod'),
(AdminOrderedCallable, 'adminorderedcallable')]
for model, url in models:
a1 = model.objects.create(stuff='The Last Item', order=3)
a2 = model.objects.create(stuff='The First Item', order=1)
a3 = model.objects.create(stuff='The Middle Item', order=2)
response = self.client.get('/test_admin/admin/admin_views/%s/' % url, {})
self.assertEqual(response.status_code, 200)
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(response.context['cl'].get_ordering_field_columns(), {2: 'asc'})
# Check order of records.
self.assertContentBefore(response, 'The First Item', 'The Middle Item')
self.assertContentBefore(response, 'The Middle Item', 'The Last Item')
def testLimitedFilter(self):
"""Ensure admin changelist filters do not contain objects excluded via limit_choices_to.
This also tests relation-spanning filters (e.g. 'color__value').
"""
response = self.client.get('/test_admin/%s/admin_views/thing/' % self.urlbit)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view"
)
self.assertNotContains(response, '<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to"
)
def testRelationSpanningFilters(self):
response = self.client.get('/test_admin/%s/admin_views/chapterxtra1/' %
self.urlbit)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
'chap__id__exact': dict(
values=[c.id for c in Chapter.objects.all()],
test=lambda obj, value: obj.chap.id == value),
'chap__title': dict(
values=[c.title for c in Chapter.objects.all()],
test=lambda obj, value: obj.chap.title == value),
'chap__book__id__exact': dict(
values=[b.id for b in Book.objects.all()],
test=lambda obj, value: obj.chap.book.id == value),
'chap__book__name': dict(
values=[b.name for b in Book.objects.all()],
test=lambda obj, value: obj.chap.book.name == value),
'chap__book__promo__id__exact': dict(
values=[p.id for p in Promo.objects.all()],
test=lambda obj, value:
obj.chap.book.promo_set.filter(id=value).exists()),
'chap__book__promo__name': dict(
values=[p.name for p in Promo.objects.all()],
test=lambda obj, value:
obj.chap.book.promo_set.filter(name=value).exists()),
}
for filter_path, params in filters.items():
for value in params['values']:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s">' % query_string)
# ensure link works
filtered_response = self.client.get(
'/test_admin/%s/admin_views/chapterxtra1/?%s' % (
self.urlbit, query_string))
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context['cl'].queryset.all():
self.assertTrue(params['test'](obj, value))
def testIncorrectLookupParameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
response = self.client.get('/test_admin/%s/admin_views/thing/' % self.urlbit, {'notarealfield': '5'})
self.assertRedirects(response, '/test_admin/%s/admin_views/thing/?e=1' % self.urlbit)
# Spanning relationships through an inexistant related object (Refs #16716)
response = self.client.get('/test_admin/%s/admin_views/thing/' % self.urlbit, {'notarealfield__whatever': '5'})
self.assertRedirects(response, '/test_admin/%s/admin_views/thing/?e=1' % self.urlbit)
response = self.client.get('/test_admin/%s/admin_views/thing/' % self.urlbit, {'color__id__exact': 'StringNotInteger!'})
self.assertRedirects(response, '/test_admin/%s/admin_views/thing/?e=1' % self.urlbit)
# Regression test for #18530
response = self.client.get('/test_admin/%s/admin_views/thing/' % self.urlbit, {'pub_date__gte': 'foo'})
self.assertRedirects(response, '/test_admin/%s/admin_views/thing/?e=1' % self.urlbit)
def testIsNullLookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(title="I Could Go Anywhere", content="Versatile", date=datetime.datetime.now())
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit)
self.assertContains(response, '4 articles')
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'section__isnull': 'false'})
self.assertContains(response, '3 articles')
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'section__isnull': '0'})
self.assertContains(response, '3 articles')
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'section__isnull': 'true'})
self.assertContains(response, '1 article')
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'section__isnull': '1'})
self.assertContains(response, '1 article')
def testLogoutAndPasswordChangeURLs(self):
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit)
self.assertContains(response, '<a href="/test_admin/%s/logout/">' % self.urlbit)
self.assertContains(response, '<a href="/test_admin/%s/password_change/">' % self.urlbit)
def testNamedGroupFieldChoicesChangeList(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse('admin:admin_views_fabric_change', args=(1,), current_app=self.urlbit)
link2 = reverse('admin:admin_views_fabric_change', args=(2,), current_app=self.urlbit)
response = self.client.get('/test_admin/%s/admin_views/fabric/' % self.urlbit)
fail_msg = "Changelist table isn't showing the right human-readable values set by a model field 'choices' option named group."
self.assertContains(response, '<a href="%s">Horizontal</a>' % link1, msg_prefix=fail_msg, html=True)
self.assertContains(response, '<a href="%s">Vertical</a>' % link2, msg_prefix=fail_msg, html=True)
def testNamedGroupFieldChoicesFilter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get('/test_admin/%s/admin_views/fabric/' % self.urlbit)
fail_msg = "Changelist filter isn't showing options contained inside a model field 'choices' option named group."
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(response,
'<a href="?surface__exact=x">Horizontal</a>', msg_prefix=fail_msg, html=True)
self.assertContains(response,
'<a href="?surface__exact=y">Vertical</a>', msg_prefix=fail_msg, html=True)
def testChangeListNullBooleanDisplay(self):
Post.objects.create(public=None)
# This hard-codes the URl because it'll fail if it runs
# against the 'admin2' custom admin (which doesn't have the
# Post model).
response = self.client.get("/test_admin/admin/admin_views/post/")
self.assertContains(response, 'icon-unknown.gif')
def testI18NLanguageNonEnglishDefault(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE='fr'):
with translation.override('en-us'):
response = self.client.get('/test_admin/admin/jsi18n/')
self.assertNotContains(response, 'Choisir une heure')
def testI18NLanguageNonEnglishFallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE='fr'):
with translation.override('none'):
response = self.client.get('/test_admin/admin/jsi18n/')
self.assertContains(response, 'Choisir une heure')
def testL10NDeactivated(self):
"""
Check if L10N is deactivated, the JavaScript i18n view doesn't
return localized date/time formats. Refs #14824.
"""
with self.settings(LANGUAGE_CODE='ru', USE_L10N=False):
with translation.override('none'):
response = self.client.get('/test_admin/admin/jsi18n/')
self.assertNotContains(response, '%d.%m.%Y %H:%M:%S')
self.assertContains(response, '%Y-%m-%d %H:%M:%S')
def test_disallowed_filtering(self):
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as calls:
response = self.client.get("/test_admin/admin/admin_views/album/?owner__email__startswith=fuzzy")
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Filters are allowed if explicitly included in list_filter
response = self.client.get("/test_admin/admin/admin_views/thing/?color__value__startswith=red")
self.assertEqual(response.status_code, 200)
response = self.client.get("/test_admin/admin/admin_views/thing/?color__value=red")
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to whitelist them in list_filter or date_hierarchy.
response = self.client.get("/test_admin/admin/admin_views/person/?age__gt=30")
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(name='Anonymous', gender=1, age=22, alive=True, code='123')
e2 = Employee.objects.create(name='Visitor', gender=2, age=19, alive=True, code='124')
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get("/test_admin/admin/admin_views/workhour/")
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'employee__person_ptr__exact')
response = self.client.get("/test_admin/admin/admin_views/workhour/?employee__person_ptr__exact=%d" % e1.pk)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get("/test_admin/admin/admin_views/section/", {TO_FIELD_VAR: 'missing_field'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Specifying a field that is not refered by any other model registered
# to this admin site should raise an exception.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get("/test_admin/admin/admin_views/section/", {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# #23839 - Primary key should always be allowed, even if the referenced model isn't registered.
response = self.client.get("/test_admin/admin/admin_views/notreferenced/", {TO_FIELD_VAR: 'id'})
self.assertEqual(response.status_code, 200)
# Specifying a field referenced by another model though a m2m should be allowed.
# XXX: We're not testing against a non-primary key field since the admin doesn't
# support it yet, ref #23862
response = self.client.get("/test_admin/admin/admin_views/recipe/", {TO_FIELD_VAR: 'id'})
self.assertEqual(response.status_code, 200)
# #23604 - Specifying a field referenced through a reverse m2m relationship should be allowed.
# XXX: We're not testing against a non-primary key field since the admin doesn't
# support it yet, ref #23862
response = self.client.get("/test_admin/admin/admin_views/ingredient/", {TO_FIELD_VAR: 'id'})
self.assertEqual(response.status_code, 200)
# #23329 - Specifying a field that is not refered by any other model directly registered
# to this admin site but registered through inheritance should be allowed.
response = self.client.get("/test_admin/admin/admin_views/referencedbyparent/", {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #23431 - Specifying a field that is only refered to by a inline of a registered
# model should be allowed.
response = self.client.get("/test_admin/admin/admin_views/referencedbyinline/", {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey pointing to this model
response = self.client.get("/test_admin/admin/admin_views/inquisition/?leader__name=Palin&leader__age=27")
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get("/test_admin/admin/admin_views/actor/?%s" % IS_POPUP_VAR)
self.assertContains(response, "opener.dismissRelatedLookupPopup(window, '%s')" % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username='super')
password = user.password
user.set_unusable_password()
user.save()
response = self.client.get('/test_admin/admin/')
self.assertNotContains(response, reverse('admin:password_change'),
msg_prefix='The "change password" link should not be displayed if a user does not have a usable password.')
def test_change_view_with_show_delete_extra_context(self):
"""
Ensured that the 'show_delete' context variable in the admin's change
view actually controls the display of the delete button.
Refs #10057.
"""
instance = UndeletableObject.objects.create(name='foo')
response = self.client.get('/test_admin/%s/admin_views/undeletableobject/%d/' %
(self.urlbit, instance.pk))
self.assertNotContains(response, 'deletelink')
def test_allows_attributeerror_to_bubble_up(self):
"""
Ensure that AttributeErrors are allowed to bubble when raised inside
a change list view.
Requires a model to be created so there's something to be displayed
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get('/test_admin/%s/admin_views/simple/' % self.urlbit)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminViewFormUrlTest(TestCase):
urls = "admin_views.urls"
fixtures = ["admin-views-users.xml"]
urlbit = "admin3"
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def testChangeFormUrlHasCorrectValue(self):
"""
Tests whether change_view has form_url in response.context
"""
response = self.client.get('/test_admin/%s/admin_views/section/1/' % self.urlbit)
self.assertTrue('form_url' in response.context, msg='form_url not present in response.context')
self.assertEqual(response.context['form_url'], 'pony')
def test_filter_with_custom_template(self):
"""
Ensure that one can use a custom template to render an admin filter.
Refs #17515.
"""
template_dirs = settings.TEMPLATE_DIRS + (
os.path.join(os.path.dirname(upath(__file__)), 'templates'),)
with self.settings(TEMPLATE_DIRS=template_dirs):
response = self.client.get("/test_admin/admin/admin_views/color2/")
self.assertTemplateUsed(response, 'custom_filter_template.html')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminJavaScriptTest(TestCase):
fixtures = ['admin-views-users.xml']
urls = "admin_views.urls"
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def testSingleWidgetFirsFieldFocus(self):
"""
JavaScript-assisted auto-focus on first field.
"""
response = self.client.get('/test_admin/%s/admin_views/picture/add/' % 'admin')
self.assertContains(
response,
'<script type="text/javascript">document.getElementById("id_name").focus();</script>'
)
def testMultiWidgetFirsFieldFocus(self):
"""
JavaScript-assisted auto-focus should work if a model/ModelAdmin setup
is such that the first form field has a MultiWidget.
"""
response = self.client.get('/test_admin/%s/admin_views/reservation/add/' % 'admin')
self.assertContains(
response,
'<script type="text/javascript">document.getElementById("id_start_date_0").focus();</script>'
)
def test_js_minified_only_if_debug_is_false(self):
"""
Ensure that the minified versions of the JS files are only used when
DEBUG is False.
Refs #17521.
"""
with override_settings(DEBUG=False):
response = self.client.get(
'/test_admin/%s/admin_views/section/add/' % 'admin')
self.assertNotContains(response, 'jquery.js')
self.assertContains(response, 'jquery.min.js')
self.assertNotContains(response, 'prepopulate.js')
self.assertContains(response, 'prepopulate.min.js')
self.assertNotContains(response, 'actions.js')
self.assertContains(response, 'actions.min.js')
self.assertNotContains(response, 'collapse.js')
self.assertContains(response, 'collapse.min.js')
self.assertNotContains(response, 'inlines.js')
self.assertContains(response, 'inlines.min.js')
with override_settings(DEBUG=True):
response = self.client.get(
'/test_admin/%s/admin_views/section/add/' % 'admin')
self.assertContains(response, 'jquery.js')
self.assertNotContains(response, 'jquery.min.js')
self.assertContains(response, 'prepopulate.js')
self.assertNotContains(response, 'prepopulate.min.js')
self.assertContains(response, 'actions.js')
self.assertNotContains(response, 'actions.min.js')
self.assertContains(response, 'collapse.js')
self.assertNotContains(response, 'collapse.min.js')
self.assertContains(response, 'inlines.js')
self.assertNotContains(response, 'inlines.min.js')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class SaveAsTests(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml', 'admin-views-person.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_save_as_duplication(self):
"""Ensure save as actually creates a new person"""
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42}
response = self.client.post('/test_admin/admin/admin_views/person/1/', post_data)
self.assertEqual(len(Person.objects.filter(name='John M')), 1)
self.assertEqual(len(Person.objects.filter(id=1)), 1)
def test_save_as_display(self):
"""
Ensure that 'save as' is displayed when activated and after submitting
invalid data aside save_as_new will not show us a form to overwrite the
initial model.
"""
response = self.client.get('/test_admin/admin/admin_views/person/1/')
self.assertTrue(response.context['save_as'])
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 3, 'alive': 'checked'}
response = self.client.post('/test_admin/admin/admin_views/person/1/', post_data)
self.assertEqual(response.context['form_url'], '/test_admin/admin/admin_views/person/add/')
class CustomModelAdminTest(AdminViewBasicTestCase):
urls = "admin_views.urls"
urlbit = "admin2"
def testCustomAdminSiteLoginForm(self):
self.client.logout()
response = self.client.get('/test_admin/admin2/')
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin2/', {
REDIRECT_FIELD_NAME: '/test_admin/admin2/',
LOGIN_FORM_KEY: 1,
'username': 'customform',
'password': 'secret',
})
self.assertIsInstance(login, TemplateResponse)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'custom form error')
def testCustomAdminSiteLoginTemplate(self):
self.client.logout()
response = self.client.get('/test_admin/admin2/')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/login.html')
self.assertContains(response, 'Hello from a custom login template')
def testCustomAdminSiteLogoutTemplate(self):
response = self.client.get('/test_admin/admin2/logout/')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/logout.html')
self.assertContains(response, 'Hello from a custom logout template')
def testCustomAdminSiteIndexViewAndTemplate(self):
try:
response = self.client.get('/test_admin/admin2/')
except TypeError:
self.fail('AdminSite.index_template should accept a list of template paths')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/index.html')
self.assertContains(response, 'Hello from a custom index template *bar*')
def testCustomAdminSitePasswordChangeTemplate(self):
response = self.client.get('/test_admin/admin2/password_change/')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'Hello from a custom password change form template')
def testCustomAdminSitePasswordChangeDoneTemplate(self):
response = self.client.get('/test_admin/admin2/password_change/done/')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_done.html')
self.assertContains(response, 'Hello from a custom password change done template')
def testCustomAdminSiteView(self):
self.client.login(username='super', password='secret')
response = self.client.get('/test_admin/%s/my_view/' % self.urlbit)
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.login(username='super', password='secret')
su = User.objects.get(username='super')
try:
response = self.client.get('/test_admin/admin4/auth/user/%s/password/' % su.pk)
except TypeError:
self.fail('ModelAdmin.change_user_password_template should accept a list of template paths')
self.assertEqual(response.status_code, 200)
def get_perm(Model, perm):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
"""Test setup."""
# Setup permissions, for our users who can add, change, and delete.
# We can't put this into the fixture, because the content type id
# and the permission id could be different on each run of the test.
opts = Article._meta
# User who can add Articles
add_user = User.objects.get(username='adduser')
add_user.user_permissions.add(get_perm(Article,
get_permission_codename('add', opts)))
# User who can change Articles
change_user = User.objects.get(username='changeuser')
change_user.user_permissions.add(get_perm(Article,
get_permission_codename('change', opts)))
# User who can delete Articles
delete_user = User.objects.get(username='deleteuser')
delete_user.user_permissions.add(get_perm(Article,
get_permission_codename('delete', opts)))
delete_user.user_permissions.add(get_perm(Section,
get_permission_codename('delete', Section._meta)))
# login POST dicts
self.super_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
LOGIN_FORM_KEY: 1,
'username': 'super',
'password': 'secret',
}
self.super_email_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
LOGIN_FORM_KEY: 1,
'username': 'super@example.com',
'password': 'secret',
}
self.super_email_bad_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
LOGIN_FORM_KEY: 1,
'username': 'super@example.com',
'password': 'notsecret',
}
self.adduser_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
LOGIN_FORM_KEY: 1,
'username': 'adduser',
'password': 'secret',
}
self.changeuser_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
LOGIN_FORM_KEY: 1,
'username': 'changeuser',
'password': 'secret',
}
self.deleteuser_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
LOGIN_FORM_KEY: 1,
'username': 'deleteuser',
'password': 'secret',
}
self.joepublic_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
LOGIN_FORM_KEY: 1,
'username': 'joepublic',
'password': 'secret',
}
self.no_username_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
LOGIN_FORM_KEY: 1,
'password': 'secret',
}
def testLogin(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the orignal url.
Unsuccessfull attempts will continue to render the login page with
a 200 status code.
"""
# Super User
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin/', self.super_login)
self.assertRedirects(login, '/test_admin/admin/')
self.assertFalse(login.context)
self.client.get('/test_admin/admin/logout/')
# Test if user enters email address
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin/', self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post('/test_admin/admin/', self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username='jondoe', password='secret', email='super@example.com')
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post('/test_admin/admin/', self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# Add User
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin/', self.adduser_login)
self.assertRedirects(login, '/test_admin/admin/')
self.assertFalse(login.context)
self.client.get('/test_admin/admin/logout/')
# Change User
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin/', self.changeuser_login)
self.assertRedirects(login, '/test_admin/admin/')
self.assertFalse(login.context)
self.client.get('/test_admin/admin/logout/')
# Delete User
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin/', self.deleteuser_login)
self.assertRedirects(login, '/test_admin/admin/')
self.assertFalse(login.context)
self.client.get('/test_admin/admin/logout/')
# Regular User should not be able to login.
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin/', self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin/', self.no_username_login)
self.assertEqual(login.status_code, 200)
form = login.context[0].get('form')
self.assertEqual(form.errors['username'][0], 'This field is required.')
def testLoginSuccessfullyRedirectsToOriginalUrl(self):
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 200)
query_string = 'the-answer=42'
redirect_url = '/test_admin/admin/?%s' % query_string
new_next = {REDIRECT_FIELD_NAME: redirect_url}
login = self.client.post('/test_admin/admin/', dict(self.super_login, **new_next), QUERY_STRING=query_string)
self.assertRedirects(login, redirect_url)
def testDoubleLoginIsNotAllowed(self):
"""Regression test for #19327"""
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 200)
# Establish a valid admin session
login = self.client.post('/test_admin/admin/', self.super_login)
self.assertRedirects(login, '/test_admin/admin/')
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post('/test_admin/admin/', self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post('/test_admin/admin/', self.super_login)
self.assertRedirects(login, '/test_admin/admin/')
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post('/test_admin/admin/', self.super_login)
self.assertRedirects(login, '/test_admin/admin/')
self.assertFalse(login.context)
self.client.get('/test_admin/admin/logout/')
def testAddView(self):
"""Test add view restricts access and actually adds items."""
add_dict = {'title': 'Døm ikke',
'content': '<p>great article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': 1}
# Change User should not have access to add articles
self.client.get('/test_admin/admin/')
self.client.post('/test_admin/admin/', self.changeuser_login)
# make sure the view removes test cookie
self.assertEqual(self.client.session.test_cookie_worked(), False)
response = self.client.get('/test_admin/admin/admin_views/article/add/')
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post('/test_admin/admin/admin_views/article/add/', add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.all().count(), 3)
self.client.get('/test_admin/admin/logout/')
# Add user may login and POST to add view, then redirect to admin root
self.client.get('/test_admin/admin/')
self.client.post('/test_admin/admin/', self.adduser_login)
addpage = self.client.get('/test_admin/admin/admin_views/article/add/')
change_list_link = '› <a href="/test_admin/admin/admin_views/article/">Articles</a>'
self.assertNotContains(addpage, change_list_link,
msg_prefix='User restricted to add permission is given link to change list view in breadcrumbs.')
post = self.client.post('/test_admin/admin/admin_views/article/add/', add_dict)
self.assertRedirects(post, '/test_admin/admin/')
self.assertEqual(Article.objects.all().count(), 4)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a created object')
self.client.get('/test_admin/admin/logout/')
# Super can add too, but is redirected to the change list view
self.client.get('/test_admin/admin/')
self.client.post('/test_admin/admin/', self.super_login)
addpage = self.client.get('/test_admin/admin/admin_views/article/add/')
self.assertContains(addpage, change_list_link,
msg_prefix='Unrestricted user is not given link to change list view in breadcrumbs.')
post = self.client.post('/test_admin/admin/admin_views/article/add/', add_dict)
self.assertRedirects(post, '/test_admin/admin/admin_views/article/')
self.assertEqual(Article.objects.all().count(), 5)
self.client.get('/test_admin/admin/logout/')
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
login = self.client.login(username='joepublic', password='secret')
# Check and make sure that if user expires, data still persists
self.client.get('/test_admin/admin/')
self.client.post('/test_admin/admin/', self.super_login)
# make sure the view removes test cookie
self.assertEqual(self.client.session.test_cookie_worked(), False)
def testChangeView(self):
"""Change view should restrict access and allow users to edit items."""
change_dict = {'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': 1}
# add user shoud not be able to view the list of article or change any of them
self.client.get('/test_admin/admin/')
self.client.post('/test_admin/admin/', self.adduser_login)
response = self.client.get('/test_admin/admin/admin_views/article/')
self.assertEqual(response.status_code, 403)
response = self.client.get('/test_admin/admin/admin_views/article/1/')
self.assertEqual(response.status_code, 403)
post = self.client.post('/test_admin/admin/admin_views/article/1/', change_dict)
self.assertEqual(post.status_code, 403)
self.client.get('/test_admin/admin/logout/')
# change user can view all items and edit them
self.client.get('/test_admin/admin/')
self.client.post('/test_admin/admin/', self.changeuser_login)
response = self.client.get('/test_admin/admin/admin_views/article/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/test_admin/admin/admin_views/article/1/')
self.assertEqual(response.status_code, 200)
post = self.client.post('/test_admin/admin/admin_views/article/1/', change_dict)
self.assertRedirects(post, '/test_admin/admin/admin_views/article/')
self.assertEqual(Article.objects.get(pk=1).content, '<p>edited article</p>')
# one error in form should produce singular error message, multiple errors plural
change_dict['title'] = ''
post = self.client.post('/test_admin/admin/admin_views/article/1/', change_dict)
self.assertContains(post, 'Please correct the error below.',
msg_prefix='Singular error message not found in response to post with one error')
change_dict['content'] = ''
post = self.client.post('/test_admin/admin/admin_views/article/1/', change_dict)
self.assertContains(post, 'Please correct the errors below.',
msg_prefix='Plural error message not found in response to post with multiple errors')
self.client.get('/test_admin/admin/logout/')
# Test redirection when using row-level change permissions. Refs #11513.
RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
RowLevelChangePermissionModel.objects.create(id=2, name="even id")
for login_dict in [self.super_login, self.changeuser_login, self.adduser_login, self.deleteuser_login]:
self.client.post('/test_admin/admin/', login_dict)
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/1/')
self.assertEqual(response.status_code, 403)
response = self.client.post('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/1/', {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 403)
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/2/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/2/', {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertRedirects(response, '/test_admin/admin/')
self.client.get('/test_admin/admin/logout/')
for login_dict in [self.joepublic_login, self.no_username_login]:
self.client.post('/test_admin/admin/', login_dict)
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/1/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.post('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/1/', {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/2/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.post('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/2/', {'name': 'changed again'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
self.client.get('/test_admin/admin/logout/')
def testHistoryView(self):
"""History view should restrict access."""
# add user shoud not be able to view the list of article or change any of them
self.client.get('/test_admin/admin/')
self.client.post('/test_admin/admin/', self.adduser_login)
response = self.client.get('/test_admin/admin/admin_views/article/1/history/')
self.assertEqual(response.status_code, 403)
self.client.get('/test_admin/admin/logout/')
# change user can view all items and edit them
self.client.get('/test_admin/admin/')
self.client.post('/test_admin/admin/', self.changeuser_login)
response = self.client.get('/test_admin/admin/admin_views/article/1/history/')
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
RowLevelChangePermissionModel.objects.create(id=2, name="even id")
for login_dict in [self.super_login, self.changeuser_login, self.adduser_login, self.deleteuser_login]:
self.client.post('/test_admin/admin/', login_dict)
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/1/history/')
self.assertEqual(response.status_code, 403)
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/2/history/')
self.assertEqual(response.status_code, 200)
self.client.get('/test_admin/admin/logout/')
for login_dict in [self.joepublic_login, self.no_username_login]:
self.client.post('/test_admin/admin/', login_dict)
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/1/history/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/2/history/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
self.client.get('/test_admin/admin/logout/')
def testConditionallyShowAddSectionLink(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
# Set up and log in user.
url = '/test_admin/admin/admin_views/article/add/'
add_link_text = ' class="add-another"'
self.client.get('/test_admin/admin/')
self.client.post('/test_admin/admin/', self.adduser_login)
# The add user can't add sections yet, so they shouldn't see the "add
# section" link.
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the add user to add sections too. Now they can see the "add
# section" link.
add_user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('add', Section._meta))
add_user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def testCustomModelAdminTemplates(self):
self.client.get('/test_admin/admin/')
self.client.post('/test_admin/admin/', self.super_login)
# Test custom change list template with custom extra context
response = self.client.get('/test_admin/admin/admin_views/customarticle/')
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, 'custom_admin/change_list.html')
# Test custom add form template
response = self.client.get('/test_admin/admin/admin_views/customarticle/add/')
self.assertTemplateUsed(response, 'custom_admin/add_form.html')
# Add an article so we can test delete, change, and history views
post = self.client.post('/test_admin/admin/admin_views/customarticle/add/', {
'content': '<p>great article</p>',
'date_0': '2008-03-18',
'date_1': '10:54:39'
})
self.assertRedirects(post, '/test_admin/admin/admin_views/customarticle/')
self.assertEqual(CustomArticle.objects.all().count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get('/test_admin/admin/admin_views/customarticle/%d/' % article_pk)
self.assertTemplateUsed(response, 'custom_admin/change_form.html')
response = self.client.get('/test_admin/admin/admin_views/customarticle/%d/delete/' % article_pk)
self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html')
response = self.client.post('/test_admin/admin/admin_views/customarticle/', data={
'index': 0,
'action': ['delete_selected'],
'_selected_action': ['1'],
})
self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html')
response = self.client.get('/test_admin/admin/admin_views/customarticle/%d/history/' % article_pk)
self.assertTemplateUsed(response, 'custom_admin/object_history.html')
self.client.get('/test_admin/admin/logout/')
def testDeleteView(self):
"""Delete view should restrict access and actually delete items."""
delete_dict = {'post': 'yes'}
# add user shoud not be able to delete articles
self.client.get('/test_admin/admin/')
self.client.post('/test_admin/admin/', self.adduser_login)
response = self.client.get('/test_admin/admin/admin_views/article/1/delete/')
self.assertEqual(response.status_code, 403)
post = self.client.post('/test_admin/admin/admin_views/article/1/delete/', delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.all().count(), 3)
self.client.get('/test_admin/admin/logout/')
# Delete user can delete
self.client.get('/test_admin/admin/')
self.client.post('/test_admin/admin/', self.deleteuser_login)
response = self.client.get('/test_admin/admin/admin_views/section/1/delete/')
# test response contains link to related Article
self.assertContains(response, "admin_views/article/1/")
response = self.client.get('/test_admin/admin/admin_views/article/1/delete/')
self.assertEqual(response.status_code, 200)
post = self.client.post('/test_admin/admin/admin_views/article/1/delete/', delete_dict)
self.assertRedirects(post, '/test_admin/admin/')
self.assertEqual(Article.objects.all().count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a deleted object')
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, '1')
self.client.get('/test_admin/admin/logout/')
def testDisabledPermissionsWhenLoggedIn(self):
self.client.login(username='super', password='secret')
superuser = User.objects.get(username='super')
superuser.is_active = False
superuser.save()
response = self.client.get('/test_admin/admin/')
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get('/test_admin/admin/secure-view/')
self.assertContains(response, 'id="login-form"')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
opts = Report._meta
# User who can change Reports
change_user = User.objects.get(username='changeuser')
change_user.user_permissions.add(get_perm(Report,
get_permission_codename('change', opts)))
# login POST dict
self.changeuser_login = {
REDIRECT_FIELD_NAME: '/test_admin/admin/',
LOGIN_FORM_KEY: 1,
'username': 'changeuser',
'password': 'secret',
}
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.get('/test_admin/admin/')
self.client.post('/test_admin/admin/', self.changeuser_login)
r = self.client.get('/test_admin/admin/')
# we shouldn' get an 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.get('/test_admin/admin/logout/')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminViewDeletedObjectsTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml', 'deleted-objects.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(br"""<li>Plot: <a href=".+/admin_views/plot/1/">World Domination</a>\s*<ul>\s*<li>Plot details: <a href=".+/admin_views/plotdetails/1/">almost finished</a>""")
response = self.client.get('/test_admin/admin/admin_views/villain/%s/delete/' % quote(1))
six.assertRegex(self, response.content, pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = """<li>Cyclic one: <a href="/test_admin/admin/admin_views/cyclicone/1/">I am recursive</a>"""
two = """<li>Cyclic two: <a href="/test_admin/admin/admin_views/cyclictwo/1/">I am recursive too</a>"""
response = self.client.get('/test_admin/admin/admin_views/cyclicone/%s/delete/' % quote(1))
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username='deleteuser')
delete_user.user_permissions.add(get_perm(Plot,
get_permission_codename('delete', Plot._meta)))
self.assertTrue(self.client.login(username='deleteuser',
password='secret'))
response = self.client.get('/test_admin/admin/admin_views/plot/%s/delete/' % quote(1))
self.assertContains(response, "your account doesn't have permission to delete the following types of objects")
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get("/test_admin/admin/admin_views/question/%s/delete/" % quote(q.pk))
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(response, '<li>Answer: <a href="/test_admin/admin/admin_views/answer/%s/">Because.</a></li>' % a1.pk)
self.assertContains(response, '<li>Answer: <a href="/test_admin/admin/admin_views/answer/%s/">Yes.</a></li>' % a2.pk)
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get('/test_admin/admin/admin_views/villain/%s/delete/' % quote(1))
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = """<li>Plot: <a href="/test_admin/admin/admin_views/plot/1/">World Domination</a>"""
response = self.client.get('/test_admin/admin/admin_views/villain/%s/delete/' % quote(1))
self.assertContains(response, should_contain)
response = self.client.get('/test_admin/admin/admin_views/villain/%s/delete/' % quote(2))
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = """<li>Plot: <a href="/test_admin/admin/admin_views/plot/2/">World Peace</a></li>"""
response = self.client.get('/test_admin/admin/admin_views/villain/%s/delete/' % quote(2))
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
"""<li>Villain: <a href="/test_admin/admin/admin_views/villain/3/">Bob</a>""",
"""<li>Super villain: <a href="/test_admin/admin/admin_views/supervillain/3/">Bob</a>""",
"""<li>Secret hideout: floating castle""",
"""<li>Super secret hideout: super floating castle!"""
]
response = self.client.get('/test_admin/admin/admin_views/villain/%s/delete/' % quote(3))
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get('/test_admin/admin/admin_views/supervillain/%s/delete/' % quote(3))
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = Plot.objects.get(pk=3)
tag = FunkyTag.objects.create(content_object=plot, name='hott')
should_contain = """<li>Funky tag: hott"""
response = self.client.get('/test_admin/admin/admin_views/plot/%s/delete/' % quote(3))
self.assertContains(response, should_contain)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminViewStringPrimaryKeyTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml', 'string-primary-key.xml']
def __init__(self, *args):
super(AdminViewStringPrimaryKeyTest, self).__init__(*args)
self.pk = """abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 -_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
def setUp(self):
self.client.login(username='super', password='secret')
content_type_pk = ContentType.objects.get_for_model(ModelWithStringPrimaryKey).pk
LogEntry.objects.log_action(100, content_type_pk, self.pk, self.pk, 2, change_message='Changed something')
def tearDown(self):
self.client.logout()
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get('/test_admin/admin/admin_views/modelwithstringprimarykey/%s/history/' % quote(self.pk))
self.assertContains(response, escape(self.pk))
self.assertContains(response, 'Changed something')
self.assertEqual(response.status_code, 200)
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get('/test_admin/admin/admin_views/modelwithstringprimarykey/%s/' % quote(self.pk))
self.assertContains(response, escape(self.pk))
self.assertEqual(response.status_code, 200)
def test_changelist_to_changeform_link(self):
"Link to the changeform of the object in changelist should use reverse() and be quoted -- #18072"
prefix = '/test_admin/admin/admin_views/modelwithstringprimarykey/'
response = self.client.get(prefix)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(urlquote(quote(self.pk))))
should_contain = """<th><a href="%s%s/">%s</a></th>""" % (prefix, pk_final_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"The link from the recent actions list referring to the changeform of the object should be quoted"
response = self.client.get('/test_admin/admin/')
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (link, escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_without_content_type(self):
"If a LogEntry is missing content_type it will not display it in span tag under the hyperlink."
response = self.client.get('/test_admin/admin/')
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (link, escape(self.pk))
self.assertContains(response, should_contain)
should_contain = "Model with string primary key" # capitalized in Recent Actions
self.assertContains(response, should_contain)
logentry = LogEntry.objects.get(content_type__name__iexact=should_contain)
# http://code.djangoproject.com/ticket/10275
# if the log entry doesn't have a content type it should still be
# possible to view the Recent Actions part
logentry.content_type = None
logentry.save()
counted_presence_before = response.content.count(force_bytes(should_contain))
response = self.client.get('/test_admin/admin/')
counted_presence_after = response.content.count(force_bytes(should_contain))
self.assertEqual(counted_presence_before - 1,
counted_presence_after)
def test_logentry_get_admin_url(self):
"LogEntry.get_admin_url returns a URL to edit the entry's object or None for non-existent (possibly deleted) models"
log_entry_name = "Model with string primary key" # capitalized in Recent Actions
logentry = LogEntry.objects.get(content_type__name__iexact=log_entry_name)
model = "modelwithstringprimarykey"
desired_admin_url = "/test_admin/admin/admin_views/%s/%s/" % (model, escape(iri_to_uri(urlquote(quote(self.pk)))))
self.assertEqual(logentry.get_admin_url(), desired_admin_url)
logentry.content_type.model = "non-existent"
self.assertEqual(logentry.get_admin_url(), None)
def test_deleteconfirmation_link(self):
"The link from the delete confirmation page referring back to the changeform of the object should be quoted"
response = self.client.get('/test_admin/admin/admin_views/modelwithstringprimarykey/%s/delete/' % quote(self.pk))
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
should_contain = """/%s/">%s</a>""" % (escape(iri_to_uri(urlquote(quote(self.pk)))), escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add should be visible"
add_model = ModelWithStringPrimaryKey(pk="i have something to add")
add_model.save()
response = self.client.get('/test_admin/admin/admin_views/modelwithstringprimarykey/%s/' % quote(add_model.pk))
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get('/test_admin/admin/admin_views/modelwithstringprimarykey/%s/' % quote(delete_model.pk))
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get('/test_admin/admin/admin_views/modelwithstringprimarykey/%s/' % quote(history_model.pk))
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk='abc_123')
model.save()
response = self.client.get('/test_admin/admin/admin_views/modelwithstringprimarykey/%s/' % quote(model.pk))
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse('admin:%s_modelwithstringprimarykey_change' %
ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse('admin:%s_modelwithstringprimarykey_history' %
ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),))
self.assertContains(response, '<a href="%s" class="historylink"' % expected_link)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class SecureViewTests(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
# login POST dicts
self.super_login = {
LOGIN_FORM_KEY: 1,
REDIRECT_FIELD_NAME: '/test_admin/admin/secure-view/',
'username': 'super',
'password': 'secret',
}
self.super_email_login = {
LOGIN_FORM_KEY: 1,
REDIRECT_FIELD_NAME: '/test_admin/admin/secure-view/',
'username': 'super@example.com',
'password': 'secret',
}
self.super_email_bad_login = {
LOGIN_FORM_KEY: 1,
REDIRECT_FIELD_NAME: '/test_admin/admin/secure-view/',
'username': 'super@example.com',
'password': 'notsecret',
}
self.adduser_login = {
LOGIN_FORM_KEY: 1,
REDIRECT_FIELD_NAME: '/test_admin/admin/secure-view/',
'username': 'adduser',
'password': 'secret',
}
self.changeuser_login = {
LOGIN_FORM_KEY: 1,
REDIRECT_FIELD_NAME: '/test_admin/admin/secure-view/',
'username': 'changeuser',
'password': 'secret',
}
self.deleteuser_login = {
LOGIN_FORM_KEY: 1,
REDIRECT_FIELD_NAME: '/test_admin/admin/secure-view/',
'username': 'deleteuser',
'password': 'secret',
}
self.joepublic_login = {
LOGIN_FORM_KEY: 1,
REDIRECT_FIELD_NAME: '/test_admin/admin/secure-view/',
'username': 'joepublic',
'password': 'secret',
}
def tearDown(self):
self.client.logout()
def test_secure_view_shows_login_if_not_logged_in(self):
"Ensure that we see the login form"
response = self.client.get('/test_admin/admin/secure-view/')
self.assertTemplateUsed(response, 'admin/login.html')
def test_secure_view_login_successfully_redirects_to_original_url(self):
response = self.client.get('/test_admin/admin/secure-view/')
self.assertEqual(response.status_code, 200)
query_string = 'the-answer=42'
redirect_url = '/test_admin/admin/secure-view/?%s' % query_string
new_next = {REDIRECT_FIELD_NAME: redirect_url}
login = self.client.post('/test_admin/admin/secure-view/', dict(self.super_login, **new_next), QUERY_STRING=query_string)
self.assertRedirects(login, redirect_url)
def test_staff_member_required_decorator_works_as_per_admin_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the orignal url.
Unsuccessfull attempts will continue to render the login page with
a 200 status code.
"""
# Super User
response = self.client.get('/test_admin/admin/secure-view/')
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin/secure-view/', self.super_login)
self.assertRedirects(login, '/test_admin/admin/secure-view/')
self.assertFalse(login.context)
self.client.get('/test_admin/admin/logout/')
# make sure the view removes test cookie
self.assertEqual(self.client.session.test_cookie_worked(), False)
# Test if user enters email address
response = self.client.get('/test_admin/admin/secure-view/')
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin/secure-view/', self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post('/test_admin/admin/secure-view/', self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username='jondoe', password='secret', email='super@example.com')
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post('/test_admin/admin/secure-view/', self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# Add User
response = self.client.get('/test_admin/admin/secure-view/')
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin/secure-view/', self.adduser_login)
self.assertRedirects(login, '/test_admin/admin/secure-view/')
self.assertFalse(login.context)
self.client.get('/test_admin/admin/logout/')
# Change User
response = self.client.get('/test_admin/admin/secure-view/')
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin/secure-view/', self.changeuser_login)
self.assertRedirects(login, '/test_admin/admin/secure-view/')
self.assertFalse(login.context)
self.client.get('/test_admin/admin/logout/')
# Delete User
response = self.client.get('/test_admin/admin/secure-view/')
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin/secure-view/', self.deleteuser_login)
self.assertRedirects(login, '/test_admin/admin/secure-view/')
self.assertFalse(login.context)
self.client.get('/test_admin/admin/logout/')
# Regular User should not be able to login.
response = self.client.get('/test_admin/admin/secure-view/')
self.assertEqual(response.status_code, 200)
login = self.client.post('/test_admin/admin/secure-view/', self.joepublic_login)
self.assertEqual(login.status_code, 200)
# Login.context is a list of context dicts we just need to check the first one.
self.assertContains(login, ERROR_MESSAGE)
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
login = self.client.login(username='joepublic', password='secret')
# Check and make sure that if user expires, data still persists
self.client.get('/test_admin/admin/secure-view/')
self.client.post('/test_admin/admin/secure-view/', self.super_login)
# make sure the view removes test cookie
self.assertEqual(self.client.session.test_cookie_worked(), False)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
user_ctype = ContentType.objects.get_for_model(User)
user = User.objects.get(username='super')
shortcut_url = "/test_admin/admin/r/%s/%s/" % (user_ctype.pk, user.pk)
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=False)
self.assertTemplateUsed(response, 'admin/login.html')
# Logged in? Redirect.
self.client.login(username='super', password='secret')
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, 'http://example.com/users/super/')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminViewUnicodeTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-unicode.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def testUnicodeEdit(self):
"""
A test to ensure that POST on edit_view handles non-ascii characters.
"""
post_data = {
"name": "Test lærdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": "1",
"chapter_set-0-title": "Norske bostaver æøå skaper problemer",
"chapter_set-0-content": "<p>Svært frustrerende med UnicodeDecodeError</p>",
"chapter_set-1-id": "2",
"chapter_set-1-title": "Kjærlighet.",
"chapter_set-1-content": "<p>La kjærligheten til de lidende seire.</p>",
"chapter_set-2-id": "3",
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post('/test_admin/admin/admin_views/book/1/', post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def testUnicodeDelete(self):
"""
Ensure that the delete_view handles non-ascii characters
"""
delete_dict = {'post': 'yes'}
response = self.client.get('/test_admin/admin/admin_views/book/1/delete/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/test_admin/admin/admin_views/book/1/delete/', delete_dict)
self.assertRedirects(response, '/test_admin/admin/admin_views/book/')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminViewListEditable(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml', 'admin-views-person.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_inheritance(self):
Podcast.objects.create(name="This Week in Django",
release_date=datetime.date.today())
response = self.client.get('/test_admin/admin/admin_views/podcast/')
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get('/test_admin/admin/admin_views/vodcast/')
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso='en', name='English', english_name='English')
response = self.client.get('/test_admin/admin/admin_views/language/')
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get('/test_admin/admin/admin_views/person/')
# 2 inputs per object(the field and the hidden id field) = 6
# 3 management hidden fields = 3
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 1
# field to track 'select all' across paginated views = 1
# 6 + 3 + 4 + 1 + 2 + 1 + 1 = 18 inputs
self.assertContains(response, "<input", count=18)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/person/',
data, follow=True)
self.assertEqual(len(response.context['messages']), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"_save": "Save",
}
self.client.post('/test_admin/admin/admin_views/person/', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "1",
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": "3",
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post('/test_admin/admin/admin_views/person/?gender__exact=1', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "1",
"form-0-gender": "1",
"_save": "Save",
}
self.client.post('/test_admin/admin/admin_views/person/?q=john', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
''' Ensure that non field errors are displayed for each of the
forms in the changelist's formset. Refs #13126.
'''
fd1 = FoodDelivery.objects.create(reference='123', driver='bill', restaurant='thai')
fd2 = FoodDelivery.objects.create(reference='456', driver='bill', restaurant='india')
fd3 = FoodDelivery.objects.create(reference='789', driver='bill', restaurant='pizza')
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/fooddelivery/', data)
self.assertContains(response, '<tr><td colspan="4"><ul class="errorlist"><li>Food delivery with this Driver and Restaurant already exists.</li></ul></td></tr>', 1, html=True)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/fooddelivery/', data)
self.assertContains(response, '<tr><td colspan="4"><ul class="errorlist"><li>Food delivery with this Driver and Restaurant already exists.</li></ul></td></tr>', 2, html=True)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "2",
"form-0-alive": "1",
"form-0-gender": "2",
# Ensure that the form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/person/', data)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "2",
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/person/', data)
non_form_errors = response.context['cl'].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(str(non_form_errors), str(ErrorList(["Grace is not a Zombie"])))
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# Ensure that the form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/category/', data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# Check that the order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Ensure that pagination works for list_editable items.
Refs #16819.
"""
UnorderedObject.objects.create(id=1, name='Unordered object #1')
UnorderedObject.objects.create(id=2, name='Unordered object #2')
UnorderedObject.objects.create(id=3, name='Unordered object #3')
response = self.client.get('/test_admin/admin/admin_views/unorderedobject/')
self.assertContains(response, 'Unordered object #3')
self.assertContains(response, 'Unordered object #2')
self.assertNotContains(response, 'Unordered object #1')
response = self.client.get('/test_admin/admin/admin_views/unorderedobject/?p=1')
self.assertNotContains(response, 'Unordered object #3')
self.assertNotContains(response, 'Unordered object #2')
self.assertContains(response, 'Unordered object #1')
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ['3'],
"action": ['', 'delete_selected'],
}
self.client.post('/test_admin/admin/admin_views/person/', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"_save": "Save",
"_selected_action": ['1'],
"action": ['', 'delete_selected'],
}
self.client.post('/test_admin/admin/admin_views/person/', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get('/test_admin/admin/admin_views/person/')
self.assertNotEqual(response.context['cl'].list_editable, ())
response = self.client.get('/test_admin/admin/admin_views/person/?%s' % IS_POPUP_VAR)
self.assertEqual(response.context['cl'].list_editable, ())
def test_pk_hidden_fields(self):
""" Ensure that hidden pk fields aren't displayed in the table body and
that their corresponding human-readable value is displayed instead.
Note that the hidden pk fields are in fact be displayed but
separately (not in the table), and only once.
Refs #12475.
"""
story1 = Story.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...')
story2 = Story.objects.create(title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...')
response = self.client.get('/test_admin/admin/admin_views/story/')
self.assertContains(response, 'id="id_form-0-id"', 1) # Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(response, '<div class="hiddenfields">\n<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /><input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>' % (story2.id, story1.id), html=True)
self.assertContains(response, '<td>%d</td>' % story1.id, 1)
self.assertContains(response, '<td>%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
""" Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...')
story2 = OtherStory.objects.create(title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...')
link1 = reverse('admin:admin_views_otherstory_change', args=(story1.pk,))
link2 = reverse('admin:admin_views_otherstory_change', args=(story2.pk,))
response = self.client.get('/test_admin/admin/admin_views/otherstory/')
self.assertContains(response, 'id="id_form-0-id"', 1) # Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(response, '<div class="hiddenfields">\n<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /><input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>' % (story2.id, story1.id), html=True)
self.assertContains(response, '<th><a href="%s">%d</a></th>' % (link1, story1.id), 1)
self.assertContains(response, '<th><a href="%s">%d</a></th>' % (link2, story2.id), 1)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminSearchTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users', 'multiple-child-classes',
'admin-views-person']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_search_on_sibling_models(self):
"Check that a search that mentions sibling models"
response = self.client.get('/test_admin/admin/admin_views/recommendation/?q=bar')
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""Ensure that the to_field GET parameter is preserved when a search
is performed. Refs #10918.
"""
response = self.client.get('/test_admin/admin/auth/user/?q=joe&%s=id' % TO_FIELD_VAR)
self.assertContains(response, "\n1 user\n")
self.assertContains(response, '<input type="hidden" name="%s" value="id"/>' % TO_FIELD_VAR, html=True)
def test_exact_matches(self):
response = self.client.get('/test_admin/admin/admin_views/recommendation/?q=bar')
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get('/test_admin/admin/admin_views/recommendation/?q=ba')
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get('/test_admin/admin/admin_views/person/?q=Gui')
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get('/test_admin/admin/admin_views/person/?q=uido')
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
p1 = PluggableSearchPerson.objects.create(name="Bob", age=10)
p2 = PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get('/test_admin/admin/admin_views/pluggablesearchperson/?q=Bob')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get('/test_admin/admin/admin_views/pluggablesearchperson/?q=20')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
response = self.client.get('/test_admin/admin/admin_views/person/?q=Gui')
self.assertContains(response,
"""<span class="small quiet">1 result (<a href="?">3 total</a>)</span>""",
html=True)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminInheritedInlinesTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def testInline(self):
"Ensure that inline models which inherit from a common parent are correctly handled by admin."
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get('/test_admin/admin/admin_views/persona/add/')
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post('/test_admin/admin/admin_views/persona/add/', post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get('/test_admin/admin/admin_views/persona/%d/' % persona_id)
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post('/test_admin/admin/admin_views/persona/%d/' % persona_id, post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminActionsTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml', 'admin-views-actions.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_model_admin_custom_action(self):
"Tests a custom action defined in a ModelAdmin method"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'mail_admin',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/subscriber/', action_data)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a ModelAdmin action')
def test_model_admin_default_delete_action(self):
"Tests the default delete action defined as a ModelAdmin method"
action_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': 'delete_selected',
'index': 0,
}
delete_confirmation_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': 'delete_selected',
'post': 'yes',
}
confirmation = self.client.post('/test_admin/admin/admin_views/subscriber/', action_data)
self.assertIsInstance(confirmation, TemplateResponse)
self.assertContains(confirmation, "Are you sure you want to delete the selected subscribers?")
self.assertContains(confirmation, ACTION_CHECKBOX_NAME, count=2)
response = self.client.post('/test_admin/admin/admin_views/subscriber/', delete_confirmation_data)
self.assertEqual(Subscriber.objects.count(), 0)
def test_non_localized_pk(self):
"""If USE_THOUSAND_SEPARATOR is set, make sure that the ids for
the objects selected for deletion are rendered without separators.
Refs #14895.
"""
self.old_USE_THOUSAND_SEPARATOR = settings.USE_THOUSAND_SEPARATOR
self.old_USE_L10N = settings.USE_L10N
settings.USE_THOUSAND_SEPARATOR = True
settings.USE_L10N = True
subscriber = Subscriber.objects.get(id=1)
subscriber.id = 9999
subscriber.save()
action_data = {
ACTION_CHECKBOX_NAME: [9999, 2],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/subscriber/', action_data)
self.assertTemplateUsed(response, 'admin/delete_selected_confirmation.html')
self.assertContains(response, 'value="9999"') # Instead of 9,999
self.assertContains(response, 'value="2"')
settings.USE_THOUSAND_SEPARATOR = self.old_USE_THOUSAND_SEPARATOR
settings.USE_L10N = self.old_USE_L10N
def test_model_admin_default_delete_action_protected(self):
"""
Tests the default delete action defined as a ModelAdmin method in the
case where some related objects are protected from deletion.
"""
q1 = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q1, answer="Because.")
a2 = Answer.objects.create(question=q1, answer="Yes.")
q2 = Question.objects.create(question="Wherefore?")
action_data = {
ACTION_CHECKBOX_NAME: [q1.pk, q2.pk],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post("/test_admin/admin/admin_views/question/", action_data)
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(response, '<li>Answer: <a href="/test_admin/admin/admin_views/answer/%s/">Because.</a></li>' % a1.pk, html=True)
self.assertContains(response, '<li>Answer: <a href="/test_admin/admin/admin_views/answer/%s/">Yes.</a></li>' % a2.pk, html=True)
def test_custom_function_mail_action(self):
"Tests a custom action defined in a function"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'external_mail',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/externalsubscriber/', action_data)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action')
def test_custom_function_action_with_redirect(self):
"Tests a custom action defined in a function"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'redirect_to',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/externalsubscriber/', action_data)
self.assertEqual(response.status_code, 302)
def test_default_redirect(self):
"""
Test that actions which don't return an HttpResponse are redirected to
the same page, retaining the querystring (which may contain changelist
information).
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'external_mail',
'index': 0,
}
url = '/test_admin/admin/admin_views/externalsubscriber/?o=1'
response = self.client.post(url, action_data)
self.assertRedirects(response, url)
def test_custom_function_action_streaming_response(self):
"""Tests a custom action that returns a StreamingHttpResponse."""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'download',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/externalsubscriber/', action_data)
content = b''.join(response.streaming_content)
self.assertEqual(content, b'This is the content of the file')
self.assertEqual(response.status_code, 200)
def test_custom_function_action_no_perm_response(self):
"""Tests a custom action that returns an HttpResponse with 403 code."""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'no_perm',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/externalsubscriber/', action_data)
self.assertEqual(response.status_code, 403)
self.assertEqual(response.content, b'No permission to perform this action')
def test_actions_ordering(self):
"""
Ensure that actions are ordered as expected.
Refs #15964.
"""
response = self.client.get('/test_admin/admin/admin_views/externalsubscriber/')
self.assertContains(response, '''<label>Action: <select name="action">
<option value="" selected="selected">---------</option>
<option value="delete_selected">Delete selected external
subscribers</option>
<option value="redirect_to">Redirect to (Awesome action)</option>
<option value="external_mail">External mail (Another awesome
action)</option>
<option value="download">Download subscription</option>
<option value="no_perm">No permission to run</option>
</select>''', html=True)
def test_model_without_action(self):
"Tests a ModelAdmin without any action"
response = self.client.get('/test_admin/admin/admin_views/oldsubscriber/')
self.assertEqual(response.context["action_form"], None)
self.assertNotContains(response, '<input type="checkbox" class="action-select"',
msg_prefix="Found an unexpected action toggle checkboxbox in response")
self.assertNotContains(response, '<input type="checkbox" class="action-select"')
def test_model_without_action_still_has_jquery(self):
"Tests that a ModelAdmin without any actions still gets jQuery included in page"
response = self.client.get('/test_admin/admin/admin_views/oldsubscriber/')
self.assertEqual(response.context["action_form"], None)
self.assertContains(response, 'jquery.min.js',
msg_prefix="jQuery missing from admin pages for model with no admin actions"
)
def test_action_column_class(self):
"Tests that the checkbox column class is present in the response"
response = self.client.get('/test_admin/admin/admin_views/subscriber/')
self.assertNotEqual(response.context["action_form"], None)
self.assertContains(response, 'action-checkbox-column')
def test_multiple_actions_form(self):
"""
Test that actions come from the form whose submit button was pressed (#10618).
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
# Two different actions selected on the two forms...
'action': ['external_mail', 'delete_selected'],
# ...but we clicked "go" on the top form.
'index': 0
}
response = self.client.post('/test_admin/admin/admin_views/externalsubscriber/', action_data)
# Send mail, don't delete.
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action')
def test_user_message_on_none_selected(self):
"""
User should see a warning when 'Go' is pressed and no items are selected.
"""
action_data = {
ACTION_CHECKBOX_NAME: [],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/subscriber/', action_data)
msg = """Items must be selected in order to perform actions on them. No items have been changed."""
self.assertContains(response, msg)
self.assertEqual(Subscriber.objects.count(), 2)
def test_user_message_on_no_action(self):
"""
User should see a warning when 'Go' is pressed and no action is selected.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': '',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/subscriber/', action_data)
msg = """No action selected."""
self.assertContains(response, msg)
self.assertEqual(Subscriber.objects.count(), 2)
def test_selection_counter(self):
"""
Check if the selection counter is there.
"""
response = self.client.get('/test_admin/admin/admin_views/subscriber/')
self.assertContains(response, '0 of 2 selected')
def test_popup_actions(self):
""" Actions should not be shown in popups. """
response = self.client.get('/test_admin/admin/admin_views/subscriber/')
self.assertNotEqual(response.context["action_form"], None)
response = self.client.get(
'/test_admin/admin/admin_views/subscriber/?%s' % IS_POPUP_VAR)
self.assertEqual(response.context["action_form"], None)
def test_popup_template_response(self):
"""
Success on popups shall be rendered from template in order to allow
easy customization.
"""
response = self.client.post(
'/test_admin/admin/admin_views/actor/add/?%s=1' % IS_POPUP_VAR,
{'name': 'Troy McClure', 'age': '55', IS_POPUP_VAR: '1'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name, 'admin/popup_response.html')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class TestCustomChangeList(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
urlbit = 'admin'
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def tearDown(self):
self.client.logout()
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post('/test_admin/%s/admin_views/gadget/add/' % self.urlbit, post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get('/test_admin/%s/admin_views/gadget/' % self.urlbit)
# Ensure that data is still not visible on the page
response = self.client.get('/test_admin/%s/admin_views/gadget/' % self.urlbit)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'First Gadget')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class TestInlineNotEditable(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def tearDown(self):
self.client.logout()
def test(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get('/test_admin/admin/admin_views/parent/add/')
self.assertEqual(response.status_code, 200)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminCustomQuerysetTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
self.pks = [EmptyModel.objects.create().id for i in range(3)]
def test_changelist_view(self):
response = self.client.get('/test_admin/admin/admin_views/emptymodel/')
for i in self.pks:
if i > 1:
self.assertContains(response, 'Primary key = %s' % i)
else:
self.assertNotContains(response, 'Primary key = %s' % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name='person1', gender=1)
Person.objects.create(name='person2', gender=2)
# 4 queries are expected: 1 for the session, 1 for the user,
# 1 for the count and 1 for the objects on the page
with self.assertNumQueries(4):
resp = self.client.get('/test_admin/admin/admin_views/person/')
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
# here one more count(*) query will run, because filters were applied
with self.assertNumQueries(5):
extra = {'q': 'not_in_name'}
resp = self.client.get('/test_admin/admin/admin_views/person/', extra)
self.assertEqual(resp.context['selection_note'], '0 of 0 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 0 selected')
with self.assertNumQueries(5):
extra = {'q': 'person'}
resp = self.client.get('/test_admin/admin/admin_views/person/', extra)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'gender__exact': '1'}
resp = self.client.get('/test_admin/admin/admin_views/person/', extra)
self.assertEqual(resp.context['selection_note'], '0 of 1 selected')
self.assertEqual(resp.context['selection_note_all'], '1 selected')
def test_change_view(self):
for i in self.pks:
response = self.client.get('/test_admin/admin/admin_views/emptymodel/%s/' % i)
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertEqual(response.status_code, 404)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/coverletter/add/',
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The cover letter "Candidate, Best" was added successfully.</li>',
html=True
)
# model has no __unicode__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/shortmessage/add/',
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The short message "ShortMessage object" was added successfully.</li>',
html=True
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/telegram/add/',
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The telegram "Urgent telegram" was added successfully.</li>',
html=True
)
# model has no __unicode__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/paper/add/',
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The paper "Paper object" was added successfully.</li>',
html=True
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get('/test_admin/admin/admin_views/coverletter/%s/' % cl.pk)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/coverletter/%s/' % cl.pk,
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __unicode__()
self.assertContains(
response,
'<li class="success">The cover letter "John Doe II" was changed successfully.</li>',
html=True
)
# model has no __unicode__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get('/test_admin/admin/admin_views/shortmessage/%s/' % sm.pk)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/shortmessage/%s/' % sm.pk,
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by six.text_type()
self.assertContains(
response,
'<li class="success">The short message "ShortMessage_Deferred_timestamp object" was changed successfully.</li>',
html=True
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
t = Telegram.objects.create(title="Frist Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get('/test_admin/admin/admin_views/telegram/%s/' % t.pk)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/telegram/%s/' % t.pk,
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __unicode__()
self.assertContains(
response,
'<li class="success">The telegram "Telegram without typo" was changed successfully.</li>',
html=True
)
# model has no __unicode__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get('/test_admin/admin/admin_views/paper/%s/' % p.pk)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post('/test_admin/admin/admin_views/paper/%s/' % p.pk,
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by six.text_type()
self.assertContains(
response,
'<li class="success">The paper "Paper_Deferred_author object" was changed successfully.</li>',
html=True
)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminInlineFileUploadTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml', 'admin-views-actions.xml']
urlbit = 'admin'
def setUp(self):
self.client.login(username='super', password='secret')
# Set up test Picture and Gallery.
# These must be set up here instead of in fixtures in order to allow Picture
# to use a NamedTemporaryFile.
tdir = tempfile.gettempdir()
file1 = tempfile.NamedTemporaryFile(suffix=".file1", dir=tdir)
file1.write(b'a' * (2 ** 21))
filename = file1.name
file1.close()
self.gallery = Gallery(name="Test Gallery")
self.gallery.save()
self.picture = Picture(name="Test Picture", image=filename, gallery=self.gallery)
self.picture.save()
def tearDown(self):
self.client.logout()
def test_inline_file_upload_edit_validation_error_post(self):
"""
Test that inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": six.text_type(self.picture.id),
"pictures-0-gallery": six.text_type(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post('/test_admin/%s/admin_views/gallery/%d/' % (self.urlbit, self.gallery.id), post_data)
self.assertTrue(response._container[0].find("Currently:") > -1)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminInlineTests(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
self.collector = Collector(pk=1, name='John Fowles')
self.collector.save()
def tearDown(self):
self.client.logout()
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data['widget_set-0-name'] = "Widget 1"
collector_url = '/test_admin/admin/admin_views/collector/%d/' % self.collector.pk
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# Now resave that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_explicit_autofield_inline(self):
"A model with an explicit autofield primary key can be saved as inlines. Regression for #8093"
# First add a new inline
self.post_data['grommet_set-0-name'] = "Grommet 1"
collector_url = '/test_admin/admin/admin_views/collector/%d/' % self.collector.pk
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
collector_url = '/test_admin/admin/admin_views/collector/%d/' % self.collector.pk
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
response = self.client.post('/test_admin/admin/admin_views/collector/1/', self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Check that the PK link exists on the rendered form
response = self.client.get('/test_admin/admin/admin_views/collector/1/')
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
response = self.client.post('/test_admin/admin/admin_views/collector/1/', self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1 Updated"
response = self.client.post('/test_admin/admin/admin_views/collector/1/', self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
collector_url = '/test_admin/admin/admin_views/collector/%d/' % self.collector.pk
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""Check that an inline with an editable ordering fields is
updated correctly. Regression for #10922"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update({
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
})
response = self.client.post('/test_admin/admin/admin_views/collector/1/', self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# Check that the order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class NeverCacheTests(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml', 'admin-views-colors.xml', 'admin-views-fabrics.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def testAdminIndex(self):
"Check the never-cache status of the main index"
response = self.client.get('/test_admin/admin/')
self.assertEqual(get_max_age(response), 0)
def testAppIndex(self):
"Check the never-cache status of an application index"
response = self.client.get('/test_admin/admin/admin_views/')
self.assertEqual(get_max_age(response), 0)
def testModelIndex(self):
"Check the never-cache status of a model index"
response = self.client.get('/test_admin/admin/admin_views/fabric/')
self.assertEqual(get_max_age(response), 0)
def testModelAdd(self):
"Check the never-cache status of a model add page"
response = self.client.get('/test_admin/admin/admin_views/fabric/add/')
self.assertEqual(get_max_age(response), 0)
def testModelView(self):
"Check the never-cache status of a model edit page"
response = self.client.get('/test_admin/admin/admin_views/section/1/')
self.assertEqual(get_max_age(response), 0)
def testModelHistory(self):
"Check the never-cache status of a model history page"
response = self.client.get('/test_admin/admin/admin_views/section/1/history/')
self.assertEqual(get_max_age(response), 0)
def testModelDelete(self):
"Check the never-cache status of a model delete page"
response = self.client.get('/test_admin/admin/admin_views/section/1/delete/')
self.assertEqual(get_max_age(response), 0)
def testLogin(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get('/test_admin/admin/')
self.assertEqual(get_max_age(response), 0)
def testLogout(self):
"Check the never-cache status of logout view"
response = self.client.get('/test_admin/admin/logout/')
self.assertEqual(get_max_age(response), 0)
def testPasswordChange(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get('/test_admin/password_change/')
self.assertEqual(get_max_age(response), None)
def testPasswordChangeDone(self):
"Check the never-cache status of the password change done view"
response = self.client.get('/test_admin/admin/password_change/done/')
self.assertEqual(get_max_age(response), None)
def testJsi18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get('/test_admin/admin/jsi18n/')
self.assertEqual(get_max_age(response), None)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class PrePopulatedTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_prepopulated_on(self):
response = self.client.get('/test_admin/admin/admin_views/prepopulatedpost/add/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "id: '#id_slug',")
self.assertContains(response, "field['dependency_ids'].push('#id_title');")
self.assertContains(response, "id: '#id_prepopulatedsubpost_set-0-subslug',")
def test_prepopulated_off(self):
response = self.client.get('/test_admin/admin/admin_views/prepopulatedpost/1/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "A Long Title")
self.assertNotContains(response, "id: '#id_slug'")
self.assertNotContains(response, "field['dependency_ids'].push('#id_title');")
self.assertNotContains(response, "id: '#id_prepopulatedsubpost_set-0-subslug',")
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get('/test_admin/admin/admin_views/prepopulatedpostlargeslug/add/')
self.assertContains(response, "maxLength: 1000") # instead of 1,000
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class SeleniumAdminViewsFirefoxTests(AdminSeleniumWebDriverTestCase):
available_apps = ['admin_views'] + AdminSeleniumWebDriverTestCase.available_apps
fixtures = ['admin-views-users.xml']
urls = "admin_views.urls"
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_prepopulated_fields(self):
"""
Ensure that the JavaScript-automated prepopulated fields work with the
main form and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
from selenium.common.exceptions import TimeoutException
self.admin_login(username='super', password='secret', login_url='/test_admin/admin/')
self.selenium.get('%s%s' % (self.live_server_url,
'/test_admin/admin/admin_views/mainprepopulated/add/'))
# Main form ----------------------------------------------------------
self.selenium.find_element_by_css_selector('#id_pubdate').send_keys('2012-02-18')
self.get_select_option('#id_status', 'option two').click()
self.selenium.find_element_by_css_selector('#id_name').send_keys(' this is the mAin nÀMë and it\'s awεšome')
slug1 = self.selenium.find_element_by_css_selector('#id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-and-its-awesome-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-and-its-awesome')
# Stacked inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-pubdate').send_keys('2011-12-17')
self.get_select_option('#id_relatedprepopulated_set-0-status', 'option one').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-name').send_keys(' here is a sŤāÇkeð inline ! ')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'here-stacked-inline-2011-12-17')
self.assertEqual(slug2, 'option-one-here-stacked-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related Prepopulated')[0].click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-pubdate').send_keys('1999-01-25')
self.get_select_option('#id_relatedprepopulated_set-1-status', 'option two').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-name').send_keys(' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog text... ')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-slug2').get_attribute('value')
self.assertEqual(slug1, 'now-you-have-another-stacked-inline-very-loooooooo') # 50 characters maximum for slug1 field
self.assertEqual(slug2, 'option-two-now-you-have-another-stacked-inline-very-looooooo') # 60 characters maximum for slug2 field
# Tabular inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-pubdate').send_keys('1234-12-07')
self.get_select_option('#id_relatedprepopulated_set-2-0-status', 'option two').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-name').send_keys('And now, with a tÃbűlaŘ inline !!!')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'and-now-tabular-inline-1234-12-07')
self.assertEqual(slug2, 'option-two-and-now-tabular-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related Prepopulated')[1].click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-pubdate').send_keys('1981-08-22')
self.get_select_option('#id_relatedprepopulated_set-2-1-status', 'option one').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-name').send_keys('a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters')
slug1 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-slug2').get_attribute('value')
self.assertEqual(slug1, 'tabular-inline-ignored-characters-1981-08-22')
self.assertEqual(slug2, 'option-one-tabular-inline-ignored-characters')
# Save and check that everything is properly stored in the database
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.assertEqual(MainPrepopulated.objects.all().count(), 1)
MainPrepopulated.objects.get(
name=' this is the mAin nÀMë and it\'s awεšome',
pubdate='2012-02-18',
status='option two',
slug1='main-name-and-its-awesome-2012-02-18',
slug2='option-two-main-name-and-its-awesome',
)
self.assertEqual(RelatedPrepopulated.objects.all().count(), 4)
RelatedPrepopulated.objects.get(
name=' here is a sŤāÇkeð inline ! ',
pubdate='2011-12-17',
status='option one',
slug1='here-stacked-inline-2011-12-17',
slug2='option-one-here-stacked-inline',
)
RelatedPrepopulated.objects.get(
name=' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooo', # 75 characters in name field
pubdate='1999-01-25',
status='option two',
slug1='now-you-have-another-stacked-inline-very-loooooooo',
slug2='option-two-now-you-have-another-stacked-inline-very-looooooo',
)
RelatedPrepopulated.objects.get(
name='And now, with a tÃbűlaŘ inline !!!',
pubdate='1234-12-07',
status='option two',
slug1='and-now-tabular-inline-1234-12-07',
slug2='option-two-and-now-tabular-inline',
)
RelatedPrepopulated.objects.get(
name='a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate='1981-08-22',
status='option one',
slug1='tabular-inline-ignored-characters-1981-08-22',
slug2='option-one-tabular-inline-ignored-characters',
)
def test_collapsible_fieldset(self):
"""
Test that the 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
self.admin_login(username='super', password='secret', login_url='/test_admin/admin/')
self.selenium.get('%s%s' % (self.live_server_url,
'/test_admin/admin/admin_views/article/add/'))
self.assertFalse(self.selenium.find_element_by_id('id_title').is_displayed())
self.selenium.find_elements_by_link_text('Show')[0].click()
self.assertTrue(self.selenium.find_element_by_id('id_title').is_displayed())
self.assertEqual(
self.selenium.find_element_by_id('fieldsetcollapser0').text,
"Hide"
)
class SeleniumAdminViewsChromeTests(SeleniumAdminViewsFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class SeleniumAdminViewsIETests(SeleniumAdminViewsFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class ReadonlyTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_readonly_get(self):
response = self.client.get('/test_admin/admin/admin_views/post/add/')
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 4 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
self.assertContains(response, "<input", count=14)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response,
"<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unkown coolness.")
self.assertContains(response, "foo")
# Checks that multiline text in a readonly field gets <br /> tags
self.assertContains(response, "Multiline<br />test<br />string")
self.assertContains(response, "InlineMultiline<br />test<br />string")
self.assertContains(response,
formats.localize(datetime.date.today() - datetime.timedelta(days=7))
)
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<p class="help">', 3)
self.assertContains(response, '<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>', html=True)
self.assertContains(response, '<p class="help">Some help text for the content (with unicode ŠĐĆŽćžšđ)</p>', html=True)
self.assertContains(response, '<p class="help">Some help text for the date (with unicode ŠĐĆŽćžšđ)</p>', html=True)
p = Post.objects.create(title="I worked on readonly_fields", content="Its good stuff")
response = self.client.get('/test_admin/admin/admin_views/post/%d/' % p.pk)
self.assertContains(response, "%d amount of cool" % p.pk)
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post('/test_admin/admin/admin_views/post/add/', data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post('/test_admin/admin/admin_views/post/add/', data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by('-id')[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get('/test_admin/admin/admin_views/pizza/add/')
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get('/test_admin/admin2/auth/user/%s/password/' % su.pk)
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get('/test_admin/admin/admin_views/choice/%s/' % choice.pk)
self.assertContains(response, '<p>No opinion</p>', html=True)
self.assertNotContains(response, '<p>(None)</p>')
def test_readonly_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name='Salami')
pizza = Pizza.objects.create(name='Americano')
pizza.toppings.add(topping)
response = self.client.get('/test_admin/admin/admin_views/topping/add/')
self.assertEqual(response.status_code, 200)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class RawIdFieldsTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
inquisition1 = Inquisition.objects.create(expected=True,
leader=actor,
country="England")
inquisition2 = Inquisition.objects.create(expected=False,
leader=actor,
country="Spain")
response = self.client.get('/test_admin/admin/admin_views/sketch/add/')
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get('/test_admin/admin/admin_views/sketch/add/')
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get('/test_admin/admin/admin_views/sketch/add/')
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post('/test_admin/admin/auth/user/add/', {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, '/test_admin/admin/auth/user/%s/' % new_user.pk)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post('/test_admin/admin/auth/user/add/', {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_continue': '1',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, '/test_admin/admin/auth/user/%s/' % new_user.pk)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_password_mismatch(self):
response = self.client.post('/test_admin/admin/auth/user/add/', {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'mismatch',
})
self.assertEqual(response.status_code, 200)
adminform = response.context['adminform']
self.assertTrue('password' not in adminform.form.errors)
self.assertEqual(adminform.form.errors['password2'],
["The two password fields didn't match."])
def test_user_fk_popup(self):
"""Quick user addition in a FK popup shouldn't invoke view for further user customization"""
response = self.client.get('/test_admin/admin/admin_views/album/add/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, '/test_admin/admin/auth/user/add')
self.assertContains(response, 'class="add-another" id="add_id_owner" onclick="return showAddAnotherPopup(this);"')
response = self.client.get('/test_admin/admin/auth/user/add/?_popup=1')
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_popup': '1',
'_save': '1',
}
response = self.client.post('/test_admin/admin/auth/user/add/?_popup=1', data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissAddAnotherPopup')
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post('/test_admin/admin/auth/user/add/', {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_addanother': '1',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, '/test_admin/admin/auth/user/add/')
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
expected_queries = 10
# Oracle doesn't implement "RELEASE SAVPOINT", see #20387.
if connection.vendor == 'oracle':
expected_queries -= 1
with self.assertNumQueries(expected_queries):
response = self.client.get('/test_admin/admin/auth/user/%s/' % u.pk)
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get('/test_admin/admin3/auth/user/%s/password/' % u.pk)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['form_url'], 'pony')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post('/test_admin/admin/auth/group/add/', {
'name': 'newgroup',
})
new_group = Group.objects.order_by('-id')[0]
self.assertRedirects(response, '/test_admin/admin/auth/group/')
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
expected_queries = 8
# Oracle doesn't implement "RELEASE SAVPOINT", see #20387.
if connection.vendor == 'oracle':
expected_queries -= 1
with self.assertNumQueries(expected_queries):
response = self.client.get('/test_admin/admin/auth/group/%s/' % g.pk)
self.assertEqual(response.status_code, 200)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class CSSTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_field_prefix_css_classes(self):
"""
Ensure that fields have a CSS class name with a 'field-' prefix.
Refs #16371.
"""
response = self.client.get('/test_admin/admin/admin_views/post/add/')
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
Ensure that CSS class names are used for each app and model on the
admin index pages.
Refs #17050.
"""
# General index page
response = self.client.get("/test_admin/admin/")
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get("/test_admin/admin/admin_views/")
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminDocsTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_tags(self):
response = self.client.get('/test_admin/admin/doc/tags/')
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-autoescape">autoescape</a></li>', html=True)
# An app tag exists in both the index and detail
self.assertContains(response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True)
self.assertContains(response, '<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>', html=True)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True)
self.assertContains(response, '<li><a href="#admin_list-admin_actions">admin_actions</a></li>', html=True)
def test_filters(self):
response = self.client.get('/test_admin/admin/doc/filters/')
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-add">add</a></li>', html=True)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class ValidXHTMLTests(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
urlbit = 'admin'
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
@override_settings(
TEMPLATE_CONTEXT_PROCESSORS=filter(
lambda t: t != 'django.core.context_processors.i18n',
global_settings.TEMPLATE_CONTEXT_PROCESSORS),
USE_I18N=False,
)
def testLangNamePresent(self):
response = self.client.get('/test_admin/%s/admin_views/' % self.urlbit)
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class DateHierarchyTests(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
self.old_USE_THOUSAND_SEPARATOR = settings.USE_THOUSAND_SEPARATOR
self.old_USE_L10N = settings.USE_L10N
settings.USE_THOUSAND_SEPARATOR = True
settings.USE_L10N = True
def tearDown(self):
settings.USE_THOUSAND_SEPARATOR = self.old_USE_THOUSAND_SEPARATOR
settings.USE_L10N = self.old_USE_L10N
formats.reset_format_cache()
def assert_non_localized_year(self, response, year):
"""Ensure that the year is not localized with
USE_THOUSAND_SEPARATOR. Refs #15234.
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % (date.year,))
def assert_contains_month_link(self, response, date):
self.assertContains(
response, '?release_date__month=%d&release_date__year=%d"' % (
date.month, date.year))
def assert_contains_day_link(self, response, date):
self.assertContains(
response, '?release_date__day=%d&'
'release_date__month=%d&release_date__year=%d"' % (
date.day, date.month, date.year))
def test_empty(self):
"""
Ensure that no date hierarchy links display with empty changelist.
"""
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
self.assertNotContains(response, 'release_date__year=')
self.assertNotContains(response, 'release_date__month=')
self.assertNotContains(response, 'release_date__day=')
def test_single(self):
"""
Ensure that single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
Ensure that day-level links appear for changelist within single month.
"""
DATES = (datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
Ensure that month-level links appear for changelist within single year.
"""
DATES = (datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, 'release_date__day=')
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
Ensure that year-level links appear for year-spanning changelist.
"""
DATES = (datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
# no day/month-level links
self.assertNotContains(response, 'release_date__day=')
self.assertNotContains(response, 'release_date__month=')
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = '%s?release_date__year=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = '%s?release_date__year=%d&release_date__month=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year, date.month)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminCustomSaveRelatedTests(TestCase):
"""
Ensure that one can easily customize the way related objects are saved.
Refs #16115.
"""
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
'child_set-TOTAL_FORMS': '3',
'child_set-INITIAL_FORMS': '0',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-1-name': 'Catherine',
}
response = self.client.post('/test_admin/admin/admin_views/parent/add/', post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name='Josh Stone')
paul = Child.objects.create(parent=parent, name='Paul')
catherine = Child.objects.create(parent=parent, name='Catherine')
post = {
'child_set-TOTAL_FORMS': '5',
'child_set-INITIAL_FORMS': '2',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-0-id': paul.id,
'child_set-1-name': 'Catherine',
'child_set-1-id': catherine.id,
}
response = self.client.post('/test_admin/admin/admin_views/parent/%s/' % parent.id, post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name='Josh Rock')
paul = Child.objects.create(parent=parent, name='Paul')
catherine = Child.objects.create(parent=parent, name='Catherine')
post = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': parent.id,
'form-0-name': 'Josh Stone',
'_save': 'Save'
}
response = self.client.post('/test_admin/admin/admin_views/parent/', post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminViewLogoutTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.get('/test_admin/admin/logout/')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'registration/logged_out.html')
self.assertEqual(response.request['PATH_INFO'], '/test_admin/admin/logout/')
# we are now logged out
response = self.client.get('/test_admin/admin/logout/')
self.assertEqual(response.status_code, 302) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.get('/test_admin/admin/logout/', follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.request['PATH_INFO'], '/test_admin/admin/')
self.assertContains(response, '<input type="hidden" name="next" value="/test_admin/admin/" />')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminUserMessageTest(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_%s' % level,
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/usermessenger/',
action_data, follow=True)
self.assertContains(response,
'<li class="%s">Test %s</li>' % (level, level),
html=True)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message('debug')
def test_message_info(self):
self.send_message('info')
def test_message_success(self):
self.send_message('success')
def test_message_warning(self):
self.send_message('warning')
def test_message_error(self):
self.send_message('error')
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_extra_tags',
'index': 0,
}
response = self.client.post('/test_admin/admin/admin_views/usermessenger/',
action_data, follow=True)
self.assertContains(response,
'<li class="extra_tag info">Test tags</li>',
html=True)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminKeepChangeListFiltersTests(TestCase):
urls = "admin_views.urls"
fixtures = ['admin-views-users.xml']
admin_site = site
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
self.client.logout()
def assertURLEqual(self, url1, url2):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if '_changelist_filters' in parsed_qs:
changelist_filters = parsed_qs['_changelist_filters']
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs['_changelist_filters'] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
self.assertURLEqual(
'http://testserver/test_admin/admin/auth/user/105/?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0',
'http://testserver/test_admin/admin/auth/user/105/?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
'http://testserver/test_admin/admin/auth/user/105/?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0',
'http://testserver/test_admin/admin/auth/user/105/?_changelist_filters=is_staff__exact%3D1%26is_superuser__exact%3D1'
)
# Ignore scheme and host.
self.assertURLEqual(
'http://testserver/test_admin/admin/auth/user/105/?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0',
'/test_admin/admin/auth/user/105/?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'
)
# Ignore ordering of querystring.
self.assertURLEqual(
'/test_admin/admin/auth/user/?is_staff__exact=0&is_superuser__exact=0',
'/test_admin/admin/auth/user/?is_superuser__exact=0&is_staff__exact=0'
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
'/test_admin/admin/auth/user/105/?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0',
'/test_admin/admin/auth/user/105/?_changelist_filters=is_superuser__exact%3D0%26is_staff__exact%3D0'
)
def get_changelist_filters(self):
return {
'is_superuser__exact': 0,
'is_staff__exact': 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode({
'_changelist_filters': self.get_changelist_filters_querystring()
})
def get_sample_user_id(self):
return 104
def get_changelist_url(self):
return '%s?%s' % (
reverse('admin:auth_user_changelist',
current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self):
return '%s?%s' % (
reverse('admin:auth_user_add',
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_change_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_change', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_history', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_delete', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">joepublic</a>',
force_text(response.content)
)
self.assertURLEqual(detail_link.group(1), self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>',
force_text(response.content)
)
self.assertURLEqual(history_link.group(1), self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>',
force_text(response.content)
)
self.assertURLEqual(delete_link.group(1), self.get_delete_url())
# Test redirect on "Save".
post_data = {
'username': 'joepublic',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
}
post_data['_save'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['_continue'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url()
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['_addanother'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
post_data = {
'username': 'dummy',
'password1': 'test',
'password2': 'test',
}
# Test redirect on "Save".
post_data['_save'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.latest('pk').pk)
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['username'] = 'dummy2'
post_data['_continue'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.latest('pk').pk)
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['username'] = 'dummy3'
post_data['_addanother'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {'post': 'yes'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
def test_url_prefix(self):
context = {
'preserved_filters': self.get_preserved_filters_querystring(),
'opts': User._meta,
}
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
original_prefix = get_script_prefix()
try:
set_script_prefix('/prefix/')
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
finally:
set_script_prefix(original_prefix)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
class AdminGenericRelationTests(TestCase):
def test_generic_relation_fk_list_filter(self):
"""
Validates a model with a generic relation to a model with
a foreign key can specify the generic+fk relationship
path as a list_filter. See trac #21428.
"""
class GenericFKAdmin(admin.ModelAdmin):
list_filter = ('tags__content_type',)
validator = ModelAdminValidator()
try:
validator.validate_list_filter(GenericFKAdmin, Plot)
except ImproperlyConfigured:
self.fail("Couldn't validate a GenericRelation -> FK path in ModelAdmin.list_filter") | unknown | codeparrot/codeparrot-clean |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.