code stringlengths 1 25.8M | language stringclasses 18 values | source stringclasses 4 values | repo stringclasses 78 values | path stringlengths 0 268 |
|---|---|---|---|---|
<?php
namespace Illuminate\Contracts\Auth;
interface SupportsBasicAuth
{
/**
* Attempt to authenticate using HTTP Basic Auth.
*
* @param string $field
* @param array $extraConditions
* @return \Symfony\Component\HttpFoundation\Response|null
*/
public function basic($field = 'email', $extraConditions = []);
/**
* Perform a stateless HTTP Basic login attempt.
*
* @param string $field
* @param array $extraConditions
* @return \Symfony\Component\HttpFoundation\Response|null
*/
public function onceBasic($field = 'email', $extraConditions = []);
} | php | github | https://github.com/laravel/framework | src/Illuminate/Contracts/Auth/SupportsBasicAuth.php |
/*!
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export const getMetaKey = () => (navigator.appVersion.includes("Mac") ? "⌘" : "Ctrl"); | typescript | github | https://github.com/apache/airflow | airflow-core/src/airflow/ui/src/utils/getMetaKey.ts |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import gc
import os
import sys
from tempfile import NamedTemporaryFile
import threading
from pyspark.java_gateway import local_connect_and_auth
from pyspark.serializers import ChunkedStream, pickle_protocol
from pyspark.util import _exception_message, print_exec
if sys.version < '3':
import cPickle as pickle
else:
import pickle
unicode = str
__all__ = ['Broadcast']
# Holds broadcasted data received from Java, keyed by its id.
_broadcastRegistry = {}
def _from_id(bid):
from pyspark.broadcast import _broadcastRegistry
if bid not in _broadcastRegistry:
raise Exception("Broadcast variable '%s' not loaded!" % bid)
return _broadcastRegistry[bid]
class Broadcast(object):
"""
A broadcast variable created with :meth:`SparkContext.broadcast`.
Access its value through :attr:`value`.
Examples:
>>> from pyspark.context import SparkContext
>>> sc = SparkContext('local', 'test')
>>> b = sc.broadcast([1, 2, 3, 4, 5])
>>> b.value
[1, 2, 3, 4, 5]
>>> sc.parallelize([0, 0]).flatMap(lambda x: b.value).collect()
[1, 2, 3, 4, 5, 1, 2, 3, 4, 5]
>>> b.unpersist()
>>> large_broadcast = sc.broadcast(range(10000))
"""
def __init__(self, sc=None, value=None, pickle_registry=None, path=None,
sock_file=None):
"""
Should not be called directly by users -- use :meth:`SparkContext.broadcast`
instead.
"""
if sc is not None:
# we're on the driver. We want the pickled data to end up in a file (maybe encrypted)
f = NamedTemporaryFile(delete=False, dir=sc._temp_dir)
self._path = f.name
self._sc = sc
self._python_broadcast = sc._jvm.PythonRDD.setupBroadcast(self._path)
if sc._encryption_enabled:
# with encryption, we ask the jvm to do the encryption for us, we send it data
# over a socket
port, auth_secret = self._python_broadcast.setupEncryptionServer()
(encryption_sock_file, _) = local_connect_and_auth(port, auth_secret)
broadcast_out = ChunkedStream(encryption_sock_file, 8192)
else:
# no encryption, we can just write pickled data directly to the file from python
broadcast_out = f
self.dump(value, broadcast_out)
if sc._encryption_enabled:
self._python_broadcast.waitTillDataReceived()
self._jbroadcast = sc._jsc.broadcast(self._python_broadcast)
self._pickle_registry = pickle_registry
else:
# we're on an executor
self._jbroadcast = None
self._sc = None
self._python_broadcast = None
if sock_file is not None:
# the jvm is doing decryption for us. Read the value
# immediately from the sock_file
self._value = self.load(sock_file)
else:
# the jvm just dumps the pickled data in path -- we'll unpickle lazily when
# the value is requested
assert(path is not None)
self._path = path
def dump(self, value, f):
try:
pickle.dump(value, f, pickle_protocol)
except pickle.PickleError:
raise
except Exception as e:
msg = "Could not serialize broadcast: %s: %s" \
% (e.__class__.__name__, _exception_message(e))
print_exec(sys.stderr)
raise pickle.PicklingError(msg)
f.close()
def load_from_path(self, path):
with open(path, 'rb', 1 << 20) as f:
return self.load(f)
def load(self, file):
# "file" could also be a socket
gc.disable()
try:
return pickle.load(file)
finally:
gc.enable()
@property
def value(self):
""" Return the broadcasted value
"""
if not hasattr(self, "_value") and self._path is not None:
# we only need to decrypt it here when encryption is enabled and
# if its on the driver, since executor decryption is handled already
if self._sc is not None and self._sc._encryption_enabled:
port, auth_secret = self._python_broadcast.setupDecryptionServer()
(decrypted_sock_file, _) = local_connect_and_auth(port, auth_secret)
self._python_broadcast.waitTillBroadcastDataSent()
return self.load(decrypted_sock_file)
else:
self._value = self.load_from_path(self._path)
return self._value
def unpersist(self, blocking=False):
"""
Delete cached copies of this broadcast on the executors. If the
broadcast is used after this is called, it will need to be
re-sent to each executor.
:param blocking: Whether to block until unpersisting has completed
"""
if self._jbroadcast is None:
raise Exception("Broadcast can only be unpersisted in driver")
self._jbroadcast.unpersist(blocking)
def destroy(self, blocking=False):
"""
Destroy all data and metadata related to this broadcast variable.
Use this with caution; once a broadcast variable has been destroyed,
it cannot be used again.
.. versionchanged:: 3.0.0
Added optional argument `blocking` to specify whether to block until all
blocks are deleted.
"""
if self._jbroadcast is None:
raise Exception("Broadcast can only be destroyed in driver")
self._jbroadcast.destroy(blocking)
os.unlink(self._path)
def __reduce__(self):
if self._jbroadcast is None:
raise Exception("Broadcast can only be serialized in driver")
self._pickle_registry.add(self)
return _from_id, (self._jbroadcast.id(),)
class BroadcastPickleRegistry(threading.local):
""" Thread-local registry for broadcast variables that have been pickled
"""
def __init__(self):
self.__dict__.setdefault("_registry", set())
def __iter__(self):
for bcast in self._registry:
yield bcast
def add(self, bcast):
self._registry.add(bcast)
def clear(self):
self._registry.clear()
if __name__ == "__main__":
import doctest
(failure_count, test_count) = doctest.testmod()
if failure_count:
sys.exit(-1) | unknown | codeparrot/codeparrot-clean | ||
from typing import TYPE_CHECKING, Any
from langchain_classic._api import create_importer
if TYPE_CHECKING:
from langchain_community.chat_models.tongyi import ChatTongyi
# Create a way to dynamically look up deprecated imports.
# Used to consolidate logic for raising deprecation warnings and
# handling optional imports.
DEPRECATED_LOOKUP = {"ChatTongyi": "langchain_community.chat_models.tongyi"}
_import_attribute = create_importer(__package__, deprecated_lookups=DEPRECATED_LOOKUP)
def __getattr__(name: str) -> Any:
"""Look up attributes dynamically."""
return _import_attribute(name)
__all__ = [
"ChatTongyi",
] | python | github | https://github.com/langchain-ai/langchain | libs/langchain/langchain_classic/chat_models/tongyi.py |
<?php
namespace Illuminate\Redis;
use Closure;
use Illuminate\Contracts\Redis\Factory;
use Illuminate\Redis\Connections\Connection;
use Illuminate\Redis\Connectors\PhpRedisConnector;
use Illuminate\Redis\Connectors\PredisConnector;
use Illuminate\Support\Arr;
use Illuminate\Support\ConfigurationUrlParser;
use InvalidArgumentException;
use function Illuminate\Support\enum_value;
/**
* @mixin \Illuminate\Redis\Connections\Connection
*/
class RedisManager implements Factory
{
/**
* The application instance.
*
* @var \Illuminate\Contracts\Foundation\Application
*/
protected $app;
/**
* The name of the default driver.
*
* @var string
*/
protected $driver;
/**
* The registered custom driver creators.
*
* @var array
*/
protected $customCreators = [];
/**
* The Redis server configurations.
*
* @var array
*/
protected $config;
/**
* The Redis connections.
*
* @var mixed
*/
protected $connections;
/**
* Indicates whether event dispatcher is set on connections.
*
* @var bool
*/
protected $events = false;
/**
* Create a new Redis manager instance.
*
* @param \Illuminate\Contracts\Foundation\Application $app
* @param string $driver
* @param array $config
*/
public function __construct($app, $driver, array $config)
{
$this->app = $app;
$this->driver = $driver;
$this->config = $config;
}
/**
* Get a Redis connection by name.
*
* @param \UnitEnum|string|null $name
* @return \Illuminate\Redis\Connections\Connection
*/
public function connection($name = null)
{
$name = enum_value($name) ?: 'default';
if (isset($this->connections[$name])) {
return $this->connections[$name];
}
return $this->connections[$name] = $this->configure(
$this->resolve($name), $name
);
}
/**
* Resolve the given connection by name.
*
* @param string|null $name
* @return \Illuminate\Redis\Connections\Connection
*
* @throws \InvalidArgumentException
*/
public function resolve($name = null)
{
$name = $name ?: 'default';
$options = $this->config['options'] ?? [];
if (isset($this->config[$name])) {
return $this->connector()->connect(
$this->parseConnectionConfiguration($this->config[$name]),
array_merge(Arr::except($options, 'parameters'), ['parameters' => Arr::get($options, 'parameters.'.$name, Arr::get($options, 'parameters', []))])
);
}
if (isset($this->config['clusters'][$name])) {
return $this->resolveCluster($name);
}
throw new InvalidArgumentException("Redis connection [{$name}] not configured.");
}
/**
* Resolve the given cluster connection by name.
*
* @param string $name
* @return \Illuminate\Redis\Connections\Connection
*/
protected function resolveCluster($name)
{
return $this->connector()->connectToCluster(
array_map(function ($config) {
return $this->parseConnectionConfiguration($config);
}, $this->config['clusters'][$name]),
$this->config['clusters']['options'] ?? [],
$this->config['options'] ?? []
);
}
/**
* Configure the given connection to prepare it for commands.
*
* @param \Illuminate\Redis\Connections\Connection $connection
* @param string $name
* @return \Illuminate\Redis\Connections\Connection
*/
protected function configure(Connection $connection, $name)
{
$connection->setName($name);
if ($this->events && $this->app->bound('events')) {
$connection->setEventDispatcher($this->app->make('events'));
}
return $connection;
}
/**
* Get the connector instance for the current driver.
*
* @return \Illuminate\Contracts\Redis\Connector|null
*/
protected function connector()
{
$customCreator = $this->customCreators[$this->driver] ?? null;
if ($customCreator) {
return $customCreator();
}
return match ($this->driver) {
'predis' => new PredisConnector,
'phpredis' => new PhpRedisConnector,
default => null,
};
}
/**
* Parse the Redis connection configuration.
*
* @param mixed $config
* @return array
*/
protected function parseConnectionConfiguration($config)
{
$parsed = (new ConfigurationUrlParser)->parseConfiguration($config);
$driver = strtolower($parsed['driver'] ?? '');
if (in_array($driver, ['tcp', 'tls'])) {
$parsed['scheme'] = $driver;
}
return array_filter($parsed, function ($key) {
return $key !== 'driver';
}, ARRAY_FILTER_USE_KEY);
}
/**
* Return all of the created connections.
*
* @return array
*/
public function connections()
{
return $this->connections;
}
/**
* Enable the firing of Redis command events.
*
* @return void
*/
public function enableEvents()
{
$this->events = true;
}
/**
* Disable the firing of Redis command events.
*
* @return void
*/
public function disableEvents()
{
$this->events = false;
}
/**
* Set the default driver.
*
* @param string $driver
* @return void
*/
public function setDriver($driver)
{
$this->driver = $driver;
}
/**
* Disconnect the given connection and remove from local cache.
*
* @param string|null $name
* @return void
*/
public function purge($name = null)
{
$name = $name ?: 'default';
unset($this->connections[$name]);
}
/**
* Register a custom driver creator Closure.
*
* @param string $driver
* @param \Closure $callback
*
* @param-closure-this $this $callback
*
* @return $this
*/
public function extend($driver, Closure $callback)
{
$this->customCreators[$driver] = $callback->bindTo($this, $this);
return $this;
}
/**
* Pass methods onto the default Redis connection.
*
* @param string $method
* @param array $parameters
* @return mixed
*/
public function __call($method, $parameters)
{
return $this->connection()->{$method}(...$parameters);
}
} | php | github | https://github.com/laravel/framework | src/Illuminate/Redis/RedisManager.php |
__author__ = 'saeedamen' # Saeed Amen / saeed@thalesians.com
#
# Copyright 2015 Thalesians Ltd. - http//www.thalesians.com / @thalesians
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and limitations under the License.
#
"""
plotfactory_examples
Examples to show how to use PlotFactory to make charts (at present only line charts have a lot of support).
"""
import datetime
from pythalesians.market.loaders.lighttimeseriesfactory import LightTimeSeriesFactory
from pythalesians.market.requests.timeseriesrequest import TimeSeriesRequest
from pythalesians.graphics.graphs.plotfactory import PlotFactory
from pythalesians.graphics.graphs.graphproperties import GraphProperties
if True:
pf = PlotFactory()
# test simple PyThalesians/Bokeh time series line charts
if False:
ltsf = LightTimeSeriesFactory()
start = '01 Jan 2000'
end = datetime.datetime.utcnow()
tickers = ['AUDJPY', 'USDJPY']
vendor_tickers = ['AUDJPY BGN Curncy', 'USDJPY BGN Curncy']
time_series_request = TimeSeriesRequest(
start_date = start, # start date
finish_date = datetime.date.today(), # finish date
freq = 'daily', # daily data
data_source = 'bloomberg', # use Bloomberg as data source
tickers = tickers, # ticker (Thalesians)
fields = ['close'], # which fields to download
vendor_tickers = vendor_tickers, # ticker (Bloomberg)
vendor_fields = ['PX_LAST'], # which Bloomberg fields to download
cache_algo = 'internet_load_return') # how to return data
daily_vals = ltsf.harvest_time_series(time_series_request)
pf = PlotFactory()
gp = GraphProperties()
gp.title = 'Spot values'
gp.file_output = 'output_data/demo.png'
gp.html_file_output = 'output_data/demo.htm'
gp.source = 'Thalesians/BBG'
# plot using PyThalesians
pf.plot_line_graph(daily_vals, adapter = 'pythalesians', gp = gp)
# plot using Bokeh (still needs a lot of work!)
pf.plot_line_graph(daily_vals, adapter = 'bokeh', gp = gp)
# do more complicated charts using several different Matplotib stylesheets (which have been customised)
if False:
ltsf = LightTimeSeriesFactory()
# load market data
start = '01 Jan 1970'
end = datetime.datetime.utcnow()
tickers = ['AUDJPY', 'USDJPY']
vendor_tickers = ['AUDJPY BGN Curncy', 'USDJPY BGN Curncy']
time_series_request = TimeSeriesRequest(
start_date = start, # start date
finish_date = datetime.date.today(), # finish date
freq = 'daily', # daily data
data_source = 'bloomberg', # use Bloomberg as data source
tickers = tickers, # ticker (Thalesians)
fields = ['close'], # which fields to download
vendor_tickers = vendor_tickers, # ticker (Bloomberg)
vendor_fields = ['PX_LAST'], # which Bloomberg fields to download
cache_algo = 'internet_load_return') # how to return data
daily_vals = ltsf.harvest_time_series(time_series_request)
# plot data
gp = GraphProperties()
pf = PlotFactory()
gp.title = 'Spot values'
gp.file_output = 'output_data/demo.png'
gp.scale_factor = 2
gp.style_sheet = 'pythalesians'
# first use PyThalesians matplotlib wrapper
pf.plot_line_graph(daily_vals, adapter = 'pythalesians', gp = gp)
pf.plot_generic_graph(daily_vals, gp = gp, type = 'line')
# use modified 538 Matplotlib stylesheet
gp.style_sheet = '538-pythalesians'
pf.plot_line_graph(daily_vals, adapter = 'pythalesians', gp = gp)
# use miletus matplotlib stylesheet
gp.style_sheet = 'miletus-pythalesians'
pf.plot_line_graph(daily_vals, adapter = 'pythalesians', gp = gp)
# use ggplot matplotlib styleheet
gp.scale_factor = 1
gp.display_brand_label = False
gp.display_source = False
gp.style_sheet = 'ggplot-pythalesians'
gp.display_mpld3 = True
gp.html_file_output = 'output_data/demo.htm'
pf.plot_line_graph(daily_vals, adapter = 'pythalesians', gp = gp)
# now use PyThalesians bokeh wrapper (still needs a lot of work!)
gp.scale_factor = 2
gp.html_file_output = 'output_data/demo_bokeh.htm'
pf.plot_line_graph(daily_vals, adapter = 'bokeh', gp = gp)
# test simple PyThalesians bar charts - calculate yearly returns for various assets
if False:
ltsf = LightTimeSeriesFactory()
start = '01 Jan 2000'
end = datetime.datetime.utcnow()
tickers = ['AUDJPY', 'USDJPY', 'EURUSD', 'S&P500']
vendor_tickers = ['AUDJPY BGN Curncy', 'USDJPY BGN Curncy', 'EURUSD BGN Curncy', 'SPX Index']
time_series_request = TimeSeriesRequest(
start_date = start, # start date
finish_date = datetime.date.today(), # finish date
freq = 'daily', # daily data
data_source = 'bloomberg', # use Bloomberg as data source
tickers = tickers, # ticker (Thalesians)
fields = ['close'], # which fields to download
vendor_tickers = vendor_tickers, # ticker (Bloomberg)
vendor_fields = ['PX_LAST'], # which Bloomberg fields to download
cache_algo = 'internet_load_return') # how to return data
daily_vals = ltsf.harvest_time_series(time_series_request)
# resample for year end
daily_vals = daily_vals.resample('A')
daily_vals = daily_vals / daily_vals.shift(1) - 1
daily_vals.index = daily_vals.index.year
daily_vals = daily_vals.drop(daily_vals.head(1).index)
pf = PlotFactory()
gp = GraphProperties()
gp.source = 'Thalesians/BBG'
gp.title = 'Yearly changes in spot'
gp.scale_factor = 3
gp.y_title = "Percent Change"
daily_vals = daily_vals * 100
# plot using PyThalesians (stacked & then bar graph)
pf.plot_stacked_graph(daily_vals, adapter = 'pythalesians', gp = gp)
pf.plot_bar_graph(daily_vals, adapter = 'pythalesians', gp = gp) | unknown | codeparrot/codeparrot-clean | ||
import xml.dom.minidom
document = """\
<slideshow>
<title>Demo slideshow</title>
<slide><title>Slide title</title>
<point>This is a demo</point>
<point>Of a program for processing slides</point>
</slide>
<slide><title>Another demo slide</title>
<point>It is important</point>
<point>To have more than</point>
<point>one slide</point>
</slide>
</slideshow>
"""
dom = xml.dom.minidom.parseString(document)
def getText(nodelist):
rc = []
for node in nodelist:
if node.nodeType == node.TEXT_NODE:
rc.append(node.data)
return ''.join(rc)
def handleSlideshow(slideshow):
print("<html>")
handleSlideshowTitle(slideshow.getElementsByTagName("title")[0])
slides = slideshow.getElementsByTagName("slide")
handleToc(slides)
handleSlides(slides)
print("</html>")
def handleSlides(slides):
for slide in slides:
handleSlide(slide)
def handleSlide(slide):
handleSlideTitle(slide.getElementsByTagName("title")[0])
handlePoints(slide.getElementsByTagName("point"))
def handleSlideshowTitle(title):
print("<title>%s</title>" % getText(title.childNodes))
def handleSlideTitle(title):
print("<h2>%s</h2>" % getText(title.childNodes))
def handlePoints(points):
print("<ul>")
for point in points:
handlePoint(point)
print("</ul>")
def handlePoint(point):
print("<li>%s</li>" % getText(point.childNodes))
def handleToc(slides):
for slide in slides:
title = slide.getElementsByTagName("title")[0]
print("<p>%s</p>" % getText(title.childNodes))
handleSlideshow(dom) | unknown | codeparrot/codeparrot-clean | ||
# Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.exception
import dns.rdata
import dns.tokenizer
def _validate_float_string(what):
if what[0] == '-' or what[0] == '+':
what = what[1:]
if what.isdigit():
return
(left, right) = what.split('.')
if left == '' and right == '':
raise dns.exception.FormError
if not left == '' and not left.isdigit():
raise dns.exception.FormError
if not right == '' and not right.isdigit():
raise dns.exception.FormError
class GPOS(dns.rdata.Rdata):
"""GPOS record
@ivar latitude: latitude
@type latitude: string
@ivar longitude: longitude
@type longitude: string
@ivar altitude: altitude
@type altitude: string
@see: RFC 1712"""
__slots__ = ['latitude', 'longitude', 'altitude']
def __init__(self, rdclass, rdtype, latitude, longitude, altitude):
super(GPOS, self).__init__(rdclass, rdtype)
if isinstance(latitude, float) or \
isinstance(latitude, int) or \
isinstance(latitude, long):
latitude = str(latitude)
if isinstance(longitude, float) or \
isinstance(longitude, int) or \
isinstance(longitude, long):
longitude = str(longitude)
if isinstance(altitude, float) or \
isinstance(altitude, int) or \
isinstance(altitude, long):
altitude = str(altitude)
_validate_float_string(latitude)
_validate_float_string(longitude)
_validate_float_string(altitude)
self.latitude = latitude
self.longitude = longitude
self.altitude = altitude
def to_text(self, origin=None, relativize=True, **kw):
return '%s %s %s' % (self.latitude, self.longitude, self.altitude)
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
latitude = tok.get_string()
longitude = tok.get_string()
altitude = tok.get_string()
tok.get_eol()
return cls(rdclass, rdtype, latitude, longitude, altitude)
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
l = len(self.latitude)
assert l < 256
byte = chr(l)
file.write(byte)
file.write(self.latitude)
l = len(self.longitude)
assert l < 256
byte = chr(l)
file.write(byte)
file.write(self.longitude)
l = len(self.altitude)
assert l < 256
byte = chr(l)
file.write(byte)
file.write(self.altitude)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
l = ord(wire[current])
current += 1
rdlen -= 1
if l > rdlen:
raise dns.exception.FormError
latitude = wire[current : current + l]
current += l
rdlen -= l
l = ord(wire[current])
current += 1
rdlen -= 1
if l > rdlen:
raise dns.exception.FormError
longitude = wire[current : current + l]
current += l
rdlen -= l
l = ord(wire[current])
current += 1
rdlen -= 1
if l != rdlen:
raise dns.exception.FormError
altitude = wire[current : current + l]
return cls(rdclass, rdtype, latitude, longitude, altitude)
from_wire = classmethod(from_wire)
def _cmp(self, other):
v = cmp(self.latitude, other.latitude)
if v == 0:
v = cmp(self.longitude, other.longitude)
if v == 0:
v = cmp(self.altitude, other.altitude)
return v
def _get_float_latitude(self):
return float(self.latitude)
def _set_float_latitude(self, value):
self.latitude = str(value)
float_latitude = property(_get_float_latitude, _set_float_latitude,
doc="latitude as a floating point value")
def _get_float_longitude(self):
return float(self.longitude)
def _set_float_longitude(self, value):
self.longitude = str(value)
float_longitude = property(_get_float_longitude, _set_float_longitude,
doc="longitude as a floating point value")
def _get_float_altitude(self):
return float(self.altitude)
def _set_float_altitude(self, value):
self.altitude = str(value)
float_altitude = property(_get_float_altitude, _set_float_altitude,
doc="altitude as a floating point value") | unknown | codeparrot/codeparrot-clean | ||
from __future__ import absolute_import, unicode_literals
from django.http import HttpResponseBadRequest
from django.shortcuts import render_to_response
from django.template import TemplateDoesNotExist
from django.template.engine import Engine
from django.utils.safestring import mark_safe
try:
from django.template import Origin
except ImportError:
Origin = None
def template_source(request):
"""
Return the source of a template, syntax-highlighted by Pygments if
it's available.
"""
template_origin_name = request.GET.get('template_origin', None)
if template_origin_name is None:
return HttpResponseBadRequest('"template_origin" key is required')
template_name = request.GET.get('template', template_origin_name)
final_loaders = []
loaders = Engine.get_default().template_loaders
for loader in loaders:
if loader is not None:
# When the loader has loaders associated with it,
# append those loaders to the list. This occurs with
# django.template.loaders.cached.Loader
if hasattr(loader, 'loaders'):
final_loaders += loader.loaders
else:
final_loaders.append(loader)
for loader in final_loaders:
if Origin: # django>=1.9
origin = Origin(template_origin_name)
try:
source = loader.get_contents(origin)
break
except TemplateDoesNotExist:
pass
else: # django<1.9
try:
source, _ = loader.load_template_source(template_name)
break
except TemplateDoesNotExist:
pass
else:
source = "Template Does Not Exist: %s" % (template_origin_name,)
try:
from pygments import highlight
from pygments.lexers import HtmlDjangoLexer
from pygments.formatters import HtmlFormatter
source = highlight(source, HtmlDjangoLexer(), HtmlFormatter())
source = mark_safe(source)
source.pygmentized = True
except ImportError:
pass
# Using render_to_response avoids running global context processors.
return render_to_response('debug_toolbar/panels/template_source.html', {
'source': source,
'template_name': template_name
}) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
import time
import re
import urlparse
from ..internal.misc import json
from ..internal.XFSAccount import XFSAccount
class UptoboxCom(XFSAccount):
__name__ = "UptoboxCom"
__type__ = "account"
__version__ = "0.23"
__status__ = "testing"
__description__ = """Uptobox.com account plugin"""
__license__ = "GPLv3"
__authors__ = [("benbox69", "dev@tollet.me"),
("GammaC0de", "nitzo2001[AT]yahoo[DOT]com")]
PLUGIN_DOMAIN = "uptobox.com"
PLUGIN_URL = "https://uptobox.com/"
PREMIUM_PATTERN = r'Premium member'
VALID_UNTIL_PATTERN = r"class='expiration-date .+?'>(\d{1,2} [\w^_]+ \d{4})"
def signin(self, user, password, data):
html = self.load(self.LOGIN_URL, cookies=self.COOKIES)
if re.search(self.LOGIN_SKIP_PATTERN, html):
self.skip_login()
html = self.load(self.PLUGIN_URL,
get={'op': "login",
'referer': "homepage"},
post={'login': user,
'password': password},
cookies=self.COOKIES)
if re.search(self.LOGIN_SKIP_PATTERN, html) is None:
self.fail_login() | unknown | codeparrot/codeparrot-clean | ||
---
---
I'm a file with dots. | unknown | github | https://github.com/jekyll/jekyll | test/source/_with.dots/file.with.dots.md |
#!/usr/bin/env python3
# GPL License, Version 3.0 or later
import os
import subprocess
import textwrap
import re
BASE_DIR = os.path.join(os.path.dirname(__file__), "..")
COMMAND_NAME = 'nirw-search'
def patch_help_test(help_output):
help_output = help_output.replace(
'usage: ' + COMMAND_NAME,
'usage::\n'
'\n'
' ' + COMMAND_NAME,
)
help_output = help_output.replace(
"{auto,always,never}", "<auto,always,never>",
)
return help_output
def main():
p = subprocess.run(
[
'python3',
os.path.join(BASE_DIR, COMMAND_NAME),
'--help',
],
stdout=subprocess.PIPE,
)
help_output = (
p.stdout.decode('utf-8').rstrip() +
'\n\n'
)
# strip trailing space
help_output = re.sub(r'[ \t]+(\n|\Z)', r'\1', help_output)
help_output = patch_help_test(help_output)
# Try write reStructuredText directly!
# help_output = textwrap.indent(help_output, ' ')
help_output = (
'\nOutput of ``' + COMMAND_NAME + ' --help``\n\n' +
help_output
)
with open('readme.rst', 'r', encoding='utf-8') as f:
data = f.read()
help_begin_text = '.. BEGIN HELP TEXT'
help_end_text = '.. END HELP TEXT'
help_begin_index = data.find(help_begin_text)
help_end_index = data.find(help_end_text, help_begin_index)
if help_begin_index == -1:
print('Error: {!r} not found'.format(help_begin_text))
return
if help_end_index == -1:
print('Error: {!r} not found'.format(help_end_text))
return
help_begin_index += len(help_begin_text) + 1
data_update = data[:help_begin_index] + help_output + data[help_end_index:]
with open('readme.rst', 'w', encoding='utf-8') as f:
f.write(data_update)
if __name__ == "__main__":
main() | unknown | codeparrot/codeparrot-clean | ||
'use strict'
exports.index = function(req, res){
res.render('index', { title: 'Route Separation Example' });
}; | javascript | github | https://github.com/expressjs/express | examples/route-separation/site.js |
//// [tests/cases/conformance/async/es6/awaitClassExpression_es6.ts] ////
//// [awaitClassExpression_es6.ts]
declare class C { }
declare var p: Promise<typeof C>;
async function func(): Promise<void> {
class D extends (await p) {
}
}
//// [awaitClassExpression_es6.js]
"use strict";
function func() {
return __awaiter(this, void 0, void 0, function* () {
class D extends (yield p) {
}
});
} | javascript | github | https://github.com/microsoft/TypeScript | tests/baselines/reference/awaitClassExpression_es6.js |
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import Dict, Union, Iterable, Optional
from typing_extensions import Literal, Required, TypeAlias, TypedDict
from ..shared_params.metadata import Metadata
from ..shared.reasoning_effort import ReasoningEffort
from ..graders.grader_inputs_param import GraderInputsParam
from ..responses.easy_input_message_param import EasyInputMessageParam
from ..shared_params.response_format_text import ResponseFormatText
from ..responses.response_input_text_param import ResponseInputTextParam
from ..responses.response_input_audio_param import ResponseInputAudioParam
from ..chat.chat_completion_function_tool_param import ChatCompletionFunctionToolParam
from ..shared_params.response_format_json_object import ResponseFormatJSONObject
from ..shared_params.response_format_json_schema import ResponseFormatJSONSchema
__all__ = [
"CreateEvalCompletionsRunDataSourceParam",
"Source",
"SourceFileContent",
"SourceFileContentContent",
"SourceFileID",
"SourceStoredCompletions",
"InputMessages",
"InputMessagesTemplate",
"InputMessagesTemplateTemplate",
"InputMessagesTemplateTemplateEvalItem",
"InputMessagesTemplateTemplateEvalItemContent",
"InputMessagesTemplateTemplateEvalItemContentOutputText",
"InputMessagesTemplateTemplateEvalItemContentInputImage",
"InputMessagesItemReference",
"SamplingParams",
"SamplingParamsResponseFormat",
]
class SourceFileContentContent(TypedDict, total=False):
item: Required[Dict[str, object]]
sample: Dict[str, object]
class SourceFileContent(TypedDict, total=False):
content: Required[Iterable[SourceFileContentContent]]
"""The content of the jsonl file."""
type: Required[Literal["file_content"]]
"""The type of jsonl source. Always `file_content`."""
class SourceFileID(TypedDict, total=False):
id: Required[str]
"""The identifier of the file."""
type: Required[Literal["file_id"]]
"""The type of jsonl source. Always `file_id`."""
class SourceStoredCompletions(TypedDict, total=False):
"""A StoredCompletionsRunDataSource configuration describing a set of filters"""
type: Required[Literal["stored_completions"]]
"""The type of source. Always `stored_completions`."""
created_after: Optional[int]
"""An optional Unix timestamp to filter items created after this time."""
created_before: Optional[int]
"""An optional Unix timestamp to filter items created before this time."""
limit: Optional[int]
"""An optional maximum number of items to return."""
metadata: Optional[Metadata]
"""Set of 16 key-value pairs that can be attached to an object.
This can be useful for storing additional information about the object in a
structured format, and querying for objects via API or the dashboard.
Keys are strings with a maximum length of 64 characters. Values are strings with
a maximum length of 512 characters.
"""
model: Optional[str]
"""An optional model to filter by (e.g., 'gpt-4o')."""
Source: TypeAlias = Union[SourceFileContent, SourceFileID, SourceStoredCompletions]
class InputMessagesTemplateTemplateEvalItemContentOutputText(TypedDict, total=False):
"""A text output from the model."""
text: Required[str]
"""The text output from the model."""
type: Required[Literal["output_text"]]
"""The type of the output text. Always `output_text`."""
class InputMessagesTemplateTemplateEvalItemContentInputImage(TypedDict, total=False):
"""An image input block used within EvalItem content arrays."""
image_url: Required[str]
"""The URL of the image input."""
type: Required[Literal["input_image"]]
"""The type of the image input. Always `input_image`."""
detail: str
"""The detail level of the image to be sent to the model.
One of `high`, `low`, or `auto`. Defaults to `auto`.
"""
InputMessagesTemplateTemplateEvalItemContent: TypeAlias = Union[
str,
ResponseInputTextParam,
InputMessagesTemplateTemplateEvalItemContentOutputText,
InputMessagesTemplateTemplateEvalItemContentInputImage,
ResponseInputAudioParam,
GraderInputsParam,
]
class InputMessagesTemplateTemplateEvalItem(TypedDict, total=False):
"""
A message input to the model with a role indicating instruction following
hierarchy. Instructions given with the `developer` or `system` role take
precedence over instructions given with the `user` role. Messages with the
`assistant` role are presumed to have been generated by the model in previous
interactions.
"""
content: Required[InputMessagesTemplateTemplateEvalItemContent]
"""Inputs to the model - can contain template strings.
Supports text, output text, input images, and input audio, either as a single
item or an array of items.
"""
role: Required[Literal["user", "assistant", "system", "developer"]]
"""The role of the message input.
One of `user`, `assistant`, `system`, or `developer`.
"""
type: Literal["message"]
"""The type of the message input. Always `message`."""
InputMessagesTemplateTemplate: TypeAlias = Union[EasyInputMessageParam, InputMessagesTemplateTemplateEvalItem]
class InputMessagesTemplate(TypedDict, total=False):
template: Required[Iterable[InputMessagesTemplateTemplate]]
"""A list of chat messages forming the prompt or context.
May include variable references to the `item` namespace, ie {{item.name}}.
"""
type: Required[Literal["template"]]
"""The type of input messages. Always `template`."""
class InputMessagesItemReference(TypedDict, total=False):
item_reference: Required[str]
"""A reference to a variable in the `item` namespace. Ie, "item.input_trajectory" """
type: Required[Literal["item_reference"]]
"""The type of input messages. Always `item_reference`."""
InputMessages: TypeAlias = Union[InputMessagesTemplate, InputMessagesItemReference]
SamplingParamsResponseFormat: TypeAlias = Union[ResponseFormatText, ResponseFormatJSONSchema, ResponseFormatJSONObject]
class SamplingParams(TypedDict, total=False):
max_completion_tokens: int
"""The maximum number of tokens in the generated output."""
reasoning_effort: Optional[ReasoningEffort]
"""
Constrains effort on reasoning for
[reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently
supported values are `none`, `minimal`, `low`, `medium`, `high`, and `xhigh`.
Reducing reasoning effort can result in faster responses and fewer tokens used
on reasoning in a response.
- `gpt-5.1` defaults to `none`, which does not perform reasoning. The supported
reasoning values for `gpt-5.1` are `none`, `low`, `medium`, and `high`. Tool
calls are supported for all reasoning values in gpt-5.1.
- All models before `gpt-5.1` default to `medium` reasoning effort, and do not
support `none`.
- The `gpt-5-pro` model defaults to (and only supports) `high` reasoning effort.
- `xhigh` is supported for all models after `gpt-5.1-codex-max`.
"""
response_format: SamplingParamsResponseFormat
"""An object specifying the format that the model must output.
Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured
Outputs which ensures the model will match your supplied JSON schema. Learn more
in the
[Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs).
Setting to `{ "type": "json_object" }` enables the older JSON mode, which
ensures the message the model generates is valid JSON. Using `json_schema` is
preferred for models that support it.
"""
seed: int
"""A seed value to initialize the randomness, during sampling."""
temperature: float
"""A higher temperature increases randomness in the outputs."""
tools: Iterable[ChatCompletionFunctionToolParam]
"""A list of tools the model may call.
Currently, only functions are supported as a tool. Use this to provide a list of
functions the model may generate JSON inputs for. A max of 128 functions are
supported.
"""
top_p: float
"""An alternative to temperature for nucleus sampling; 1.0 includes all tokens."""
class CreateEvalCompletionsRunDataSourceParam(TypedDict, total=False):
"""A CompletionsRunDataSource object describing a model sampling configuration."""
source: Required[Source]
"""Determines what populates the `item` namespace in this run's data source."""
type: Required[Literal["completions"]]
"""The type of run data source. Always `completions`."""
input_messages: InputMessages
"""Used when sampling from a model.
Dictates the structure of the messages passed into the model. Can either be a
reference to a prebuilt trajectory (ie, `item.input_trajectory`), or a template
with variable references to the `item` namespace.
"""
model: str
"""The name of the model to use for generating completions (e.g. "o3-mini")."""
sampling_params: SamplingParams | python | github | https://github.com/openai/openai-python | src/openai/types/evals/create_eval_completions_run_data_source_param.py |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Franck Cuny <franck@lumberjaph.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: cpanm
short_description: Manages Perl library dependencies.
description:
- Manage Perl library dependencies.
version_added: "1.6"
options:
name:
description:
- The name of the Perl library to install. You may use the "full distribution path", e.g. MIYAGAWA/Plack-0.99_05.tar.gz
required: false
default: null
aliases: ["pkg"]
from_path:
description:
- The local directory from where to install
required: false
default: null
notest:
description:
- Do not run unit tests
required: false
default: false
locallib:
description:
- Specify the install base to install modules
required: false
default: false
mirror:
description:
- Specifies the base URL for the CPAN mirror to use
required: false
default: false
mirror_only:
description:
- Use the mirror's index file instead of the CPAN Meta DB
required: false
default: false
installdeps:
description:
- Only install dependencies
required: false
default: false
version_added: "2.0"
version:
description:
- minimum version of perl module to consider acceptable
required: false
default: false
version_added: "2.1"
system_lib:
description:
- Use this if you want to install modules to the system perl include path. You must be root or have "passwordless" sudo for this to work.
- This uses the cpanm commandline option '--sudo', which has nothing to do with ansible privilege escalation.
required: false
default: false
version_added: "2.0"
aliases: ['use_sudo']
executable:
description:
- Override the path to the cpanm executable
required: false
default: null
version_added: "2.1"
notes:
- Please note that U(http://search.cpan.org/dist/App-cpanminus/bin/cpanm, cpanm) must be installed on the remote host.
author: "Franck Cuny (@franckcuny)"
'''
EXAMPLES = '''
# install Dancer perl package
- cpanm:
name: Dancer
# install version 0.99_05 of the Plack perl package
- cpanm:
name: MIYAGAWA/Plack-0.99_05.tar.gz
# install Dancer into the specified locallib
- cpanm:
name: Dancer
locallib: /srv/webapps/my_app/extlib
# install perl dependencies from local directory
- cpanm:
from_path: /srv/webapps/my_app/src/
# install Dancer perl package without running the unit tests in indicated locallib
- cpanm:
name: Dancer
notest: True
locallib: /srv/webapps/my_app/extlib
# install Dancer perl package from a specific mirror
- cpanm:
name: Dancer
mirror: 'http://cpan.cpantesters.org/'
# install Dancer perl package into the system root path
- cpanm:
name: Dancer
system_lib: yes
# install Dancer if it's not already installed
# OR the installed version is older than version 1.0
- cpanm:
name: Dancer
version: '1.0'
'''
def _is_package_installed(module, name, locallib, cpanm, version):
cmd = ""
if locallib:
os.environ["PERL5LIB"] = "%s/lib/perl5" % locallib
cmd = "%s perl -e ' use %s" % (cmd, name)
if version:
cmd = "%s %s;'" % (cmd, version)
else:
cmd = "%s;'" % cmd
res, stdout, stderr = module.run_command(cmd, check_rc=False)
if res == 0:
return True
else:
return False
def _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo):
# this code should use "%s" like everything else and just return early but not fixing all of it now.
# don't copy stuff like this
if from_path:
cmd = cpanm + " " + from_path
else:
cmd = cpanm + " " + name
if notest is True:
cmd = cmd + " -n"
if locallib is not None:
cmd = cmd + " -l " + locallib
if mirror is not None:
cmd = cmd + " --mirror " + mirror
if mirror_only is True:
cmd = cmd + " --mirror-only"
if installdeps is True:
cmd = cmd + " --installdeps"
if use_sudo is True:
cmd = cmd + " --sudo"
return cmd
def _get_cpanm_path(module):
if module.params['executable']:
return module.params['executable']
else:
return module.get_bin_path('cpanm', True)
def main():
arg_spec = dict(
name=dict(default=None, required=False, aliases=['pkg']),
from_path=dict(default=None, required=False, type='path'),
notest=dict(default=False, type='bool'),
locallib=dict(default=None, required=False, type='path'),
mirror=dict(default=None, required=False),
mirror_only=dict(default=False, type='bool'),
installdeps=dict(default=False, type='bool'),
system_lib=dict(default=False, type='bool', aliases=['use_sudo']),
version=dict(default=None, required=False),
executable=dict(required=False, type='path'),
)
module = AnsibleModule(
argument_spec=arg_spec,
required_one_of=[['name', 'from_path']],
)
cpanm = _get_cpanm_path(module)
name = module.params['name']
from_path = module.params['from_path']
notest = module.boolean(module.params.get('notest', False))
locallib = module.params['locallib']
mirror = module.params['mirror']
mirror_only = module.params['mirror_only']
installdeps = module.params['installdeps']
use_sudo = module.params['system_lib']
version = module.params['version']
changed = False
installed = _is_package_installed(module, name, locallib, cpanm, version)
if not installed:
cmd = _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo)
rc_cpanm, out_cpanm, err_cpanm = module.run_command(cmd, check_rc=False)
if rc_cpanm != 0:
module.fail_json(msg=err_cpanm, cmd=cmd)
if (err_cpanm.find('is up to date') == -1 and out_cpanm.find('is up to date') == -1):
changed = True
module.exit_json(changed=changed, binary=cpanm, name=name)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
import django.contrib.gis.db.models.fields
from django.db import migrations, models
class Migration(migrations.Migration):
"""
Used for gis.specific migration tests.
"""
operations = [
migrations.CreateModel(
name='Neighborhood',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, unique=True)),
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4326)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Household',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('neighborhood', models.ForeignKey(to='gis_migrations.Neighborhood', to_field='id', null=True)),
('address', models.CharField(max_length=100)),
('zip_code', models.IntegerField(null=True, blank=True)),
('geom', django.contrib.gis.db.models.fields.PointField(srid=4326, geography=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Family',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, unique=True)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='household',
name='family',
field=models.ForeignKey(blank=True, to='gis_migrations.Family', null=True),
preserve_default=True,
),
] | unknown | codeparrot/codeparrot-clean | ||
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os
import socket
import ssl
from thrift.transport import TSocket
from thrift.transport.TTransport import TTransportException
class TSSLSocket(TSocket.TSocket):
"""
SSL implementation of client-side TSocket
This class creates outbound sockets wrapped using the
python standard ssl module for encrypted connections.
The protocol used is set using the class variable
SSL_VERSION, which must be one of ssl.PROTOCOL_* and
defaults to ssl.PROTOCOL_TLSv1 for greatest security.
"""
SSL_VERSION = ssl.PROTOCOL_TLSv1
def __init__(self, host='localhost', port=9090, validate=True, ca_certs=None, unix_socket=None):
"""
@param validate: Set to False to disable SSL certificate validation entirely.
@type validate: bool
@param ca_certs: Filename to the Certificate Authority pem file, possibly a
file downloaded from: http://curl.haxx.se/ca/cacert.pem This is passed to
the ssl_wrap function as the 'ca_certs' parameter.
@type ca_certs: str
Raises an IOError exception if validate is True and the ca_certs file is
None, not present or unreadable.
"""
self.validate = validate
self.is_valid = False
self.peercert = None
if not validate:
self.cert_reqs = ssl.CERT_NONE
else:
self.cert_reqs = ssl.CERT_REQUIRED
self.ca_certs = ca_certs
if validate:
if ca_certs is None or not os.access(ca_certs, os.R_OK):
raise IOError('Certificate Authority ca_certs file "%s" is not readable, cannot validate SSL certificates.' % (ca_certs))
TSocket.TSocket.__init__(self, host, port, unix_socket)
def open(self):
try:
res0 = self._resolveAddr()
for res in res0:
sock_family, sock_type= res[0:2]
ip_port = res[4]
plain_sock = socket.socket(sock_family, sock_type)
self.handle = ssl.wrap_socket(plain_sock, ssl_version=self.SSL_VERSION,
do_handshake_on_connect=True, ca_certs=self.ca_certs, cert_reqs=self.cert_reqs)
self.handle.settimeout(self._timeout)
try:
self.handle.connect(ip_port)
except socket.error, e:
if res is not res0[-1]:
continue
else:
raise e
break
except socket.error, e:
if self._unix_socket:
message = 'Could not connect to secure socket %s' % self._unix_socket
else:
message = 'Could not connect to %s:%d' % (self.host, self.port)
raise TTransportException(type=TTransportException.NOT_OPEN, message=message)
if self.validate:
self._validate_cert()
def _validate_cert(self):
"""internal method to validate the peer's SSL certificate, and to check the
commonName of the certificate to ensure it matches the hostname we
used to make this connection. Does not support subjectAltName records
in certificates.
raises TTransportException if the certificate fails validation."""
cert = self.handle.getpeercert()
self.peercert = cert
if 'subject' not in cert:
raise TTransportException(type=TTransportException.NOT_OPEN,
message='No SSL certificate found from %s:%s' % (self.host, self.port))
fields = cert['subject']
for field in fields:
# ensure structure we get back is what we expect
if not isinstance(field, tuple):
continue
cert_pair = field[0]
if len(cert_pair) < 2:
continue
cert_key, cert_value = cert_pair[0:2]
if cert_key != 'commonName':
continue
certhost = cert_value
if certhost == self.host:
# success, cert commonName matches desired hostname
self.is_valid = True
return
else:
raise TTransportException(type=TTransportException.UNKNOWN,
message='Host name we connected to "%s" doesn\'t match certificate provided commonName "%s"' % (self.host, certhost))
raise TTransportException(type=TTransportException.UNKNOWN,
message='Could not validate SSL certificate from host "%s". Cert=%s' % (self.host, cert))
class TSSLServerSocket(TSocket.TServerSocket):
"""
SSL implementation of TServerSocket
This uses the ssl module's wrap_socket() method to provide SSL
negotiated encryption.
"""
SSL_VERSION = ssl.PROTOCOL_TLSv1
def __init__(self, host=None, port=9090, certfile='cert.pem', unix_socket=None):
"""Initialize a TSSLServerSocket
@param certfile: The filename of the server certificate file, defaults to cert.pem
@type certfile: str
@param host: The hostname or IP to bind the listen socket to, i.e. 'localhost' for only allowing
local network connections. Pass None to bind to all interfaces.
@type host: str
@param port: The port to listen on for inbound connections.
@type port: int
"""
self.setCertfile(certfile)
TSocket.TServerSocket.__init__(self, host, port)
def setCertfile(self, certfile):
"""Set or change the server certificate file used to wrap new connections.
@param certfile: The filename of the server certificate, i.e. '/etc/certs/server.pem'
@type certfile: str
Raises an IOError exception if the certfile is not present or unreadable.
"""
if not os.access(certfile, os.R_OK):
raise IOError('No such certfile found: %s' % (certfile))
self.certfile = certfile
def accept(self):
plain_client, addr = self.handle.accept()
try:
client = ssl.wrap_socket(plain_client, certfile=self.certfile,
server_side=True, ssl_version=self.SSL_VERSION)
except ssl.SSLError, ssl_exc:
# failed handshake/ssl wrap, close socket to client
plain_client.close()
# raise ssl_exc
# We can't raise the exception, because it kills most TServer derived serve()
# methods.
# Instead, return None, and let the TServer instance deal with it in
# other exception handling. (but TSimpleServer dies anyway)
return None
result = TSocket.TSocket()
result.setHandle(client)
return result | unknown | codeparrot/codeparrot-clean | ||
Hi!
Thanks very much for your interest in Ansible. It means a lot to us.
This appears to be something that should be filed against another project or bug tracker. Here's why:
* FILL IN
<< CHOOSE AS APPROPRIATE >>
* <https://github.com/ansible/ansible-lint>
* <https://github.com/ansible/ansible-runner>
* <https://github.com/ansible/ansible-navigator>
* <https://github.com/ansible-community/antsibull>
* <https://github.com/ansible-community/ara>
* <https://github.com/ansible/awx>
* <https://github.com/ansible-collections/community.general>
* <https://github.com/ansible-community/molecule>
* For AAP Customer issues please see <https://docs.ansible.com/ansible/latest/community/communication.html#ansible-automation-platform-support-questions>
If you can stop by the tracker or forum for one of those projects, we'd appreciate it.
Because this project is very active, we're unlikely to see comments made on closed tickets and we lock them after some time.
Should you still wish to discuss things further, or if you disagree with our thought process, please stop by one of our two mailing lists:
* [ansible-core on the Ansible Forum](https://forum.ansible.com/tag/ansible-core)
* Matrix: [#devel:ansible.im](https://matrix.to/#/#devel:ansible.im)
We'd be happy to discuss things.
See this page for a complete list of communication channels and their purposes:
* <https://docs.ansible.com/ansible/latest/community/communication.html>
Thank you once again! | unknown | github | https://github.com/ansible/ansible | hacking/ticket_stubs/bug_wrong_repo.md |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.requests;
import org.apache.kafka.common.Uuid;
import org.apache.kafka.common.message.ShareGroupHeartbeatResponseData;
import org.apache.kafka.common.protocol.ApiKeys;
import org.apache.kafka.common.protocol.Errors;
import org.apache.kafka.common.protocol.Readable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
/**
* Possible error codes.
*
* - {@link Errors#GROUP_AUTHORIZATION_FAILED}
* - {@link Errors#NOT_COORDINATOR}
* - {@link Errors#COORDINATOR_NOT_AVAILABLE}
* - {@link Errors#COORDINATOR_LOAD_IN_PROGRESS}
* - {@link Errors#INVALID_REQUEST}
* - {@link Errors#UNKNOWN_MEMBER_ID}
* - {@link Errors#GROUP_MAX_SIZE_REACHED}
* - {@link Errors#TOPIC_AUTHORIZATION_FAILED}
*/
public class ShareGroupHeartbeatResponse extends AbstractResponse {
private final ShareGroupHeartbeatResponseData data;
public ShareGroupHeartbeatResponse(ShareGroupHeartbeatResponseData data) {
super(ApiKeys.SHARE_GROUP_HEARTBEAT);
this.data = data;
}
@Override
public ShareGroupHeartbeatResponseData data() {
return data;
}
@Override
public Map<Errors, Integer> errorCounts() {
return Collections.singletonMap(Errors.forCode(data.errorCode()), 1);
}
@Override
public int throttleTimeMs() {
return data.throttleTimeMs();
}
@Override
public void maybeSetThrottleTimeMs(int throttleTimeMs) {
data.setThrottleTimeMs(throttleTimeMs);
}
public static ShareGroupHeartbeatResponse parse(Readable readable, short version) {
return new ShareGroupHeartbeatResponse(new ShareGroupHeartbeatResponseData(
readable, version));
}
public static ShareGroupHeartbeatResponseData.Assignment createAssignment(
Map<Uuid, Set<Integer>> assignment
) {
List<ShareGroupHeartbeatResponseData.TopicPartitions> topicPartitions = assignment.entrySet().stream()
.map(keyValue -> new ShareGroupHeartbeatResponseData.TopicPartitions()
.setTopicId(keyValue.getKey())
.setPartitions(new ArrayList<>(keyValue.getValue())))
.collect(Collectors.toList());
return new ShareGroupHeartbeatResponseData.Assignment()
.setTopicPartitions(topicPartitions);
}
} | java | github | https://github.com/apache/kafka | clients/src/main/java/org/apache/kafka/common/requests/ShareGroupHeartbeatResponse.java |
"""
TV module
Name: TV.py
Description: responds to common TV commands (turn up/down volume,
change input, turn on/off, etc)
Dependencies: python-cec library
CEC compatible TV and controller (Raspberry Pi)
Author: Brad Ahlers (github - brad999)
"""
import cec
import re
WORDS = ["VOLUME", "UP", "DOWN", "TV", "TELEVISION", "POWER", "ON", "OFF"]
def handle(text, mic, profile):
"""
Responds to user-input, typically speech text, by telling a joke.
Arguments:
text -- user-input, typically transcribed speech
mic -- used to interact with the user (for both input and output)
profile -- contains information related to the user
"""
cec.init()
tv = cec.Device(0)
# !! add volume and mute functions
if 'on' in text.lower():
tv.power_on()
mic.say('I', "TV powered on.")
elif 'off' in text.lower() or 'off' in text.lower():
tv.standby()
mic.say('I', "TV powered off.")
else:
mic.say('A', "I'm sorry that command is not currently supported")
def isValid(text):
"""
Returns True if the input is related to TV.
Arguments:
text -- user-input, typically transcribed speech
"""
return bool(re.search(r'\b(tv|television)\b', text, re.IGNORECASE)) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
# pep8.py - Check Python source code formatting, according to PEP 8
# Copyright (C) 2006 Johann C. Rocholl <johann@rocholl.net>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
r"""
Check Python source code formatting, according to PEP 8:
http://www.python.org/dev/peps/pep-0008/
For usage and a list of options, try this:
$ python pep8.py -h
This program and its regression test suite live here:
http://github.com/jcrocholl/pep8
Groups of errors and warnings:
E errors
W warnings
100 indentation
200 whitespace
300 blank lines
400 imports
500 line length
600 deprecation
700 statements
900 syntax error
You can add checks to this program by writing plugins. Each plugin is
a simple function that is called for each line of source code, either
physical or logical.
Physical line:
- Raw line of text from the input file.
Logical line:
- Multi-line statements converted to a single line.
- Stripped left and right.
- Contents of strings replaced with 'xxx' of same length.
- Comments removed.
The check function requests physical or logical lines by the name of
the first argument:
def maximum_line_length(physical_line)
def extraneous_whitespace(logical_line)
def blank_lines(logical_line, blank_lines, indent_level, line_number)
The last example above demonstrates how check plugins can request
additional information with extra arguments. All attributes of the
Checker object are available. Some examples:
lines: a list of the raw lines from the input file
tokens: the tokens that contribute to this logical line
line_number: line number in the input file
blank_lines: blank lines before this one
indent_char: first indentation character in this file (' ' or '\t')
indent_level: indentation (with tabs expanded to multiples of 8)
previous_indent_level: indentation on previous line
previous_logical: previous logical line
The docstring of each check function shall be the relevant part of
text from PEP 8. It is printed if the user enables --show-pep8.
Several docstrings contain examples directly from the PEP 8 document.
Okay: spam(ham[1], {eggs: 2})
E201: spam( ham[1], {eggs: 2})
These examples are verified automatically when pep8.py is run with the
--doctest option. You can add examples for your own check functions.
The format is simple: "Okay" or error/warning code followed by colon
and space, the rest of the line is example source code. If you put 'r'
before the docstring, you can use \n for newline, \t for tab and \s
for space.
"""
__version__ = '1.3.3'
import os
import sys
import re
import time
import inspect
import keyword
import tokenize
from optparse import OptionParser
from fnmatch import fnmatch
try:
from ConfigParser import RawConfigParser
except ImportError:
from configparser import RawConfigParser
from io import TextIOWrapper
DEFAULT_EXCLUDE = '.svn,CVS,.bzr,.hg,.git'
DEFAULT_IGNORE = 'E24'
if sys.platform == 'win32':
DEFAULT_CONFIG = os.path.expanduser(r'~\.pep8')
else:
DEFAULT_CONFIG = os.path.join(os.getenv('XDG_CONFIG_HOME') or
os.path.expanduser('~/.config'), 'pep8')
MAX_LINE_LENGTH = 80
REPORT_FORMAT = {
'default': '%(path)s:%(row)d:%(col)d: %(code)s %(text)s',
'pylint': '%(path)s:%(row)d: [%(code)s] %(text)s',
}
SINGLETONS = frozenset(['False', 'None', 'True'])
KEYWORDS = frozenset(keyword.kwlist + ['print']) - SINGLETONS
BINARY_OPERATORS = frozenset([
'**=', '*=', '+=', '-=', '!=', '<>',
'%=', '^=', '&=', '|=', '==', '/=', '//=', '<=', '>=', '<<=', '>>=',
'%', '^', '&', '|', '=', '/', '//', '<', '>', '<<'])
UNARY_OPERATORS = frozenset(['>>', '**', '*', '+', '-'])
OPERATORS = BINARY_OPERATORS | UNARY_OPERATORS
WHITESPACE = frozenset(' \t')
SKIP_TOKENS = frozenset([tokenize.COMMENT, tokenize.NL, tokenize.NEWLINE,
tokenize.INDENT, tokenize.DEDENT])
BENCHMARK_KEYS = ['directories', 'files', 'logical lines', 'physical lines']
INDENT_REGEX = re.compile(r'([ \t]*)')
RAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*(,)')
RERAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,\s*\w+\s*,\s*\w+')
SELFTEST_REGEX = re.compile(r'(Okay|[EW]\d{3}):\s(.*)')
ERRORCODE_REGEX = re.compile(r'[EW]\d{3}')
DOCSTRING_REGEX = re.compile(r'u?r?["\']')
EXTRANEOUS_WHITESPACE_REGEX = re.compile(r'[[({] | []}),;:]')
WHITESPACE_AFTER_COMMA_REGEX = re.compile(r'[,;:]\s*(?: |\t)')
COMPARE_SINGLETON_REGEX = re.compile(r'([=!]=)\s*(None|False|True)')
COMPARE_TYPE_REGEX = re.compile(r'([=!]=|is|is\s+not)\s*type(?:s\.(\w+)Type'
r'|\(\s*(\(\s*\)|[^)]*[^ )])\s*\))')
KEYWORD_REGEX = re.compile(r'(?:[^\s])(\s*)\b(?:%s)\b(\s*)' %
r'|'.join(KEYWORDS))
OPERATOR_REGEX = re.compile(r'(?:[^\s])(\s*)(?:[-+*/|!<=>%&^]+)(\s*)')
LAMBDA_REGEX = re.compile(r'\blambda\b')
HUNK_REGEX = re.compile(r'^@@ -\d+,\d+ \+(\d+),(\d+) @@.*$')
# Work around Python < 2.6 behaviour, which does not generate NL after
# a comment which is on a line by itself.
COMMENT_WITH_NL = tokenize.generate_tokens(['#\n'].pop).send(None)[1] == '#\n'
##############################################################################
# Plugins (check functions) for physical lines
##############################################################################
def tabs_or_spaces(physical_line, indent_char):
r"""
Never mix tabs and spaces.
The most popular way of indenting Python is with spaces only. The
second-most popular way is with tabs only. Code indented with a mixture
of tabs and spaces should be converted to using spaces exclusively. When
invoking the Python command line interpreter with the -t option, it issues
warnings about code that illegally mixes tabs and spaces. When using -tt
these warnings become errors. These options are highly recommended!
Okay: if a == 0:\n a = 1\n b = 1
E101: if a == 0:\n a = 1\n\tb = 1
"""
indent = INDENT_REGEX.match(physical_line).group(1)
for offset, char in enumerate(indent):
if char != indent_char:
return offset, "E101 indentation contains mixed spaces and tabs"
def tabs_obsolete(physical_line):
r"""
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
Okay: if True:\n return
W191: if True:\n\treturn
"""
indent = INDENT_REGEX.match(physical_line).group(1)
if '\t' in indent:
return indent.index('\t'), "W191 indentation contains tabs"
def trailing_whitespace(physical_line):
r"""
JCR: Trailing whitespace is superfluous.
FBM: Except when it occurs as part of a blank line (i.e. the line is
nothing but whitespace). According to Python docs[1] a line with only
whitespace is considered a blank line, and is to be ignored. However,
matching a blank line to its indentation level avoids mistakenly
terminating a multi-line statement (e.g. class declaration) when
pasting code into the standard Python interpreter.
[1] http://docs.python.org/reference/lexical_analysis.html#blank-lines
The warning returned varies on whether the line itself is blank, for easier
filtering for those who want to indent their blank lines.
Okay: spam(1)
W291: spam(1)\s
W293: class Foo(object):\n \n bang = 12
"""
physical_line = physical_line.rstrip('\n') # chr(10), newline
physical_line = physical_line.rstrip('\r') # chr(13), carriage return
physical_line = physical_line.rstrip('\x0c') # chr(12), form feed, ^L
stripped = physical_line.rstrip(' \t\v')
if physical_line != stripped:
if stripped:
return len(stripped), "W291 trailing whitespace"
else:
return 0, "W293 blank line contains whitespace"
#def trailing_blank_lines(physical_line, lines, line_number):
# r"""
# JCR: Trailing blank lines are superfluous.
#
# Okay: spam(1)
# W391: spam(1)\n
# """
# if not physical_line.rstrip() and line_number == len(lines):
# return 0, "W391 blank line at end of file"
def missing_newline(physical_line):
"""
JCR: The last line should have a newline.
Reports warning W292.
"""
if physical_line.rstrip() == physical_line:
return len(physical_line), "W292 no newline at end of file"
def maximum_line_length(physical_line, max_line_length):
"""
Limit all lines to a maximum of 79 characters.
There are still many devices around that are limited to 80 character
lines; plus, limiting windows to 80 characters makes it possible to have
several windows side-by-side. The default wrapping on such devices looks
ugly. Therefore, please limit all lines to a maximum of 79 characters.
For flowing long blocks of text (docstrings or comments), limiting the
length to 72 characters is recommended.
Reports error E501.
"""
line = physical_line.rstrip()
length = len(line)
if length > max_line_length:
if hasattr(line, 'decode'): # Python 2
# The line could contain multi-byte characters
try:
length = len(line.decode('utf-8'))
except UnicodeError:
pass
if length > max_line_length:
return (max_line_length, "E501 line too long "
"(%d > %d characters)" % (length, max_line_length))
##############################################################################
# Plugins (check functions) for logical lines
##############################################################################
def blank_lines(logical_line, blank_lines, indent_level, line_number,
previous_logical, previous_indent_level):
r"""
Separate top-level function and class definitions with two blank lines.
Method definitions inside a class are separated by a single blank line.
Extra blank lines may be used (sparingly) to separate groups of related
functions. Blank lines may be omitted between a bunch of related
one-liners (e.g. a set of dummy implementations).
Use blank lines in functions, sparingly, to indicate logical sections.
Okay: def a():\n pass\n\n\ndef b():\n pass
Okay: def a():\n pass\n\n\n# Foo\n# Bar\n\ndef b():\n pass
E301: class Foo:\n b = 0\n def bar():\n pass
E302: def a():\n pass\n\ndef b(n):\n pass
E303: def a():\n pass\n\n\n\ndef b(n):\n pass
E303: def a():\n\n\n\n pass
E304: @decorator\n\ndef a():\n pass
"""
if line_number == 1:
return # Don't expect blank lines before the first line
if previous_logical.startswith('@'):
if blank_lines:
yield 0, "E304 blank lines found after function decorator"
elif blank_lines > 2 or (indent_level and blank_lines == 2):
yield 0, "E303 too many blank lines (%d)" % blank_lines
elif logical_line.startswith(('def ', 'class ', '@')):
if indent_level:
if not (blank_lines or previous_indent_level < indent_level or
DOCSTRING_REGEX.match(previous_logical)):
yield 0, "E301 expected 1 blank line, found 0"
elif blank_lines != 2:
yield 0, "E302 expected 2 blank lines, found %d" % blank_lines
def extraneous_whitespace(logical_line):
"""
Avoid extraneous whitespace in the following situations:
- Immediately inside parentheses, brackets or braces.
- Immediately before a comma, semicolon, or colon.
Okay: spam(ham[1], {eggs: 2})
E201: spam( ham[1], {eggs: 2})
E201: spam(ham[ 1], {eggs: 2})
E201: spam(ham[1], { eggs: 2})
E202: spam(ham[1], {eggs: 2} )
E202: spam(ham[1 ], {eggs: 2})
E202: spam(ham[1], {eggs: 2 })
E203: if x == 4: print x, y; x, y = y , x
E203: if x == 4: print x, y ; x, y = y, x
E203: if x == 4 : print x, y; x, y = y, x
"""
line = logical_line
for match in EXTRANEOUS_WHITESPACE_REGEX.finditer(line):
text = match.group()
char = text.strip()
found = match.start()
if text == char + ' ':
# assert char in '([{'
yield found + 1, "E201 whitespace after '%s'" % char
elif line[found - 1] != ',':
code = ('E202' if char in '}])' else 'E203') # if char in ',;:'
yield found, "%s whitespace before '%s'" % (code, char)
def whitespace_around_keywords(logical_line):
r"""
Avoid extraneous whitespace around keywords.
Okay: True and False
E271: True and False
E272: True and False
E273: True and\tFalse
E274: True\tand False
"""
for match in KEYWORD_REGEX.finditer(logical_line):
before, after = match.groups()
if '\t' in before:
yield match.start(1), "E274 tab before keyword"
elif len(before) > 1:
yield match.start(1), "E272 multiple spaces before keyword"
if '\t' in after:
yield match.start(2), "E273 tab after keyword"
elif len(after) > 1:
yield match.start(2), "E271 multiple spaces after keyword"
def missing_whitespace(logical_line):
"""
JCR: Each comma, semicolon or colon should be followed by whitespace.
Okay: [a, b]
Okay: (3,)
Okay: a[1:4]
Okay: a[:4]
Okay: a[1:]
Okay: a[1:4:2]
E231: ['a','b']
E231: foo(bar,baz)
"""
line = logical_line
for index in range(len(line) - 1):
char = line[index]
if char in ',;:' and line[index + 1] not in WHITESPACE:
before = line[:index]
if char == ':' and before.count('[') > before.count(']'):
continue # Slice syntax, no space required
if char == ',' and line[index + 1] == ')':
continue # Allow tuple with only one element: (3,)
yield index, "E231 missing whitespace after '%s'" % char
def indentation(logical_line, previous_logical, indent_char,
indent_level, previous_indent_level):
r"""
Use 4 spaces per indentation level.
For really old code that you don't want to mess up, you can continue to
use 8-space tabs.
Okay: a = 1
Okay: if a == 0:\n a = 1
E111: a = 1
Okay: for item in items:\n pass
E112: for item in items:\npass
Okay: a = 1\nb = 2
E113: a = 1\n b = 2
"""
if indent_char == ' ' and indent_level % 4:
yield 0, "E111 indentation is not a multiple of four"
indent_expect = previous_logical.endswith(':')
if indent_expect and indent_level <= previous_indent_level:
yield 0, "E112 expected an indented block"
if indent_level > previous_indent_level and not indent_expect:
yield 0, "E113 unexpected indentation"
def continuation_line_indentation(logical_line, tokens, indent_level, verbose):
r"""
Continuation lines should align wrapped elements either vertically using
Python's implicit line joining inside parentheses, brackets and braces, or
using a hanging indent.
When using a hanging indent the following considerations should be applied:
- there should be no arguments on the first line, and
- further indentation should be used to clearly distinguish itself as a
continuation line.
Okay: a = (\n)
E123: a = (\n )
Okay: a = (\n 42)
E121: a = (\n 42)
E122: a = (\n42)
E123: a = (\n 42\n )
E124: a = (24,\n 42\n)
E125: if (a or\n b):\n pass
E126: a = (\n 42)
E127: a = (24,\n 42)
E128: a = (24,\n 42)
"""
first_row = tokens[0][2][0]
nrows = 1 + tokens[-1][2][0] - first_row
if nrows == 1:
return
# indent_next tells us whether the next block is indented; assuming
# that it is indented by 4 spaces, then we should not allow 4-space
# indents on the final continuation line; in turn, some other
# indents are allowed to have an extra 4 spaces.
indent_next = logical_line.endswith(':')
row = depth = 0
# remember how many brackets were opened on each line
parens = [0] * nrows
# relative indents of physical lines
rel_indent = [0] * nrows
# visual indents
indent = [indent_level]
indent_chances = {}
last_indent = (0, 0)
if verbose >= 3:
print((">>> " + tokens[0][4].rstrip()))
for token_type, text, start, end, line in tokens:
newline = row < start[0] - first_row
if newline:
row = start[0] - first_row
newline = (not last_token_multiline and
token_type not in (tokenize.NL, tokenize.NEWLINE))
if newline:
# this is the beginning of a continuation line.
last_indent = start
if verbose >= 3:
print(("... " + line.rstrip()))
# record the initial indent.
rel_indent[row] = start[1] - indent_level
if depth:
# a bracket expression in a continuation line.
# find the line that it was opened on
for open_row in range(row - 1, -1, -1):
if parens[open_row]:
break
else:
# an unbracketed continuation line (ie, backslash)
open_row = 0
hang = rel_indent[row] - rel_indent[open_row]
visual_indent = indent_chances.get(start[1])
if token_type == tokenize.OP and text in ']})':
# this line starts with a closing bracket
if indent[depth]:
if start[1] != indent[depth]:
yield (start, 'E124 closing bracket does not match '
'visual indentation')
elif hang:
yield (start, 'E123 closing bracket does not match '
'indentation of opening bracket\'s line')
elif visual_indent is True:
# visual indent is verified
if not indent[depth]:
indent[depth] = start[1]
elif visual_indent in (text, str):
# ignore token lined up with matching one from a previous line
pass
elif indent[depth] and start[1] < indent[depth]:
# visual indent is broken
yield (start, 'E128 continuation line '
'under-indented for visual indent')
elif hang == 4 or (indent_next and rel_indent[row] == 8):
# hanging indent is verified
pass
else:
# indent is broken
if hang <= 0:
error = 'E122', 'missing indentation or outdented'
elif indent[depth]:
error = 'E127', 'over-indented for visual indent'
elif hang % 4:
error = 'E121', 'indentation is not a multiple of four'
else:
error = 'E126', 'over-indented for hanging indent'
yield start, "%s continuation line %s" % error
# look for visual indenting
if parens[row] and token_type != tokenize.NL and not indent[depth]:
indent[depth] = start[1]
indent_chances[start[1]] = True
if verbose >= 4:
print(("bracket depth %s indent to %s" % (depth, start[1])))
# deal with implicit string concatenation
elif token_type == tokenize.STRING or text in ('u', 'ur', 'b', 'br'):
indent_chances[start[1]] = str
# keep track of bracket depth
if token_type == tokenize.OP:
if text in '([{':
depth += 1
indent.append(0)
parens[row] += 1
if verbose >= 4:
print(("bracket depth %s seen, col %s, visual min = %s" %
(depth, start[1], indent[depth])))
elif text in ')]}' and depth > 0:
# parent indents should not be more than this one
prev_indent = indent.pop() or last_indent[1]
for d in range(depth):
if indent[d] > prev_indent:
indent[d] = 0
for ind in list(indent_chances):
if ind >= prev_indent:
del indent_chances[ind]
depth -= 1
if depth:
indent_chances[indent[depth]] = True
for idx in range(row, -1, -1):
if parens[idx]:
parens[idx] -= 1
break
assert len(indent) == depth + 1
if start[1] not in indent_chances:
# allow to line up tokens
indent_chances[start[1]] = text
last_token_multiline = (start[0] != end[0])
if indent_next and rel_indent[-1] == 4:
yield (last_indent, "E125 continuation line does not distinguish "
"itself from next logical line")
def whitespace_before_parameters(logical_line, tokens):
"""
Avoid extraneous whitespace in the following situations:
- Immediately before the open parenthesis that starts the argument
list of a function call.
- Immediately before the open parenthesis that starts an indexing or
slicing.
Okay: spam(1)
E211: spam (1)
Okay: dict['key'] = list[index]
E211: dict ['key'] = list[index]
E211: dict['key'] = list [index]
"""
prev_type = tokens[0][0]
prev_text = tokens[0][1]
prev_end = tokens[0][3]
for index in range(1, len(tokens)):
token_type, text, start, end, line = tokens[index]
if (token_type == tokenize.OP and
text in '([' and
start != prev_end and
(prev_type == tokenize.NAME or prev_text in '}])') and
# Syntax "class A (B):" is allowed, but avoid it
(index < 2 or tokens[index - 2][1] != 'class') and
# Allow "return (a.foo for a in range(5))"
not keyword.iskeyword(prev_text)):
yield prev_end, "E211 whitespace before '%s'" % text
prev_type = token_type
prev_text = text
prev_end = end
def whitespace_around_operator(logical_line):
r"""
Avoid extraneous whitespace in the following situations:
- More than one space around an assignment (or other) operator to
align it with another.
Okay: a = 12 + 3
E221: a = 4 + 5
E222: a = 4 + 5
E223: a = 4\t+ 5
E224: a = 4 +\t5
"""
for match in OPERATOR_REGEX.finditer(logical_line):
before, after = match.groups()
if '\t' in before:
yield match.start(1), "E223 tab before operator"
elif len(before) > 1:
yield match.start(1), "E221 multiple spaces before operator"
if '\t' in after:
yield match.start(2), "E224 tab after operator"
elif len(after) > 1:
yield match.start(2), "E222 multiple spaces after operator"
def missing_whitespace_around_operator(logical_line, tokens):
r"""
- Always surround these binary operators with a single space on
either side: assignment (=), augmented assignment (+=, -= etc.),
comparisons (==, <, >, !=, <>, <=, >=, in, not in, is, is not),
Booleans (and, or, not).
- Use spaces around arithmetic operators.
Okay: i = i + 1
Okay: submitted += 1
Okay: x = x * 2 - 1
Okay: hypot2 = x * x + y * y
Okay: c = (a + b) * (a - b)
Okay: foo(bar, key='word', *args, **kwargs)
Okay: baz(**kwargs)
Okay: negative = -1
Okay: spam(-1)
Okay: alpha[:-i]
Okay: if not -5 < x < +5:\n pass
Okay: lambda *args, **kw: (args, kw)
E225: i=i+1
E225: submitted +=1
E225: x = x*2 - 1
E225: hypot2 = x*x + y*y
E225: c = (a+b) * (a-b)
E225: c = alpha -4
E225: z = x **y
"""
parens = 0
need_space = False
prev_type = tokenize.OP
prev_text = prev_end = None
for token_type, text, start, end, line in tokens:
if token_type in (tokenize.NL, tokenize.NEWLINE, tokenize.ERRORTOKEN):
# ERRORTOKEN is triggered by backticks in Python 3000
continue
if text in ('(', 'lambda'):
parens += 1
elif text == ')':
parens -= 1
if need_space:
if start != prev_end:
need_space = False
elif text == '>' and prev_text in ('<', '-'):
# Tolerate the "<>" operator, even if running Python 3
# Deal with Python 3's annotated return value "->"
pass
else:
yield prev_end, "E225 missing whitespace around operator"
need_space = False
elif token_type == tokenize.OP and prev_end is not None:
if text == '=' and parens:
# Allow keyword args or defaults: foo(bar=None).
pass
elif text in BINARY_OPERATORS:
need_space = True
elif text in UNARY_OPERATORS:
# Allow unary operators: -123, -x, +1.
# Allow argument unpacking: foo(*args, **kwargs).
if prev_type == tokenize.OP:
if prev_text in '}])':
need_space = True
elif prev_type == tokenize.NAME:
if prev_text not in KEYWORDS:
need_space = True
elif prev_type not in SKIP_TOKENS:
need_space = True
if need_space and start == prev_end:
yield prev_end, "E225 missing whitespace around operator"
need_space = False
prev_type = token_type
prev_text = text
prev_end = end
def whitespace_around_comma(logical_line):
r"""
Avoid extraneous whitespace in the following situations:
- More than one space around an assignment (or other) operator to
align it with another.
Note: these checks are disabled by default
Okay: a = (1, 2)
E241: a = (1, 2)
E242: a = (1,\t2)
"""
line = logical_line
for m in WHITESPACE_AFTER_COMMA_REGEX.finditer(line):
found = m.start() + 1
if '\t' in m.group():
yield found, "E242 tab after '%s'" % m.group()[0]
else:
yield found, "E241 multiple spaces after '%s'" % m.group()[0]
def whitespace_around_named_parameter_equals(logical_line, tokens):
"""
Don't use spaces around the '=' sign when used to indicate a
keyword argument or a default parameter value.
Okay: def complex(real, imag=0.0):
Okay: return magic(r=real, i=imag)
Okay: boolean(a == b)
Okay: boolean(a != b)
Okay: boolean(a <= b)
Okay: boolean(a >= b)
E251: def complex(real, imag = 0.0):
E251: return magic(r = real, i = imag)
"""
parens = 0
no_space = False
prev_end = None
for token_type, text, start, end, line in tokens:
if no_space:
no_space = False
if start != prev_end:
yield (prev_end,
"E251 no spaces around keyword / parameter equals")
elif token_type == tokenize.OP:
if text == '(':
parens += 1
elif text == ')':
parens -= 1
elif parens and text == '=':
no_space = True
if start != prev_end:
yield (prev_end,
"E251 no spaces around keyword / parameter equals")
prev_end = end
def whitespace_before_inline_comment(logical_line, tokens):
"""
Separate inline comments by at least two spaces.
An inline comment is a comment on the same line as a statement. Inline
comments should be separated by at least two spaces from the statement.
They should start with a # and a single space.
Okay: x = x + 1 # Increment x
Okay: x = x + 1 # Increment x
E261: x = x + 1 # Increment x
E262: x = x + 1 #Increment x
E262: x = x + 1 # Increment x
"""
prev_end = (0, 0)
for token_type, text, start, end, line in tokens:
if token_type == tokenize.COMMENT:
if not line[:start[1]].strip():
continue
if prev_end[0] == start[0] and start[1] < prev_end[1] + 2:
yield (prev_end,
"E261 at least two spaces before inline comment")
if text.startswith('# ') or not text.startswith('# '):
yield start, "E262 inline comment should start with '# '"
elif token_type != tokenize.NL:
prev_end = end
def imports_on_separate_lines(logical_line):
r"""
Imports should usually be on separate lines.
Okay: import os\nimport sys
E401: import sys, os
Okay: from subprocess import Popen, PIPE
Okay: from myclas import MyClass
Okay: from foo.bar.yourclass import YourClass
Okay: import myclass
Okay: import foo.bar.yourclass
"""
line = logical_line
if line.startswith('import '):
found = line.find(',')
if -1 < found:
yield found, "E401 multiple imports on one line"
def compound_statements(logical_line):
r"""
Compound statements (multiple statements on the same line) are
generally discouraged.
While sometimes it's okay to put an if/for/while with a small body
on the same line, never do this for multi-clause statements. Also
avoid folding such long lines!
Okay: if foo == 'blah':\n do_blah_thing()
Okay: do_one()
Okay: do_two()
Okay: do_three()
E701: if foo == 'blah': do_blah_thing()
E701: for x in lst: total += x
E701: while t < 10: t = delay()
E701: if foo == 'blah': do_blah_thing()
E701: else: do_non_blah_thing()
E701: try: something()
E701: finally: cleanup()
E701: if foo == 'blah': one(); two(); three()
E702: do_one(); do_two(); do_three()
"""
line = logical_line
found = line.find(':')
if -1 < found < len(line) - 1:
before = line[:found]
if (before.count('{') <= before.count('}') and # {'a': 1} (dict)
before.count('[') <= before.count(']') and # [1:2] (slice)
before.count('(') <= before.count(')') and # (Python 3 annotation)
not LAMBDA_REGEX.search(before)): # lambda x: x
yield found, "E701 multiple statements on one line (colon)"
found = line.find(';')
if -1 < found:
yield found, "E702 multiple statements on one line (semicolon)"
def explicit_line_join(logical_line, tokens):
r"""
Avoid explicit line join between brackets.
The preferred way of wrapping long lines is by using Python's implied line
continuation inside parentheses, brackets and braces. Long lines can be
broken over multiple lines by wrapping expressions in parentheses. These
should be used in preference to using a backslash for line continuation.
E502: aaa = [123, \\n 123]
E502: aaa = ("bbb " \\n "ccc")
Okay: aaa = [123,\n 123]
Okay: aaa = ("bbb "\n "ccc")
Okay: aaa = "bbb " \\n "ccc"
"""
prev_start = prev_end = parens = 0
for token_type, text, start, end, line in tokens:
if start[0] != prev_start and parens and backslash:
yield backslash, "E502 the backslash is redundant between brackets"
if end[0] != prev_end:
if line.rstrip('\r\n').endswith('\\'):
backslash = (end[0], len(line.splitlines()[-1]) - 1)
else:
backslash = None
prev_start = prev_end = end[0]
else:
prev_start = start[0]
if token_type == tokenize.OP:
if text in '([{':
parens += 1
elif text in ')]}':
parens -= 1
def comparison_to_singleton(logical_line):
"""
Comparisons to singletons like None should always be done
with "is" or "is not", never the equality operators.
Okay: if arg is not None:
E711: if arg != None:
E712: if arg == True:
Also, beware of writing if x when you really mean if x is not None --
e.g. when testing whether a variable or argument that defaults to None was
set to some other value. The other value might have a type (such as a
container) that could be false in a boolean context!
"""
match = COMPARE_SINGLETON_REGEX.search(logical_line)
if match:
same = (match.group(1) == '==')
singleton = match.group(2)
msg = "'if cond is %s:'" % (('' if same else 'not ') + singleton)
if singleton in ('None',):
code = 'E711'
else:
code = 'E712'
nonzero = ((singleton == 'True' and same) or
(singleton == 'False' and not same))
msg += " or 'if %scond:'" % ('' if nonzero else 'not ')
yield match.start(1), ("%s comparison to %s should be %s" %
(code, singleton, msg))
def comparison_type(logical_line):
"""
Object type comparisons should always use isinstance() instead of
comparing types directly.
Okay: if isinstance(obj, int):
E721: if type(obj) is type(1):
When checking if an object is a string, keep in mind that it might be a
unicode string too! In Python 2.3, str and unicode have a common base
class, basestring, so you can do:
Okay: if isinstance(obj, basestring):
Okay: if type(a1) is type(b1):
"""
match = COMPARE_TYPE_REGEX.search(logical_line)
if match:
inst = match.group(3)
if inst and isidentifier(inst) and inst not in SINGLETONS:
return # Allow comparison for types which are not obvious
yield match.start(1), "E721 do not compare types, use 'isinstance()'"
def python_3000_has_key(logical_line):
r"""
The {}.has_key() method will be removed in the future version of
Python. Use the 'in' operation instead.
Okay: if "alph" in d:\n print d["alph"]
W601: assert d.has_key('alph')
"""
pos = logical_line.find('.has_key(')
if pos > -1:
yield pos, "W601 .has_key() is deprecated, use 'in'"
def python_3000_raise_comma(logical_line):
"""
When raising an exception, use "raise ValueError('message')"
instead of the older form "raise ValueError, 'message'".
The paren-using form is preferred because when the exception arguments
are long or include string formatting, you don't need to use line
continuation characters thanks to the containing parentheses. The older
form will be removed in Python 3000.
Okay: raise DummyError("Message")
W602: raise DummyError, "Message"
"""
match = RAISE_COMMA_REGEX.match(logical_line)
if match and not RERAISE_COMMA_REGEX.match(logical_line):
yield match.start(1), "W602 deprecated form of raising exception"
def python_3000_not_equal(logical_line):
"""
!= can also be written <>, but this is an obsolete usage kept for
backwards compatibility only. New code should always use !=.
The older syntax is removed in Python 3000.
Okay: if a != 'no':
W603: if a <> 'no':
"""
pos = logical_line.find('<>')
if pos > -1:
yield pos, "W603 '<>' is deprecated, use '!='"
def python_3000_backticks(logical_line):
"""
Backticks are removed in Python 3000.
Use repr() instead.
Okay: val = repr(1 + 2)
W604: val = `1 + 2`
"""
pos = logical_line.find('`')
if pos > -1:
yield pos, "W604 backticks are deprecated, use 'repr()'"
##############################################################################
# Helper functions
##############################################################################
if '' == ''.encode():
# Python 2: implicit encoding.
def readlines(filename):
f = open(filename)
try:
return f.readlines()
finally:
f.close()
isidentifier = re.compile(r'[a-zA-Z_]\w*').match
stdin_get_value = sys.stdin.read
else:
# Python 3
def readlines(filename):
f = open(filename, 'rb')
try:
coding, lines = tokenize.detect_encoding(f.readline)
f = TextIOWrapper(f, coding, line_buffering=True)
return [l.decode(coding) for l in lines] + f.readlines()
except (LookupError, SyntaxError, UnicodeError):
f.close()
# Fall back if files are improperly declared
f = open(filename, encoding='latin-1')
return f.readlines()
finally:
f.close()
isidentifier = str.isidentifier
stdin_get_value = TextIOWrapper(sys.stdin.buffer, errors='ignore').read
readlines.__doc__ = " Read the source code."
def expand_indent(line):
r"""
Return the amount of indentation.
Tabs are expanded to the next multiple of 8.
>>> expand_indent(' ')
4
>>> expand_indent('\t')
8
>>> expand_indent(' \t')
8
>>> expand_indent(' \t')
8
>>> expand_indent(' \t')
16
"""
if '\t' not in line:
return len(line) - len(line.lstrip())
result = 0
for char in line:
if char == '\t':
result = result // 8 * 8 + 8
elif char == ' ':
result += 1
else:
break
return result
def mute_string(text):
"""
Replace contents with 'xxx' to prevent syntax matching.
>>> mute_string('"abc"')
'"xxx"'
>>> mute_string("'''abc'''")
"'''xxx'''"
>>> mute_string("r'abc'")
"r'xxx'"
"""
# String modifiers (e.g. u or r)
start = text.index(text[-1]) + 1
end = len(text) - 1
# Triple quotes
if text[-3:] in ('"""', "'''"):
start += 2
end -= 2
return text[:start] + 'x' * (end - start) + text[end:]
def parse_udiff(diff, patterns=None, parent='.'):
rv = {}
path = nrows = None
for line in diff.splitlines():
if nrows:
if line[:1] != '-':
nrows -= 1
continue
if line[:3] == '@@ ':
row, nrows = [int(g) for g in HUNK_REGEX.match(line).groups()]
rv[path].update(list(range(row, row + nrows)))
elif line[:3] == '+++':
path = line[4:].split('\t', 1)[0]
if path[:2] == 'b/':
path = path[2:]
rv[path] = set()
return dict([(os.path.join(parent, path), rows)
for (path, rows) in list(rv.items())
if rows and filename_match(path, patterns)])
def filename_match(filename, patterns, default=True):
"""
Check if patterns contains a pattern that matches filename.
If patterns is unspecified, this always returns True.
"""
if not patterns:
return default
return any(fnmatch(filename, pattern) for pattern in patterns)
##############################################################################
# Framework to run all checks
##############################################################################
def find_checks(argument_name):
"""
Find all globally visible functions where the first argument name
starts with argument_name.
"""
for name, function in list(globals().items()):
if not inspect.isfunction(function):
continue
args = inspect.getargspec(function)[0]
if args and args[0].startswith(argument_name):
codes = ERRORCODE_REGEX.findall(function.__doc__ or '')
yield name, codes, function, args
class Checker(object):
"""
Load a Python source file, tokenize it, check coding style.
"""
def __init__(self, filename, lines=None,
options=None, report=None, **kwargs):
if options is None:
options = StyleGuide(kwargs).options
else:
assert not kwargs
self._io_error = None
self._physical_checks = options.physical_checks
self._logical_checks = options.logical_checks
self.max_line_length = options.max_line_length
self.verbose = options.verbose
self.filename = filename
if filename is None:
self.filename = 'stdin'
self.lines = lines or []
elif lines is None:
try:
self.lines = readlines(filename)
except IOError:
exc_type, exc = sys.exc_info()[:2]
self._io_error = '%s: %s' % (exc_type.__name__, exc)
self.lines = []
else:
self.lines = lines
self.report = report or options.report
self.report_error = self.report.error
def readline(self):
"""
Get the next line from the input buffer.
"""
self.line_number += 1
if self.line_number > len(self.lines):
return ''
return self.lines[self.line_number - 1]
def readline_check_physical(self):
"""
Check and return the next physical line. This method can be
used to feed tokenize.generate_tokens.
"""
line = self.readline()
if line:
self.check_physical(line)
return line
def run_check(self, check, argument_names):
"""
Run a check plugin.
"""
arguments = []
for name in argument_names:
arguments.append(getattr(self, name))
return check(*arguments)
def check_physical(self, line):
"""
Run all physical checks on a raw input line.
"""
self.physical_line = line
if self.indent_char is None and line[:1] in WHITESPACE:
self.indent_char = line[0]
for name, check, argument_names in self._physical_checks:
result = self.run_check(check, argument_names)
if result is not None:
offset, text = result
self.report_error(self.line_number, offset, text, check)
def build_tokens_line(self):
"""
Build a logical line from tokens.
"""
self.mapping = []
logical = []
length = 0
previous = None
for token in self.tokens:
token_type, text = token[0:2]
if token_type in SKIP_TOKENS:
continue
if token_type == tokenize.STRING:
text = mute_string(text)
if previous:
end_row, end = previous[3]
start_row, start = token[2]
if end_row != start_row: # different row
prev_text = self.lines[end_row - 1][end - 1]
if prev_text == ',' or (prev_text not in '{[('
and text not in '}])'):
logical.append(' ')
length += 1
elif end != start: # different column
fill = self.lines[end_row - 1][end:start]
logical.append(fill)
length += len(fill)
self.mapping.append((length, token))
logical.append(text)
length += len(text)
previous = token
self.logical_line = ''.join(logical)
assert self.logical_line.strip() == self.logical_line
def check_logical(self):
"""
Build a line from tokens and run all logical checks on it.
"""
self.build_tokens_line()
self.report.increment_logical_line()
first_line = self.lines[self.mapping[0][1][2][0] - 1]
indent = first_line[:self.mapping[0][1][2][1]]
self.previous_indent_level = self.indent_level
self.indent_level = expand_indent(indent)
if self.verbose >= 2:
print((self.logical_line[:80].rstrip()))
for name, check, argument_names in self._logical_checks:
if self.verbose >= 4:
print((' ' + name))
for result in self.run_check(check, argument_names):
offset, text = result
if isinstance(offset, tuple):
orig_number, orig_offset = offset
else:
for token_offset, token in self.mapping:
if offset >= token_offset:
orig_number = token[2][0]
orig_offset = (token[2][1] + offset - token_offset)
self.report_error(orig_number, orig_offset, text, check)
self.previous_logical = self.logical_line
def generate_tokens(self):
if self._io_error:
self.report_error(1, 0, 'E902 %s' % self._io_error, readlines)
tokengen = tokenize.generate_tokens(self.readline_check_physical)
try:
for token in tokengen:
yield token
except (SyntaxError, tokenize.TokenError):
exc_type, exc = sys.exc_info()[:2]
offset = exc.args[1]
if len(offset) > 2:
offset = offset[1:3]
self.report_error(offset[0], offset[1],
'E901 %s: %s' % (exc_type.__name__, exc.args[0]),
self.generate_tokens)
generate_tokens.__doc__ = " Check if the syntax is valid."
def check_all(self, expected=None, line_offset=0):
"""
Run all checks on the input file.
"""
self.report.init_file(self.filename, self.lines, expected, line_offset)
self.line_number = 0
self.indent_char = None
self.indent_level = 0
self.previous_logical = ''
self.tokens = []
self.blank_lines = blank_lines_before_comment = 0
parens = 0
for token in self.generate_tokens():
self.tokens.append(token)
token_type, text = token[0:2]
if self.verbose >= 3:
if token[2][0] == token[3][0]:
pos = '[%s:%s]' % (token[2][1] or '', token[3][1])
else:
pos = 'l.%s' % token[3][0]
print(('l.%s\t%s\t%s\t%r' %
(token[2][0], pos, tokenize.tok_name[token[0]], text)))
if token_type == tokenize.COMMENT or token_type == tokenize.STRING:
for sre in re.finditer(r"[:.;,] ?[A-Za-z]", text):
pos = sre.span()[0]
part = text[:pos]
line = token[2][0] + part.count('\n')
offset = 0 if part.count('\n') > 0 else token[2][1]
col = offset + pos - part.rfind('\n') + 1
if sre.group(0)[0] == '.':
self.report_error(line, col,
'E289 Too many spaces after period. Use only one.',
check=None)
elif sre.group(0)[0] == ',':
self.report_error(line, col,
'E288 Too many spaces after comma. Use only one.',
check=None)
else:
self.report_error(line, col,
'E287 Too many spaces after punctuation. '
'Use only one.',
check=None)
if token_type == tokenize.OP:
if text in '([{':
parens += 1
elif text in '}])':
parens -= 1
elif not parens:
if token_type == tokenize.NEWLINE:
if self.blank_lines < blank_lines_before_comment:
self.blank_lines = blank_lines_before_comment
self.check_logical()
self.tokens = []
self.blank_lines = blank_lines_before_comment = 0
elif token_type == tokenize.NL:
if len(self.tokens) == 1:
# The physical line contains only this token.
self.blank_lines += 1
self.tokens = []
elif token_type == tokenize.COMMENT and len(self.tokens) == 1:
if blank_lines_before_comment < self.blank_lines:
blank_lines_before_comment = self.blank_lines
self.blank_lines = 0
if COMMENT_WITH_NL:
# The comment also ends a physical line
self.tokens = []
if self.blank_lines > 1:
self.report_error(token[2][0],0,
'E389 File ends in multiple blank lines',
check=None)
return self.report.get_file_results()
class BaseReport(object):
"""Collect the results of the checks."""
print_filename = False
def __init__(self, options):
self._benchmark_keys = options.benchmark_keys
self._ignore_code = options.ignore_code
# Results
self.elapsed = 0
self.total_errors = 0
self.counters = dict.fromkeys(self._benchmark_keys, 0)
self.messages = {}
def start(self):
"""Start the timer."""
self._start_time = time.time()
def stop(self):
"""Stop the timer."""
self.elapsed = time.time() - self._start_time
def init_file(self, filename, lines, expected, line_offset):
"""Signal a new file."""
self.filename = filename
self.lines = lines
self.expected = expected or ()
self.line_offset = line_offset
self.file_errors = 0
self.counters['files'] += 1
self.counters['physical lines'] += len(lines)
def increment_logical_line(self):
"""Signal a new logical line."""
self.counters['logical lines'] += 1
def error(self, line_number, offset, text, check):
"""Report an error, according to options."""
code = text[:4]
if self._ignore_code(code):
return
if code in self.counters:
self.counters[code] += 1
else:
self.counters[code] = 1
self.messages[code] = text[5:]
# Don't care about expected errors or warnings
if code in self.expected:
return
if self.print_filename and not self.file_errors:
print((self.filename))
self.file_errors += 1
self.total_errors += 1
return code
def get_file_results(self):
"""Return the count of errors and warnings for this file."""
return self.file_errors
def get_count(self, prefix=''):
"""Return the total count of errors and warnings."""
return sum([self.counters[key]
for key in self.messages if key.startswith(prefix)])
def get_statistics(self, prefix=''):
"""
Get statistics for message codes that start with the prefix.
prefix='' matches all errors and warnings
prefix='E' matches all errors
prefix='W' matches all warnings
prefix='E4' matches all errors that have to do with imports
"""
return ['%-7s %s %s' % (self.counters[key], key, self.messages[key])
for key in sorted(self.messages) if key.startswith(prefix)]
def print_statistics(self, prefix=''):
"""Print overall statistics (number of errors and warnings)."""
for line in self.get_statistics(prefix):
print(line)
def print_benchmark(self):
"""Print benchmark numbers."""
print(('%-7.2f %s' % (self.elapsed, 'seconds elapsed')))
if self.elapsed:
for key in self._benchmark_keys:
print(('%-7d %s per second (%d total)' %
(self.counters[key] / self.elapsed, key,
self.counters[key])))
class FileReport(BaseReport):
print_filename = True
class StandardReport(BaseReport):
"""Collect and print the results of the checks."""
def __init__(self, options):
super(StandardReport, self).__init__(options)
self._fmt = REPORT_FORMAT.get(options.format.lower(),
options.format)
self._repeat = options.repeat
self._show_source = options.show_source
self._show_pep8 = options.show_pep8
def error(self, line_number, offset, text, check):
"""
Report an error, according to options.
"""
code = super(StandardReport, self).error(line_number, offset,
text, check)
if code and (self.counters[code] == 1 or self._repeat):
print((self._fmt % {
'path': self.filename,
'row': self.line_offset + line_number, 'col': offset + 1,
'code': code, 'text': text[5:],
}))
if self._show_source:
if line_number > len(self.lines):
line = ''
else:
line = self.lines[line_number - 1]
print((line.rstrip()))
print((' ' * offset + '^'))
if self._show_pep8 and check is not None:
print((check.__doc__.lstrip('\n').rstrip()))
return code
class DiffReport(StandardReport):
"""Collect and print the results for the changed lines only."""
def __init__(self, options):
super(DiffReport, self).__init__(options)
self._selected = options.selected_lines
def error(self, line_number, offset, text, check):
if line_number not in self._selected[self.filename]:
return
return super(DiffReport, self).error(line_number, offset, text, check)
class TestReport(StandardReport):
"""Collect the results for the tests."""
def __init__(self, options):
options.benchmark_keys += ['test cases', 'failed tests']
super(TestReport, self).__init__(options)
self._verbose = options.verbose
def get_file_results(self):
# Check if the expected errors were found
label = '%s:%s:1' % (self.filename, self.line_offset)
codes = sorted(self.expected)
for code in codes:
if not self.counters.get(code):
self.file_errors += 1
self.total_errors += 1
print(('%s: error %s not found' % (label, code)))
if self._verbose and not self.file_errors:
print(('%s: passed (%s)' %
(label, ' '.join(codes) or 'Okay')))
self.counters['test cases'] += 1
if self.file_errors:
self.counters['failed tests'] += 1
# Reset counters
for key in set(self.counters) - set(self._benchmark_keys):
del self.counters[key]
self.messages = {}
return self.file_errors
def print_results(self):
results = ("%(physical lines)d lines tested: %(files)d files, "
"%(test cases)d test cases%%s." % self.counters)
if self.total_errors:
print((results % ", %s failures" % self.total_errors))
else:
print((results % ""))
print(("Test failed." if self.total_errors else "Test passed."))
class StyleGuide(object):
"""Initialize a PEP-8 instance with few options."""
def __init__(self, *args, **kwargs):
# build options from the command line
parse_argv = kwargs.pop('parse_argv', False)
config_file = kwargs.pop('config_file', None)
options, self.paths = process_options(parse_argv=parse_argv,
config_file=config_file)
if args or kwargs:
# build options from dict
options_dict = dict(*args, **kwargs)
options.__dict__.update(options_dict)
if 'paths' in options_dict:
self.paths = options_dict['paths']
self.runner = self.input_file
self.options = options
if not options.reporter:
options.reporter = BaseReport if options.quiet else StandardReport
for index, value in enumerate(options.exclude):
options.exclude[index] = value.rstrip('/')
# Ignore all checks which are not explicitly selected
options.select = tuple(options.select or ())
options.ignore = tuple(options.ignore or options.select and ('',))
options.benchmark_keys = BENCHMARK_KEYS[:]
options.ignore_code = self.ignore_code
options.physical_checks = self.get_checks('physical_line')
options.logical_checks = self.get_checks('logical_line')
self.init_report()
def init_report(self, reporter=None):
"""Initialize the report instance."""
self.options.report = (reporter or self.options.reporter)(self.options)
return self.options.report
def check_files(self, paths=None):
"""Run all checks on the paths."""
if paths is None:
paths = self.paths
report = self.options.report
runner = self.runner
report.start()
for path in paths:
if os.path.isdir(path):
self.input_dir(path)
elif not self.excluded(path):
runner(path)
report.stop()
return report
def input_file(self, filename, lines=None, expected=None, line_offset=0):
"""Run all checks on a Python source file."""
if self.options.verbose:
print(('checking %s' % filename))
fchecker = Checker(filename, lines=lines, options=self.options)
return fchecker.check_all(expected=expected, line_offset=line_offset)
def input_dir(self, dirname):
"""Check all files in this directory and all subdirectories."""
dirname = dirname.rstrip('/')
if self.excluded(dirname):
return 0
counters = self.options.report.counters
verbose = self.options.verbose
filepatterns = self.options.filename
runner = self.runner
for root, dirs, files in os.walk(dirname):
if verbose:
print(('directory ' + root))
counters['directories'] += 1
for subdir in sorted(dirs):
if self.excluded(subdir):
dirs.remove(subdir)
for filename in sorted(files):
# contain a pattern that matches?
if ((filename_match(filename, filepatterns) and
not self.excluded(filename))):
runner(os.path.join(root, filename))
def excluded(self, filename):
"""
Check if options.exclude contains a pattern that matches filename.
"""
basename = os.path.basename(filename)
return filename_match(basename, self.options.exclude, default=False)
def ignore_code(self, code):
"""
Check if the error code should be ignored.
If 'options.select' contains a prefix of the error code,
return False. Else, if 'options.ignore' contains a prefix of
the error code, return True.
"""
return (code.startswith(self.options.ignore) and
not code.startswith(self.options.select))
def get_checks(self, argument_name):
"""
Find all globally visible functions where the first argument name
starts with argument_name and which contain selected tests.
"""
checks = []
for name, codes, function, args in find_checks(argument_name):
if any(not (code and self.ignore_code(code)) for code in codes):
checks.append((name, function, args))
return sorted(checks)
def init_tests(pep8style):
"""
Initialize testing framework.
A test file can provide many tests. Each test starts with a
declaration. This declaration is a single line starting with '#:'.
It declares codes of expected failures, separated by spaces or 'Okay'
if no failure is expected.
If the file does not contain such declaration, it should pass all
tests. If the declaration is empty, following lines are not checked,
until next declaration.
Examples:
* Only E224 and W701 are expected: #: E224 W701
* Following example is conform: #: Okay
* Don't check these lines: #:
"""
report = pep8style.init_report(TestReport)
runner = pep8style.input_file
def run_tests(filename):
"""Run all the tests from a file."""
lines = readlines(filename) + ['#:\n']
line_offset = 0
codes = ['Okay']
testcase = []
count_files = report.counters['files']
for index, line in enumerate(lines):
if not line.startswith('#:'):
if codes:
# Collect the lines of the test case
testcase.append(line)
continue
if codes and index:
codes = [c for c in codes if c != 'Okay']
# Run the checker
runner(filename, testcase, expected=codes,
line_offset=line_offset)
# output the real line numbers
line_offset = index + 1
# configure the expected errors
codes = line.split()[1:]
# empty the test case buffer
del testcase[:]
report.counters['files'] = count_files + 1
return report.counters['failed tests']
pep8style.runner = run_tests
def selftest(options):
"""
Test all check functions with test cases in docstrings.
"""
count_failed = count_all = 0
report = BaseReport(options)
counters = report.counters
checks = options.physical_checks + options.logical_checks
for name, check, argument_names in checks:
for line in check.__doc__.splitlines():
line = line.lstrip()
match = SELFTEST_REGEX.match(line)
if match is None:
continue
code, source = match.groups()
checker = Checker(None, options=options, report=report)
for part in source.split(r'\n'):
part = part.replace(r'\t', '\t')
part = part.replace(r'\s', ' ')
checker.lines.append(part + '\n')
checker.check_all()
error = None
if code == 'Okay':
if len(counters) > len(options.benchmark_keys):
codes = [key for key in counters
if key not in options.benchmark_keys]
error = "incorrectly found %s" % ', '.join(codes)
elif not counters.get(code):
error = "failed to find %s" % code
# Keep showing errors for multiple tests
for key in set(counters) - set(options.benchmark_keys):
del counters[key]
report.messages = {}
count_all += 1
if not error:
if options.verbose:
print(("%s: %s" % (code, source)))
else:
count_failed += 1
print(("%s: %s:" % (__file__, error)))
for line in checker.lines:
print((line.rstrip()))
return count_failed, count_all
def read_config(options, args, arglist, parser):
"""Read both user configuration and local configuration."""
config = RawConfigParser()
user_conf = options.config
if user_conf and os.path.isfile(user_conf):
if options.verbose:
print(('user configuration: %s' % user_conf))
config.read(user_conf)
parent = tail = args and os.path.abspath(os.path.commonprefix(args))
while tail:
local_conf = os.path.join(parent, '.pep8')
if os.path.isfile(local_conf):
if options.verbose:
print(('local configuration: %s' % local_conf))
config.read(local_conf)
break
parent, tail = os.path.split(parent)
if config.has_section('pep8'):
option_list = dict([(o.dest, o.type or o.action)
for o in parser.option_list])
# First, read the default values
new_options, _ = parser.parse_args([])
# Second, parse the configuration
for opt in config.options('pep8'):
if options.verbose > 1:
print((' %s = %s' % (opt, config.get('pep8', opt))))
if opt.replace('_', '-') not in parser.config_options:
print(('Unknown option: \'%s\'\n not in [%s]' %
(opt, ' '.join(parser.config_options))))
sys.exit(1)
normalized_opt = opt.replace('-', '_')
opt_type = option_list[normalized_opt]
if opt_type in ('int', 'count'):
value = config.getint('pep8', opt)
elif opt_type == 'string':
value = config.get('pep8', opt)
else:
assert opt_type in ('store_true', 'store_false')
value = config.getboolean('pep8', opt)
setattr(new_options, normalized_opt, value)
# Third, overwrite with the command-line options
options, _ = parser.parse_args(arglist, values=new_options)
return options
def process_options(arglist=None, parse_argv=False, config_file=None):
"""Process options passed either via arglist or via command line args."""
if not arglist and not parse_argv:
# Don't read the command line if the module is used as a library.
arglist = []
if config_file is True:
config_file = DEFAULT_CONFIG
parser = OptionParser(version=__version__,
usage="%prog [options] input ...")
parser.config_options = [
'exclude', 'filename', 'select', 'ignore', 'max-line-length', 'count',
'format', 'quiet', 'show-pep8', 'show-source', 'statistics', 'verbose']
parser.add_option('-v', '--verbose', default=0, action='count',
help="print status messages, or debug with -vv")
parser.add_option('-q', '--quiet', default=0, action='count',
help="report only file names, or nothing with -qq")
parser.add_option('-r', '--repeat', default=True, action='store_true',
help="(obsolete) show all occurrences of the same error")
parser.add_option('--first', action='store_false', dest='repeat',
help="show first occurrence of each error")
parser.add_option('--exclude', metavar='patterns', default=DEFAULT_EXCLUDE,
help="exclude files or directories which match these "
"comma separated patterns (default: %default)")
parser.add_option('--filename', metavar='patterns', default='*.py',
help="when parsing directories, only check filenames "
"matching these comma separated patterns "
"(default: %default)")
parser.add_option('--select', metavar='errors', default='',
help="select errors and warnings (e.g. E,W6)")
parser.add_option('--ignore', metavar='errors', default='',
help="skip errors and warnings (e.g. E4,W)")
parser.add_option('--show-source', action='store_true',
help="show source code for each error")
parser.add_option('--show-pep8', action='store_true',
help="show text of PEP 8 for each error "
"(implies --first)")
parser.add_option('--statistics', action='store_true',
help="count errors and warnings")
parser.add_option('--count', action='store_true',
help="print total number of errors and warnings "
"to standard error and set exit code to 1 if "
"total is not null")
parser.add_option('--max-line-length', type='int', metavar='n',
default=MAX_LINE_LENGTH,
help="set maximum allowed line length "
"(default: %default)")
parser.add_option('--format', metavar='format', default='default',
help="set the error format [default|pylint|<custom>]")
parser.add_option('--diff', action='store_true',
help="report only lines changed according to the "
"unified diff received on STDIN")
group = parser.add_option_group("Testing Options")
group.add_option('--testsuite', metavar='dir',
help="run regression tests from dir")
group.add_option('--doctest', action='store_true',
help="run doctest on myself")
group.add_option('--benchmark', action='store_true',
help="measure processing speed")
group = parser.add_option_group("Configuration", description=(
"The project options are read from the [pep8] section of the .pep8 "
"file located in any parent folder of the path(s) being processed. "
"Allowed options are: %s." % ', '.join(parser.config_options)))
group.add_option('--config', metavar='path', default=config_file,
help="config file location (default: %default)")
options, args = parser.parse_args(arglist)
options.reporter = None
if options.testsuite:
args.append(options.testsuite)
elif not options.doctest:
if parse_argv and not args:
if os.path.exists('.pep8') or options.diff:
args = ['.']
else:
parser.error('input not specified')
options = read_config(options, args, arglist, parser)
options.reporter = parse_argv and options.quiet == 1 and FileReport
if options.filename:
options.filename = options.filename.split(',')
options.exclude = options.exclude.split(',')
if options.select:
options.select = options.select.split(',')
if options.ignore:
options.ignore = options.ignore.split(',')
elif not (options.select or
options.testsuite or options.doctest) and DEFAULT_IGNORE:
# The default choice: ignore controversial checks
# (for doctest and testsuite, all checks are required)
options.ignore = DEFAULT_IGNORE.split(',')
if options.diff:
options.reporter = DiffReport
stdin = stdin_get_value()
options.selected_lines = parse_udiff(stdin, options.filename, args[0])
args = sorted(options.selected_lines)
return options, args
def _main():
"""Parse options and run checks on Python source."""
pep8style = StyleGuide(parse_argv=True, config_file=True)
options = pep8style.options
if options.doctest:
import doctest
fail_d, done_d = doctest.testmod(report=False, verbose=options.verbose)
fail_s, done_s = selftest(options)
count_failed = fail_s + fail_d
if not options.quiet:
count_passed = done_d + done_s - count_failed
print(("%d passed and %d failed." % (count_passed, count_failed)))
print(("Test failed." if count_failed else "Test passed."))
if count_failed:
sys.exit(1)
if options.testsuite:
init_tests(pep8style)
report = pep8style.check_files()
if options.statistics:
report.print_statistics()
if options.benchmark:
report.print_benchmark()
if options.testsuite and not options.quiet:
report.print_results()
if report.total_errors:
if options.count:
sys.stderr.write(str(report.total_errors) + '\n')
sys.exit(1)
if __name__ == '__main__':
_main() | unknown | codeparrot/codeparrot-clean | ||
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from cron_servlet import CronServlet
from instance_servlet import InstanceServlet
from patch_servlet import PatchServlet
from servlet import Servlet, Request, Response
from test_servlet import TestServlet
_DEFAULT_SERVLET = InstanceServlet.GetConstructor()
_SERVLETS = {
'cron': CronServlet,
'patch': PatchServlet,
'test': TestServlet,
}
class Handler(Servlet):
def Get(self):
path = self._request.path
if path.startswith('_'):
servlet_path = path[1:]
if not '/' in servlet_path:
servlet_path += '/'
servlet_name, servlet_path = servlet_path.split('/', 1)
servlet = _SERVLETS.get(servlet_name)
if servlet is None:
return Response.NotFound('"%s" servlet not found' % servlet_path)
else:
servlet_path = path
servlet = _DEFAULT_SERVLET
return servlet(
Request(servlet_path, self._request.host, self._request.headers)).Get() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python2.7
# Copyright 2013 Virantha Ekanayake All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys, os, multiprocessing.forking
import logging
""" Special work-around to support multiprocessing and pyinstaller --onefile on windows systms
https://github.com/pyinstaller/pyinstaller/wiki/Recipe-Multiprocessing
"""
import multiprocessing.forking as forking
import os
import sys
class _Popen(multiprocessing.forking.Popen):
def __init__(self, *args, **kw):
if hasattr(sys, 'frozen'):
# We have to set original _MEIPASS2 value from sys._MEIPASS
# to get --onefile mode working.
os.putenv('_MEIPASS2', sys._MEIPASS)
try:
super(_Popen, self).__init__(*args, **kw)
finally:
if hasattr(sys, 'frozen'):
# On some platforms (e.g. AIX) 'os.unsetenv()' is not
# available. In those cases we cannot delete the variable
# but only set it to the empty string. The bootloader
# can handle this case.
if hasattr(os, 'unsetenv'):
os.unsetenv('_MEIPASS2')
else:
os.putenv('_MEIPASS2', '')
forking.Popen = _Popen
#class Process(multiprocessing.Process):
#_Popen = _Popen
# ...
if __name__ == '__main__':
# On Windows calling this function is necessary.
multiprocessing.freeze_support() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
import argparse as ap
#import subprocess
import shlex, subprocess
import json
import xml.etree.ElementTree as ET
parser = ap.ArgumentParser(description='Ocr trainner')
parser.add_argument(
'-dataset',
#dest='dataset',
type=str,
help='Dataset to use'
)
parser.add_argument(
'-jsons',
dest='jsons',
type=str,
nargs='+',
help='List of json to use'
)
parser.add_argument(
'-outfile',
dest='outfile',
type=str,
help='logfile path (!) Overide existing file'
)
args = parser.parse_args()
with open(args.outfile, 'wa+') as logfile:
def get_layers(training_path):
tree = ET.parse(training_path)
root = tree.getroot()
ocr = root.find('SimpleOcr')
layers = ocr.find('layer_sizes')
data = layers.find("data")
return ET.tostring(data, method='text')
def check_value(workflow, dataset_path, training, datas):
with open(dataset_path, 'r') as r:
dataset = sorted(json.load(r).items())
total = float(0)
errors = float(0)
layers = get_layers(training).split()
# fichier -> tant de layers, tel taux de success
for key, values in dataset:
for value in values:
print value
cmd = "./ocr {:s} {:s} {:s} {:s} | grep \'value:\' | sed \'s/value: //g\'"\
.format(value, workflow, dataset_path, training)
#cmd = shlex.split(cmd)
result = subprocess.check_output(['sh', '-c', cmd],
stderr=subprocess.PIPE)[:-1]
print key + ", " + result
total += 1
if key != result:
errors += 1
success = (total - errors) / (total) * 100
logfile.write('Workflow: {:s} dataset: {:s} training: {:s} training_iteration: {:s} success {:f} layers: {:s}\n'.\
format(workflow, dataset_path, training, datas['iterations'], success, str(layers)))
print success
def parse_output(output):
l = output.split('\n')
value = {}
for i in l:
if i.startswith('iterations'):
value['iterations'] = i.split()[1]
elif i.startswith('training_file'):
value['training_path'] = i.split()[1]
#elif i.startswith('value'):
#value['value'] = i.split()[1]
return value
for i in args.jsons:
try:
cmd = './ocr {:s} {:s} {:s}'.format('./data/step1/base/a.bmp', i, args.dataset)
print cmd
cmd = shlex.split(cmd)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
print out, err
output = parse_output(out)
check_value(i, args.dataset, output['training_path'], output)
except Exception as e:
print e
#def main():
#tests = get_test_datas() | unknown | codeparrot/codeparrot-clean | ||
from django.template import TemplateSyntaxError
from django.test import SimpleTestCase
from ..utils import TestObj, setup
class IfTagTests(SimpleTestCase):
@setup({'if-tag01': '{% if foo %}yes{% else %}no{% endif %}'})
def test_if_tag01(self):
output = self.engine.render_to_string('if-tag01', {'foo': True})
self.assertEqual(output, 'yes')
@setup({'if-tag02': '{% if foo %}yes{% else %}no{% endif %}'})
def test_if_tag02(self):
output = self.engine.render_to_string('if-tag02', {'foo': False})
self.assertEqual(output, 'no')
@setup({'if-tag03': '{% if foo %}yes{% else %}no{% endif %}'})
def test_if_tag03(self):
output = self.engine.render_to_string('if-tag03')
self.assertEqual(output, 'no')
@setup({'if-tag04': '{% if foo %}foo{% elif bar %}bar{% endif %}'})
def test_if_tag04(self):
output = self.engine.render_to_string('if-tag04', {'foo': True})
self.assertEqual(output, 'foo')
@setup({'if-tag05': '{% if foo %}foo{% elif bar %}bar{% endif %}'})
def test_if_tag05(self):
output = self.engine.render_to_string('if-tag05', {'bar': True})
self.assertEqual(output, 'bar')
@setup({'if-tag06': '{% if foo %}foo{% elif bar %}bar{% endif %}'})
def test_if_tag06(self):
output = self.engine.render_to_string('if-tag06')
self.assertEqual(output, '')
@setup({'if-tag07': '{% if foo %}foo{% elif bar %}bar{% else %}nothing{% endif %}'})
def test_if_tag07(self):
output = self.engine.render_to_string('if-tag07', {'foo': True})
self.assertEqual(output, 'foo')
@setup({'if-tag08': '{% if foo %}foo{% elif bar %}bar{% else %}nothing{% endif %}'})
def test_if_tag08(self):
output = self.engine.render_to_string('if-tag08', {'bar': True})
self.assertEqual(output, 'bar')
@setup({'if-tag09': '{% if foo %}foo{% elif bar %}bar{% else %}nothing{% endif %}'})
def test_if_tag09(self):
output = self.engine.render_to_string('if-tag09')
self.assertEqual(output, 'nothing')
@setup({'if-tag10': '{% if foo %}foo{% elif bar %}bar{% elif baz %}baz{% else %}nothing{% endif %}'})
def test_if_tag10(self):
output = self.engine.render_to_string('if-tag10', {'foo': True})
self.assertEqual(output, 'foo')
@setup({'if-tag11': '{% if foo %}foo{% elif bar %}bar{% elif baz %}baz{% else %}nothing{% endif %}'})
def test_if_tag11(self):
output = self.engine.render_to_string('if-tag11', {'bar': True})
self.assertEqual(output, 'bar')
@setup({'if-tag12': '{% if foo %}foo{% elif bar %}bar{% elif baz %}baz{% else %}nothing{% endif %}'})
def test_if_tag12(self):
output = self.engine.render_to_string('if-tag12', {'baz': True})
self.assertEqual(output, 'baz')
@setup({'if-tag13': '{% if foo %}foo{% elif bar %}bar{% elif baz %}baz{% else %}nothing{% endif %}'})
def test_if_tag13(self):
output = self.engine.render_to_string('if-tag13')
self.assertEqual(output, 'nothing')
# Filters
@setup({'if-tag-filter01': '{% if foo|length == 5 %}yes{% else %}no{% endif %}'})
def test_if_tag_filter01(self):
output = self.engine.render_to_string('if-tag-filter01', {'foo': 'abcde'})
self.assertEqual(output, 'yes')
@setup({'if-tag-filter02': '{% if foo|upper == \'ABC\' %}yes{% else %}no{% endif %}'})
def test_if_tag_filter02(self):
output = self.engine.render_to_string('if-tag-filter02')
self.assertEqual(output, 'no')
# Equality
@setup({'if-tag-eq01': '{% if foo == bar %}yes{% else %}no{% endif %}'})
def test_if_tag_eq01(self):
output = self.engine.render_to_string('if-tag-eq01')
self.assertEqual(output, 'yes')
@setup({'if-tag-eq02': '{% if foo == bar %}yes{% else %}no{% endif %}'})
def test_if_tag_eq02(self):
output = self.engine.render_to_string('if-tag-eq02', {'foo': 1})
self.assertEqual(output, 'no')
@setup({'if-tag-eq03': '{% if foo == bar %}yes{% else %}no{% endif %}'})
def test_if_tag_eq03(self):
output = self.engine.render_to_string('if-tag-eq03', {'foo': 1, 'bar': 1})
self.assertEqual(output, 'yes')
@setup({'if-tag-eq04': '{% if foo == bar %}yes{% else %}no{% endif %}'})
def test_if_tag_eq04(self):
output = self.engine.render_to_string('if-tag-eq04', {'foo': 1, 'bar': 2})
self.assertEqual(output, 'no')
@setup({'if-tag-eq05': '{% if foo == \'\' %}yes{% else %}no{% endif %}'})
def test_if_tag_eq05(self):
output = self.engine.render_to_string('if-tag-eq05')
self.assertEqual(output, 'no')
# Inequality
@setup({'if-tag-noteq01': '{% if foo != bar %}yes{% else %}no{% endif %}'})
def test_if_tag_noteq01(self):
output = self.engine.render_to_string('if-tag-noteq01')
self.assertEqual(output, 'no')
@setup({'if-tag-noteq02': '{% if foo != bar %}yes{% else %}no{% endif %}'})
def test_if_tag_noteq02(self):
output = self.engine.render_to_string('if-tag-noteq02', {'foo': 1})
self.assertEqual(output, 'yes')
@setup({'if-tag-noteq03': '{% if foo != bar %}yes{% else %}no{% endif %}'})
def test_if_tag_noteq03(self):
output = self.engine.render_to_string('if-tag-noteq03', {'foo': 1, 'bar': 1})
self.assertEqual(output, 'no')
@setup({'if-tag-noteq04': '{% if foo != bar %}yes{% else %}no{% endif %}'})
def test_if_tag_noteq04(self):
output = self.engine.render_to_string('if-tag-noteq04', {'foo': 1, 'bar': 2})
self.assertEqual(output, 'yes')
@setup({'if-tag-noteq05': '{% if foo != "" %}yes{% else %}no{% endif %}'})
def test_if_tag_noteq05(self):
output = self.engine.render_to_string('if-tag-noteq05')
self.assertEqual(output, 'yes')
# Comparison
@setup({'if-tag-gt-01': '{% if 2 > 1 %}yes{% else %}no{% endif %}'})
def test_if_tag_gt_01(self):
output = self.engine.render_to_string('if-tag-gt-01')
self.assertEqual(output, 'yes')
@setup({'if-tag-gt-02': '{% if 1 > 1 %}yes{% else %}no{% endif %}'})
def test_if_tag_gt_02(self):
output = self.engine.render_to_string('if-tag-gt-02')
self.assertEqual(output, 'no')
@setup({'if-tag-gte-01': '{% if 1 >= 1 %}yes{% else %}no{% endif %}'})
def test_if_tag_gte_01(self):
output = self.engine.render_to_string('if-tag-gte-01')
self.assertEqual(output, 'yes')
@setup({'if-tag-gte-02': '{% if 1 >= 2 %}yes{% else %}no{% endif %}'})
def test_if_tag_gte_02(self):
output = self.engine.render_to_string('if-tag-gte-02')
self.assertEqual(output, 'no')
@setup({'if-tag-lt-01': '{% if 1 < 2 %}yes{% else %}no{% endif %}'})
def test_if_tag_lt_01(self):
output = self.engine.render_to_string('if-tag-lt-01')
self.assertEqual(output, 'yes')
@setup({'if-tag-lt-02': '{% if 1 < 1 %}yes{% else %}no{% endif %}'})
def test_if_tag_lt_02(self):
output = self.engine.render_to_string('if-tag-lt-02')
self.assertEqual(output, 'no')
@setup({'if-tag-lte-01': '{% if 1 <= 1 %}yes{% else %}no{% endif %}'})
def test_if_tag_lte_01(self):
output = self.engine.render_to_string('if-tag-lte-01')
self.assertEqual(output, 'yes')
@setup({'if-tag-lte-02': '{% if 2 <= 1 %}yes{% else %}no{% endif %}'})
def test_if_tag_lte_02(self):
output = self.engine.render_to_string('if-tag-lte-02')
self.assertEqual(output, 'no')
# Contains
@setup({'if-tag-in-01': '{% if 1 in x %}yes{% else %}no{% endif %}'})
def test_if_tag_in_01(self):
output = self.engine.render_to_string('if-tag-in-01', {'x': [1]})
self.assertEqual(output, 'yes')
@setup({'if-tag-in-02': '{% if 2 in x %}yes{% else %}no{% endif %}'})
def test_if_tag_in_02(self):
output = self.engine.render_to_string('if-tag-in-02', {'x': [1]})
self.assertEqual(output, 'no')
@setup({'if-tag-not-in-01': '{% if 1 not in x %}yes{% else %}no{% endif %}'})
def test_if_tag_not_in_01(self):
output = self.engine.render_to_string('if-tag-not-in-01', {'x': [1]})
self.assertEqual(output, 'no')
@setup({'if-tag-not-in-02': '{% if 2 not in x %}yes{% else %}no{% endif %}'})
def test_if_tag_not_in_02(self):
output = self.engine.render_to_string('if-tag-not-in-02', {'x': [1]})
self.assertEqual(output, 'yes')
# AND
@setup({'if-tag-and01': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and01(self):
output = self.engine.render_to_string('if-tag-and01', {'foo': True, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-and02': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and02(self):
output = self.engine.render_to_string('if-tag-and02', {'foo': True, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-and03': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and03(self):
output = self.engine.render_to_string('if-tag-and03', {'foo': False, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-and04': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and04(self):
output = self.engine.render_to_string('if-tag-and04', {'foo': False, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-and05': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and05(self):
output = self.engine.render_to_string('if-tag-and05', {'foo': False})
self.assertEqual(output, 'no')
@setup({'if-tag-and06': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and06(self):
output = self.engine.render_to_string('if-tag-and06', {'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-and07': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and07(self):
output = self.engine.render_to_string('if-tag-and07', {'foo': True})
self.assertEqual(output, 'no')
@setup({'if-tag-and08': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and08(self):
output = self.engine.render_to_string('if-tag-and08', {'bar': True})
self.assertEqual(output, 'no')
# OR
@setup({'if-tag-or01': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or01(self):
output = self.engine.render_to_string('if-tag-or01', {'foo': True, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-or02': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or02(self):
output = self.engine.render_to_string('if-tag-or02', {'foo': True, 'bar': False})
self.assertEqual(output, 'yes')
@setup({'if-tag-or03': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or03(self):
output = self.engine.render_to_string('if-tag-or03', {'foo': False, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-or04': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or04(self):
output = self.engine.render_to_string('if-tag-or04', {'foo': False, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-or05': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or05(self):
output = self.engine.render_to_string('if-tag-or05', {'foo': False})
self.assertEqual(output, 'no')
@setup({'if-tag-or06': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or06(self):
output = self.engine.render_to_string('if-tag-or06', {'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-or07': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or07(self):
output = self.engine.render_to_string('if-tag-or07', {'foo': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-or08': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or08(self):
output = self.engine.render_to_string('if-tag-or08', {'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-or09': '{% if foo or bar or baz %}yes{% else %}no{% endif %}'})
def test_if_tag_or09(self):
"""
multiple ORs
"""
output = self.engine.render_to_string('if-tag-or09', {'baz': True})
self.assertEqual(output, 'yes')
# NOT
@setup({'if-tag-not01': '{% if not foo %}no{% else %}yes{% endif %}'})
def test_if_tag_not01(self):
output = self.engine.render_to_string('if-tag-not01', {'foo': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-not02': '{% if not not foo %}no{% else %}yes{% endif %}'})
def test_if_tag_not02(self):
output = self.engine.render_to_string('if-tag-not02', {'foo': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not06': '{% if foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not06(self):
output = self.engine.render_to_string('if-tag-not06')
self.assertEqual(output, 'no')
@setup({'if-tag-not07': '{% if foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not07(self):
output = self.engine.render_to_string('if-tag-not07', {'foo': True, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not08': '{% if foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not08(self):
output = self.engine.render_to_string('if-tag-not08', {'foo': True, 'bar': False})
self.assertEqual(output, 'yes')
@setup({'if-tag-not09': '{% if foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not09(self):
output = self.engine.render_to_string('if-tag-not09', {'foo': False, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not10': '{% if foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not10(self):
output = self.engine.render_to_string('if-tag-not10', {'foo': False, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-not11': '{% if not foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not11(self):
output = self.engine.render_to_string('if-tag-not11')
self.assertEqual(output, 'no')
@setup({'if-tag-not12': '{% if not foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not12(self):
output = self.engine.render_to_string('if-tag-not12', {'foo': True, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not13': '{% if not foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not13(self):
output = self.engine.render_to_string('if-tag-not13', {'foo': True, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-not14': '{% if not foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not14(self):
output = self.engine.render_to_string('if-tag-not14', {'foo': False, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-not15': '{% if not foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not15(self):
output = self.engine.render_to_string('if-tag-not15', {'foo': False, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-not16': '{% if foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not16(self):
output = self.engine.render_to_string('if-tag-not16')
self.assertEqual(output, 'yes')
@setup({'if-tag-not17': '{% if foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not17(self):
output = self.engine.render_to_string('if-tag-not17', {'foo': True, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-not18': '{% if foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not18(self):
output = self.engine.render_to_string('if-tag-not18', {'foo': True, 'bar': False})
self.assertEqual(output, 'yes')
@setup({'if-tag-not19': '{% if foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not19(self):
output = self.engine.render_to_string('if-tag-not19', {'foo': False, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not20': '{% if foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not20(self):
output = self.engine.render_to_string('if-tag-not20', {'foo': False, 'bar': False})
self.assertEqual(output, 'yes')
@setup({'if-tag-not21': '{% if not foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not21(self):
output = self.engine.render_to_string('if-tag-not21')
self.assertEqual(output, 'yes')
@setup({'if-tag-not22': '{% if not foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not22(self):
output = self.engine.render_to_string('if-tag-not22', {'foo': True, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-not23': '{% if not foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not23(self):
output = self.engine.render_to_string('if-tag-not23', {'foo': True, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-not24': '{% if not foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not24(self):
output = self.engine.render_to_string('if-tag-not24', {'foo': False, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-not25': '{% if not foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not25(self):
output = self.engine.render_to_string('if-tag-not25', {'foo': False, 'bar': False})
self.assertEqual(output, 'yes')
@setup({'if-tag-not26': '{% if not foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not26(self):
output = self.engine.render_to_string('if-tag-not26')
self.assertEqual(output, 'yes')
@setup({'if-tag-not27': '{% if not foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not27(self):
output = self.engine.render_to_string('if-tag-not27', {'foo': True, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not28': '{% if not foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not28(self):
output = self.engine.render_to_string('if-tag-not28', {'foo': True, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-not29': '{% if not foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not29(self):
output = self.engine.render_to_string('if-tag-not29', {'foo': False, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not30': '{% if not foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not30(self):
output = self.engine.render_to_string('if-tag-not30', {'foo': False, 'bar': False})
self.assertEqual(output, 'yes')
@setup({'if-tag-not31': '{% if not foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not31(self):
output = self.engine.render_to_string('if-tag-not31')
self.assertEqual(output, 'yes')
@setup({'if-tag-not32': '{% if not foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not32(self):
output = self.engine.render_to_string('if-tag-not32', {'foo': True, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not33': '{% if not foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not33(self):
output = self.engine.render_to_string('if-tag-not33', {'foo': True, 'bar': False})
self.assertEqual(output, 'yes')
@setup({'if-tag-not34': '{% if not foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not34(self):
output = self.engine.render_to_string('if-tag-not34', {'foo': False, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-not35': '{% if not foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not35(self):
output = self.engine.render_to_string('if-tag-not35', {'foo': False, 'bar': False})
self.assertEqual(output, 'yes')
# Various syntax errors
@setup({'if-tag-error01': '{% if %}yes{% endif %}'})
def test_if_tag_error01(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error01')
@setup({'if-tag-error02': '{% if foo and %}yes{% else %}no{% endif %}'})
def test_if_tag_error02(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('if-tag-error02', {'foo': True})
@setup({'if-tag-error03': '{% if foo or %}yes{% else %}no{% endif %}'})
def test_if_tag_error03(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('if-tag-error03', {'foo': True})
@setup({'if-tag-error04': '{% if not foo and %}yes{% else %}no{% endif %}'})
def test_if_tag_error04(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('if-tag-error04', {'foo': True})
@setup({'if-tag-error05': '{% if not foo or %}yes{% else %}no{% endif %}'})
def test_if_tag_error05(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('if-tag-error05', {'foo': True})
@setup({'if-tag-error06': '{% if abc def %}yes{% endif %}'})
def test_if_tag_error06(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error06')
@setup({'if-tag-error07': '{% if not %}yes{% endif %}'})
def test_if_tag_error07(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error07')
@setup({'if-tag-error08': '{% if and %}yes{% endif %}'})
def test_if_tag_error08(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error08')
@setup({'if-tag-error09': '{% if or %}yes{% endif %}'})
def test_if_tag_error09(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error09')
@setup({'if-tag-error10': '{% if == %}yes{% endif %}'})
def test_if_tag_error10(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error10')
@setup({'if-tag-error11': '{% if 1 == %}yes{% endif %}'})
def test_if_tag_error11(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error11')
@setup({'if-tag-error12': '{% if a not b %}yes{% endif %}'})
def test_if_tag_error12(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error12')
@setup({'if-tag-shortcircuit01': '{% if x.is_true or x.is_bad %}yes{% else %}no{% endif %}'})
def test_if_tag_shortcircuit01(self):
"""
If evaluations are shortcircuited where possible
"""
output = self.engine.render_to_string('if-tag-shortcircuit01', {'x': TestObj()})
self.assertEqual(output, 'yes')
@setup({'if-tag-shortcircuit02': '{% if x.is_false and x.is_bad %}yes{% else %}no{% endif %}'})
def test_if_tag_shortcircuit02(self):
"""
The is_bad() function should not be evaluated. If it is, an
exception is raised.
"""
output = self.engine.render_to_string('if-tag-shortcircuit02', {'x': TestObj()})
self.assertEqual(output, 'no')
@setup({'if-tag-badarg01': '{% if x|default_if_none:y %}yes{% endif %}'})
def test_if_tag_badarg01(self):
"""
Non-existent args
"""
output = self.engine.render_to_string('if-tag-badarg01')
self.assertEqual(output, '')
@setup({'if-tag-badarg02': '{% if x|default_if_none:y %}yes{% endif %}'})
def test_if_tag_badarg02(self):
output = self.engine.render_to_string('if-tag-badarg02', {'y': 0})
self.assertEqual(output, '')
@setup({'if-tag-badarg03': '{% if x|default_if_none:y %}yes{% endif %}'})
def test_if_tag_badarg03(self):
output = self.engine.render_to_string('if-tag-badarg03', {'y': 1})
self.assertEqual(output, 'yes')
@setup({'if-tag-badarg04': '{% if x|default_if_none:y %}yes{% else %}no{% endif %}'})
def test_if_tag_badarg04(self):
output = self.engine.render_to_string('if-tag-badarg04')
self.assertEqual(output, 'no')
@setup({'if-tag-single-eq': '{% if foo = bar %}yes{% else %}no{% endif %}'})
def test_if_tag_single_eq(self):
# A single equals sign is a syntax error.
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('if-tag-single-eq', {'foo': 1})
@setup({'template': '{% if foo is True %}yes{% else %}no{% endif %}'})
def test_if_is_match(self):
output = self.engine.render_to_string('template', {'foo': True})
self.assertEqual(output, 'yes')
@setup({'template': '{% if foo is True %}yes{% else %}no{% endif %}'})
def test_if_is_no_match(self):
output = self.engine.render_to_string('template', {'foo': 1})
self.assertEqual(output, 'no')
@setup({'template': '{% if foo is bar %}yes{% else %}no{% endif %}'})
def test_if_is_variable_missing(self):
output = self.engine.render_to_string('template', {'foo': 1})
self.assertEqual(output, 'no')
@setup({'template': '{% if foo is bar %}yes{% else %}no{% endif %}'})
def test_if_is_both_variables_missing(self):
output = self.engine.render_to_string('template', {})
self.assertEqual(output, 'yes')
@setup({'template': '{% if foo is not None %}yes{% else %}no{% endif %}'})
def test_if_is_not_match(self):
# For this to act as a regression test, it's important not to use
# foo=True because True is (not None)
output = self.engine.render_to_string('template', {'foo': False})
self.assertEqual(output, 'yes')
@setup({'template': '{% if foo is not None %}yes{% else %}no{% endif %}'})
def test_if_is_not_no_match(self):
output = self.engine.render_to_string('template', {'foo': None})
self.assertEqual(output, 'no')
@setup({'template': '{% if foo is not bar %}yes{% else %}no{% endif %}'})
def test_if_is_not_variable_missing(self):
output = self.engine.render_to_string('template', {'foo': False})
self.assertEqual(output, 'yes')
@setup({'template': '{% if foo is not bar %}yes{% else %}no{% endif %}'})
def test_if_is_not_both_variables_missing(self):
output = self.engine.render_to_string('template', {})
self.assertEqual(output, 'no') | unknown | codeparrot/codeparrot-clean | ||
/**********************************************************************
symbol.h -
$Author$
created at: Tue Jul 8 15:49:54 JST 2014
Copyright (C) 2014 Yukihiro Matsumoto
**********************************************************************/
#include "darray.h"
#include "internal.h"
#include "internal/concurrent_set.h"
#include "internal/error.h"
#include "internal/gc.h"
#include "internal/hash.h"
#include "internal/object.h"
#include "internal/symbol.h"
#include "internal/vm.h"
#include "probes.h"
#include "ruby/encoding.h"
#include "ruby/ractor.h"
#include "ruby/st.h"
#include "symbol.h"
#include "vm_sync.h"
#include "builtin.h"
#include "ruby/internal/attr/nonstring.h"
#if defined(USE_SYMBOL_GC) && !(USE_SYMBOL_GC+0)
# undef USE_SYMBOL_GC
# define USE_SYMBOL_GC 0
#else
# undef USE_SYMBOL_GC
# define USE_SYMBOL_GC 1
#endif
#if defined(SYMBOL_DEBUG) && (SYMBOL_DEBUG+0)
# undef SYMBOL_DEBUG
# define SYMBOL_DEBUG 1
#else
# undef SYMBOL_DEBUG
# define SYMBOL_DEBUG 0
#endif
#ifndef CHECK_ID_SERIAL
# define CHECK_ID_SERIAL SYMBOL_DEBUG
#endif
#define IDSET_ATTRSET_FOR_SYNTAX ((1U<<ID_LOCAL)|(1U<<ID_CONST))
#define IDSET_ATTRSET_FOR_INTERN (~(~0U<<(1<<ID_SCOPE_SHIFT)) & ~(1U<<ID_ATTRSET))
#define SYMBOL_PINNED_P(sym) (RSYMBOL(sym)->id&~ID_SCOPE_MASK)
#define STATIC_SYM2ID(sym) RSHIFT((VALUE)(sym), RUBY_SPECIAL_SHIFT)
static ID register_static_symid(ID, const char *, long, rb_encoding *);
#define REGISTER_SYMID(id, name) register_static_symid((id), (name), strlen(name), enc)
#include "id.c"
#define is_identchar(p,e,enc) (ISALNUM((unsigned char)*(p)) || (*(p)) == '_' || !ISASCII(*(p)))
#define op_tbl_count numberof(op_tbl)
STATIC_ASSERT(op_tbl_name_size, sizeof(op_tbl[0].name) == 3);
#define op_tbl_len(i) (!op_tbl[i].name[1] ? 1 : !op_tbl[i].name[2] ? 2 : 3)
#define GLOBAL_SYMBOLS_LOCKING(symbols) \
for (rb_symbols_t *symbols = &ruby_global_symbols, **locking = &symbols; \
locking; \
locking = NULL) \
RB_VM_LOCKING()
static void
Init_op_tbl(void)
{
int i;
rb_encoding *const enc = rb_usascii_encoding();
for (i = '!'; i <= '~'; ++i) {
if (!ISALNUM(i) && i != '_') {
char c = (char)i;
register_static_symid(i, &c, 1, enc);
}
}
for (i = 0; i < op_tbl_count; ++i) {
register_static_symid(op_tbl[i].token, op_tbl[i].name, op_tbl_len(i), enc);
}
}
static const int ID_ENTRY_UNIT = 512;
typedef struct {
rb_atomic_t next_id;
VALUE sym_set;
VALUE ids;
} rb_symbols_t;
rb_symbols_t ruby_global_symbols = {
.next_id = tNEXT_ID,
};
struct sym_set_static_sym_entry {
VALUE sym;
VALUE str;
};
#define SYM_SET_SYM_STATIC_TAG 1
static bool
sym_set_sym_static_p(VALUE sym)
{
return sym & SYM_SET_SYM_STATIC_TAG;
}
static VALUE
sym_set_static_sym_tag(struct sym_set_static_sym_entry *sym)
{
VALUE value = (VALUE)sym | SYM_SET_SYM_STATIC_TAG;
RUBY_ASSERT(IMMEDIATE_P(value));
RUBY_ASSERT(sym_set_sym_static_p(value));
return value;
}
static struct sym_set_static_sym_entry *
sym_set_static_sym_untag(VALUE sym)
{
RUBY_ASSERT(sym_set_sym_static_p(sym));
return (struct sym_set_static_sym_entry *)(sym & ~((VALUE)SYM_SET_SYM_STATIC_TAG));
}
static VALUE
sym_set_sym_get_str(VALUE sym)
{
VALUE str;
if (sym_set_sym_static_p(sym)) {
str = sym_set_static_sym_untag(sym)->str;
}
else {
RUBY_ASSERT(RB_TYPE_P(sym, T_SYMBOL));
str = RSYMBOL(sym)->fstr;
}
RUBY_ASSERT(RB_TYPE_P(str, T_STRING));
return str;
}
static VALUE
sym_set_hash(VALUE sym)
{
if (sym_set_sym_static_p(sym)) {
return (VALUE)rb_str_hash(sym_set_static_sym_untag(sym)->str);
}
else {
return (VALUE)RSYMBOL(sym)->hashval;
}
}
static bool
sym_set_cmp(VALUE a, VALUE b)
{
return rb_str_hash_cmp(sym_set_sym_get_str(a), sym_set_sym_get_str(b)) == false;
}
struct sym_id_entry {
VALUE sym;
VALUE str;
};
static void
sym_id_entry_list_mark(void *ptr)
{
rb_darray(struct sym_id_entry) ary = ptr;
struct sym_id_entry *entry;
rb_darray_foreach(ary, i, entry) {
// sym must be pinned because it may be used in places that don't
// support compaction
rb_gc_mark(entry->sym);
rb_gc_mark_movable(entry->str);
}
}
static void
sym_id_entry_list_free(void *ptr)
{
rb_darray_free_sized(ptr, struct sym_id_entry);
}
static size_t
sym_id_entry_list_memsize(const void *ptr)
{
const rb_darray(struct sym_id_entry) ary = ptr;
return rb_darray_memsize(ary);
}
static void
sym_id_entry_list_compact(void *ptr)
{
rb_darray(struct sym_id_entry) ary = ptr;
struct sym_id_entry *entry;
rb_darray_foreach(ary, i, entry) {
entry->str = rb_gc_location(entry->str);
}
}
static const rb_data_type_t sym_id_entry_list_type = {
"symbol_id_entry_list",
{
sym_id_entry_list_mark,
sym_id_entry_list_free,
sym_id_entry_list_memsize,
sym_id_entry_list_compact,
},
0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED
};
static int
sym_check_asciionly(VALUE str, bool fake_str)
{
if (!rb_enc_asciicompat(rb_enc_get(str))) return FALSE;
switch (rb_enc_str_coderange(str)) {
case ENC_CODERANGE_BROKEN:
if (fake_str) {
str = rb_enc_str_new(RSTRING_PTR(str), RSTRING_LEN(str), rb_enc_get(str));
}
rb_raise(rb_eEncodingError, "invalid symbol in encoding %s :%+"PRIsVALUE,
rb_enc_name(rb_enc_get(str)), str);
case ENC_CODERANGE_7BIT:
return TRUE;
}
return FALSE;
}
static VALUE
dup_string_for_create(VALUE str)
{
rb_encoding *enc = rb_enc_get(str);
str = rb_enc_str_new(RSTRING_PTR(str), RSTRING_LEN(str), enc);
rb_encoding *ascii = rb_usascii_encoding();
if (enc != ascii && sym_check_asciionly(str, false)) {
rb_enc_associate(str, ascii);
}
OBJ_FREEZE(str);
str = rb_fstring(str);
return str;
}
static int
rb_str_symname_type(VALUE name, unsigned int allowed_attrset)
{
const char *ptr = StringValuePtr(name);
long len = RSTRING_LEN(name);
int type = rb_enc_symname_type(ptr, len, rb_enc_get(name), allowed_attrset);
RB_GC_GUARD(name);
return type;
}
static ID
next_id_base(void)
{
rb_atomic_t serial = RUBY_ATOMIC_FETCH_ADD(ruby_global_symbols.next_id, 1);
return (ID)serial << ID_SCOPE_SHIFT;
}
static void
set_id_entry(rb_symbols_t *symbols, rb_id_serial_t num, VALUE str, VALUE sym)
{
ASSERT_vm_locking();
RUBY_ASSERT_BUILTIN_TYPE(str, T_STRING);
RUBY_ASSERT_BUILTIN_TYPE(sym, T_SYMBOL);
size_t idx = num / ID_ENTRY_UNIT;
VALUE id_entry_list, ids = symbols->ids;
rb_darray(struct sym_id_entry) entries;
if (idx >= (size_t)RARRAY_LEN(ids) || NIL_P(id_entry_list = rb_ary_entry(ids, (long)idx))) {
rb_darray_make(&entries, ID_ENTRY_UNIT);
id_entry_list = TypedData_Wrap_Struct(0, &sym_id_entry_list_type, entries);
rb_ary_store(ids, (long)idx, id_entry_list);
}
else {
entries = RTYPEDDATA_GET_DATA(id_entry_list);
}
idx = num % ID_ENTRY_UNIT;
struct sym_id_entry *entry = rb_darray_ref(entries, idx);
RUBY_ASSERT(entry->str == 0);
RUBY_ASSERT(entry->sym == 0);
RB_OBJ_WRITE(id_entry_list, &entry->str, str);
RB_OBJ_WRITE(id_entry_list, &entry->sym, sym);
}
static VALUE
sym_set_create(VALUE sym, void *data)
{
bool create_dynamic_symbol = (bool)data;
struct sym_set_static_sym_entry *static_sym_entry = sym_set_static_sym_untag(sym);
VALUE str = dup_string_for_create(static_sym_entry->str);
if (create_dynamic_symbol) {
NEWOBJ_OF(obj, struct RSymbol, rb_cSymbol, T_SYMBOL | FL_WB_PROTECTED, sizeof(struct RSymbol), 0);
rb_encoding *enc = rb_enc_get(str);
rb_enc_set_index((VALUE)obj, rb_enc_to_index(enc));
RB_OBJ_WRITE((VALUE)obj, &obj->fstr, str);
RB_OBJ_SET_FROZEN_SHAREABLE((VALUE)obj);
int id = rb_str_symname_type(str, IDSET_ATTRSET_FOR_INTERN);
if (id < 0) id = ID_INTERNAL;
obj->id = id;
obj->hashval = rb_str_hash(str);
RUBY_DTRACE_CREATE_HOOK(SYMBOL, RSTRING_PTR(obj->fstr));
return (VALUE)obj;
}
else {
struct sym_set_static_sym_entry *new_static_sym_entry = xmalloc(sizeof(struct sym_set_static_sym_entry));
new_static_sym_entry->str = str;
VALUE static_sym = static_sym_entry->sym;
if (static_sym == 0) {
ID id = rb_str_symname_type(str, IDSET_ATTRSET_FOR_INTERN);
if (id == (ID)-1) id = ID_INTERNAL;
id |= next_id_base();
id |= ID_STATIC_SYM;
static_sym = STATIC_ID2SYM(id);
}
new_static_sym_entry->sym = static_sym;
RB_VM_LOCKING() {
set_id_entry(&ruby_global_symbols, rb_id_to_serial(STATIC_SYM2ID(static_sym)), str, static_sym);
}
return sym_set_static_sym_tag(new_static_sym_entry);
}
}
static void
sym_set_free(VALUE sym)
{
if (sym_set_sym_static_p(sym)) {
xfree(sym_set_static_sym_untag(sym));
}
}
static const struct rb_concurrent_set_funcs sym_set_funcs = {
.hash = sym_set_hash,
.cmp = sym_set_cmp,
.create = sym_set_create,
.free = sym_set_free,
};
static VALUE
sym_set_entry_to_sym(VALUE entry)
{
if (sym_set_sym_static_p(entry)) {
RUBY_ASSERT(STATIC_SYM_P(sym_set_static_sym_untag(entry)->sym));
if (!STATIC_SYM_P(sym_set_static_sym_untag(entry)->sym)) rb_bug("not sym");
return sym_set_static_sym_untag(entry)->sym;
}
else {
RUBY_ASSERT(DYNAMIC_SYM_P(entry));
if (!DYNAMIC_SYM_P(entry)) rb_bug("not sym");
return entry;
}
}
static VALUE
sym_find_or_insert_dynamic_symbol(rb_symbols_t *symbols, const VALUE str)
{
struct sym_set_static_sym_entry static_sym = {
.str = str
};
return sym_set_entry_to_sym(
rb_concurrent_set_find_or_insert(&symbols->sym_set, sym_set_static_sym_tag(&static_sym), (void *)true)
);
}
static VALUE
sym_find_or_insert_static_symbol(rb_symbols_t *symbols, const VALUE str)
{
struct sym_set_static_sym_entry static_sym = {
.str = str
};
return sym_set_entry_to_sym(
rb_concurrent_set_find_or_insert(&symbols->sym_set, sym_set_static_sym_tag(&static_sym), (void *)false)
);
}
static VALUE
sym_find_or_insert_static_symbol_id(rb_symbols_t *symbols, const VALUE str, ID id)
{
struct sym_set_static_sym_entry static_sym = {
.sym = STATIC_ID2SYM(id),
.str = str,
};
return sym_set_entry_to_sym(
rb_concurrent_set_find_or_insert(&symbols->sym_set, sym_set_static_sym_tag(&static_sym), (void *)false)
);
}
void
Init_sym(void)
{
rb_symbols_t *symbols = &ruby_global_symbols;
symbols->sym_set = rb_concurrent_set_new(&sym_set_funcs, 1024);
symbols->ids = rb_ary_hidden_new(0);
Init_op_tbl();
Init_id();
}
void
rb_sym_global_symbols_mark_and_move(void)
{
rb_symbols_t *symbols = &ruby_global_symbols;
rb_gc_mark_and_move(&symbols->sym_set);
rb_gc_mark_and_move(&symbols->ids);
}
static int
rb_free_global_symbol_table_i(VALUE *sym_ptr, void *data)
{
sym_set_free(*sym_ptr);
return ST_DELETE;
}
void
rb_free_global_symbol_table(void)
{
rb_concurrent_set_foreach_with_replace(ruby_global_symbols.sym_set, rb_free_global_symbol_table_i, NULL);
}
WARN_UNUSED_RESULT(static ID lookup_str_id(VALUE str));
WARN_UNUSED_RESULT(static VALUE get_id_str(ID id));
ID
rb_id_attrset(ID id)
{
int scope;
if (!is_notop_id(id)) {
switch (id) {
case tAREF: case tASET:
return tASET; /* only exception */
}
rb_name_error(id, "cannot make operator ID :%"PRIsVALUE" attrset",
rb_id2str(id));
}
else {
scope = id_type(id);
switch (scope) {
case ID_LOCAL: case ID_INSTANCE: case ID_GLOBAL:
case ID_CONST: case ID_CLASS: case ID_INTERNAL:
break;
case ID_ATTRSET:
return id;
default:
{
VALUE str = get_id_str(id);
if (str != 0) {
rb_name_error(id, "cannot make unknown type ID %d:%"PRIsVALUE" attrset",
scope, str);
}
else {
rb_name_error_str(Qnil, "cannot make unknown type anonymous ID %d:%"PRIxVALUE" attrset",
scope, (VALUE)id);
}
}
}
}
bool error = false;
/* make new symbol and ID */
VALUE str = get_id_str(id);
if (str) {
str = rb_str_dup(str);
rb_str_cat(str, "=", 1);
if (sym_check_asciionly(str, false)) {
rb_enc_associate(str, rb_usascii_encoding());
}
VALUE sym = sym_find_or_insert_static_symbol(&ruby_global_symbols, str);
id = rb_sym2id(sym);
}
else {
error = true;
}
if (error) {
RBIMPL_ATTR_NONSTRING_ARRAY() static const char id_types[][8] = {
"local",
"instance",
"invalid",
"global",
"attrset",
"const",
"class",
"internal",
};
rb_name_error(id, "cannot make anonymous %.*s ID %"PRIxVALUE" attrset",
(int)sizeof(id_types[0]), id_types[scope], (VALUE)id);
}
return id;
}
static int
is_special_global_name(const char *m, const char *e, rb_encoding *enc)
{
int mb = 0;
if (m >= e) return 0;
if (is_global_name_punct(*m)) {
++m;
}
else if (*m == '-') {
if (++m >= e) return 0;
if (is_identchar(m, e, enc)) {
if (!ISASCII(*m)) mb = 1;
m += rb_enc_mbclen(m, e, enc);
}
}
else {
if (!ISDIGIT(*m)) return 0;
do {
if (!ISASCII(*m)) mb = 1;
++m;
} while (m < e && ISDIGIT(*m));
}
return m == e ? mb + 1 : 0;
}
int
rb_symname_p(const char *name)
{
return rb_enc_symname_p(name, rb_ascii8bit_encoding());
}
int
rb_enc_symname_p(const char *name, rb_encoding *enc)
{
return rb_enc_symname2_p(name, strlen(name), enc);
}
static int
rb_sym_constant_char_p(const char *name, long nlen, rb_encoding *enc)
{
int c, len;
const char *end = name + nlen;
if (nlen < 1) return FALSE;
if (ISASCII(*name)) return ISUPPER(*name);
c = rb_enc_precise_mbclen(name, end, enc);
if (!MBCLEN_CHARFOUND_P(c)) return FALSE;
len = MBCLEN_CHARFOUND_LEN(c);
c = rb_enc_mbc_to_codepoint(name, end, enc);
if (rb_enc_isupper(c, enc)) return TRUE;
if (rb_enc_islower(c, enc)) return FALSE;
if (ONIGENC_IS_UNICODE(enc)) {
static int ctype_titlecase = 0;
if (!ctype_titlecase) {
static const UChar cname[] = "titlecaseletter";
static const UChar *const end = cname + sizeof(cname) - 1;
ctype_titlecase = ONIGENC_PROPERTY_NAME_TO_CTYPE(enc, cname, end);
}
if (rb_enc_isctype(c, ctype_titlecase, enc)) return TRUE;
}
else {
/* fallback to case-folding */
OnigUChar fold[ONIGENC_GET_CASE_FOLD_CODES_MAX_NUM];
const OnigUChar *beg = (const OnigUChar *)name;
int r = enc->mbc_case_fold(ONIGENC_CASE_FOLD,
&beg, (const OnigUChar *)end,
fold, enc);
if (r > 0 && (r != len || memcmp(fold, name, r)))
return TRUE;
}
return FALSE;
}
struct enc_synmane_type_leading_chars_tag {
const enum { invalid, stophere, needmore, } kind;
const enum ruby_id_types type;
const long nread;
};
#define t struct enc_synmane_type_leading_chars_tag
static struct enc_synmane_type_leading_chars_tag
enc_synmane_type_leading_chars(const char *name, long len, rb_encoding *enc, int allowed_attrset)
{
const char *m = name;
const char *e = m + len;
if (! rb_enc_asciicompat(enc)) {
return (t) { invalid, 0, 0, };
}
else if (! m) {
return (t) { invalid, 0, 0, };
}
else if ( len <= 0 ) {
return (t) { invalid, 0, 0, };
}
switch (*m) {
case '\0':
return (t) { invalid, 0, 0, };
case '$':
if (is_special_global_name(++m, e, enc)) {
return (t) { stophere, ID_GLOBAL, len, };
}
else {
return (t) { needmore, ID_GLOBAL, 1, };
}
case '@':
switch (*++m) {
default: return (t) { needmore, ID_INSTANCE, 1, };
case '@': return (t) { needmore, ID_CLASS, 2, };
}
case '<':
switch (*++m) {
default: return (t) { stophere, ID_INTERNAL, 1, };
case '<': return (t) { stophere, ID_INTERNAL, 2, };
case '=':
switch (*++m) {
default: return (t) { stophere, ID_INTERNAL, 2, };
case '>': return (t) { stophere, ID_INTERNAL, 3, };
}
}
case '>':
switch (*++m) {
default: return (t) { stophere, ID_INTERNAL, 1, };
case '>': case '=': return (t) { stophere, ID_INTERNAL, 2, };
}
case '=':
switch (*++m) {
default: return (t) { invalid, 0, 1, };
case '~': return (t) { stophere, ID_INTERNAL, 2, };
case '=':
switch (*++m) {
default: return (t) { stophere, ID_INTERNAL, 2, };
case '=': return (t) { stophere, ID_INTERNAL, 3, };
}
}
case '*':
switch (*++m) {
default: return (t) { stophere, ID_INTERNAL, 1, };
case '*': return (t) { stophere, ID_INTERNAL, 2, };
}
case '+': case '-':
switch (*++m) {
default: return (t) { stophere, ID_INTERNAL, 1, };
case '@': return (t) { stophere, ID_INTERNAL, 2, };
}
case '|': case '^': case '&': case '/': case '%': case '~': case '`':
return (t) { stophere, ID_INTERNAL, 1, };
case '[':
switch (*++m) {
default: return (t) { needmore, ID_INTERNAL, 0, };
case ']':
switch (*++m) {
default: return (t) { stophere, ID_INTERNAL, 2, };
case '=': return (t) { stophere, ID_INTERNAL, 3, };
}
}
case '!':
switch (*++m) {
case '=': case '~': return (t) { stophere, ID_INTERNAL, 2, };
default:
if (allowed_attrset & (1U << ID_INTERNAL)) {
return (t) { needmore, ID_INTERNAL, 1, };
}
else {
return (t) { stophere, ID_INTERNAL, 1, };
}
}
default:
if (rb_sym_constant_char_p(name, len, enc)) {
return (t) { needmore, ID_CONST, 0, };
}
else {
return (t) { needmore, ID_LOCAL, 0, };
}
}
}
#undef t
int
rb_enc_symname_type(const char *name, long len, rb_encoding *enc, unsigned int allowed_attrset)
{
const struct enc_synmane_type_leading_chars_tag f =
enc_synmane_type_leading_chars(name, len, enc, allowed_attrset);
const char *m = name + f.nread;
const char *e = name + len;
int type = (int)f.type;
switch (f.kind) {
case invalid: return -1;
case stophere: break;
case needmore:
if (m >= e || (*m != '_' && !ISALPHA(*m) && ISASCII(*m))) {
if (len > 1 && *(e-1) == '=') {
type = rb_enc_symname_type(name, len-1, enc, allowed_attrset);
if (allowed_attrset & (1U << type)) return ID_ATTRSET;
}
return -1;
}
while (m < e && is_identchar(m, e, enc)) m += rb_enc_mbclen(m, e, enc);
if (m >= e) break;
switch (*m) {
case '!': case '?':
if (type == ID_GLOBAL || type == ID_CLASS || type == ID_INSTANCE) return -1;
type = ID_INTERNAL;
++m;
if (m + 1 < e || *m != '=') break;
/* fall through */
case '=':
if (!(allowed_attrset & (1U << type))) return -1;
type = ID_ATTRSET;
++m;
break;
}
}
return m == e ? type : -1;
}
int
rb_enc_symname2_p(const char *name, long len, rb_encoding *enc)
{
return rb_enc_symname_type(name, len, enc, IDSET_ATTRSET_FOR_SYNTAX) != -1;
}
static struct sym_id_entry *
get_id_serial_entry(rb_id_serial_t num)
{
struct sym_id_entry *entry = NULL;
GLOBAL_SYMBOLS_LOCKING(symbols) {
if (num && num < RUBY_ATOMIC_LOAD(symbols->next_id)) {
size_t idx = num / ID_ENTRY_UNIT;
VALUE ids = symbols->ids;
VALUE id_entry_list;
if (idx < (size_t)RARRAY_LEN(ids) && !NIL_P(id_entry_list = rb_ary_entry(ids, (long)idx))) {
rb_darray(struct sym_id_entry) entries = RTYPEDDATA_GET_DATA(id_entry_list);
size_t pos = (size_t)(num % ID_ENTRY_UNIT);
RUBY_ASSERT(pos < rb_darray_size(entries));
entry = rb_darray_ref(entries, pos);
}
}
}
return entry;
}
static VALUE
get_id_sym(ID id)
{
struct sym_id_entry *entry = get_id_serial_entry(rb_id_to_serial(id));
return entry ? entry->sym : 0;
}
static VALUE
get_id_str(ID id)
{
struct sym_id_entry *entry = get_id_serial_entry(rb_id_to_serial(id));
return entry ? entry->str : 0;
}
int
rb_static_id_valid_p(ID id)
{
return STATIC_ID2SYM(id) == get_id_sym(id);
}
static inline ID
rb_id_serial_to_id(rb_id_serial_t num)
{
if (is_notop_id((ID)num)) {
struct sym_id_entry *entry = get_id_serial_entry(num);
if (entry && entry->sym != 0) {
return SYM2ID(entry->sym);
}
else {
return ((ID)num << ID_SCOPE_SHIFT) | ID_INTERNAL | ID_STATIC_SYM;
}
}
else {
return (ID)num;
}
}
static ID
register_static_symid(ID id, const char *name, long len, rb_encoding *enc)
{
VALUE str = rb_enc_str_new(name, len, enc);
OBJ_FREEZE(str);
str = rb_fstring(str);
RUBY_DTRACE_CREATE_HOOK(SYMBOL, RSTRING_PTR(str));
sym_find_or_insert_static_symbol_id(&ruby_global_symbols, str, id);
return id;
}
static VALUE
sym_find(VALUE str)
{
VALUE sym;
struct sym_set_static_sym_entry static_sym = {
.str = str
};
sym = rb_concurrent_set_find(&ruby_global_symbols.sym_set, sym_set_static_sym_tag(&static_sym));
if (sym) {
return sym_set_entry_to_sym(sym);
}
else {
return 0;
}
}
static ID
lookup_str_id(VALUE str)
{
VALUE sym = sym_find(str);
if (sym == 0) {
return (ID)0;
}
if (STATIC_SYM_P(sym)) {
return STATIC_SYM2ID(sym);
}
else if (DYNAMIC_SYM_P(sym)) {
ID id = RSYMBOL(sym)->id;
if (id & ~ID_SCOPE_MASK) return id;
}
else {
rb_bug("non-symbol object %s:%"PRIxVALUE" for %"PRIsVALUE" in symbol table",
rb_builtin_class_name(sym), sym, str);
}
return (ID)0;
}
ID
rb_intern3(const char *name, long len, rb_encoding *enc)
{
struct RString fake_str = {RBASIC_INIT};
VALUE str = rb_setup_fake_str(&fake_str, name, len, enc);
OBJ_FREEZE(str);
VALUE sym = sym_find_or_insert_static_symbol(&ruby_global_symbols, str);
return rb_sym2id(sym);
}
ID
rb_intern2(const char *name, long len)
{
return rb_intern3(name, len, rb_usascii_encoding());
}
#undef rb_intern
ID
rb_intern(const char *name)
{
return rb_intern2(name, strlen(name));
}
ID
rb_intern_str(VALUE str)
{
VALUE sym = sym_find_or_insert_static_symbol(&ruby_global_symbols, str);
return SYM2ID(sym);
}
bool
rb_obj_is_symbol_table(VALUE obj)
{
return obj == ruby_global_symbols.sym_set;
}
struct global_symbol_table_foreach_weak_reference_data {
int (*callback)(VALUE *key, void *data);
void *data;
};
static int
rb_sym_global_symbol_table_foreach_weak_reference_i(VALUE *key, void *d)
{
struct global_symbol_table_foreach_weak_reference_data *data = d;
VALUE sym = *key;
if (sym_set_sym_static_p(sym)) {
struct sym_set_static_sym_entry *static_sym = sym_set_static_sym_untag(sym);
return data->callback(&static_sym->str, data->data);
}
else {
return data->callback(key, data->data);
}
}
void
rb_sym_global_symbol_table_foreach_weak_reference(int (*callback)(VALUE *key, void *data), void *data)
{
if (!ruby_global_symbols.sym_set) return;
struct global_symbol_table_foreach_weak_reference_data foreach_data = {
.callback = callback,
.data = data,
};
rb_concurrent_set_foreach_with_replace(ruby_global_symbols.sym_set, rb_sym_global_symbol_table_foreach_weak_reference_i, &foreach_data);
}
void
rb_gc_free_dsymbol(VALUE sym)
{
VALUE str = RSYMBOL(sym)->fstr;
if (str) {
rb_concurrent_set_delete_by_identity(ruby_global_symbols.sym_set, sym);
RSYMBOL(sym)->fstr = 0;
}
}
/*
* call-seq:
* intern -> symbol
*
* :include: doc/string/intern.rdoc
*
*/
VALUE
rb_str_intern(VALUE str)
{
return sym_find_or_insert_dynamic_symbol(&ruby_global_symbols, str);
}
ID
rb_sym2id(VALUE sym)
{
ID id = 0;
if (STATIC_SYM_P(sym)) {
id = STATIC_SYM2ID(sym);
}
else if (DYNAMIC_SYM_P(sym)) {
GLOBAL_SYMBOLS_LOCKING(symbols) {
RUBY_ASSERT(!rb_objspace_garbage_object_p(sym));
id = RSYMBOL(sym)->id;
if (UNLIKELY(!(id & ~ID_SCOPE_MASK))) {
VALUE fstr = RSYMBOL(sym)->fstr;
ID num = next_id_base();
RSYMBOL(sym)->id = id |= num;
/* make it permanent object */
set_id_entry(symbols, rb_id_to_serial(num), fstr, sym);
}
}
}
else {
rb_raise(rb_eTypeError, "wrong argument type %s (expected Symbol)",
rb_builtin_class_name(sym));
}
return id;
}
#undef rb_id2sym
VALUE
rb_id2sym(ID x)
{
if (!DYNAMIC_ID_P(x)) return STATIC_ID2SYM(x);
return get_id_sym(x);
}
/*
* call-seq:
* name -> string
*
* Returns a frozen string representation of +self+ (not including the leading colon):
*
* :foo.name # => "foo"
* :foo.name.frozen? # => true
*
* Related: Symbol#to_s, Symbol#inspect.
*/
VALUE
rb_sym2str(VALUE sym)
{
VALUE str;
if (DYNAMIC_SYM_P(sym)) {
str = RSYMBOL(sym)->fstr;
RUBY_ASSERT_BUILTIN_TYPE(str, T_STRING);
}
else {
str = rb_id2str(STATIC_SYM2ID(sym));
if (str) RUBY_ASSERT_BUILTIN_TYPE(str, T_STRING);
}
return str;
}
VALUE
rb_id2str(ID id)
{
return get_id_str(id);
}
const char *
rb_id2name(ID id)
{
VALUE str = rb_id2str(id);
if (!str) return 0;
return RSTRING_PTR(str);
}
ID
rb_make_internal_id(void)
{
return next_id_base() | ID_INTERNAL | ID_STATIC_SYM;
}
ID
rb_make_temporary_id(size_t n)
{
const ID max_id = RB_ID_SERIAL_MAX & ~0xffff;
const ID id = max_id - (ID)n;
if (id < RUBY_ATOMIC_LOAD(ruby_global_symbols.next_id)) {
rb_raise(rb_eRuntimeError, "too big to make temporary ID: %" PRIdSIZE, n);
}
return (id << ID_SCOPE_SHIFT) | ID_STATIC_SYM | ID_INTERNAL;
}
static int
symbols_i(VALUE *key, void *data)
{
VALUE ary = (VALUE)data;
VALUE sym = (VALUE)*key;
if (sym_set_sym_static_p(sym)) {
rb_ary_push(ary, sym_set_static_sym_untag(sym)->sym);
}
else if (rb_objspace_garbage_object_p(sym)) {
return ST_DELETE;
}
else {
rb_ary_push(ary, sym);
}
return ST_CONTINUE;
}
VALUE
rb_sym_all_symbols(void)
{
VALUE ary;
GLOBAL_SYMBOLS_LOCKING(symbols) {
ary = rb_ary_new2(rb_concurrent_set_size(symbols->sym_set));
rb_concurrent_set_foreach_with_replace(symbols->sym_set, symbols_i, (void *)ary);
}
return ary;
}
size_t
rb_sym_immortal_count(void)
{
return (size_t)(RUBY_ATOMIC_LOAD(ruby_global_symbols.next_id) - 1);
}
int
rb_is_const_id(ID id)
{
return is_const_id(id);
}
int
rb_is_class_id(ID id)
{
return is_class_id(id);
}
int
rb_is_global_id(ID id)
{
return is_global_id(id);
}
int
rb_is_instance_id(ID id)
{
return is_instance_id(id);
}
int
rb_is_attrset_id(ID id)
{
return is_attrset_id(id);
}
int
rb_is_local_id(ID id)
{
return is_local_id(id);
}
int
rb_is_junk_id(ID id)
{
return is_internal_id(id);
}
int
rb_is_const_sym(VALUE sym)
{
return is_const_sym(sym);
}
int
rb_is_attrset_sym(VALUE sym)
{
return is_attrset_sym(sym);
}
ID
rb_check_id(volatile VALUE *namep)
{
VALUE tmp;
VALUE name = *namep;
if (STATIC_SYM_P(name)) {
return STATIC_SYM2ID(name);
}
else if (DYNAMIC_SYM_P(name)) {
if (SYMBOL_PINNED_P(name)) {
return RSYMBOL(name)->id;
}
else {
*namep = RSYMBOL(name)->fstr;
return 0;
}
}
else if (!RB_TYPE_P(name, T_STRING)) {
tmp = rb_check_string_type(name);
if (NIL_P(tmp)) {
rb_raise(rb_eTypeError, "%+"PRIsVALUE" is not a symbol nor a string",
name);
}
name = tmp;
*namep = name;
}
sym_check_asciionly(name, false);
return lookup_str_id(name);
}
// Used by yjit for handling .send without throwing exceptions
ID
rb_get_symbol_id(VALUE name)
{
if (STATIC_SYM_P(name)) {
return STATIC_SYM2ID(name);
}
else if (DYNAMIC_SYM_P(name)) {
if (SYMBOL_PINNED_P(name)) {
return RSYMBOL(name)->id;
}
else {
return 0;
}
}
else if (RB_TYPE_P(name, T_STRING)) {
return lookup_str_id(name);
}
else {
return 0;
}
}
VALUE
rb_check_symbol(volatile VALUE *namep)
{
VALUE sym;
VALUE tmp;
VALUE name = *namep;
if (STATIC_SYM_P(name)) {
return name;
}
else if (DYNAMIC_SYM_P(name)) {
RUBY_ASSERT(!rb_objspace_garbage_object_p(name));
return name;
}
else if (!RB_TYPE_P(name, T_STRING)) {
tmp = rb_check_string_type(name);
if (NIL_P(tmp)) {
rb_raise(rb_eTypeError, "%+"PRIsVALUE" is not a symbol nor a string",
name);
}
name = tmp;
*namep = name;
}
sym_check_asciionly(name, false);
if ((sym = sym_find(name)) != 0) {
return sym;
}
return Qnil;
}
ID
rb_check_id_cstr(const char *ptr, long len, rb_encoding *enc)
{
struct RString fake_str = {RBASIC_INIT};
const VALUE name = rb_setup_fake_str(&fake_str, ptr, len, enc);
sym_check_asciionly(name, true);
return lookup_str_id(name);
}
VALUE
rb_check_symbol_cstr(const char *ptr, long len, rb_encoding *enc)
{
VALUE sym;
struct RString fake_str = {RBASIC_INIT};
const VALUE name = rb_setup_fake_str(&fake_str, ptr, len, enc);
sym_check_asciionly(name, true);
if ((sym = sym_find(name)) != 0) {
return sym;
}
return Qnil;
}
#undef rb_sym_intern_ascii_cstr
#ifdef __clang__
NOINLINE(VALUE rb_sym_intern(const char *ptr, long len, rb_encoding *enc));
#else
FUNC_MINIMIZED(VALUE rb_sym_intern(const char *ptr, long len, rb_encoding *enc));
FUNC_MINIMIZED(VALUE rb_sym_intern_ascii(const char *ptr, long len));
FUNC_MINIMIZED(VALUE rb_sym_intern_ascii_cstr(const char *ptr));
#endif
VALUE
rb_sym_intern(const char *ptr, long len, rb_encoding *enc)
{
struct RString fake_str = {RBASIC_INIT};
const VALUE name = rb_setup_fake_str(&fake_str, ptr, len, enc);
return rb_str_intern(name);
}
VALUE
rb_sym_intern_ascii(const char *ptr, long len)
{
return rb_sym_intern(ptr, len, rb_usascii_encoding());
}
VALUE
rb_sym_intern_ascii_cstr(const char *ptr)
{
return rb_sym_intern_ascii(ptr, strlen(ptr));
}
VALUE
rb_to_symbol_type(VALUE obj)
{
return rb_convert_type_with_id(obj, T_SYMBOL, "Symbol", idTo_sym);
}
int
rb_is_const_name(VALUE name)
{
return rb_str_symname_type(name, 0) == ID_CONST;
}
int
rb_is_class_name(VALUE name)
{
return rb_str_symname_type(name, 0) == ID_CLASS;
}
int
rb_is_instance_name(VALUE name)
{
return rb_str_symname_type(name, 0) == ID_INSTANCE;
}
int
rb_is_local_name(VALUE name)
{
return rb_str_symname_type(name, 0) == ID_LOCAL;
}
#include "id_table.c"
#include "symbol.rbinc" | c | github | https://github.com/ruby/ruby | symbol.c |
#
# (c) 2017, Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import time
import glob
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.parse import urlsplit
from ansible.utils.vars import merge_hash
PRIVATE_KEYS_RE = re.compile('__.+__')
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
if self._task.args.get('src'):
try:
self._handle_template()
except ValueError as exc:
return dict(failed=True, msg=exc.message)
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
return result
def _get_working_path(self):
cwd = self._loader.get_basedir()
if self._task._role is not None:
cwd = self._task._role._role_path
return cwd
def _handle_template(self):
src = self._task.args.get('src')
working_path = self._get_working_path()
if os.path.isabs(src) or urlsplit('src').scheme:
source = src
else:
source = self._loader.path_dwim_relative(working_path, 'templates', src)
if not source:
source = self._loader.path_dwim_relative(working_path, src)
if not os.path.exists(source):
raise ValueError('path specified in src not found')
try:
with open(source, 'r') as f:
template_data = to_text(f.read())
except IOError:
return dict(failed=True, msg='unable to load src file')
# Create a template search path in the following order:
# [working_path, self_role_path, dependent_role_paths, dirname(source)]
searchpath = [working_path]
if self._task._role is not None:
searchpath.append(self._task._role._role_path)
if hasattr(self._task, "_block:"):
dep_chain = self._task._block.get_dep_chain()
if dep_chain is not None:
for role in dep_chain:
searchpath.append(role._role_path)
searchpath.append(os.path.dirname(source))
self._templar.environment.loader.searchpath = searchpath
self._task.args['src'] = self._templar.template(template_data) | unknown | codeparrot/codeparrot-clean | ||
# -*- test-case-name: twisted.web.test.test_xml -*-
#
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
#
"""
*S*mall, *U*ncomplicated *X*ML.
This is a very simple implementation of XML/HTML as a network
protocol. It is not at all clever. Its main features are that it
does not:
- support namespaces
- mung mnemonic entity references
- validate
- perform *any* external actions (such as fetching URLs or writing files)
under *any* circumstances
- has lots and lots of horrible hacks for supporting broken HTML (as an
option, they're not on by default).
"""
from twisted.internet.protocol import Protocol, FileWrapper
from twisted.python.reflect import prefixedMethodNames
identChars = '.-_:'
lenientIdentChars = identChars + ';+#/%~'
def nop(*args, **kw):
"Do nothing."
def unionlist(*args):
l = []
for x in args:
l.extend(x)
d = dict([(x, 1) for x in l])
return d.keys()
def zipfndict(*args, **kw):
default = kw.get('default', nop)
d = {}
for key in unionlist(*[fndict.keys() for fndict in args]):
d[key] = tuple([x.get(key, default) for x in args])
return d
def prefixedMethodClassDict(clazz, prefix):
return dict([(name, getattr(clazz, prefix + name)) for name in prefixedMethodNames(clazz, prefix)])
def prefixedMethodObjDict(obj, prefix):
return dict([(name, getattr(obj, prefix + name)) for name in prefixedMethodNames(obj.__class__, prefix)])
class ParseError(Exception):
def __init__(self, filename, line, col, message):
self.filename = filename
self.line = line
self.col = col
self.message = message
def __str__(self):
return "%s:%s:%s: %s" % (self.filename, self.line, self.col,
self.message)
class XMLParser(Protocol):
state = None
encodings = None
filename = "<xml />"
beExtremelyLenient = 0
_prepend = None
def connectionMade(self):
self.lineno = 1
self.colno = 0
self.encodings = []
def saveMark(self):
'''Get the line number and column of the last character parsed'''
# This gets replaced during dataReceived, restored afterwards
return (self.lineno, self.colno)
def _parseError(self, message):
raise ParseError(*((self.filename,)+self.saveMark()+(message,)))
def _buildStateTable(self):
'''Return a dictionary of begin, do, end state function tuples'''
# _buildStateTable leaves something to be desired
# but it does what it does.. probably slowly,
# so I'm doing some evil caching so it doesn't get called
# more than once per class.
stateTable = getattr(self.__class__, '__stateTable', None)
if stateTable is None:
stateTable = self.__class__.__stateTable = zipfndict(*[prefixedMethodObjDict(self, prefix) for prefix in ('begin_', 'do_', 'end_')])
return stateTable
def _decode(self, data):
if 'UTF-16' in self.encodings or 'UCS-2' in self.encodings:
assert not len(data) & 1, 'UTF-16 must come in pairs for now'
if self._prepend:
data = self._prepend + data
for encoding in self.encodings:
data = unicode(data, encoding)
return data
def maybeBodyData(self):
if self.endtag:
return 'bodydata'
# Get ready for fun! We're going to allow
# <script>if (foo < bar)</script> to work!
# We do this by making everything between <script> and
# </script> a Text
# BUT <script src="foo"> will be special-cased to do regular,
# lenient behavior, because those may not have </script>
# -radix
if (self.tagName == 'script'
and not self.tagAttributes.has_key('src')):
# we do this ourselves rather than having begin_waitforendscript
# becuase that can get called multiple times and we don't want
# bodydata to get reset other than the first time.
self.begin_bodydata(None)
return 'waitforendscript'
return 'bodydata'
def dataReceived(self, data):
stateTable = self._buildStateTable()
if not self.state:
# all UTF-16 starts with this string
if data.startswith('\xff\xfe'):
self._prepend = '\xff\xfe'
self.encodings.append('UTF-16')
data = data[2:]
elif data.startswith('\xfe\xff'):
self._prepend = '\xfe\xff'
self.encodings.append('UTF-16')
data = data[2:]
self.state = 'begin'
if self.encodings:
data = self._decode(data)
# bring state, lineno, colno into local scope
lineno, colno = self.lineno, self.colno
curState = self.state
# replace saveMark with a nested scope function
_saveMark = self.saveMark
def saveMark():
return (lineno, colno)
self.saveMark = saveMark
# fetch functions from the stateTable
beginFn, doFn, endFn = stateTable[curState]
try:
for byte in data:
# do newline stuff
if byte == '\n':
lineno += 1
colno = 0
else:
colno += 1
newState = doFn(byte)
if newState is not None and newState != curState:
# this is the endFn from the previous state
endFn()
curState = newState
beginFn, doFn, endFn = stateTable[curState]
beginFn(byte)
finally:
self.saveMark = _saveMark
self.lineno, self.colno = lineno, colno
# state doesn't make sense if there's an exception..
self.state = curState
# state methods
def do_begin(self, byte):
if byte.isspace():
return
if byte != '<':
if self.beExtremelyLenient:
return 'bodydata'
self._parseError("First char of document [%r] wasn't <" % (byte,))
return 'tagstart'
def begin_comment(self, byte):
self.commentbuf = ''
def do_comment(self, byte):
self.commentbuf += byte
if self.commentbuf.endswith('-->'):
self.gotComment(self.commentbuf[:-3])
return 'bodydata'
def begin_tagstart(self, byte):
self.tagName = '' # name of the tag
self.tagAttributes = {} # attributes of the tag
self.termtag = 0 # is the tag self-terminating
self.endtag = 0
def do_tagstart(self, byte):
if byte.isalnum() or byte in identChars:
self.tagName += byte
if self.tagName == '!--':
return 'comment'
elif byte.isspace():
if self.tagName:
if self.endtag:
# properly strict thing to do here is probably to only
# accept whitespace
return 'waitforgt'
return 'attrs'
else:
self._parseError("Whitespace before tag-name")
elif byte == '>':
if self.endtag:
self.gotTagEnd(self.tagName)
return 'bodydata'
else:
self.gotTagStart(self.tagName, {})
return (not self.beExtremelyLenient) and 'bodydata' or self.maybeBodyData()
elif byte == '/':
if self.tagName:
return 'afterslash'
else:
self.endtag = 1
elif byte in '!?':
if self.tagName:
self._parseError("Invalid character in tag-name")
else:
self.tagName += byte
self.termtag = 1
elif byte == '[':
if self.tagName == '!':
return 'expectcdata'
else:
self._parseError("Invalid '[' in tag-name")
else:
if self.beExtremelyLenient:
self.bodydata = '<'
return 'unentity'
self._parseError('Invalid tag character: %r'% byte)
def begin_unentity(self, byte):
self.bodydata += byte
def do_unentity(self, byte):
self.bodydata += byte
return 'bodydata'
def end_unentity(self):
self.gotText(self.bodydata)
def begin_expectcdata(self, byte):
self.cdatabuf = byte
def do_expectcdata(self, byte):
self.cdatabuf += byte
cdb = self.cdatabuf
cd = '[CDATA['
if len(cd) > len(cdb):
if cd.startswith(cdb):
return
elif self.beExtremelyLenient:
## WHAT THE CRAP!? MSWord9 generates HTML that includes these
## bizarre <![if !foo]> <![endif]> chunks, so I've gotta ignore
## 'em as best I can. this should really be a separate parse
## state but I don't even have any idea what these _are_.
return 'waitforgt'
else:
self._parseError("Mal-formed CDATA header")
if cd == cdb:
self.cdatabuf = ''
return 'cdata'
self._parseError("Mal-formed CDATA header")
def do_cdata(self, byte):
self.cdatabuf += byte
if self.cdatabuf.endswith("]]>"):
self.cdatabuf = self.cdatabuf[:-3]
return 'bodydata'
def end_cdata(self):
self.gotCData(self.cdatabuf)
self.cdatabuf = ''
def do_attrs(self, byte):
if byte.isalnum() or byte in identChars:
# XXX FIXME really handle !DOCTYPE at some point
if self.tagName == '!DOCTYPE':
return 'doctype'
if self.tagName[0] in '!?':
return 'waitforgt'
return 'attrname'
elif byte.isspace():
return
elif byte == '>':
self.gotTagStart(self.tagName, self.tagAttributes)
return (not self.beExtremelyLenient) and 'bodydata' or self.maybeBodyData()
elif byte == '/':
return 'afterslash'
elif self.beExtremelyLenient:
# discard and move on? Only case I've seen of this so far was:
# <foo bar="baz"">
return
self._parseError("Unexpected character: %r" % byte)
def begin_doctype(self, byte):
self.doctype = byte
def do_doctype(self, byte):
if byte == '>':
return 'bodydata'
self.doctype += byte
def end_doctype(self):
self.gotDoctype(self.doctype)
self.doctype = None
def do_waitforgt(self, byte):
if byte == '>':
if self.endtag or not self.beExtremelyLenient:
return 'bodydata'
return self.maybeBodyData()
def begin_attrname(self, byte):
self.attrname = byte
self._attrname_termtag = 0
def do_attrname(self, byte):
if byte.isalnum() or byte in identChars:
self.attrname += byte
return
elif byte == '=':
return 'beforeattrval'
elif byte.isspace():
return 'beforeeq'
elif self.beExtremelyLenient:
if byte in '"\'':
return 'attrval'
if byte in lenientIdentChars or byte.isalnum():
self.attrname += byte
return
if byte == '/':
self._attrname_termtag = 1
return
if byte == '>':
self.attrval = 'True'
self.tagAttributes[self.attrname] = self.attrval
self.gotTagStart(self.tagName, self.tagAttributes)
if self._attrname_termtag:
self.gotTagEnd(self.tagName)
return 'bodydata'
return self.maybeBodyData()
self._parseError("Invalid attribute name: %r %r" % (self.attrname, byte))
def do_beforeattrval(self, byte):
if byte in '"\'':
return 'attrval'
elif byte.isspace():
return
elif self.beExtremelyLenient:
if byte in lenientIdentChars or byte.isalnum():
return 'messyattr'
if byte == '>':
self.attrval = 'True'
self.tagAttributes[self.attrname] = self.attrval
self.gotTagStart(self.tagName, self.tagAttributes)
return self.maybeBodyData()
if byte == '\\':
# I saw this in actual HTML once:
# <font size=\"3\"><sup>SM</sup></font>
return
self._parseError("Invalid initial attribute value: %r; Attribute values must be quoted." % byte)
attrname = ''
attrval = ''
def begin_beforeeq(self,byte):
self._beforeeq_termtag = 0
def do_beforeeq(self, byte):
if byte == '=':
return 'beforeattrval'
elif byte.isspace():
return
elif self.beExtremelyLenient:
if byte.isalnum() or byte in identChars:
self.attrval = 'True'
self.tagAttributes[self.attrname] = self.attrval
return 'attrname'
elif byte == '>':
self.attrval = 'True'
self.tagAttributes[self.attrname] = self.attrval
self.gotTagStart(self.tagName, self.tagAttributes)
if self._beforeeq_termtag:
self.gotTagEnd(self.tagName)
return 'bodydata'
return self.maybeBodyData()
elif byte == '/':
self._beforeeq_termtag = 1
return
self._parseError("Invalid attribute")
def begin_attrval(self, byte):
self.quotetype = byte
self.attrval = ''
def do_attrval(self, byte):
if byte == self.quotetype:
return 'attrs'
self.attrval += byte
def end_attrval(self):
self.tagAttributes[self.attrname] = self.attrval
self.attrname = self.attrval = ''
def begin_messyattr(self, byte):
self.attrval = byte
def do_messyattr(self, byte):
if byte.isspace():
return 'attrs'
elif byte == '>':
endTag = 0
if self.attrval.endswith('/'):
endTag = 1
self.attrval = self.attrval[:-1]
self.tagAttributes[self.attrname] = self.attrval
self.gotTagStart(self.tagName, self.tagAttributes)
if endTag:
self.gotTagEnd(self.tagName)
return 'bodydata'
return self.maybeBodyData()
else:
self.attrval += byte
def end_messyattr(self):
if self.attrval:
self.tagAttributes[self.attrname] = self.attrval
def begin_afterslash(self, byte):
self._after_slash_closed = 0
def do_afterslash(self, byte):
# this state is only after a self-terminating slash, e.g. <foo/>
if self._after_slash_closed:
self._parseError("Mal-formed")#XXX When does this happen??
if byte != '>':
self._parseError("No data allowed after '/'")
self._after_slash_closed = 1
self.gotTagStart(self.tagName, self.tagAttributes)
self.gotTagEnd(self.tagName)
# don't need maybeBodyData here because there better not be
# any javascript code after a <script/>... we'll see :(
return 'bodydata'
def begin_bodydata(self, byte):
self.bodydata = ''
def do_bodydata(self, byte):
if byte == '<':
return 'tagstart'
if byte == '&':
return 'entityref'
self.bodydata += byte
def end_bodydata(self):
self.gotText(self.bodydata)
self.bodydata = ''
def do_waitforendscript(self, byte):
if byte == '<':
return 'waitscriptendtag'
self.bodydata += byte
def begin_waitscriptendtag(self, byte):
self.temptagdata = ''
self.tagName = ''
self.endtag = 0
def do_waitscriptendtag(self, byte):
# 1 enforce / as first byte read
# 2 enforce following bytes to be subset of "script" until
# tagName == "script"
# 2a when that happens, gotText(self.bodydata) and gotTagEnd(self.tagName)
# 3 spaces can happen anywhere, they're ignored
# e.g. < / script >
# 4 anything else causes all data I've read to be moved to the
# bodydata, and switch back to waitforendscript state
# If it turns out this _isn't_ a </script>, we need to
# remember all the data we've been through so we can append it
# to bodydata
self.temptagdata += byte
# 1
if byte == '/':
self.endtag = True
elif not self.endtag:
self.bodydata += "<" + self.temptagdata
return 'waitforendscript'
# 2
elif byte.isalnum() or byte in identChars:
self.tagName += byte
if not 'script'.startswith(self.tagName):
self.bodydata += "<" + self.temptagdata
return 'waitforendscript'
elif self.tagName == 'script':
self.gotText(self.bodydata)
self.gotTagEnd(self.tagName)
return 'waitforgt'
# 3
elif byte.isspace():
return 'waitscriptendtag'
# 4
else:
self.bodydata += "<" + self.temptagdata
return 'waitforendscript'
def begin_entityref(self, byte):
self.erefbuf = ''
self.erefextra = '' # extra bit for lenient mode
def do_entityref(self, byte):
if byte.isspace() or byte == "<":
if self.beExtremelyLenient:
# '&foo' probably was '&foo'
if self.erefbuf and self.erefbuf != "amp":
self.erefextra = self.erefbuf
self.erefbuf = "amp"
if byte == "<":
return "tagstart"
else:
self.erefextra += byte
return 'spacebodydata'
self._parseError("Bad entity reference")
elif byte != ';':
self.erefbuf += byte
else:
return 'bodydata'
def end_entityref(self):
self.gotEntityReference(self.erefbuf)
# hacky support for space after & in entityref in beExtremelyLenient
# state should only happen in that case
def begin_spacebodydata(self, byte):
self.bodydata = self.erefextra
self.erefextra = None
do_spacebodydata = do_bodydata
end_spacebodydata = end_bodydata
# Sorta SAX-ish API
def gotTagStart(self, name, attributes):
'''Encountered an opening tag.
Default behaviour is to print.'''
print 'begin', name, attributes
def gotText(self, data):
'''Encountered text
Default behaviour is to print.'''
print 'text:', repr(data)
def gotEntityReference(self, entityRef):
'''Encountered mnemonic entity reference
Default behaviour is to print.'''
print 'entityRef: &%s;' % entityRef
def gotComment(self, comment):
'''Encountered comment.
Default behaviour is to ignore.'''
pass
def gotCData(self, cdata):
'''Encountered CDATA
Default behaviour is to call the gotText method'''
self.gotText(cdata)
def gotDoctype(self, doctype):
"""Encountered DOCTYPE
This is really grotty: it basically just gives you everything between
'<!DOCTYPE' and '>' as an argument.
"""
print '!DOCTYPE', repr(doctype)
def gotTagEnd(self, name):
'''Encountered closing tag
Default behaviour is to print.'''
print 'end', name
if __name__ == '__main__':
from cStringIO import StringIO
testDocument = '''
<!DOCTYPE ignore all this shit, hah its malformed!!!!@$>
<?xml version="suck it"?>
<foo>
A
<bar />
<baz boz="buz">boz &zop;</baz>
<![CDATA[ foo bar baz ]]>
</foo>
'''
x = XMLParser()
x.makeConnection(FileWrapper(StringIO()))
# fn = "/home/glyph/Projects/Twisted/doc/howto/ipc10paper.html"
fn = "/home/glyph/gruesome.xml"
# testDocument = open(fn).read()
x.dataReceived(testDocument) | unknown | codeparrot/codeparrot-clean | ||
<!---
Copyright 2020 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<p align="center">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/transformers-logo-dark.svg">
<source media="(prefers-color-scheme: light)" srcset="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/transformers-logo-light.svg">
<img alt="Bibliothèque Hugging Face Transformers" src="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/transformers-logo-light.svg" width="352" height="59" style="max-width: 100%;">
</picture>
<br/>
<br/>
</p>
<p align="center">
<a href="https://circleci.com/gh/huggingface/transformers"><img alt="Build" src="https://img.shields.io/circleci/build/github/huggingface/transformers/main"></a>
<a href="https://github.com/huggingface/transformers/blob/main/LICENSE"><img alt="GitHub" src="https://img.shields.io/github/license/huggingface/transformers.svg?color=blue"></a>
<a href="https://huggingface.co/docs/transformers/index"><img alt="Documentation" src="https://img.shields.io/website/http/huggingface.co/docs/transformers/index.svg?down_color=red&down_message=offline&up_message=online"></a>
<a href="https://github.com/huggingface/transformers/releases"><img alt="GitHub release" src="https://img.shields.io/github/release/huggingface/transformers.svg"></a>
<a href="https://github.com/huggingface/transformers/blob/main/CODE_OF_CONDUCT.md"><img alt="Contributor Covenant" src="https://img.shields.io/badge/Contributor%20Covenant-v2.0%20adopted-ff69b4.svg"></a>
<a href="https://zenodo.org/badge/latestdoi/155220641"><img src="https://zenodo.org/badge/155220641.svg" alt="DOI"></a>
</p>
<h4 align="center">
<p>
<a href="https://github.com/huggingface/transformers/">English</a> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_zh-hans.md">简体中文</a> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_zh-hant.md">繁體中文</a> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_ko.md">한국어</a> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_es.md">Español</a> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_ja.md">日本語</a> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_hd.md">हिन्दी</a> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_ru.md">Русский</a> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_pt-br.md">Рortuguês</a> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_te.md">తెలుగు</a> |
<b>Français</b> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_de.md">Deutsch</a> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_it.md">Italiano</a> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_vi.md">Tiếng Việt</a> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_ar.md">العربية</a> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_ur.md">اردو</a> |
<a href="https://github.com/huggingface/transformers/blob/main/i18n/README_bn.md">বাংলা</a> |
</p>
</h4>
<h3 align="center">
<p>Apprentissage automatique de pointe pour JAX, PyTorch et TensorFlow</p>
</h3>
<h3 align="center">
<a href="https://hf.co/course"><img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/course_banner.png"></a>
</h3>
🤗 Transformers fournit des milliers de modèles pré-entraînés pour effectuer des tâches sur différentes modalités telles que le texte, la vision et l'audio.
Ces modèles peuvent être appliqués à :
* 📝 Texte, pour des tâches telles que la classification de texte, l'extraction d'informations, la réponse aux questions, le résumé, la traduction et la génération de texte, dans plus de 100 langues.
* 🖼️ Images, pour des tâches telles que la classification d'images, la détection d'objets et la segmentation.
* 🗣️ Audio, pour des tâches telles que la reconnaissance vocale et la classification audio.
Les modèles de transformer peuvent également effectuer des tâches sur **plusieurs modalités combinées**, telles que la réponse aux questions sur des tableaux, la reconnaissance optique de caractères, l'extraction d'informations à partir de documents numérisés, la classification vidéo et la réponse aux questions visuelles.
🤗 Transformers fournit des API pour télécharger et utiliser rapidement ces modèles pré-entraînés sur un texte donné, les affiner sur vos propres ensembles de données, puis les partager avec la communauté sur notre [hub de modèles](https://huggingface.co/models). En même temps, chaque module Python définissant une architecture est complètement indépendant et peut être modifié pour permettre des expériences de recherche rapides.
🤗 Transformers est soutenu par les trois bibliothèques d'apprentissage profond les plus populaires — [Jax](https://jax.readthedocs.io/en/latest/), [PyTorch](https://pytorch.org/) et [TensorFlow](https://www.tensorflow.org/) — avec une intégration transparente entre eux. Il est facile de former vos modèles avec l'un avant de les charger pour l'inférence avec l'autre.
## Démos en ligne
Vous pouvez tester la plupart de nos modèles directement sur leurs pages du [hub de modèles](https://huggingface.co/models). Nous proposons également [l'hébergement privé de modèles, le versionning et une API d'inférence](https://huggingface.co/pricing) pour des modèles publics et privés.
Voici quelques exemples :
En traitement du langage naturel :
- [Complétion de mots masqués avec BERT](https://huggingface.co/google-bert/bert-base-uncased?text=Paris+is+the+%5BMASK%5D+of+France)
- [Reconnaissance d'entités nommées avec Electra](https://huggingface.co/dbmdz/electra-large-discriminator-finetuned-conll03-english?text=My+name+is+Sarah+and+I+live+in+London+city)
- [Génération de texte avec GPT-2](https://huggingface.co/openai-community/gpt2?text=A+long+time+ago%2C+)
- [Inférence de langage naturel avec RoBERTa](https://huggingface.co/FacebookAI/roberta-large-mnli?text=The+dog+was+lost.+Nobody+lost+any+animal)
- [Résumé avec BART](https://huggingface.co/facebook/bart-large-cnn?text=The+tower+is+324+metres+%281%2C063+ft%29+tall%2C+about+the+same+height+as+an+81-storey+building%2C+and+the+tallest+structure+in+Paris.+Its+base+is+square%2C+measuring+125+metres+%28410+ft%29+on+each+side.+During+its+construction%2C+the+Eiffel+Tower+surpassed+the+Washington+Monument+to+become+the+tallest+man-made+structure+in+the+world%2C+a+title+it+held+for+41+years+until+the+Chrysler+Building+in+New+York+City+was+finished+in+1930.+It+was+the+first+structure+to+reach+a+height+of+300+metres.+Due+to+the+addition+of+a+broadcasting+aerial+at+the+top+of+the+tower+in+1957%2C+it+is+now+taller+than+the+Chrysler+Building+by+5.2+metres+%2817+ft%29.+Excluding+transmitters%2C+the+Eiffel+Tower+is+the+second+tallest+free-standing+structure+in+France+after+the+Millau+Viaduct)
- [Réponse aux questions avec DistilBERT](https://huggingface.co/distilbert/distilbert-base-uncased-distilled-squad?text=Which+name+is+also+used+to+describe+the+Amazon+rainforest+in+English%3F&context=The+Amazon+rainforest+%28Portuguese%3A+Floresta+Amaz%C3%B4nica+or+Amaz%C3%B4nia%3B+Spanish%3A+Selva+Amaz%C3%B3nica%2C+Amazon%C3%ADa+or+usually+Amazonia%3B+French%3A+For%C3%AAt+amazonienne%3B+Dutch%3A+Amazoneregenwoud%29%2C+also+known+in+English+as+Amazonia+or+the+Amazon+Jungle%2C+is+a+moist+broadleaf+forest+that+covers+most+of+the+Amazon+basin+of+South+America.+This+basin+encompasses+7%2C000%2C000+square+kilometres+%282%2C700%2C000+sq+mi%29%2C+of+which+5%2C500%2C000+square+kilometres+%282%2C100%2C000+sq+mi%29+are+covered+by+the+rainforest.+This+region+includes+territory+belonging+to+nine+nations.+The+majority+of+the+forest+is+contained+within+Brazil%2C+with+60%25+of+the+rainforest%2C+followed+by+Peru+with+13%25%2C+Colombia+with+10%25%2C+and+with+minor+amounts+in+Venezuela%2C+Ecuador%2C+Bolivia%2C+Guyana%2C+Suriname+and+French+Guiana.+States+or+departments+in+four+nations+contain+%22Amazonas%22+in+their+names.+The+Amazon+represents+over+half+of+the+planet%27s+remaining+rainforests%2C+and+comprises+the+largest+and+most+biodiverse+tract+of+tropical+rainforest+in+the+world%2C+with+an+estimated+390+billion+individual+trees+divided+into+16%2C000+species)
- [Traduction avec T5](https://huggingface.co/google-t5/t5-base?text=My+name+is+Wolfgang+and+I+live+in+Berlin)
En vision par ordinateur :
- [Classification d'images avec ViT](https://huggingface.co/google/vit-base-patch16-224)
- [Détection d'objets avec DETR](https://huggingface.co/facebook/detr-resnet-50)
- [Segmentation sémantique avec SegFormer](https://huggingface.co/nvidia/segformer-b0-finetuned-ade-512-512)
- [Segmentation panoptique avec MaskFormer](https://huggingface.co/facebook/maskformer-swin-small-coco)
- [Estimation de profondeur avec DPT](https://huggingface.co/docs/transformers/model_doc/dpt)
- [Classification vidéo avec VideoMAE](https://huggingface.co/docs/transformers/model_doc/videomae)
- [Segmentation universelle avec OneFormer](https://huggingface.co/shi-labs/oneformer_ade20k_dinat_large)
En audio :
- [Reconnaissance automatique de la parole avec Wav2Vec2](https://huggingface.co/facebook/wav2vec2-base-960h)
- [Spotting de mots-clés avec Wav2Vec2](https://huggingface.co/superb/wav2vec2-base-superb-ks)
- [Classification audio avec Audio Spectrogram Transformer](https://huggingface.co/MIT/ast-finetuned-audioset-10-10-0.4593)
Dans les tâches multimodales :
- [Réponses aux questions sur table avec TAPAS](https://huggingface.co/google/tapas-base-finetuned-wtq)
- [Réponses aux questions visuelles avec ViLT](https://huggingface.co/dandelin/vilt-b32-finetuned-vqa)
- [Classification d'images sans étiquette avec CLIP](https://huggingface.co/openai/clip-vit-large-patch14)
- [Réponses aux questions sur les documents avec LayoutLM](https://huggingface.co/impira/layoutlm-document-qa)
- [Classification vidéo sans étiquette avec X-CLIP](https://huggingface.co/docs/transformers/model_doc/xclip)
## 100 projets utilisant Transformers
Transformers est plus qu'une boîte à outils pour utiliser des modèles pré-entraînés : c'est une communauté de projets construits autour de lui et du Hub Hugging Face. Nous voulons que Transformers permette aux développeurs, chercheurs, étudiants, professeurs, ingénieurs et à quiconque d'imaginer et de réaliser leurs projets de rêve.
Afin de célébrer les 100 000 étoiles de transformers, nous avons décidé de mettre en avant la communauté et avons créé la page [awesome-transformers](./awesome-transformers.md) qui répertorie 100 projets incroyables construits autour de transformers.
Si vous possédez ou utilisez un projet que vous pensez devoir figurer dans la liste, veuillez ouvrir une pull request pour l'ajouter !
## Si vous recherchez un support personnalisé de la part de l'équipe Hugging Face
<a target="_blank" href="https://huggingface.co/support">
<img alt="Programme d'accélération des experts HuggingFace" src="https://cdn-media.huggingface.co/marketing/transformers/new-support-improved.png" style="max-width: 600px; border: 1px solid #eee; border-radius: 4px; box-shadow: 0 1px 2px 0 rgba(0, 0, 0, 0.05);">
</a><br>
## Tour rapide
Pour utiliser immédiatement un modèle sur une entrée donnée (texte, image, audio,...), nous fournissons l'API `pipeline`. Les pipelines regroupent un modèle pré-entraîné avec la préparation des données qui a été utilisée lors de l'entraînement de ce modèle. Voici comment utiliser rapidement un pipeline pour classer des textes en positif ou négatif :
```python
>>> from transformers import pipeline
# Allouer un pipeline pour l'analyse de sentiment
>>> classifieur = pipeline('sentiment-analysis')
>>> classifieur("Nous sommes très heureux d'introduire le pipeline dans le référentiel transformers.")
[{'label': 'POSITIF', 'score': 0.9996980428695679}]
```
La deuxième ligne de code télécharge et met en cache le modèle pré-entraîné utilisé par le pipeline, tandis que la troisième l'évalue sur le texte donné. Ici, la réponse est "positive" avec une confiance de 99,97%.
De nombreuses tâches ont une pipeline pré-entraîné prêt à l'emploi, en NLP, mais aussi en vision par ordinateur et en parole. Par exemple, nous pouvons facilement extraire les objets détectés dans une image :
```python
>>> import requests
>>> from PIL import Image
>>> from transformers import pipeline
# Télécharger une image avec de jolis chats
>>> url = "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/coco_sample.png"
>>> donnees_image = requests.get(url, stream=True).raw
>>> image = Image.open(donnees_image)
# Allouer un pipeline pour la détection d'objets
>>> detecteur_objets = pipeline('object-detection')
>>> detecteur_objets(image)
[{'score': 0.9982201457023621,
'label': 'télécommande',
'box': {'xmin': 40, 'ymin': 70, 'xmax': 175, 'ymax': 117}},
{'score': 0.9960021376609802,
'label': 'télécommande',
'box': {'xmin': 333, 'ymin': 72, 'xmax': 368, 'ymax': 187}},
{'score': 0.9954745173454285,
'label': 'canapé',
'box': {'xmin': 0, 'ymin': 1, 'xmax': 639, 'ymax': 473}},
{'score': 0.9988006353378296,
'label': 'chat',
'box': {'xmin': 13, 'ymin': 52, 'xmax': 314, 'ymax': 470}},
{'score': 0.9986783862113953,
'label': 'chat',
'box': {'xmin': 345, 'ymin': 23, 'xmax': 640, 'ymax': 368}}]
```
Ici, nous obtenons une liste d'objets détectés dans l'image, avec une boîte entourant l'objet et un score de confiance. Voici l'image originale à gauche, avec les prédictions affichées à droite :
<h3 align="center">
<a><img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/coco_sample.png" width="400"></a>
<a><img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/coco_sample_post_processed.png" width="400"></a>
</h3>
Vous pouvez en savoir plus sur les tâches supportées par l'API pipeline dans [ce tutoriel](https://huggingface.co/docs/transformers/task_summary).
En plus de `pipeline`, pour télécharger et utiliser n'importe lequel des modèles pré-entraînés sur votre tâche donnée, il suffit de trois lignes de code. Voici la version PyTorch :
```python
>>> from transformers import AutoTokenizer, AutoModel
>>> tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-uncased")
>>> model = AutoModel.from_pretrained("google-bert/bert-base-uncased")
inputs = tokenizer("Bonjour le monde !", return_tensors="pt")
outputs = model(**inputs)
```
Et voici le code équivalent pour TensorFlow :
```python
from transformers import AutoTokenizer, TFAutoModel
tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-uncased")
model = TFAutoModel.from_pretrained("google-bert/bert-base-uncased")
inputs = tokenizer("Bonjour le monde !", return_tensors="tf")
outputs = model(**inputs)
```
Le tokenizer est responsable de toutes les étapes de prétraitement que le modèle préentraîné attend et peut être appelé directement sur une seule chaîne de caractères (comme dans les exemples ci-dessus) ou sur une liste. Il produira un dictionnaire que vous pouvez utiliser dans votre code ou simplement passer directement à votre modèle en utilisant l'opérateur de déballage **.
Le modèle lui-même est un module [`nn.Module` PyTorch](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) ou un modèle [`tf.keras.Model` TensorFlow](https://www.tensorflow.org/api_docs/python/tf/keras/Model) (selon votre backend) que vous pouvez utiliser comme d'habitude. [Ce tutoriel](https://huggingface.co/docs/transformers/training) explique comment intégrer un tel modèle dans une boucle d'entraînement classique PyTorch ou TensorFlow, ou comment utiliser notre API `Trainer` pour affiner rapidement sur un nouvel ensemble de données.
## Pourquoi devrais-je utiliser transformers ?
1. Des modèles de pointe faciles à utiliser :
- Hautes performances en compréhension et génération de langage naturel, en vision par ordinateur et en tâches audio.
- Faible barrière à l'entrée pour les éducateurs et les praticiens.
- Peu d'abstractions visibles pour l'utilisateur avec seulement trois classes à apprendre.
- Une API unifiée pour utiliser tous nos modèles préentraînés.
1. Coûts informatiques réduits, empreinte carbone plus petite :
- Les chercheurs peuvent partager des modèles entraînés au lieu de toujours les réentraîner.
- Les praticiens peuvent réduire le temps de calcul et les coûts de production.
- Des dizaines d'architectures avec plus de 400 000 modèles préentraînés dans toutes les modalités.
1. Choisissez le bon framework pour chaque partie de la vie d'un modèle :
- Entraînez des modèles de pointe en 3 lignes de code.
- Transférez un seul modèle entre les frameworks TF2.0/PyTorch/JAX à volonté.
- Choisissez facilement le bon framework pour l'entraînement, l'évaluation et la production.
1. Personnalisez facilement un modèle ou un exemple selon vos besoins :
- Nous fournissons des exemples pour chaque architecture afin de reproduire les résultats publiés par ses auteurs originaux.
- Les détails internes du modèle sont exposés de manière aussi cohérente que possible.
- Les fichiers de modèle peuvent être utilisés indépendamment de la bibliothèque pour des expériences rapides.
## Pourquoi ne devrais-je pas utiliser transformers ?
- Cette bibliothèque n'est pas une boîte à outils modulaire de blocs de construction pour les réseaux neuronaux. Le code dans les fichiers de modèle n'est pas refactorisé avec des abstractions supplémentaires à dessein, afin que les chercheurs puissent itérer rapidement sur chacun des modèles sans plonger dans des abstractions/fichiers supplémentaires.
- L'API d'entraînement n'est pas destinée à fonctionner avec n'importe quel modèle, mais elle est optimisée pour fonctionner avec les modèles fournis par la bibliothèque. Pour des boucles génériques d'apprentissage automatique, vous devriez utiliser une autre bibliothèque (éventuellement, [Accelerate](https://huggingface.co/docs/accelerate)).
- Bien que nous nous efforcions de présenter autant de cas d'utilisation que possible, les scripts de notre [dossier d'exemples](https://github.com/huggingface/transformers/tree/main/examples) ne sont que cela : des exemples. Il est prévu qu'ils ne fonctionnent pas immédiatement sur votre problème spécifique et que vous devrez probablement modifier quelques lignes de code pour les adapter à vos besoins.
## Installation
### Avec pip
Ce référentiel est testé sur Python 3.10+ et PyTorch 2.4+.
Vous devriez installer 🤗 Transformers dans un [environnement virtuel](https://docs.python.org/3/library/venv.html). Si vous n'êtes pas familier avec les environnements virtuels Python, consultez le [guide utilisateur](https://packaging.python.org/guides/installing-using-pip-and-virtual-environments/).
D'abord, créez un environnement virtuel avec la version de Python que vous allez utiliser et activez-le.
Ensuite, vous devrez installer au moins l'un de Flax, PyTorch ou TensorFlow.
Veuillez vous référer à la page d'installation de [TensorFlow](https://www.tensorflow.org/install/), de [PyTorch](https://pytorch.org/get-started/locally/#start-locally) et/ou de [Flax](https://github.com/google/flax#quick-install) et [Jax](https://github.com/google/jax#installation) pour connaître la commande d'installation spécifique à votre plateforme.
Lorsqu'un de ces backends est installé, 🤗 Transformers peut être installé avec pip comme suit :
```bash
pip install transformers
```
Si vous souhaitez jouer avec les exemples ou avez besoin de la dernière version du code et ne pouvez pas attendre une nouvelle version, vous devez [installer la bibliothèque à partir de la source](https://huggingface.co/docs/transformers/installation#installing-from-source).
### Avec conda
🤗 Transformers peut être installé avec conda comme suit :
```shell
conda install conda-forge::transformers
```
> **_NOTE:_** L'installation de `transformers` depuis le canal `huggingface` est obsolète.
Suivez les pages d'installation de Flax, PyTorch ou TensorFlow pour voir comment les installer avec conda.
> **_NOTE:_** Sur Windows, on peut vous demander d'activer le mode développeur pour bénéficier de la mise en cache. Si ce n'est pas une option pour vous, veuillez nous le faire savoir dans [cette issue](https://github.com/huggingface/huggingface_hub/issues/1062).
## Architectures de modèles
**[Tous les points de contrôle](https://huggingface.co/models)** de modèle fournis par 🤗 Transformers sont intégrés de manière transparente depuis le [hub de modèles](https://huggingface.co/models) huggingface.co, où ils sont téléchargés directement par les [utilisateurs](https://huggingface.co/users) et les [organisations](https://huggingface.co/organizations).
Nombre actuel de points de contrôle : 
🤗 Transformers fournit actuellement les architectures suivantes: consultez [ici](https://huggingface.co/docs/transformers/model_summary) pour un résumé global de chacune d'entre elles.
Pour vérifier si chaque modèle a une implémentation en Flax, PyTorch ou TensorFlow, ou s'il a un tokenizer associé pris en charge par la bibliothèque 🤗 Tokenizers, consultez [ce tableau](https://huggingface.co/docs/transformers/index#supported-frameworks).
Ces implémentations ont été testées sur plusieurs ensembles de données (voir les scripts d'exemple) et devraient correspondre aux performances des implémentations originales. Vous pouvez trouver plus de détails sur les performances dans la section Exemples de la [documentation](https://github.com/huggingface/transformers/tree/main/examples).
## En savoir plus
| Section | Description |
|-|-|
| [Documentation](https://huggingface.co/docs/transformers/) | Documentation complète de l'API et tutoriels |
| [Résumé des tâches](https://huggingface.co/docs/transformers/task_summary) | Tâches prises en charge par les 🤗 Transformers |
| [Tutoriel de prétraitement](https://huggingface.co/docs/transformers/preprocessing) | Utilisation de la classe `Tokenizer` pour préparer les données pour les modèles |
| [Entraînement et ajustement fin](https://huggingface.co/docs/transformers/training) | Utilisation des modèles fournis par les 🤗 Transformers dans une boucle d'entraînement PyTorch/TensorFlow et de l'API `Trainer` |
| [Tour rapide : Scripts d'ajustement fin/d'utilisation](https://github.com/huggingface/transformers/tree/main/examples) | Scripts d'exemple pour ajuster finement les modèles sur une large gamme de tâches |
| [Partage et téléversement de modèles](https://huggingface.co/docs/transformers/model_sharing) | Téléchargez et partagez vos modèles ajustés avec la communauté |
## Citation
Nous disposons désormais d'un [article](https://www.aclweb.org/anthology/2020.emnlp-demos.6/) que vous pouvez citer pour la bibliothèque 🤗 Transformers :
```bibtex
@inproceedings{wolf-etal-2020-transformers,
title = "Transformers: State-of-the-Art Natural Language Processing",
author = "Thomas Wolf and Lysandre Debut and Victor Sanh and Julien Chaumond and Clement Delangue and Anthony Moi and Pierric Cistac and Tim Rault and Rémi Louf and Morgan Funtowicz and Joe Davison and Sam Shleifer and Patrick von Platen and Clara Ma and Yacine Jernite and Julien Plu and Canwen Xu and Teven Le Scao and Sylvain Gugger and Mariama Drame and Quentin Lhoest and Alexander M. Rush",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: System Demonstrations",
month = oct,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://www.aclweb.org/anthology/2020.emnlp-demos.6",
pages = "38--45"
}
``` | unknown | github | https://github.com/huggingface/transformers | i18n/README_fr.md |
import os
import sys
from select import select
from subprocess import Popen, PIPE
import rpyc
err = ""
def handleInterpreter(conn, fd, data):
global err
if fd == p.stderr.fileno():
datastr = str(data, 'utf8')
if datastr == '>>> ':
return
if 'Type "help", "copyright", "credits" or "license" for more information.' in datastr:
return
err += datastr
# errors seem to always end with >>>
if '>>> ' in datastr:
conn.root.add_err(err)
err = ""
def handleScript(conn, fd, data):
if fd == p.stderr.fileno():
# send to local debug service
conn.root.add_err(str(data, 'utf8'))
def handle(conn, fd, data, mode):
if mode == 'interpreter':
handleInterpreter(conn, fd, data)
else:
handleScript(conn, fd, data)
if __name__ == "__main__":
conn = rpyc.connect("localhost", 18861)
command = ['python']
mode = 'interpreter'
if len(sys.argv) > 1:
command = ['python'] + sys.argv[1:]
mode = 'script'
with Popen(command, stdout=PIPE, stderr=PIPE) as p:
readable = {
p.stdout.fileno(): sys.stdout.buffer,
p.stderr.fileno(): sys.stderr.buffer,
}
while readable:
for fd in select(readable, [], [])[0]:
data = os.read(fd, 1024) # read available
if not data: # EOF
del readable[fd]
continue
readable[fd].write(data)
readable[fd].flush()
handle(conn, fd, data, mode) | unknown | codeparrot/codeparrot-clean | ||
"""distutils.msvccompiler
Contains MSVCCompiler, an implementation of the abstract CCompiler class
for the Microsoft Visual Studio.
"""
# Written by Perry Stoll
# hacked by Robin Becker and Thomas Heller to do a better job of
# finding DevStudio (through the registry)
import sys, os
from distutils.errors import \
DistutilsExecError, DistutilsPlatformError, \
CompileError, LibError, LinkError
from distutils.ccompiler import \
CCompiler, gen_preprocess_options, gen_lib_options
from distutils import log
_can_read_reg = False
try:
import winreg
_can_read_reg = True
hkey_mod = winreg
RegOpenKeyEx = winreg.OpenKeyEx
RegEnumKey = winreg.EnumKey
RegEnumValue = winreg.EnumValue
RegError = winreg.error
except ImportError:
try:
import win32api
import win32con
_can_read_reg = True
hkey_mod = win32con
RegOpenKeyEx = win32api.RegOpenKeyEx
RegEnumKey = win32api.RegEnumKey
RegEnumValue = win32api.RegEnumValue
RegError = win32api.error
except ImportError:
log.info("Warning: Can't read registry to find the "
"necessary compiler setting\n"
"Make sure that Python modules winreg, "
"win32api or win32con are installed.")
pass
if _can_read_reg:
HKEYS = (hkey_mod.HKEY_USERS,
hkey_mod.HKEY_CURRENT_USER,
hkey_mod.HKEY_LOCAL_MACHINE,
hkey_mod.HKEY_CLASSES_ROOT)
def read_keys(base, key):
"""Return list of registry keys."""
try:
handle = RegOpenKeyEx(base, key)
except RegError:
return None
L = []
i = 0
while True:
try:
k = RegEnumKey(handle, i)
except RegError:
break
L.append(k)
i += 1
return L
def read_values(base, key):
"""Return dict of registry keys and values.
All names are converted to lowercase.
"""
try:
handle = RegOpenKeyEx(base, key)
except RegError:
return None
d = {}
i = 0
while True:
try:
name, value, type = RegEnumValue(handle, i)
except RegError:
break
name = name.lower()
d[convert_mbcs(name)] = convert_mbcs(value)
i += 1
return d
def convert_mbcs(s):
dec = getattr(s, "decode", None)
if dec is not None:
try:
s = dec("mbcs")
except UnicodeError:
pass
return s
class MacroExpander:
def __init__(self, version):
self.macros = {}
self.load_macros(version)
def set_macro(self, macro, path, key):
for base in HKEYS:
d = read_values(base, path)
if d:
self.macros["$(%s)" % macro] = d[key]
break
def load_macros(self, version):
vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
net = r"Software\Microsoft\.NETFramework"
self.set_macro("FrameworkDir", net, "installroot")
try:
if version > 7.0:
self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
else:
self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
except KeyError as exc: #
raise DistutilsPlatformError(
"""Python was built with Visual Studio 2003;
extensions must be built with a compiler than can generate compatible binaries.
Visual Studio 2003 was not found on this system. If you have Cygwin installed,
you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
p = r"Software\Microsoft\NET Framework Setup\Product"
for base in HKEYS:
try:
h = RegOpenKeyEx(base, p)
except RegError:
continue
key = RegEnumKey(h, 0)
d = read_values(base, r"%s\%s" % (p, key))
self.macros["$(FrameworkVersion)"] = d["version"]
def sub(self, s):
for k, v in self.macros.items():
s = s.replace(k, v)
return s
def get_build_version():
"""Return the version of MSVC that was used to build Python.
For Python 2.3 and up, the version number is included in
sys.version. For earlier versions, assume the compiler is MSVC 6.
"""
prefix = "MSC v."
i = sys.version.find(prefix)
if i == -1:
return 6
i = i + len(prefix)
s, rest = sys.version[i:].split(" ", 1)
majorVersion = int(s[:-2]) - 6
minorVersion = int(s[2:3]) / 10.0
# I don't think paths are affected by minor version in version 6
if majorVersion == 6:
minorVersion = 0
if majorVersion >= 6:
return majorVersion + minorVersion
# else we don't know what version of the compiler this is
return None
def get_build_architecture():
"""Return the processor architecture.
Possible results are "Intel", "Itanium", or "AMD64".
"""
prefix = " bit ("
i = sys.version.find(prefix)
if i == -1:
return "Intel"
j = sys.version.find(")", i)
return sys.version[i+len(prefix):j]
def normalize_and_reduce_paths(paths):
"""Return a list of normalized paths with duplicates removed.
The current order of paths is maintained.
"""
# Paths are normalized so things like: /a and /a/ aren't both preserved.
reduced_paths = []
for p in paths:
np = os.path.normpath(p)
# XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
if np not in reduced_paths:
reduced_paths.append(np)
return reduced_paths
class MSVCCompiler(CCompiler) :
"""Concrete class that implements an interface to Microsoft Visual C++,
as defined by the CCompiler abstract class."""
compiler_type = 'msvc'
# Just set this so CCompiler's constructor doesn't barf. We currently
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
# as it really isn't necessary for this sort of single-compiler class.
# Would be nice to have a consistent interface with UnixCCompiler,
# though, so it's worth thinking about.
executables = {}
# Private class data (need to distinguish C from C++ source for compiler)
_c_extensions = ['.c']
_cpp_extensions = ['.cc', '.cpp', '.cxx']
_rc_extensions = ['.rc']
_mc_extensions = ['.mc']
# Needed for the filename generation methods provided by the
# base class, CCompiler.
src_extensions = (_c_extensions + _cpp_extensions +
_rc_extensions + _mc_extensions)
res_extension = '.res'
obj_extension = '.obj'
static_lib_extension = '.lib'
shared_lib_extension = '.dll'
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
def __init__(self, verbose=0, dry_run=0, force=0):
CCompiler.__init__ (self, verbose, dry_run, force)
self.__version = get_build_version()
self.__arch = get_build_architecture()
if self.__arch == "Intel":
# x86
if self.__version >= 7:
self.__root = r"Software\Microsoft\VisualStudio"
self.__macros = MacroExpander(self.__version)
else:
self.__root = r"Software\Microsoft\Devstudio"
self.__product = "Visual Studio version %s" % self.__version
else:
# Win64. Assume this was built with the platform SDK
self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
self.initialized = False
def initialize(self):
self.__paths = []
if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
# Assume that the SDK set up everything alright; don't try to be
# smarter
self.cc = "cl.exe"
self.linker = "link.exe"
self.lib = "lib.exe"
self.rc = "rc.exe"
self.mc = "mc.exe"
else:
self.__paths = self.get_msvc_paths("path")
if len(self.__paths) == 0:
raise DistutilsPlatformError("Python was built with %s, "
"and extensions need to be built with the same "
"version of the compiler, but it isn't installed."
% self.__product)
self.cc = self.find_exe("cl.exe")
self.linker = self.find_exe("link.exe")
self.lib = self.find_exe("lib.exe")
self.rc = self.find_exe("rc.exe") # resource compiler
self.mc = self.find_exe("mc.exe") # message compiler
self.set_path_env_var('lib')
self.set_path_env_var('include')
# extend the MSVC path with the current path
try:
for p in os.environ['path'].split(';'):
self.__paths.append(p)
except KeyError:
pass
self.__paths = normalize_and_reduce_paths(self.__paths)
os.environ['path'] = ";".join(self.__paths)
self.preprocess_options = None
if self.__arch == "Intel":
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GX' ,
'/DNDEBUG']
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX',
'/Z7', '/D_DEBUG']
else:
# Win64
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' ,
'/DNDEBUG']
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
'/Z7', '/D_DEBUG']
self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
if self.__version >= 7:
self.ldflags_shared_debug = [
'/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
]
else:
self.ldflags_shared_debug = [
'/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG'
]
self.ldflags_static = [ '/nologo']
self.initialized = True
# -- Worker methods ------------------------------------------------
def object_filenames(self,
source_filenames,
strip_dir=0,
output_dir=''):
# Copied from ccompiler.py, extended to return .res as 'object'-file
# for .rc input file
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
(base, ext) = os.path.splitext (src_name)
base = os.path.splitdrive(base)[1] # Chop off the drive
base = base[os.path.isabs(base):] # If abs, chop off leading /
if ext not in self.src_extensions:
# Better to raise an exception instead of silently continuing
# and later complain about sources and targets having
# different lengths
raise CompileError ("Don't know how to compile %s" % src_name)
if strip_dir:
base = os.path.basename (base)
if ext in self._rc_extensions:
obj_names.append (os.path.join (output_dir,
base + self.res_extension))
elif ext in self._mc_extensions:
obj_names.append (os.path.join (output_dir,
base + self.res_extension))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
def compile(self, sources,
output_dir=None, macros=None, include_dirs=None, debug=0,
extra_preargs=None, extra_postargs=None, depends=None):
if not self.initialized:
self.initialize()
compile_info = self._setup_compile(output_dir, macros, include_dirs,
sources, depends, extra_postargs)
macros, objects, extra_postargs, pp_opts, build = compile_info
compile_opts = extra_preargs or []
compile_opts.append ('/c')
if debug:
compile_opts.extend(self.compile_options_debug)
else:
compile_opts.extend(self.compile_options)
for obj in objects:
try:
src, ext = build[obj]
except KeyError:
continue
if debug:
# pass the full pathname to MSVC in debug mode,
# this allows the debugger to find the source file
# without asking the user to browse for it
src = os.path.abspath(src)
if ext in self._c_extensions:
input_opt = "/Tc" + src
elif ext in self._cpp_extensions:
input_opt = "/Tp" + src
elif ext in self._rc_extensions:
# compile .RC to .RES file
input_opt = src
output_opt = "/fo" + obj
try:
self.spawn([self.rc] + pp_opts +
[output_opt] + [input_opt])
except DistutilsExecError as msg:
raise CompileError(msg)
continue
elif ext in self._mc_extensions:
# Compile .MC to .RC file to .RES file.
# * '-h dir' specifies the directory for the
# generated include file
# * '-r dir' specifies the target directory of the
# generated RC file and the binary message resource
# it includes
#
# For now (since there are no options to change this),
# we use the source-directory for the include file and
# the build directory for the RC file and message
# resources. This works at least for win32all.
h_dir = os.path.dirname(src)
rc_dir = os.path.dirname(obj)
try:
# first compile .MC to .RC and .H file
self.spawn([self.mc] +
['-h', h_dir, '-r', rc_dir] + [src])
base, _ = os.path.splitext (os.path.basename (src))
rc_file = os.path.join (rc_dir, base + '.rc')
# then compile .RC to .RES file
self.spawn([self.rc] +
["/fo" + obj] + [rc_file])
except DistutilsExecError as msg:
raise CompileError(msg)
continue
else:
# how to handle this file?
raise CompileError("Don't know how to compile %s to %s"
% (src, obj))
output_opt = "/Fo" + obj
try:
self.spawn([self.cc] + compile_opts + pp_opts +
[input_opt, output_opt] +
extra_postargs)
except DistutilsExecError as msg:
raise CompileError(msg)
return objects
def create_static_lib(self,
objects,
output_libname,
output_dir=None,
debug=0,
target_lang=None):
if not self.initialized:
self.initialize()
(objects, output_dir) = self._fix_object_args(objects, output_dir)
output_filename = self.library_filename(output_libname,
output_dir=output_dir)
if self._need_link(objects, output_filename):
lib_args = objects + ['/OUT:' + output_filename]
if debug:
pass # XXX what goes here?
try:
self.spawn([self.lib] + lib_args)
except DistutilsExecError as msg:
raise LibError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
def link(self,
target_desc,
objects,
output_filename,
output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
if not self.initialized:
self.initialize()
(objects, output_dir) = self._fix_object_args(objects, output_dir)
fixed_args = self._fix_lib_args(libraries, library_dirs,
runtime_library_dirs)
(libraries, library_dirs, runtime_library_dirs) = fixed_args
if runtime_library_dirs:
self.warn ("I don't know what to do with 'runtime_library_dirs': "
+ str (runtime_library_dirs))
lib_opts = gen_lib_options(self,
library_dirs, runtime_library_dirs,
libraries)
if output_dir is not None:
output_filename = os.path.join(output_dir, output_filename)
if self._need_link(objects, output_filename):
if target_desc == CCompiler.EXECUTABLE:
if debug:
ldflags = self.ldflags_shared_debug[1:]
else:
ldflags = self.ldflags_shared[1:]
else:
if debug:
ldflags = self.ldflags_shared_debug
else:
ldflags = self.ldflags_shared
export_opts = []
for sym in (export_symbols or []):
export_opts.append("/EXPORT:" + sym)
ld_args = (ldflags + lib_opts + export_opts +
objects + ['/OUT:' + output_filename])
# The MSVC linker generates .lib and .exp files, which cannot be
# suppressed by any linker switches. The .lib files may even be
# needed! Make sure they are generated in the temporary build
# directory. Since they have different names for debug and release
# builds, they can go into the same directory.
if export_symbols is not None:
(dll_name, dll_ext) = os.path.splitext(
os.path.basename(output_filename))
implib_file = os.path.join(
os.path.dirname(objects[0]),
self.library_filename(dll_name))
ld_args.append ('/IMPLIB:' + implib_file)
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath(os.path.dirname(output_filename))
try:
self.spawn([self.linker] + ld_args)
except DistutilsExecError as msg:
raise LinkError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
# -- Miscellaneous methods -----------------------------------------
# These are all used by the 'gen_lib_options() function, in
# ccompiler.py.
def library_dir_option(self, dir):
return "/LIBPATH:" + dir
def runtime_library_dir_option(self, dir):
raise DistutilsPlatformError(
"don't know how to set runtime library search path for MSVC++")
def library_option(self, lib):
return self.library_filename(lib)
def find_library_file(self, dirs, lib, debug=0):
# Prefer a debugging library if found (and requested), but deal
# with it if we don't have one.
if debug:
try_names = [lib + "_d", lib]
else:
try_names = [lib]
for dir in dirs:
for name in try_names:
libfile = os.path.join(dir, self.library_filename (name))
if os.path.exists(libfile):
return libfile
else:
# Oops, didn't find it in *any* of 'dirs'
return None
# Helper methods for using the MSVC registry settings
def find_exe(self, exe):
"""Return path to an MSVC executable program.
Tries to find the program in several places: first, one of the
MSVC program search paths from the registry; next, the directories
in the PATH environment variable. If any of those work, return an
absolute path that is known to exist. If none of them work, just
return the original program name, 'exe'.
"""
for p in self.__paths:
fn = os.path.join(os.path.abspath(p), exe)
if os.path.isfile(fn):
return fn
# didn't find it; try existing path
for p in os.environ['Path'].split(';'):
fn = os.path.join(os.path.abspath(p),exe)
if os.path.isfile(fn):
return fn
return exe
def get_msvc_paths(self, path, platform='x86'):
"""Get a list of devstudio directories (include, lib or path).
Return a list of strings. The list will be empty if unable to
access the registry or appropriate registry keys not found.
"""
if not _can_read_reg:
return []
path = path + " dirs"
if self.__version >= 7:
key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
% (self.__root, self.__version))
else:
key = (r"%s\6.0\Build System\Components\Platforms"
r"\Win32 (%s)\Directories" % (self.__root, platform))
for base in HKEYS:
d = read_values(base, key)
if d:
if self.__version >= 7:
return self.__macros.sub(d[path]).split(";")
else:
return d[path].split(";")
# MSVC 6 seems to create the registry entries we need only when
# the GUI is run.
if self.__version == 6:
for base in HKEYS:
if read_values(base, r"%s\6.0" % self.__root) is not None:
self.warn("It seems you have Visual Studio 6 installed, "
"but the expected registry settings are not present.\n"
"You must at least run the Visual Studio GUI once "
"so that these entries are created.")
break
return []
def set_path_env_var(self, name):
"""Set environment variable 'name' to an MSVC path type value.
This is equivalent to a SET command prior to execution of spawned
commands.
"""
if name == "lib":
p = self.get_msvc_paths("library")
else:
p = self.get_msvc_paths(name)
if p:
os.environ[name] = ';'.join(p)
if get_build_version() >= 8.0:
log.debug("Importing new compiler from distutils.msvc9compiler")
OldMSVCCompiler = MSVCCompiler
from distutils.msvc9compiler import MSVCCompiler
# get_build_architecture not really relevant now we support cross-compile
from distutils.msvc9compiler import MacroExpander | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
import os
import os.path
import oauth2 as oauth
import json
from font_colors import font_colors
import getrel
import setfav
import time
from torrentApi import torrent_api as api
#name of config file where all keys get stored
config = '~/.config/getrel/getrel.json'
nzb_path = '~/.get_fav/nzbs'
def decode_json(resp):
fav_dict = json.loads(resp)
fav_list = []
for fav in fav_dict['payload']:
#print fav
#if there are no releases in any list, the key 'releases' does not exist
if ('releases' not in fav):
continue
if (fav['releases']):
#print fav['releases']
for dirname in fav['releases']:
fav_list.append(dirname['dirname'])
return fav_list
config = os.path.expanduser(config)
try:
with open(config, 'r') as f:
config_dict = json.loads(f.read())
except IOError:
print 'please run auth_xrel first'
exit(-42)
config_args = getrel.init_argparse(config)
parsed_config = getrel.init_configparser(config)
config_xrel = config_dict['xrel']
consumer_key = config_xrel['consumer_key']
consumer_secret = config_xrel['consumer_secret']
oauth_token = config_xrel['oauth_token']
oauth_token_secret = config_xrel['oauth_token_secret']
url = 'http://api.xrel.to/api/favs/lists.json'
consumer = oauth.Consumer(key=consumer_key, secret=consumer_secret)
token = oauth.Token(key=oauth_token, secret=oauth_token_secret)
client = oauth.Client(consumer, token)
resp, content = client.request(url)
favdict = {}
favlists = json.loads(content[11:-3])['payload']
nzb_path = os.path.expanduser(nzb_path)
for favlist in favlists:
listname = favlist['name']
if listname in config_dict['skip']:
continue
listid = favlist['id']
new_dir = os.path.join(nzb_path, listname)
if not os.path.exists(new_dir):
os.makedirs(new_dir)
url = 'http://api.xrel.to/api/favs/list_entries.json?id=%d&get_releases=true' % listid
resp, content = client.request(url)
favdict[listid] = {'name': listname, 'rels': []}
for fav in json.loads(content[11:-3])['payload']:
if ('releases' not in fav):
continue
if (fav['releases']):
for dirname in fav['releases']:
relid = int(dirname['link_href'].split('/')[4])
favdict[listid]['rels'].append({'name': dirname['dirname'], 'id': relid})
try:
xrel_session = setfav.login({'username': config_xrel['username'], 'password': config_xrel['password']})
except:
pass
prefer_torrent = config_dict['torrent']['prefer']
torrent_download_path = config_dict['torrent']['dir']
if prefer_torrent:
torrentApi = api.TorrentApi(base_path=torrent_download_path)
for favlist in favdict:
listname = favdict[favlist]['name']
print '%s%s%s:' % (font_colors.f_magenta, listname, font_colors.f_reset)
new_dir = os.path.join(nzb_path, listname)
config_args['category'] = listname.lower()
for reldict in favdict[favlist]['rels']:
rel = reldict['name']
print '%s%s%s searching...' % (font_colors.f_yellow, rel, font_colors.f_reset)
config_args['query'] = rel
checked_args = getrel.check_args(config_args.copy(), parsed_config)
set_fav_data = {
'anticache': long(time.time()), # unix time stamp (long)
'isnew': 0, # mark as new, otherwise mark as read (boolean)
'wid': favlist, # watchlist id (int)
'rid': reldict['id'] # release id (int)
}
found_release = False
if prefer_torrent:
found_release = torrentApi.search(rel)
if not found_release:
found_release = getrel.main(checked_args)
if found_release:
if xrel_session:
setfav.set_fav_state(xrel_session, set_fav_data) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Wei Gao <gaowei3@qq.com>
# Copyright: (c) 2018, Ansible Project
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_host_facts
short_description: Gathers facts about remote ESXi hostsystem
description:
- This module can be used to gathers facts like CPU, memory, datastore, network and system etc. about ESXi host system.
- Please specify hostname or IP address of ESXi host system as C(hostname).
- If hostname or IP address of vCenter is provided as C(hostname) and C(esxi_hostname) is not specified, then the
module will throw an error.
- VSAN facts added in 2.7 version.
version_added: 2.5
author:
- Wei Gao (@woshihaoren)
requirements:
- python >= 2.6
- PyVmomi
options:
esxi_hostname:
description:
- ESXi hostname.
- Host facts about the specified ESXi server will be returned.
- By specifying this option, you can select which ESXi hostsystem is returned if connecting to a vCenter.
version_added: 2.8
type: str
show_tag:
description:
- Tags related to Host are shown if set to C(True).
default: False
type: bool
required: False
version_added: 2.9
schema:
description:
- Specify the output schema desired.
- The 'summary' output schema is the legacy output from the module
- The 'vsphere' output schema is the vSphere API class definition
which requires pyvmomi>6.7.1
choices: ['summary', 'vsphere']
default: 'summary'
type: str
version_added: '2.10'
properties:
description:
- Specify the properties to retrieve.
- If not specified, all properties are retrieved (deeply).
- Results are returned in a structure identical to the vsphere API.
- 'Example:'
- ' properties: ['
- ' "hardware.memorySize",'
- ' "hardware.cpuInfo.numCpuCores",'
- ' "config.product.apiVersion",'
- ' "overallStatus"'
- ' ]'
- Only valid when C(schema) is C(vsphere).
type: list
required: False
version_added: '2.10'
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Gather vmware host facts
vmware_host_facts:
hostname: "{{ esxi_server }}"
username: "{{ esxi_username }}"
password: "{{ esxi_password }}"
register: host_facts
delegate_to: localhost
- name: Gather vmware host facts from vCenter
vmware_host_facts:
hostname: "{{ vcenter_server }}"
username: "{{ vcenter_user }}"
password: "{{ vcenter_pass }}"
esxi_hostname: "{{ esxi_hostname }}"
register: host_facts
delegate_to: localhost
- name: Gather vmware host facts from vCenter with tag information
vmware_host_facts:
hostname: "{{ vcenter_server }}"
username: "{{ vcenter_user }}"
password: "{{ vcenter_pass }}"
esxi_hostname: "{{ esxi_hostname }}"
show_tag: True
register: host_facts_tag
delegate_to: localhost
- name: Get VSAN Cluster UUID from host facts
vmware_host_facts:
hostname: "{{ esxi_server }}"
username: "{{ esxi_username }}"
password: "{{ esxi_password }}"
register: host_facts
- set_fact:
cluster_uuid: "{{ host_facts['ansible_facts']['vsan_cluster_uuid'] }}"
- name: Gather some info from a host using the vSphere API output schema
vmware_host_facts:
hostname: "{{ vcenter_server }}"
username: "{{ esxi_username }}"
password: "{{ esxi_password }}"
esxi_hostname: "{{ esxi_hostname }}"
schema: vsphere
properties:
- hardware.memorySize
- hardware.cpuInfo.numCpuCores
- config.product.apiVersion
- overallStatus
register: host_facts
'''
RETURN = r'''
ansible_facts:
description: system info about the host machine
returned: always
type: dict
sample:
{
"ansible_all_ipv4_addresses": [
"10.76.33.200"
],
"ansible_bios_date": "2011-01-01T00:00:00+00:00",
"ansible_bios_version": "0.5.1",
"ansible_datastore": [
{
"free": "11.63 GB",
"name": "datastore1",
"total": "12.50 GB"
}
],
"ansible_distribution": "VMware ESXi",
"ansible_distribution_build": "4887370",
"ansible_distribution_version": "6.5.0",
"ansible_hostname": "10.76.33.100",
"ansible_in_maintenance_mode": true,
"ansible_interfaces": [
"vmk0"
],
"ansible_memfree_mb": 2702,
"ansible_memtotal_mb": 4095,
"ansible_os_type": "vmnix-x86",
"ansible_processor": "Intel Xeon E312xx (Sandy Bridge)",
"ansible_processor_cores": 2,
"ansible_processor_count": 2,
"ansible_processor_vcpus": 2,
"ansible_product_name": "KVM",
"ansible_product_serial": "NA",
"ansible_system_vendor": "Red Hat",
"ansible_uptime": 1791680,
"ansible_vmk0": {
"device": "vmk0",
"ipv4": {
"address": "10.76.33.100",
"netmask": "255.255.255.0"
},
"macaddress": "52:54:00:56:7d:59",
"mtu": 1500
},
"vsan_cluster_uuid": null,
"vsan_node_uuid": null,
"vsan_health": "unknown",
"tags": [
{
"category_id": "urn:vmomi:InventoryServiceCategory:8eb81431-b20d-49f5-af7b-126853aa1189:GLOBAL",
"category_name": "host_category_0001",
"description": "",
"id": "urn:vmomi:InventoryServiceTag:e9398232-46fd-461a-bf84-06128e182a4a:GLOBAL",
"name": "host_tag_0001"
}
],
}
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.text.formatters import bytes_to_human
from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec, find_obj
try:
from pyVmomi import vim
except ImportError:
pass
from ansible.module_utils.vmware_rest_client import VmwareRestClient
class VMwareHostFactManager(PyVmomi):
def __init__(self, module):
super(VMwareHostFactManager, self).__init__(module)
esxi_host_name = self.params.get('esxi_hostname', None)
if self.is_vcenter():
if esxi_host_name is None:
self.module.fail_json(msg="Connected to a vCenter system without specifying esxi_hostname")
self.host = self.get_all_host_objs(esxi_host_name=esxi_host_name)
if len(self.host) > 1:
self.module.fail_json(msg="esxi_hostname matched multiple hosts")
self.host = self.host[0]
else:
self.host = find_obj(self.content, [vim.HostSystem], None)
if self.host is None:
self.module.fail_json(msg="Failed to find host system.")
def all_facts(self):
ansible_facts = {}
ansible_facts.update(self.get_cpu_facts())
ansible_facts.update(self.get_memory_facts())
ansible_facts.update(self.get_datastore_facts())
ansible_facts.update(self.get_network_facts())
ansible_facts.update(self.get_system_facts())
ansible_facts.update(self.get_vsan_facts())
ansible_facts.update(self.get_cluster_facts())
if self.params.get('show_tag'):
vmware_client = VmwareRestClient(self.module)
tag_info = {
'tags': vmware_client.get_tags_for_hostsystem(hostsystem_mid=self.host._moId)
}
ansible_facts.update(tag_info)
self.module.exit_json(changed=False, ansible_facts=ansible_facts)
def get_cluster_facts(self):
cluster_facts = {'cluster': None}
if self.host.parent and isinstance(self.host.parent, vim.ClusterComputeResource):
cluster_facts.update(cluster=self.host.parent.name)
return cluster_facts
def get_vsan_facts(self):
config_mgr = self.host.configManager.vsanSystem
if config_mgr is None:
return {
'vsan_cluster_uuid': None,
'vsan_node_uuid': None,
'vsan_health': "unknown",
}
status = config_mgr.QueryHostStatus()
return {
'vsan_cluster_uuid': status.uuid,
'vsan_node_uuid': status.nodeUuid,
'vsan_health': status.health,
}
def get_cpu_facts(self):
return {
'ansible_processor': self.host.summary.hardware.cpuModel,
'ansible_processor_cores': self.host.summary.hardware.numCpuCores,
'ansible_processor_count': self.host.summary.hardware.numCpuPkgs,
'ansible_processor_vcpus': self.host.summary.hardware.numCpuThreads,
}
def get_memory_facts(self):
return {
'ansible_memfree_mb': self.host.hardware.memorySize // 1024 // 1024 - self.host.summary.quickStats.overallMemoryUsage,
'ansible_memtotal_mb': self.host.hardware.memorySize // 1024 // 1024,
}
def get_datastore_facts(self):
facts = dict()
facts['ansible_datastore'] = []
for store in self.host.datastore:
_tmp = {
'name': store.summary.name,
'total': bytes_to_human(store.summary.capacity),
'free': bytes_to_human(store.summary.freeSpace),
}
facts['ansible_datastore'].append(_tmp)
return facts
def get_network_facts(self):
facts = dict()
facts['ansible_interfaces'] = []
facts['ansible_all_ipv4_addresses'] = []
for nic in self.host.config.network.vnic:
device = nic.device
facts['ansible_interfaces'].append(device)
facts['ansible_all_ipv4_addresses'].append(nic.spec.ip.ipAddress)
_tmp = {
'device': device,
'ipv4': {
'address': nic.spec.ip.ipAddress,
'netmask': nic.spec.ip.subnetMask,
},
'macaddress': nic.spec.mac,
'mtu': nic.spec.mtu,
}
facts['ansible_' + device] = _tmp
return facts
def get_system_facts(self):
sn = 'NA'
for info in self.host.hardware.systemInfo.otherIdentifyingInfo:
if info.identifierType.key == 'ServiceTag':
sn = info.identifierValue
facts = {
'ansible_distribution': self.host.config.product.name,
'ansible_distribution_version': self.host.config.product.version,
'ansible_distribution_build': self.host.config.product.build,
'ansible_os_type': self.host.config.product.osType,
'ansible_system_vendor': self.host.hardware.systemInfo.vendor,
'ansible_hostname': self.host.summary.config.name,
'ansible_product_name': self.host.hardware.systemInfo.model,
'ansible_product_serial': sn,
'ansible_bios_date': self.host.hardware.biosInfo.releaseDate,
'ansible_bios_version': self.host.hardware.biosInfo.biosVersion,
'ansible_uptime': self.host.summary.quickStats.uptime,
'ansible_in_maintenance_mode': self.host.runtime.inMaintenanceMode,
}
return facts
def properties_facts(self):
ansible_facts = self.to_json(self.host, self.params.get('properties'))
if self.params.get('show_tag'):
vmware_client = VmwareRestClient(self.module)
tag_info = {
'tags': vmware_client.get_tags_for_hostsystem(hostsystem_mid=self.host._moId)
}
ansible_facts.update(tag_info)
self.module.exit_json(changed=False, ansible_facts=ansible_facts)
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
esxi_hostname=dict(type='str', required=False),
show_tag=dict(type='bool', default=False),
schema=dict(type='str', choices=['summary', 'vsphere'], default='summary'),
properties=dict(type='list')
)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
vm_host_manager = VMwareHostFactManager(module)
if module.params['schema'] == 'summary':
vm_host_manager.all_facts()
else:
vm_host_manager.properties_facts()
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import struct
from time import time
from .utils import get_args
class FakePogoApi:
def __init__(self, mock):
# Fake a 24 hour auth token
self._auth_provider = type('', (object,), {"_ticket_expire": (time() + (3600 * 24)) * 1000})()
self.inited = False
self.mock = mock
def set_proxy(self, proxy_config):
pass
def activate_signature(self, library):
pass
def set_position(self, lat, lng, alt):
# meters radius (very, very rough approximation -- deal with it)
if not self.inited:
args = get_args()
radius = 140 * args.step_limit
requests.get('{}/login/{}/{}/{}'.format(self.mock, lat, lng, radius))
self.inited = True
def set_authentication(self, provider=None, oauth2_refresh_token=None, username=None, password=None):
pass
def i2f(self, i):
return struct.unpack('<d', struct.pack('<Q', i))[0]
def get_map_objects(self, latitude=None, longitude=None, since_timestamp_ms=None, cell_id=None):
location = (self.i2f(latitude), self.i2f(longitude))
response = requests.get('{}/scan/{}/{}'.format(self.mock, *location))
return response.json() | unknown | codeparrot/codeparrot-clean | ||
/*
* contrib/btree_gist/btree_float8.c
*/
#include "postgres.h"
#include "btree_gist.h"
#include "btree_utils_num.h"
#include "utils/float.h"
#include "utils/rel.h"
#include "utils/sortsupport.h"
typedef struct float8key
{
float8 lower;
float8 upper;
} float8KEY;
/* GiST support functions */
PG_FUNCTION_INFO_V1(gbt_float8_compress);
PG_FUNCTION_INFO_V1(gbt_float8_fetch);
PG_FUNCTION_INFO_V1(gbt_float8_union);
PG_FUNCTION_INFO_V1(gbt_float8_picksplit);
PG_FUNCTION_INFO_V1(gbt_float8_consistent);
PG_FUNCTION_INFO_V1(gbt_float8_distance);
PG_FUNCTION_INFO_V1(gbt_float8_penalty);
PG_FUNCTION_INFO_V1(gbt_float8_same);
PG_FUNCTION_INFO_V1(gbt_float8_sortsupport);
static bool
gbt_float8gt(const void *a, const void *b, FmgrInfo *flinfo)
{
return (*((const float8 *) a) > *((const float8 *) b));
}
static bool
gbt_float8ge(const void *a, const void *b, FmgrInfo *flinfo)
{
return (*((const float8 *) a) >= *((const float8 *) b));
}
static bool
gbt_float8eq(const void *a, const void *b, FmgrInfo *flinfo)
{
return (*((const float8 *) a) == *((const float8 *) b));
}
static bool
gbt_float8le(const void *a, const void *b, FmgrInfo *flinfo)
{
return (*((const float8 *) a) <= *((const float8 *) b));
}
static bool
gbt_float8lt(const void *a, const void *b, FmgrInfo *flinfo)
{
return (*((const float8 *) a) < *((const float8 *) b));
}
static int
gbt_float8key_cmp(const void *a, const void *b, FmgrInfo *flinfo)
{
float8KEY *ia = (float8KEY *) (((const Nsrt *) a)->t);
float8KEY *ib = (float8KEY *) (((const Nsrt *) b)->t);
if (ia->lower == ib->lower)
{
if (ia->upper == ib->upper)
return 0;
return (ia->upper > ib->upper) ? 1 : -1;
}
return (ia->lower > ib->lower) ? 1 : -1;
}
static float8
gbt_float8_dist(const void *a, const void *b, FmgrInfo *flinfo)
{
float8 arg1 = *(const float8 *) a;
float8 arg2 = *(const float8 *) b;
float8 r;
r = arg1 - arg2;
if (unlikely(isinf(r)) && !isinf(arg1) && !isinf(arg2))
float_overflow_error();
return fabs(r);
}
static const gbtree_ninfo tinfo =
{
gbt_t_float8,
sizeof(float8),
16, /* sizeof(gbtreekey16) */
gbt_float8gt,
gbt_float8ge,
gbt_float8eq,
gbt_float8le,
gbt_float8lt,
gbt_float8key_cmp,
gbt_float8_dist
};
PG_FUNCTION_INFO_V1(float8_dist);
Datum
float8_dist(PG_FUNCTION_ARGS)
{
float8 a = PG_GETARG_FLOAT8(0);
float8 b = PG_GETARG_FLOAT8(1);
float8 r;
r = a - b;
if (unlikely(isinf(r)) && !isinf(a) && !isinf(b))
float_overflow_error();
PG_RETURN_FLOAT8(fabs(r));
}
/**************************************************
* GiST support functions
**************************************************/
Datum
gbt_float8_compress(PG_FUNCTION_ARGS)
{
GISTENTRY *entry = (GISTENTRY *) PG_GETARG_POINTER(0);
PG_RETURN_POINTER(gbt_num_compress(entry, &tinfo));
}
Datum
gbt_float8_fetch(PG_FUNCTION_ARGS)
{
GISTENTRY *entry = (GISTENTRY *) PG_GETARG_POINTER(0);
PG_RETURN_POINTER(gbt_num_fetch(entry, &tinfo));
}
Datum
gbt_float8_consistent(PG_FUNCTION_ARGS)
{
GISTENTRY *entry = (GISTENTRY *) PG_GETARG_POINTER(0);
float8 query = PG_GETARG_FLOAT8(1);
StrategyNumber strategy = (StrategyNumber) PG_GETARG_UINT16(2);
#ifdef NOT_USED
Oid subtype = PG_GETARG_OID(3);
#endif
bool *recheck = (bool *) PG_GETARG_POINTER(4);
float8KEY *kkk = (float8KEY *) DatumGetPointer(entry->key);
GBT_NUMKEY_R key;
/* All cases served by this function are exact */
*recheck = false;
key.lower = (GBT_NUMKEY *) &kkk->lower;
key.upper = (GBT_NUMKEY *) &kkk->upper;
PG_RETURN_BOOL(gbt_num_consistent(&key, &query, &strategy,
GIST_LEAF(entry), &tinfo,
fcinfo->flinfo));
}
Datum
gbt_float8_distance(PG_FUNCTION_ARGS)
{
GISTENTRY *entry = (GISTENTRY *) PG_GETARG_POINTER(0);
float8 query = PG_GETARG_FLOAT8(1);
#ifdef NOT_USED
Oid subtype = PG_GETARG_OID(3);
#endif
float8KEY *kkk = (float8KEY *) DatumGetPointer(entry->key);
GBT_NUMKEY_R key;
key.lower = (GBT_NUMKEY *) &kkk->lower;
key.upper = (GBT_NUMKEY *) &kkk->upper;
PG_RETURN_FLOAT8(gbt_num_distance(&key, &query, GIST_LEAF(entry),
&tinfo, fcinfo->flinfo));
}
Datum
gbt_float8_union(PG_FUNCTION_ARGS)
{
GistEntryVector *entryvec = (GistEntryVector *) PG_GETARG_POINTER(0);
void *out = palloc(sizeof(float8KEY));
*(int *) PG_GETARG_POINTER(1) = sizeof(float8KEY);
PG_RETURN_POINTER(gbt_num_union(out, entryvec, &tinfo, fcinfo->flinfo));
}
Datum
gbt_float8_penalty(PG_FUNCTION_ARGS)
{
float8KEY *origentry = (float8KEY *) DatumGetPointer(((GISTENTRY *) PG_GETARG_POINTER(0))->key);
float8KEY *newentry = (float8KEY *) DatumGetPointer(((GISTENTRY *) PG_GETARG_POINTER(1))->key);
float *result = (float *) PG_GETARG_POINTER(2);
penalty_num(result, origentry->lower, origentry->upper, newentry->lower, newentry->upper);
PG_RETURN_POINTER(result);
}
Datum
gbt_float8_picksplit(PG_FUNCTION_ARGS)
{
PG_RETURN_POINTER(gbt_num_picksplit((GistEntryVector *) PG_GETARG_POINTER(0),
(GIST_SPLITVEC *) PG_GETARG_POINTER(1),
&tinfo, fcinfo->flinfo));
}
Datum
gbt_float8_same(PG_FUNCTION_ARGS)
{
float8KEY *b1 = (float8KEY *) PG_GETARG_POINTER(0);
float8KEY *b2 = (float8KEY *) PG_GETARG_POINTER(1);
bool *result = (bool *) PG_GETARG_POINTER(2);
*result = gbt_num_same((void *) b1, (void *) b2, &tinfo, fcinfo->flinfo);
PG_RETURN_POINTER(result);
}
static int
gbt_float8_ssup_cmp(Datum x, Datum y, SortSupport ssup)
{
float8KEY *arg1 = (float8KEY *) DatumGetPointer(x);
float8KEY *arg2 = (float8KEY *) DatumGetPointer(y);
/* for leaf items we expect lower == upper, so only compare lower */
return float8_cmp_internal(arg1->lower, arg2->lower);
}
Datum
gbt_float8_sortsupport(PG_FUNCTION_ARGS)
{
SortSupport ssup = (SortSupport) PG_GETARG_POINTER(0);
ssup->comparator = gbt_float8_ssup_cmp;
ssup->ssup_extra = NULL;
PG_RETURN_VOID();
} | c | github | https://github.com/postgres/postgres | contrib/btree_gist/btree_float8.c |
#
# This file is part of Bakefile (http://bakefile.org)
#
# Copyright (C) 2009-2013 Vaclav Slavik
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import plugins | unknown | codeparrot/codeparrot-clean | ||
# frozen_string_literal: true
module Jekyll
module Drops
class Drop < Liquid::Drop
include Enumerable
NON_CONTENT_METHODS = [:fallback_data, :collapse_document].freeze
NON_CONTENT_METHOD_NAMES = NON_CONTENT_METHODS.map(&:to_s).freeze
private_constant :NON_CONTENT_METHOD_NAMES
# A private stash to avoid repeatedly generating the setter method name string for
# a call to `Drops::Drop#[]=`.
# The keys of the stash below have a very high probability of being called upon during
# the course of various `Jekyll::Renderer#run` calls.
SETTER_KEYS_STASH = {
"content" => "content=",
"layout" => "layout=",
"page" => "page=",
"paginator" => "paginator=",
"highlighter_prefix" => "highlighter_prefix=",
"highlighter_suffix" => "highlighter_suffix=",
}.freeze
private_constant :SETTER_KEYS_STASH
class << self
# Get or set whether the drop class is mutable.
# Mutability determines whether or not pre-defined fields may be
# overwritten.
#
# is_mutable - Boolean set mutability of the class (default: nil)
#
# Returns the mutability of the class
def mutable(is_mutable = nil)
@is_mutable = is_mutable || false
end
def mutable?
@is_mutable
end
# public delegation helper methods that calls onto Drop's instance
# variable `@obj`.
# Generate private Drop instance_methods for each symbol in the given list.
#
# Returns nothing.
def private_delegate_methods(*symbols)
symbols.each { |symbol| private delegate_method(symbol) }
nil
end
# Generate public Drop instance_methods for each symbol in the given list.
#
# Returns nothing.
def delegate_methods(*symbols)
symbols.each { |symbol| delegate_method(symbol) }
nil
end
# Generate public Drop instance_method for given symbol that calls `@obj.<sym>`.
#
# Returns delegated method symbol.
def delegate_method(symbol)
define_method(symbol) { @obj.send(symbol) }
end
# Generate public Drop instance_method named `delegate` that calls `@obj.<original>`.
#
# Returns delegated method symbol.
def delegate_method_as(original, delegate)
define_method(delegate) { @obj.send(original) }
end
# Generate public Drop instance_methods for each string entry in the given list.
# The generated method(s) access(es) `@obj`'s data hash.
#
# Returns nothing.
def data_delegators(*strings)
strings.each do |key|
data_delegator(key) if key.is_a?(String)
end
nil
end
# Generate public Drop instance_methods for given string `key`.
# The generated method access(es) `@obj`'s data hash.
#
# Returns method symbol.
def data_delegator(key)
define_method(key.to_sym) { @obj.data[key] }
end
# Array of stringified instance methods that do not end with the assignment operator.
#
# (<klass>.instance_methods always generates a new Array object so it can be mutated)
#
# Returns array of strings.
def getter_method_names
@getter_method_names ||= instance_methods.map!(&:to_s).tap do |list|
list.reject! { |item| item.end_with?("=") }
end
end
end
# Create a new Drop
#
# obj - the Jekyll Site, Collection, or Document required by the
# drop.
#
# Returns nothing
def initialize(obj)
@obj = obj
end
# Access a method in the Drop or a field in the underlying hash data.
# If mutable, checks the mutations first. Then checks the methods,
# and finally check the underlying hash (e.g. document front matter)
# if all the previous places didn't match.
#
# key - the string key whose value to fetch
#
# Returns the value for the given key, or nil if none exists
def [](key)
if self.class.mutable? && mutations.key?(key)
mutations[key]
elsif self.class.invokable? key
public_send key
else
fallback_data[key]
end
end
alias_method :invoke_drop, :[]
# Set a field in the Drop. If mutable, sets in the mutations and
# returns. If not mutable, checks first if it's trying to override a
# Drop method and raises a DropMutationException if so. If not
# mutable and the key is not a method on the Drop, then it sets the
# key to the value in the underlying hash (e.g. document front
# matter)
#
# key - the String key whose value to set
# val - the Object to set the key's value to
#
# Returns the value the key was set to unless the Drop is not mutable
# and the key matches a method in which case it raises a
# DropMutationException.
def []=(key, val)
setter = SETTER_KEYS_STASH[key] || "#{key}="
if respond_to?(setter)
public_send(setter, val)
elsif respond_to?(key.to_s)
if self.class.mutable?
mutations[key] = val
else
raise Errors::DropMutationException, "Key #{key} cannot be set in the drop."
end
else
fallback_data[key] = val
end
end
# Generates a list of strings which correspond to content getter
# methods.
#
# Returns an Array of strings which represent method-specific keys.
def content_methods
@content_methods ||= \
self.class.getter_method_names \
- Jekyll::Drops::Drop.getter_method_names \
- NON_CONTENT_METHOD_NAMES
end
# Check if key exists in Drop
#
# key - the string key whose value to fetch
#
# Returns true if the given key is present
def key?(key)
return false if key.nil?
return true if self.class.mutable? && mutations.key?(key)
respond_to?(key) || fallback_data.key?(key)
end
# Generates a list of keys with user content as their values.
# This gathers up the Drop methods and keys of the mutations and
# underlying data hashes and performs a set union to ensure a list
# of unique keys for the Drop.
#
# Returns an Array of unique keys for content for the Drop.
def keys
(content_methods |
mutations.keys |
fallback_data.keys).flatten
end
# Generate a Hash representation of the Drop by resolving each key's
# value. It includes Drop methods, mutations, and the underlying object's
# data. See the documentation for Drop#keys for more.
#
# Returns a Hash with all the keys and values resolved.
def to_h
keys.each_with_object({}) do |(key, _), result|
result[key] = self[key]
end
end
alias_method :to_hash, :to_h
# Inspect the drop's keys and values through a JSON representation
# of its keys and values.
#
# Returns a pretty generation of the hash representation of the Drop.
def inspect
JSON.pretty_generate to_h
end
# Generate a Hash for use in generating JSON.
# This is useful if fields need to be cleared before the JSON can generate.
#
# Returns a Hash ready for JSON generation.
def hash_for_json(*)
to_h
end
# Generate a JSON representation of the Drop.
#
# state - the JSON::State object which determines the state of current processing.
#
# Returns a JSON representation of the Drop in a String.
def to_json(state = nil)
JSON.generate(hash_for_json(state), state)
end
# Collects all the keys and passes each to the block in turn.
#
# block - a block which accepts one argument, the key
#
# Returns nothing.
def each_key(&block)
keys.each(&block)
end
def each
each_key.each do |key|
yield key, self[key]
end
end
def merge(other, &block)
dup.tap do |me|
if block.nil?
me.merge!(other)
else
me.merge!(other, block)
end
end
end
def merge!(other)
other.each_key do |key|
if block_given?
self[key] = yield key, self[key], other[key]
else
if Utils.mergable?(self[key]) && Utils.mergable?(other[key])
self[key] = Utils.deep_merge_hashes(self[key], other[key])
next
end
self[key] = other[key] unless other[key].nil?
end
end
end
# Imitate Hash.fetch method in Drop
#
# Returns value if key is present in Drop, otherwise returns default value
# KeyError is raised if key is not present and no default value given
def fetch(key, default = nil, &block)
return self[key] if key?(key)
raise KeyError, %(key not found: "#{key}") if default.nil? && block.nil?
return yield(key) unless block.nil?
default unless default.nil?
end
private
def mutations
@mutations ||= {}
end
end
end
end | ruby | github | https://github.com/jekyll/jekyll | lib/jekyll/drops/drop.rb |
"""
===========
Basic Units
===========
"""
import math
import numpy as np
import matplotlib.units as units
import matplotlib.ticker as ticker
class ProxyDelegate:
def __init__(self, fn_name, proxy_type):
self.proxy_type = proxy_type
self.fn_name = fn_name
def __get__(self, obj, objtype=None):
return self.proxy_type(self.fn_name, obj)
class TaggedValueMeta(type):
def __init__(self, name, bases, dict):
for fn_name in self._proxies:
if not hasattr(self, fn_name):
setattr(self, fn_name,
ProxyDelegate(fn_name, self._proxies[fn_name]))
class PassThroughProxy:
def __init__(self, fn_name, obj):
self.fn_name = fn_name
self.target = obj.proxy_target
def __call__(self, *args):
fn = getattr(self.target, self.fn_name)
ret = fn(*args)
return ret
class ConvertArgsProxy(PassThroughProxy):
def __init__(self, fn_name, obj):
PassThroughProxy.__init__(self, fn_name, obj)
self.unit = obj.unit
def __call__(self, *args):
converted_args = []
for a in args:
try:
converted_args.append(a.convert_to(self.unit))
except AttributeError:
converted_args.append(TaggedValue(a, self.unit))
converted_args = tuple([c.get_value() for c in converted_args])
return PassThroughProxy.__call__(self, *converted_args)
class ConvertReturnProxy(PassThroughProxy):
def __init__(self, fn_name, obj):
PassThroughProxy.__init__(self, fn_name, obj)
self.unit = obj.unit
def __call__(self, *args):
ret = PassThroughProxy.__call__(self, *args)
return (NotImplemented if ret is NotImplemented
else TaggedValue(ret, self.unit))
class ConvertAllProxy(PassThroughProxy):
def __init__(self, fn_name, obj):
PassThroughProxy.__init__(self, fn_name, obj)
self.unit = obj.unit
def __call__(self, *args):
converted_args = []
arg_units = [self.unit]
for a in args:
if hasattr(a, 'get_unit') and not hasattr(a, 'convert_to'):
# if this arg has a unit type but no conversion ability,
# this operation is prohibited
return NotImplemented
if hasattr(a, 'convert_to'):
try:
a = a.convert_to(self.unit)
except Exception:
pass
arg_units.append(a.get_unit())
converted_args.append(a.get_value())
else:
converted_args.append(a)
if hasattr(a, 'get_unit'):
arg_units.append(a.get_unit())
else:
arg_units.append(None)
converted_args = tuple(converted_args)
ret = PassThroughProxy.__call__(self, *converted_args)
if ret is NotImplemented:
return NotImplemented
ret_unit = unit_resolver(self.fn_name, arg_units)
if ret_unit is NotImplemented:
return NotImplemented
return TaggedValue(ret, ret_unit)
class TaggedValue(metaclass=TaggedValueMeta):
_proxies = {'__add__': ConvertAllProxy,
'__sub__': ConvertAllProxy,
'__mul__': ConvertAllProxy,
'__rmul__': ConvertAllProxy,
'__cmp__': ConvertAllProxy,
'__lt__': ConvertAllProxy,
'__gt__': ConvertAllProxy,
'__len__': PassThroughProxy}
def __new__(cls, value, unit):
# generate a new subclass for value
value_class = type(value)
try:
subcls = type(f'TaggedValue_of_{value_class.__name__}',
(cls, value_class), {})
return object.__new__(subcls)
except TypeError:
return object.__new__(cls)
def __init__(self, value, unit):
self.value = value
self.unit = unit
self.proxy_target = self.value
def __getattribute__(self, name):
if name.startswith('__'):
return object.__getattribute__(self, name)
variable = object.__getattribute__(self, 'value')
if hasattr(variable, name) and name not in self.__class__.__dict__:
return getattr(variable, name)
return object.__getattribute__(self, name)
def __array__(self, dtype=object):
return np.asarray(self.value).astype(dtype)
def __array_wrap__(self, array, context):
return TaggedValue(array, self.unit)
def __repr__(self):
return 'TaggedValue({!r}, {!r})'.format(self.value, self.unit)
def __str__(self):
return str(self.value) + ' in ' + str(self.unit)
def __len__(self):
return len(self.value)
def __iter__(self):
# Return a generator expression rather than use `yield`, so that
# TypeError is raised by iter(self) if appropriate when checking for
# iterability.
return (TaggedValue(inner, self.unit) for inner in self.value)
def get_compressed_copy(self, mask):
new_value = np.ma.masked_array(self.value, mask=mask).compressed()
return TaggedValue(new_value, self.unit)
def convert_to(self, unit):
if unit == self.unit or not unit:
return self
try:
new_value = self.unit.convert_value_to(self.value, unit)
except AttributeError:
new_value = self
return TaggedValue(new_value, unit)
def get_value(self):
return self.value
def get_unit(self):
return self.unit
class BasicUnit:
def __init__(self, name, fullname=None):
self.name = name
if fullname is None:
fullname = name
self.fullname = fullname
self.conversions = dict()
def __repr__(self):
return f'BasicUnit({self.name})'
def __str__(self):
return self.fullname
def __call__(self, value):
return TaggedValue(value, self)
def __mul__(self, rhs):
value = rhs
unit = self
if hasattr(rhs, 'get_unit'):
value = rhs.get_value()
unit = rhs.get_unit()
unit = unit_resolver('__mul__', (self, unit))
if unit is NotImplemented:
return NotImplemented
return TaggedValue(value, unit)
def __rmul__(self, lhs):
return self*lhs
def __array_wrap__(self, array, context):
return TaggedValue(array, self)
def __array__(self, t=None, context=None):
ret = np.array([1])
if t is not None:
return ret.astype(t)
else:
return ret
def add_conversion_factor(self, unit, factor):
def convert(x):
return x*factor
self.conversions[unit] = convert
def add_conversion_fn(self, unit, fn):
self.conversions[unit] = fn
def get_conversion_fn(self, unit):
return self.conversions[unit]
def convert_value_to(self, value, unit):
conversion_fn = self.conversions[unit]
ret = conversion_fn(value)
return ret
def get_unit(self):
return self
class UnitResolver:
def addition_rule(self, units):
for unit_1, unit_2 in zip(units[:-1], units[1:]):
if unit_1 != unit_2:
return NotImplemented
return units[0]
def multiplication_rule(self, units):
non_null = [u for u in units if u]
if len(non_null) > 1:
return NotImplemented
return non_null[0]
op_dict = {
'__mul__': multiplication_rule,
'__rmul__': multiplication_rule,
'__add__': addition_rule,
'__radd__': addition_rule,
'__sub__': addition_rule,
'__rsub__': addition_rule}
def __call__(self, operation, units):
if operation not in self.op_dict:
return NotImplemented
return self.op_dict[operation](self, units)
unit_resolver = UnitResolver()
cm = BasicUnit('cm', 'centimeters')
inch = BasicUnit('inch', 'inches')
inch.add_conversion_factor(cm, 2.54)
cm.add_conversion_factor(inch, 1/2.54)
radians = BasicUnit('rad', 'radians')
degrees = BasicUnit('deg', 'degrees')
radians.add_conversion_factor(degrees, 180.0/np.pi)
degrees.add_conversion_factor(radians, np.pi/180.0)
secs = BasicUnit('s', 'seconds')
hertz = BasicUnit('Hz', 'Hertz')
minutes = BasicUnit('min', 'minutes')
secs.add_conversion_fn(hertz, lambda x: 1./x)
secs.add_conversion_factor(minutes, 1/60.0)
# radians formatting
def rad_fn(x, pos=None):
if x >= 0:
n = int((x / np.pi) * 2.0 + 0.25)
else:
n = int((x / np.pi) * 2.0 - 0.25)
if n == 0:
return '0'
elif n == 1:
return r'$\pi/2$'
elif n == 2:
return r'$\pi$'
elif n == -1:
return r'$-\pi/2$'
elif n == -2:
return r'$-\pi$'
elif n % 2 == 0:
return fr'${n//2}\pi$'
else:
return fr'${n}\pi/2$'
class BasicUnitConverter(units.ConversionInterface):
@staticmethod
def axisinfo(unit, axis):
"""Return AxisInfo instance for x and unit."""
if unit == radians:
return units.AxisInfo(
majloc=ticker.MultipleLocator(base=np.pi/2),
majfmt=ticker.FuncFormatter(rad_fn),
label=unit.fullname,
)
elif unit == degrees:
return units.AxisInfo(
majloc=ticker.AutoLocator(),
majfmt=ticker.FormatStrFormatter(r'$%i^\circ$'),
label=unit.fullname,
)
elif unit is not None:
if hasattr(unit, 'fullname'):
return units.AxisInfo(label=unit.fullname)
elif hasattr(unit, 'unit'):
return units.AxisInfo(label=unit.unit.fullname)
return None
@staticmethod
def convert(val, unit, axis):
if units.ConversionInterface.is_numlike(val):
return val
if np.iterable(val):
if isinstance(val, np.ma.MaskedArray):
val = val.astype(float).filled(np.nan)
out = np.empty(len(val))
for i, thisval in enumerate(val):
if np.ma.is_masked(thisval):
out[i] = np.nan
else:
try:
out[i] = thisval.convert_to(unit).get_value()
except AttributeError:
out[i] = thisval
return out
if np.ma.is_masked(val):
return np.nan
else:
return val.convert_to(unit).get_value()
@staticmethod
def default_units(x, axis):
"""Return the default unit for x or None."""
if np.iterable(x):
for thisx in x:
return thisx.unit
return x.unit
def cos(x):
if np.iterable(x):
return [math.cos(val.convert_to(radians).get_value()) for val in x]
else:
return math.cos(x.convert_to(radians).get_value())
units.registry[BasicUnit] = units.registry[TaggedValue] = BasicUnitConverter() | unknown | codeparrot/codeparrot-clean | ||
"""
These test the method maybe_promote from core/dtypes/cast.py
"""
import datetime
from decimal import Decimal
import numpy as np
import pytest
from pandas._libs.tslibs import NaT
from pandas.core.dtypes.cast import maybe_promote
from pandas.core.dtypes.common import is_scalar
from pandas.core.dtypes.dtypes import DatetimeTZDtype
from pandas.core.dtypes.missing import isna
import pandas as pd
def _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar=None):
"""
Auxiliary function to unify testing of scalar/array promotion.
Parameters
----------
dtype : dtype
The value to pass on as the first argument to maybe_promote.
fill_value : scalar
The value to pass on as the second argument to maybe_promote as
a scalar.
expected_dtype : dtype
The expected dtype returned by maybe_promote (by design this is the
same regardless of whether fill_value was passed as a scalar or in an
array!).
exp_val_for_scalar : scalar
The expected value for the (potentially upcast) fill_value returned by
maybe_promote.
"""
assert is_scalar(fill_value)
# here, we pass on fill_value as a scalar directly; the expected value
# returned from maybe_promote is fill_value, potentially upcast to the
# returned dtype.
result_dtype, result_fill_value = maybe_promote(dtype, fill_value)
expected_fill_value = exp_val_for_scalar
assert result_dtype == expected_dtype
_assert_match(result_fill_value, expected_fill_value)
def _assert_match(result_fill_value, expected_fill_value):
# GH#23982/25425 require the same type in addition to equality/NA-ness
res_type = type(result_fill_value)
ex_type = type(expected_fill_value)
if hasattr(result_fill_value, "dtype"):
# Compare types in a way that is robust to platform-specific
# idiosyncrasies where e.g. sometimes we get "ulonglong" as an alias
# for "uint64" or "intc" as an alias for "int32"
assert result_fill_value.dtype.kind == expected_fill_value.dtype.kind
assert result_fill_value.dtype.itemsize == expected_fill_value.dtype.itemsize
else:
# On some builds, type comparison fails, e.g. np.int32 != np.int32
assert res_type == ex_type or res_type.__name__ == ex_type.__name__
match_value = result_fill_value == expected_fill_value
if match_value is pd.NA:
match_value = False
# Note: type check above ensures that we have the _same_ NA value
# for missing values, None == None (which is checked
# through match_value above), but np.nan != np.nan and pd.NaT != pd.NaT
match_missing = isna(result_fill_value) and isna(expected_fill_value)
assert match_value or match_missing
@pytest.mark.parametrize(
"dtype, fill_value, expected_dtype",
[
# size 8
("int8", 1, "int8"),
("int8", np.iinfo("int8").max + 1, "int16"),
("int8", np.iinfo("int16").max + 1, "int32"),
("int8", np.iinfo("int32").max + 1, "int64"),
("int8", np.iinfo("int64").max + 1, "object"),
("int8", -1, "int8"),
("int8", np.iinfo("int8").min - 1, "int16"),
("int8", np.iinfo("int16").min - 1, "int32"),
("int8", np.iinfo("int32").min - 1, "int64"),
("int8", np.iinfo("int64").min - 1, "object"),
# keep signed-ness as long as possible
("uint8", 1, "uint8"),
("uint8", np.iinfo("int8").max + 1, "uint8"),
("uint8", np.iinfo("uint8").max + 1, "uint16"),
("uint8", np.iinfo("int16").max + 1, "uint16"),
("uint8", np.iinfo("uint16").max + 1, "uint32"),
("uint8", np.iinfo("int32").max + 1, "uint32"),
("uint8", np.iinfo("uint32").max + 1, "uint64"),
("uint8", np.iinfo("int64").max + 1, "uint64"),
("uint8", np.iinfo("uint64").max + 1, "object"),
# max of uint8 cannot be contained in int8
("uint8", -1, "int16"),
("uint8", np.iinfo("int8").min - 1, "int16"),
("uint8", np.iinfo("int16").min - 1, "int32"),
("uint8", np.iinfo("int32").min - 1, "int64"),
("uint8", np.iinfo("int64").min - 1, "object"),
# size 16
("int16", 1, "int16"),
("int16", np.iinfo("int8").max + 1, "int16"),
("int16", np.iinfo("int16").max + 1, "int32"),
("int16", np.iinfo("int32").max + 1, "int64"),
("int16", np.iinfo("int64").max + 1, "object"),
("int16", -1, "int16"),
("int16", np.iinfo("int8").min - 1, "int16"),
("int16", np.iinfo("int16").min - 1, "int32"),
("int16", np.iinfo("int32").min - 1, "int64"),
("int16", np.iinfo("int64").min - 1, "object"),
("uint16", 1, "uint16"),
("uint16", np.iinfo("int8").max + 1, "uint16"),
("uint16", np.iinfo("uint8").max + 1, "uint16"),
("uint16", np.iinfo("int16").max + 1, "uint16"),
("uint16", np.iinfo("uint16").max + 1, "uint32"),
("uint16", np.iinfo("int32").max + 1, "uint32"),
("uint16", np.iinfo("uint32").max + 1, "uint64"),
("uint16", np.iinfo("int64").max + 1, "uint64"),
("uint16", np.iinfo("uint64").max + 1, "object"),
("uint16", -1, "int32"),
("uint16", np.iinfo("int8").min - 1, "int32"),
("uint16", np.iinfo("int16").min - 1, "int32"),
("uint16", np.iinfo("int32").min - 1, "int64"),
("uint16", np.iinfo("int64").min - 1, "object"),
# size 32
("int32", 1, "int32"),
("int32", np.iinfo("int8").max + 1, "int32"),
("int32", np.iinfo("int16").max + 1, "int32"),
("int32", np.iinfo("int32").max + 1, "int64"),
("int32", np.iinfo("int64").max + 1, "object"),
("int32", -1, "int32"),
("int32", np.iinfo("int8").min - 1, "int32"),
("int32", np.iinfo("int16").min - 1, "int32"),
("int32", np.iinfo("int32").min - 1, "int64"),
("int32", np.iinfo("int64").min - 1, "object"),
("uint32", 1, "uint32"),
("uint32", np.iinfo("int8").max + 1, "uint32"),
("uint32", np.iinfo("uint8").max + 1, "uint32"),
("uint32", np.iinfo("int16").max + 1, "uint32"),
("uint32", np.iinfo("uint16").max + 1, "uint32"),
("uint32", np.iinfo("int32").max + 1, "uint32"),
("uint32", np.iinfo("uint32").max + 1, "uint64"),
("uint32", np.iinfo("int64").max + 1, "uint64"),
("uint32", np.iinfo("uint64").max + 1, "object"),
("uint32", -1, "int64"),
("uint32", np.iinfo("int8").min - 1, "int64"),
("uint32", np.iinfo("int16").min - 1, "int64"),
("uint32", np.iinfo("int32").min - 1, "int64"),
("uint32", np.iinfo("int64").min - 1, "object"),
# size 64
("int64", 1, "int64"),
("int64", np.iinfo("int8").max + 1, "int64"),
("int64", np.iinfo("int16").max + 1, "int64"),
("int64", np.iinfo("int32").max + 1, "int64"),
("int64", np.iinfo("int64").max + 1, "object"),
("int64", -1, "int64"),
("int64", np.iinfo("int8").min - 1, "int64"),
("int64", np.iinfo("int16").min - 1, "int64"),
("int64", np.iinfo("int32").min - 1, "int64"),
("int64", np.iinfo("int64").min - 1, "object"),
("uint64", 1, "uint64"),
("uint64", np.iinfo("int8").max + 1, "uint64"),
("uint64", np.iinfo("uint8").max + 1, "uint64"),
("uint64", np.iinfo("int16").max + 1, "uint64"),
("uint64", np.iinfo("uint16").max + 1, "uint64"),
("uint64", np.iinfo("int32").max + 1, "uint64"),
("uint64", np.iinfo("uint32").max + 1, "uint64"),
("uint64", np.iinfo("int64").max + 1, "uint64"),
("uint64", np.iinfo("uint64").max + 1, "object"),
("uint64", -1, "object"),
("uint64", np.iinfo("int8").min - 1, "object"),
("uint64", np.iinfo("int16").min - 1, "object"),
("uint64", np.iinfo("int32").min - 1, "object"),
("uint64", np.iinfo("int64").min - 1, "object"),
],
)
def test_maybe_promote_int_with_int(dtype, fill_value, expected_dtype):
dtype = np.dtype(dtype)
expected_dtype = np.dtype(expected_dtype)
# output is not a generic int, but corresponds to expected_dtype
exp_val_for_scalar = np.array([fill_value], dtype=expected_dtype)[0]
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
def test_maybe_promote_int_with_float(any_int_numpy_dtype, float_numpy_dtype):
dtype = np.dtype(any_int_numpy_dtype)
fill_dtype = np.dtype(float_numpy_dtype)
# create array of given dtype; casts "1" to correct dtype
fill_value = np.array([1], dtype=fill_dtype)[0]
# filling int with float always upcasts to float64
expected_dtype = np.float64
# fill_value can be different float type
exp_val_for_scalar = np.float64(fill_value)
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
def test_maybe_promote_float_with_int(float_numpy_dtype, any_int_numpy_dtype):
dtype = np.dtype(float_numpy_dtype)
fill_dtype = np.dtype(any_int_numpy_dtype)
# create array of given dtype; casts "1" to correct dtype
fill_value = np.array([1], dtype=fill_dtype)[0]
# filling float with int always keeps float dtype
# because: np.finfo('float32').max > np.iinfo('uint64').max
expected_dtype = dtype
# output is not a generic float, but corresponds to expected_dtype
exp_val_for_scalar = np.array([fill_value], dtype=expected_dtype)[0]
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
@pytest.mark.parametrize(
"dtype, fill_value, expected_dtype",
[
# float filled with float
("float32", 1, "float32"),
("float32", float(np.finfo("float32").max) * 1.1, "float64"),
("float64", 1, "float64"),
("float64", float(np.finfo("float32").max) * 1.1, "float64"),
# complex filled with float
("complex64", 1, "complex64"),
("complex64", float(np.finfo("float32").max) * 1.1, "complex128"),
("complex128", 1, "complex128"),
("complex128", float(np.finfo("float32").max) * 1.1, "complex128"),
# float filled with complex
("float32", 1 + 1j, "complex64"),
("float32", float(np.finfo("float32").max) * (1.1 + 1j), "complex128"),
("float64", 1 + 1j, "complex128"),
("float64", float(np.finfo("float32").max) * (1.1 + 1j), "complex128"),
# complex filled with complex
("complex64", 1 + 1j, "complex64"),
("complex64", float(np.finfo("float32").max) * (1.1 + 1j), "complex128"),
("complex128", 1 + 1j, "complex128"),
("complex128", float(np.finfo("float32").max) * (1.1 + 1j), "complex128"),
],
)
def test_maybe_promote_float_with_float(dtype, fill_value, expected_dtype):
dtype = np.dtype(dtype)
expected_dtype = np.dtype(expected_dtype)
# output is not a generic float, but corresponds to expected_dtype
exp_val_for_scalar = np.array([fill_value], dtype=expected_dtype)[0]
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
def test_maybe_promote_bool_with_any(any_numpy_dtype):
dtype = np.dtype(bool)
fill_dtype = np.dtype(any_numpy_dtype)
# create array of given dtype; casts "1" to correct dtype
fill_value = np.array([1], dtype=fill_dtype)[0]
# filling bool with anything but bool casts to object
expected_dtype = np.dtype(object) if fill_dtype != bool else fill_dtype
exp_val_for_scalar = fill_value
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
def test_maybe_promote_any_with_bool(any_numpy_dtype):
dtype = np.dtype(any_numpy_dtype)
fill_value = True
# filling anything but bool with bool casts to object
expected_dtype = np.dtype(object) if dtype != bool else dtype
# output is not a generic bool, but corresponds to expected_dtype
exp_val_for_scalar = np.array([fill_value], dtype=expected_dtype)[0]
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
def test_maybe_promote_bytes_with_any(bytes_dtype, any_numpy_dtype):
dtype = np.dtype(bytes_dtype)
fill_dtype = np.dtype(any_numpy_dtype)
# create array of given dtype; casts "1" to correct dtype
fill_value = np.array([1], dtype=fill_dtype)[0]
# we never use bytes dtype internally, always promote to object
expected_dtype = np.dtype(np.object_)
exp_val_for_scalar = fill_value
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
def test_maybe_promote_any_with_bytes(any_numpy_dtype):
dtype = np.dtype(any_numpy_dtype)
# create array of given dtype
fill_value = b"abc"
# we never use bytes dtype internally, always promote to object
expected_dtype = np.dtype(np.object_)
# output is not a generic bytes, but corresponds to expected_dtype
exp_val_for_scalar = np.array([fill_value], dtype=expected_dtype)[0]
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
def test_maybe_promote_datetime64_with_any(datetime64_dtype, any_numpy_dtype):
dtype = np.dtype(datetime64_dtype)
fill_dtype = np.dtype(any_numpy_dtype)
# create array of given dtype; casts "1" to correct dtype
fill_value = np.array([1], dtype=fill_dtype)[0]
# filling datetime with anything but datetime casts to object
if fill_dtype.kind == "M":
expected_dtype = dtype
# for datetime dtypes, scalar values get cast to to_datetime64
exp_val_for_scalar = pd.Timestamp(fill_value).to_datetime64()
else:
expected_dtype = np.dtype(object)
exp_val_for_scalar = fill_value
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
@pytest.mark.parametrize(
"fill_value",
[
pd.Timestamp("now"),
np.datetime64("now"),
datetime.datetime.now(),
datetime.date.today(),
],
ids=["pd.Timestamp", "np.datetime64", "datetime.datetime", "datetime.date"],
)
def test_maybe_promote_any_with_datetime64(any_numpy_dtype, fill_value):
dtype = np.dtype(any_numpy_dtype)
# filling datetime with anything but datetime casts to object
if dtype.kind == "M":
expected_dtype = dtype
# for datetime dtypes, scalar values get cast to pd.Timestamp.value
exp_val_for_scalar = pd.Timestamp(fill_value).to_datetime64()
else:
expected_dtype = np.dtype(object)
exp_val_for_scalar = fill_value
if type(fill_value) is datetime.date and dtype.kind == "M":
# Casting date to dt64 is deprecated, in 2.0 enforced to cast to object
expected_dtype = np.dtype(object)
exp_val_for_scalar = fill_value
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
@pytest.mark.parametrize(
"fill_value",
[
pd.Timestamp(2023, 1, 1),
np.datetime64("2023-01-01"),
datetime.datetime(2023, 1, 1),
datetime.date(2023, 1, 1),
],
ids=["pd.Timestamp", "np.datetime64", "datetime.datetime", "datetime.date"],
)
def test_maybe_promote_any_numpy_dtype_with_datetimetz(
any_numpy_dtype, tz_aware_fixture, fill_value
):
dtype = np.dtype(any_numpy_dtype)
fill_dtype = DatetimeTZDtype(tz=tz_aware_fixture)
fill_value = pd.Series([fill_value], dtype=fill_dtype)[0]
# filling any numpy dtype with datetimetz casts to object
expected_dtype = np.dtype(object)
exp_val_for_scalar = fill_value
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
def test_maybe_promote_timedelta64_with_any(timedelta64_dtype, any_numpy_dtype):
dtype = np.dtype(timedelta64_dtype)
fill_dtype = np.dtype(any_numpy_dtype)
# create array of given dtype; casts "1" to correct dtype
fill_value = np.array([1], dtype=fill_dtype)[0]
# filling timedelta with anything but timedelta casts to object
if fill_dtype.kind == "m":
expected_dtype = dtype
# for timedelta dtypes, scalar values get cast to pd.Timedelta.value
exp_val_for_scalar = pd.Timedelta(fill_value).to_timedelta64()
else:
expected_dtype = np.dtype(object)
exp_val_for_scalar = fill_value
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
@pytest.mark.parametrize(
"fill_value",
[pd.Timedelta(days=1), np.timedelta64(24, "h"), datetime.timedelta(1)],
ids=["pd.Timedelta", "np.timedelta64", "datetime.timedelta"],
)
def test_maybe_promote_any_with_timedelta64(any_numpy_dtype, fill_value):
dtype = np.dtype(any_numpy_dtype)
# filling anything but timedelta with timedelta casts to object
if dtype.kind == "m":
expected_dtype = dtype
# for timedelta dtypes, scalar values get cast to pd.Timedelta.value
exp_val_for_scalar = pd.Timedelta(fill_value).to_timedelta64()
else:
expected_dtype = np.dtype(object)
exp_val_for_scalar = fill_value
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
def test_maybe_promote_string_with_any(string_dtype, any_numpy_dtype):
dtype = np.dtype(string_dtype)
fill_dtype = np.dtype(any_numpy_dtype)
# create array of given dtype; casts "1" to correct dtype
fill_value = np.array([1], dtype=fill_dtype)[0]
# filling string with anything casts to object
expected_dtype = np.dtype(object)
exp_val_for_scalar = fill_value
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
def test_maybe_promote_any_with_string(any_numpy_dtype):
dtype = np.dtype(any_numpy_dtype)
# create array of given dtype
fill_value = "abc"
# filling anything with a string casts to object
expected_dtype = np.dtype(object)
exp_val_for_scalar = fill_value
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
def test_maybe_promote_object_with_any(object_dtype, any_numpy_dtype):
dtype = np.dtype(object_dtype)
fill_dtype = np.dtype(any_numpy_dtype)
# create array of given dtype; casts "1" to correct dtype
fill_value = np.array([1], dtype=fill_dtype)[0]
# filling object with anything stays object
expected_dtype = np.dtype(object)
exp_val_for_scalar = fill_value
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
def test_maybe_promote_any_with_object(any_numpy_dtype):
dtype = np.dtype(any_numpy_dtype)
# create array of object dtype from a scalar value (i.e. passing
# dtypes.common.is_scalar), which can however not be cast to int/float etc.
fill_value = pd.DateOffset(1)
# filling object with anything stays object
expected_dtype = np.dtype(object)
exp_val_for_scalar = fill_value
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
def test_maybe_promote_any_numpy_dtype_with_na(any_numpy_dtype, nulls_fixture):
fill_value = nulls_fixture
dtype = np.dtype(any_numpy_dtype)
if isinstance(fill_value, Decimal):
# Subject to change, but ATM (When Decimal(NAN) is being added to nulls_fixture)
# this is the existing behavior in maybe_promote,
# hinges on is_valid_na_for_dtype
if dtype.kind in "iufc":
if dtype.kind in "iu":
expected_dtype = np.dtype(np.float64)
else:
expected_dtype = dtype
exp_val_for_scalar = np.nan
else:
expected_dtype = np.dtype(object)
exp_val_for_scalar = fill_value
elif dtype.kind in "iu" and fill_value is not NaT:
# integer + other missing value (np.nan / None) casts to float
expected_dtype = np.float64
exp_val_for_scalar = np.nan
elif dtype == object and fill_value is NaT:
# inserting into object does not cast the value
# but *does* cast None to np.nan
expected_dtype = np.dtype(object)
exp_val_for_scalar = fill_value
elif dtype.kind in "mM":
# datetime / timedelta cast all missing values to dtyped-NaT
expected_dtype = dtype
exp_val_for_scalar = dtype.type("NaT", "ns")
elif fill_value is NaT:
# NaT upcasts everything that's not datetime/timedelta to object
expected_dtype = np.dtype(object)
exp_val_for_scalar = NaT
elif dtype.kind in "fc":
# float / complex + missing value (!= NaT) stays the same
expected_dtype = dtype
exp_val_for_scalar = np.nan
else:
# all other cases cast to object, and use np.nan as missing value
expected_dtype = np.dtype(object)
if fill_value is pd.NA:
exp_val_for_scalar = pd.NA
else:
exp_val_for_scalar = np.nan
_check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar) | python | github | https://github.com/pandas-dev/pandas | pandas/tests/dtypes/cast/test_promote.py |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import testtools
from neutron.callbacks import exceptions
from neutron.callbacks import registry
from neutron import context
from neutron.db import common_db_mixin
from neutron.db import securitygroups_db
from neutron.extensions import securitygroup
from neutron.tests.unit import testlib_api
class SecurityGroupDbMixinImpl(securitygroups_db.SecurityGroupDbMixin,
common_db_mixin.CommonDbMixin):
pass
class SecurityGroupDbMixinTestCase(testlib_api.SqlTestCase):
def setUp(self):
super(SecurityGroupDbMixinTestCase, self).setUp()
self.ctx = context.get_admin_context()
self.mixin = SecurityGroupDbMixinImpl()
def test_create_security_group_conflict(self):
with mock.patch.object(registry, "notify") as mock_notify:
mock_notify.side_effect = exceptions.CallbackFailure(Exception())
secgroup = {'security_group': mock.ANY}
with testtools.ExpectedException(
securitygroup.SecurityGroupConflict):
self.mixin.create_security_group(self.ctx, secgroup)
def test_delete_security_group_in_use(self):
with mock.patch.object(self.mixin,
'_get_port_security_group_bindings'),\
mock.patch.object(self.mixin, '_get_security_group'),\
mock.patch.object(registry, "notify") as mock_notify:
mock_notify.side_effect = exceptions.CallbackFailure(Exception())
with testtools.ExpectedException(
securitygroup.SecurityGroupInUse):
self.mixin.delete_security_group(self.ctx, mock.ANY)
def test_update_security_group_conflict(self):
with mock.patch.object(registry, "notify") as mock_notify:
mock_notify.side_effect = exceptions.CallbackFailure(Exception())
secgroup = {'security_group': mock.ANY}
with testtools.ExpectedException(
securitygroup.SecurityGroupConflict):
self.mixin.update_security_group(self.ctx, 'foo_id', secgroup)
def test_create_security_group_rule_conflict(self):
with mock.patch.object(self.mixin, '_validate_security_group_rule'),\
mock.patch.object(self.mixin,
'_check_for_duplicate_rules_in_db'),\
mock.patch.object(registry, "notify") as mock_notify:
mock_notify.side_effect = exceptions.CallbackFailure(Exception())
with testtools.ExpectedException(
securitygroup.SecurityGroupConflict):
self.mixin.create_security_group_rule(
self.ctx, mock.MagicMock())
def test_delete_security_group_rule_in_use(self):
with mock.patch.object(registry, "notify") as mock_notify:
mock_notify.side_effect = exceptions.CallbackFailure(Exception())
with testtools.ExpectedException(
securitygroup.SecurityGroupRuleInUse):
self.mixin.delete_security_group_rule(self.ctx, mock.ANY)
def test_delete_security_group_rule_raise_error_on_not_found(self):
with testtools.ExpectedException(
securitygroup.SecurityGroupRuleNotFound):
self.mixin.delete_security_group_rule(self.ctx, 'foo_rule') | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python3
# Single purpose HTTP server
# - accepts POST of ureport JSON and dumps it to a file
import sys
import json
import http.server as BaseHTTPServer
import cgi
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_POST(self):
# parse form data
form = cgi.FieldStorage(
fp=self.rfile,
headers=self.headers,
environ={
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': self.headers['Content-Type'],
}
)
self.send_response(202)
self.send_header('Content-Type', 'application/json')
self.send_header('Connection', 'close')
self.end_headers()
if self.path != '/faf/reports/new/':
with open(self.save_ureport, 'wb') as fh:
fh.write('{"invalid_request_path": "%s"}' % self.path)
return
ureport = json.load(form['file'].file)
with open(self.save_ureport, 'wb') as fh:
ureport_dump = json.dumps(ureport, indent=2).encode()
fh.write(ureport_dump)
response = {
'bthash': '691cf824e3e07457156125636e86c50279e29496',
'message': 'https://retrace.fedoraproject.org/faf/reports/6437/\nhttps://bugzilla.redhat.com/show_bug.cgi?id=851210',
'reported_to': [
{
'type': 'url',
'value': 'https://retrace.fedoraproject.org/faf/reports/6437/',
'reporter': 'ABRT Server'
},
{
'type': 'url',
'value': 'https://bugzilla.redhat.com/show_bug.cgi?id=851210',
'reporter': 'Bugzilla'
}
],
'result': True
}
response_dump = json.dumps(response, indent=2).encode()
self.wfile.write(response_dump)
PORT = 12345
print("Serving at port", PORT)
Handler.save_ureport = sys.argv[1] if len(sys.argv) > 1 else 'ureport.json'
httpd = BaseHTTPServer.HTTPServer(("", PORT), Handler)
httpd.serve_forever() | unknown | codeparrot/codeparrot-clean | ||
# SPDX-License-Identifier: GPL-2.0-only OR BSD-2-Clause
%YAML 1.2
---
$id: http://devicetree.org/schemas/mfd/brcm,twd.yaml#
$schema: http://devicetree.org/meta-schemas/core.yaml#
title: Broadcom's Timer-Watchdog (aka TWD)
maintainers:
- Rafał Miłecki <rafal@milecki.pl>
description: |
Broadcom has a Timer-Watchdog block used in multiple SoCs (e.g., BCM4908,
BCM63xx, BCM7038). There are few variants available (they differ slightly in
registers layout). This block consists of: timers, watchdog and optionally a
software reset handler.
properties:
compatible:
items:
- enum:
- brcm,bcm4908-twd
- brcm,bcm7038-twd
- const: simple-mfd
- const: syscon
reg:
maxItems: 1
ranges: true
"#address-cells":
const: 1
"#size-cells":
const: 1
patternProperties:
'^timer@[a-f0-9]+$':
$ref: /schemas/timer/brcm,bcmbca-timer.yaml
'^watchdog@[a-f0-9]+$':
$ref: /schemas/watchdog/brcm,bcm7038-wdt.yaml
additionalProperties: false
required:
- reg
examples:
- |
timer-mfd@ff800400 {
compatible = "brcm,bcm4908-twd", "simple-mfd", "syscon";
reg = <0xff800400 0x4c>;
ranges = <0x00000000 0xff800400 0x4c>;
#address-cells = <1>;
#size-cells = <1>;
timer@0 {
compatible = "brcm,bcm63138-timer";
reg = <0x0 0x28>;
};
watchdog@28 {
compatible = "brcm,bcm7038-wdt";
reg = <0x28 0x8>;
};
}; | unknown | github | https://github.com/torvalds/linux | Documentation/devicetree/bindings/mfd/brcm,twd.yaml |
#!/bin/sh
# Writes out all of the exported symbols to a file.
# This is needed on AIX as symbols are not exported
# by an executable by default and need to be listed
# specifically for export so that they can be used
# by native add-ons.
#
# The raw symbol data is obtained by using nm on
# the .a files which make up the node executable.
#
# -Xany processes symbols for both 32-bit and
# 64-bit (the default is for 32-bit only).
#
# -g selects only exported symbols.
#
# -C, -B and -p ensure that the output is in a
# format that can be easily parsed and converted
# into the required symbol.
#
# -C suppresses the demangling of C++ names.
# -B writes the output in BSD format.
# -p displays the info in a standard portable
# output format.
#
# Only include symbols if they are of the following
# types and don't start with a dot.
#
# T - Global text symbol.
# D - Global data symbol.
# B - Global bss symbol.
#
# The final sort allows removal of any duplicates.
#
# Symbols for the gtest libraries are excluded as
# they are not linked into the node executable.
#
echo "Searching $1 to write out expfile to $2"
# This special sequence must be at the start of the exp file.
echo "#!." > "$2.tmp"
# Pull the symbols from the .a files.
find "$1" -name "*.a" | grep -v gtest \
| xargs nm -Xany -BCpg \
| awk '{
if ((($2 == "T") || ($2 == "D") || ($2 == "B")) &&
(substr($3,1,1) != ".")) { print $3 }
}' \
| sort -u >> "$2.tmp"
mv -f "$2.tmp" "$2" | unknown | github | https://github.com/nodejs/node | tools/create_expfile.sh |
# Copyright (c) 2001-2006 Twisted Matrix Laboratories.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Tests for epoll wrapper.
"""
import errno
import os
import select
import socket
import time
import unittest
from test import support
if not hasattr(select, "epoll"):
raise unittest.SkipTest("test works only on Linux 2.6")
try:
select.epoll()
except OSError as e:
if e.errno == errno.ENOSYS:
raise unittest.SkipTest("kernel doesn't support epoll()")
raise
class TestEPoll(unittest.TestCase):
def setUp(self):
self.serverSocket = socket.socket()
self.serverSocket.bind(('127.0.0.1', 0))
self.serverSocket.listen(1)
self.connections = [self.serverSocket]
def tearDown(self):
for skt in self.connections:
skt.close()
def _connected_pair(self):
client = socket.socket()
client.setblocking(False)
try:
client.connect(('127.0.0.1', self.serverSocket.getsockname()[1]))
except OSError as e:
self.assertEqual(e.args[0], errno.EINPROGRESS)
else:
raise AssertionError("Connect should have raised EINPROGRESS")
server, addr = self.serverSocket.accept()
self.connections.extend((client, server))
return client, server
def test_create(self):
try:
ep = select.epoll(16)
except OSError as e:
raise AssertionError(str(e))
self.assertTrue(ep.fileno() > 0, ep.fileno())
self.assertTrue(not ep.closed)
ep.close()
self.assertTrue(ep.closed)
self.assertRaises(ValueError, ep.fileno)
if hasattr(select, "EPOLL_CLOEXEC"):
select.epoll(select.EPOLL_CLOEXEC).close()
self.assertRaises(OSError, select.epoll, flags=12356)
def test_badcreate(self):
self.assertRaises(TypeError, select.epoll, 1, 2, 3)
self.assertRaises(TypeError, select.epoll, 'foo')
self.assertRaises(TypeError, select.epoll, None)
self.assertRaises(TypeError, select.epoll, ())
self.assertRaises(TypeError, select.epoll, ['foo'])
self.assertRaises(TypeError, select.epoll, {})
def test_context_manager(self):
with select.epoll(16) as ep:
self.assertGreater(ep.fileno(), 0)
self.assertFalse(ep.closed)
self.assertTrue(ep.closed)
self.assertRaises(ValueError, ep.fileno)
def test_add(self):
server, client = self._connected_pair()
ep = select.epoll(2)
try:
ep.register(server.fileno(), select.EPOLLIN | select.EPOLLOUT)
ep.register(client.fileno(), select.EPOLLIN | select.EPOLLOUT)
finally:
ep.close()
# adding by object w/ fileno works, too.
ep = select.epoll(2)
try:
ep.register(server, select.EPOLLIN | select.EPOLLOUT)
ep.register(client, select.EPOLLIN | select.EPOLLOUT)
finally:
ep.close()
ep = select.epoll(2)
try:
# TypeError: argument must be an int, or have a fileno() method.
self.assertRaises(TypeError, ep.register, object(),
select.EPOLLIN | select.EPOLLOUT)
self.assertRaises(TypeError, ep.register, None,
select.EPOLLIN | select.EPOLLOUT)
# ValueError: file descriptor cannot be a negative integer (-1)
self.assertRaises(ValueError, ep.register, -1,
select.EPOLLIN | select.EPOLLOUT)
# OSError: [Errno 9] Bad file descriptor
self.assertRaises(OSError, ep.register, 10000,
select.EPOLLIN | select.EPOLLOUT)
# registering twice also raises an exception
ep.register(server, select.EPOLLIN | select.EPOLLOUT)
self.assertRaises(OSError, ep.register, server,
select.EPOLLIN | select.EPOLLOUT)
finally:
ep.close()
def test_fromfd(self):
server, client = self._connected_pair()
ep = select.epoll(2)
ep2 = select.epoll.fromfd(ep.fileno())
ep2.register(server.fileno(), select.EPOLLIN | select.EPOLLOUT)
ep2.register(client.fileno(), select.EPOLLIN | select.EPOLLOUT)
events = ep.poll(1, 4)
events2 = ep2.poll(0.9, 4)
self.assertEqual(len(events), 2)
self.assertEqual(len(events2), 2)
ep.close()
try:
ep2.poll(1, 4)
except OSError as e:
self.assertEqual(e.args[0], errno.EBADF, e)
else:
self.fail("epoll on closed fd didn't raise EBADF")
def test_control_and_wait(self):
client, server = self._connected_pair()
ep = select.epoll(16)
ep.register(server.fileno(),
select.EPOLLIN | select.EPOLLOUT | select.EPOLLET)
ep.register(client.fileno(),
select.EPOLLIN | select.EPOLLOUT | select.EPOLLET)
now = time.monotonic()
events = ep.poll(1, 4)
then = time.monotonic()
self.assertFalse(then - now > 0.1, then - now)
events.sort()
expected = [(client.fileno(), select.EPOLLOUT),
(server.fileno(), select.EPOLLOUT)]
expected.sort()
self.assertEqual(events, expected)
events = ep.poll(timeout=2.1, maxevents=4)
self.assertFalse(events)
client.send(b"Hello!")
server.send(b"world!!!")
now = time.monotonic()
events = ep.poll(1, 4)
then = time.monotonic()
self.assertFalse(then - now > 0.01)
events.sort()
expected = [(client.fileno(), select.EPOLLIN | select.EPOLLOUT),
(server.fileno(), select.EPOLLIN | select.EPOLLOUT)]
expected.sort()
self.assertEqual(events, expected)
ep.unregister(client.fileno())
ep.modify(server.fileno(), select.EPOLLOUT)
now = time.monotonic()
events = ep.poll(1, 4)
then = time.monotonic()
self.assertFalse(then - now > 0.01)
expected = [(server.fileno(), select.EPOLLOUT)]
self.assertEqual(events, expected)
def test_errors(self):
self.assertRaises(ValueError, select.epoll, -2)
self.assertRaises(ValueError, select.epoll().register, -1,
select.EPOLLIN)
def test_unregister_closed(self):
server, client = self._connected_pair()
fd = server.fileno()
ep = select.epoll(16)
ep.register(server)
now = time.monotonic()
events = ep.poll(1, 4)
then = time.monotonic()
self.assertFalse(then - now > 0.01)
server.close()
ep.unregister(fd)
def test_close(self):
open_file = open(__file__, "rb")
self.addCleanup(open_file.close)
fd = open_file.fileno()
epoll = select.epoll()
# test fileno() method and closed attribute
self.assertIsInstance(epoll.fileno(), int)
self.assertFalse(epoll.closed)
# test close()
epoll.close()
self.assertTrue(epoll.closed)
self.assertRaises(ValueError, epoll.fileno)
# close() can be called more than once
epoll.close()
# operations must fail with ValueError("I/O operation on closed ...")
self.assertRaises(ValueError, epoll.modify, fd, select.EPOLLIN)
self.assertRaises(ValueError, epoll.poll, 1.0)
self.assertRaises(ValueError, epoll.register, fd, select.EPOLLIN)
self.assertRaises(ValueError, epoll.unregister, fd)
def test_fd_non_inheritable(self):
epoll = select.epoll()
self.addCleanup(epoll.close)
self.assertEqual(os.get_inheritable(epoll.fileno()), False)
def test_main():
support.run_unittest(TestEPoll)
if __name__ == "__main__":
test_main() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# Copyright (c) 2016, Adfinis SyGroup AG
# Tobias Rueetschi <tobias.ruetschi@adfinis-sygroup.ch>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: udm_dns_record
version_added: "2.2"
author: "Tobias Rueetschi (@2-B)"
short_description: Manage dns entries on a univention corporate server
description:
- "This module allows to manage dns records on a univention corporate server (UCS).
It uses the python API of the UCS to create a new object or edit it."
requirements:
- Python >= 2.6
- Univention
options:
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Whether the dns record is present or not.
name:
required: true
description:
- "Name of the record, this is also the DNS record. E.g. www for
www.example.com."
zone:
required: true
description:
- Corresponding DNS zone for this record, e.g. example.com.
type:
required: true
choices: [ host_record, alias, ptr_record, srv_record, txt_record ]
description:
- "Define the record type. C(host_record) is a A or AAAA record,
C(alias) is a CNAME, C(ptr_record) is a PTR record, C(srv_record)
is a SRV record and C(txt_record) is a TXT record."
data:
required: false
default: []
description:
- "Additional data for this record, e.g. ['a': '192.0.2.1'].
Required if C(state=present)."
'''
EXAMPLES = '''
# Create a DNS record on a UCS
- udm_dns_zone:
name: www
zone: example.com
type: host_record
data:
- a: 192.0.2.1
'''
RETURN = '''# '''
HAVE_UNIVENTION = False
try:
from univention.admin.handlers.dns import (
forward_zone,
reverse_zone,
)
HAVE_UNIVENTION = True
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.univention_umc import (
umc_module_for_add,
umc_module_for_edit,
ldap_search,
base_dn,
config,
uldap,
)
def main():
module = AnsibleModule(
argument_spec = dict(
type = dict(required=True,
type='str'),
zone = dict(required=True,
type='str'),
name = dict(required=True,
type='str'),
data = dict(default=[],
type='dict'),
state = dict(default='present',
choices=['present', 'absent'],
type='str')
),
supports_check_mode=True,
required_if = ([
('state', 'present', ['data'])
])
)
if not HAVE_UNIVENTION:
module.fail_json(msg="This module requires univention python bindings")
type = module.params['type']
zone = module.params['zone']
name = module.params['name']
data = module.params['data']
state = module.params['state']
changed = False
obj = list(ldap_search(
'(&(objectClass=dNSZone)(zoneName={})(relativeDomainName={}))'.format(zone, name),
attr=['dNSZone']
))
exists = bool(len(obj))
container = 'zoneName={},cn=dns,{}'.format(zone, base_dn())
dn = 'relativeDomainName={},{}'.format(name, container)
if state == 'present':
try:
if not exists:
so = forward_zone.lookup(
config(),
uldap(),
'(zone={})'.format(zone),
scope='domain',
) or reverse_zone.lookup(
config(),
uldap(),
'(zone={})'.format(zone),
scope='domain',
)
obj = umc_module_for_add('dns/{}'.format(type), container, superordinate=so[0])
else:
obj = umc_module_for_edit('dns/{}'.format(type), dn)
obj['name'] = name
for k, v in data.items():
obj[k] = v
diff = obj.diff()
changed = obj.diff() != []
if not module.check_mode:
if not exists:
obj.create()
else:
obj.modify()
except BaseException as e:
module.fail_json(
msg='Creating/editing dns entry {} in {} failed: {}'.format(name, container, e)
)
if state == 'absent' and exists:
try:
obj = umc_module_for_edit('dns/{}'.format(type), dn)
if not module.check_mode:
obj.remove()
changed = True
except BaseException as e:
module.fail_json(
msg='Removing dns entry {} in {} failed: {}'.format(name, container, e)
)
module.exit_json(
changed=changed,
name=name,
diff=diff,
container=container
)
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
/*!
* Bootstrap carousel.js v5.3.8 (https://getbootstrap.com/)
* Copyright 2011-2025 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors)
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('./base-component.js'), require('./dom/event-handler.js'), require('./dom/manipulator.js'), require('./dom/selector-engine.js'), require('./util/index.js'), require('./util/swipe.js')) :
typeof define === 'function' && define.amd ? define(['./base-component', './dom/event-handler', './dom/manipulator', './dom/selector-engine', './util/index', './util/swipe'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.Carousel = factory(global.BaseComponent, global.EventHandler, global.Manipulator, global.SelectorEngine, global.Index, global.Swipe));
})(this, (function (BaseComponent, EventHandler, Manipulator, SelectorEngine, index_js, Swipe) { 'use strict';
/**
* --------------------------------------------------------------------------
* Bootstrap carousel.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* Constants
*/
const NAME = 'carousel';
const DATA_KEY = 'bs.carousel';
const EVENT_KEY = `.${DATA_KEY}`;
const DATA_API_KEY = '.data-api';
const ARROW_LEFT_KEY = 'ArrowLeft';
const ARROW_RIGHT_KEY = 'ArrowRight';
const TOUCHEVENT_COMPAT_WAIT = 500; // Time for mouse compat events to fire after touch
const ORDER_NEXT = 'next';
const ORDER_PREV = 'prev';
const DIRECTION_LEFT = 'left';
const DIRECTION_RIGHT = 'right';
const EVENT_SLIDE = `slide${EVENT_KEY}`;
const EVENT_SLID = `slid${EVENT_KEY}`;
const EVENT_KEYDOWN = `keydown${EVENT_KEY}`;
const EVENT_MOUSEENTER = `mouseenter${EVENT_KEY}`;
const EVENT_MOUSELEAVE = `mouseleave${EVENT_KEY}`;
const EVENT_DRAG_START = `dragstart${EVENT_KEY}`;
const EVENT_LOAD_DATA_API = `load${EVENT_KEY}${DATA_API_KEY}`;
const EVENT_CLICK_DATA_API = `click${EVENT_KEY}${DATA_API_KEY}`;
const CLASS_NAME_CAROUSEL = 'carousel';
const CLASS_NAME_ACTIVE = 'active';
const CLASS_NAME_SLIDE = 'slide';
const CLASS_NAME_END = 'carousel-item-end';
const CLASS_NAME_START = 'carousel-item-start';
const CLASS_NAME_NEXT = 'carousel-item-next';
const CLASS_NAME_PREV = 'carousel-item-prev';
const SELECTOR_ACTIVE = '.active';
const SELECTOR_ITEM = '.carousel-item';
const SELECTOR_ACTIVE_ITEM = SELECTOR_ACTIVE + SELECTOR_ITEM;
const SELECTOR_ITEM_IMG = '.carousel-item img';
const SELECTOR_INDICATORS = '.carousel-indicators';
const SELECTOR_DATA_SLIDE = '[data-bs-slide], [data-bs-slide-to]';
const SELECTOR_DATA_RIDE = '[data-bs-ride="carousel"]';
const KEY_TO_DIRECTION = {
[ARROW_LEFT_KEY]: DIRECTION_RIGHT,
[ARROW_RIGHT_KEY]: DIRECTION_LEFT
};
const Default = {
interval: 5000,
keyboard: true,
pause: 'hover',
ride: false,
touch: true,
wrap: true
};
const DefaultType = {
interval: '(number|boolean)',
// TODO:v6 remove boolean support
keyboard: 'boolean',
pause: '(string|boolean)',
ride: '(boolean|string)',
touch: 'boolean',
wrap: 'boolean'
};
/**
* Class definition
*/
class Carousel extends BaseComponent {
constructor(element, config) {
super(element, config);
this._interval = null;
this._activeElement = null;
this._isSliding = false;
this.touchTimeout = null;
this._swipeHelper = null;
this._indicatorsElement = SelectorEngine.findOne(SELECTOR_INDICATORS, this._element);
this._addEventListeners();
if (this._config.ride === CLASS_NAME_CAROUSEL) {
this.cycle();
}
}
// Getters
static get Default() {
return Default;
}
static get DefaultType() {
return DefaultType;
}
static get NAME() {
return NAME;
}
// Public
next() {
this._slide(ORDER_NEXT);
}
nextWhenVisible() {
// FIXME TODO use `document.visibilityState`
// Don't call next when the page isn't visible
// or the carousel or its parent isn't visible
if (!document.hidden && index_js.isVisible(this._element)) {
this.next();
}
}
prev() {
this._slide(ORDER_PREV);
}
pause() {
if (this._isSliding) {
index_js.triggerTransitionEnd(this._element);
}
this._clearInterval();
}
cycle() {
this._clearInterval();
this._updateInterval();
this._interval = setInterval(() => this.nextWhenVisible(), this._config.interval);
}
_maybeEnableCycle() {
if (!this._config.ride) {
return;
}
if (this._isSliding) {
EventHandler.one(this._element, EVENT_SLID, () => this.cycle());
return;
}
this.cycle();
}
to(index) {
const items = this._getItems();
if (index > items.length - 1 || index < 0) {
return;
}
if (this._isSliding) {
EventHandler.one(this._element, EVENT_SLID, () => this.to(index));
return;
}
const activeIndex = this._getItemIndex(this._getActive());
if (activeIndex === index) {
return;
}
const order = index > activeIndex ? ORDER_NEXT : ORDER_PREV;
this._slide(order, items[index]);
}
dispose() {
if (this._swipeHelper) {
this._swipeHelper.dispose();
}
super.dispose();
}
// Private
_configAfterMerge(config) {
config.defaultInterval = config.interval;
return config;
}
_addEventListeners() {
if (this._config.keyboard) {
EventHandler.on(this._element, EVENT_KEYDOWN, event => this._keydown(event));
}
if (this._config.pause === 'hover') {
EventHandler.on(this._element, EVENT_MOUSEENTER, () => this.pause());
EventHandler.on(this._element, EVENT_MOUSELEAVE, () => this._maybeEnableCycle());
}
if (this._config.touch && Swipe.isSupported()) {
this._addTouchEventListeners();
}
}
_addTouchEventListeners() {
for (const img of SelectorEngine.find(SELECTOR_ITEM_IMG, this._element)) {
EventHandler.on(img, EVENT_DRAG_START, event => event.preventDefault());
}
const endCallBack = () => {
if (this._config.pause !== 'hover') {
return;
}
// If it's a touch-enabled device, mouseenter/leave are fired as
// part of the mouse compatibility events on first tap - the carousel
// would stop cycling until user tapped out of it;
// here, we listen for touchend, explicitly pause the carousel
// (as if it's the second time we tap on it, mouseenter compat event
// is NOT fired) and after a timeout (to allow for mouse compatibility
// events to fire) we explicitly restart cycling
this.pause();
if (this.touchTimeout) {
clearTimeout(this.touchTimeout);
}
this.touchTimeout = setTimeout(() => this._maybeEnableCycle(), TOUCHEVENT_COMPAT_WAIT + this._config.interval);
};
const swipeConfig = {
leftCallback: () => this._slide(this._directionToOrder(DIRECTION_LEFT)),
rightCallback: () => this._slide(this._directionToOrder(DIRECTION_RIGHT)),
endCallback: endCallBack
};
this._swipeHelper = new Swipe(this._element, swipeConfig);
}
_keydown(event) {
if (/input|textarea/i.test(event.target.tagName)) {
return;
}
const direction = KEY_TO_DIRECTION[event.key];
if (direction) {
event.preventDefault();
this._slide(this._directionToOrder(direction));
}
}
_getItemIndex(element) {
return this._getItems().indexOf(element);
}
_setActiveIndicatorElement(index) {
if (!this._indicatorsElement) {
return;
}
const activeIndicator = SelectorEngine.findOne(SELECTOR_ACTIVE, this._indicatorsElement);
activeIndicator.classList.remove(CLASS_NAME_ACTIVE);
activeIndicator.removeAttribute('aria-current');
const newActiveIndicator = SelectorEngine.findOne(`[data-bs-slide-to="${index}"]`, this._indicatorsElement);
if (newActiveIndicator) {
newActiveIndicator.classList.add(CLASS_NAME_ACTIVE);
newActiveIndicator.setAttribute('aria-current', 'true');
}
}
_updateInterval() {
const element = this._activeElement || this._getActive();
if (!element) {
return;
}
const elementInterval = Number.parseInt(element.getAttribute('data-bs-interval'), 10);
this._config.interval = elementInterval || this._config.defaultInterval;
}
_slide(order, element = null) {
if (this._isSliding) {
return;
}
const activeElement = this._getActive();
const isNext = order === ORDER_NEXT;
const nextElement = element || index_js.getNextActiveElement(this._getItems(), activeElement, isNext, this._config.wrap);
if (nextElement === activeElement) {
return;
}
const nextElementIndex = this._getItemIndex(nextElement);
const triggerEvent = eventName => {
return EventHandler.trigger(this._element, eventName, {
relatedTarget: nextElement,
direction: this._orderToDirection(order),
from: this._getItemIndex(activeElement),
to: nextElementIndex
});
};
const slideEvent = triggerEvent(EVENT_SLIDE);
if (slideEvent.defaultPrevented) {
return;
}
if (!activeElement || !nextElement) {
// Some weirdness is happening, so we bail
// TODO: change tests that use empty divs to avoid this check
return;
}
const isCycling = Boolean(this._interval);
this.pause();
this._isSliding = true;
this._setActiveIndicatorElement(nextElementIndex);
this._activeElement = nextElement;
const directionalClassName = isNext ? CLASS_NAME_START : CLASS_NAME_END;
const orderClassName = isNext ? CLASS_NAME_NEXT : CLASS_NAME_PREV;
nextElement.classList.add(orderClassName);
index_js.reflow(nextElement);
activeElement.classList.add(directionalClassName);
nextElement.classList.add(directionalClassName);
const completeCallBack = () => {
nextElement.classList.remove(directionalClassName, orderClassName);
nextElement.classList.add(CLASS_NAME_ACTIVE);
activeElement.classList.remove(CLASS_NAME_ACTIVE, orderClassName, directionalClassName);
this._isSliding = false;
triggerEvent(EVENT_SLID);
};
this._queueCallback(completeCallBack, activeElement, this._isAnimated());
if (isCycling) {
this.cycle();
}
}
_isAnimated() {
return this._element.classList.contains(CLASS_NAME_SLIDE);
}
_getActive() {
return SelectorEngine.findOne(SELECTOR_ACTIVE_ITEM, this._element);
}
_getItems() {
return SelectorEngine.find(SELECTOR_ITEM, this._element);
}
_clearInterval() {
if (this._interval) {
clearInterval(this._interval);
this._interval = null;
}
}
_directionToOrder(direction) {
if (index_js.isRTL()) {
return direction === DIRECTION_LEFT ? ORDER_PREV : ORDER_NEXT;
}
return direction === DIRECTION_LEFT ? ORDER_NEXT : ORDER_PREV;
}
_orderToDirection(order) {
if (index_js.isRTL()) {
return order === ORDER_PREV ? DIRECTION_LEFT : DIRECTION_RIGHT;
}
return order === ORDER_PREV ? DIRECTION_RIGHT : DIRECTION_LEFT;
}
// Static
static jQueryInterface(config) {
return this.each(function () {
const data = Carousel.getOrCreateInstance(this, config);
if (typeof config === 'number') {
data.to(config);
return;
}
if (typeof config === 'string') {
if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {
throw new TypeError(`No method named "${config}"`);
}
data[config]();
}
});
}
}
/**
* Data API implementation
*/
EventHandler.on(document, EVENT_CLICK_DATA_API, SELECTOR_DATA_SLIDE, function (event) {
const target = SelectorEngine.getElementFromSelector(this);
if (!target || !target.classList.contains(CLASS_NAME_CAROUSEL)) {
return;
}
event.preventDefault();
const carousel = Carousel.getOrCreateInstance(target);
const slideIndex = this.getAttribute('data-bs-slide-to');
if (slideIndex) {
carousel.to(slideIndex);
carousel._maybeEnableCycle();
return;
}
if (Manipulator.getDataAttribute(this, 'slide') === 'next') {
carousel.next();
carousel._maybeEnableCycle();
return;
}
carousel.prev();
carousel._maybeEnableCycle();
});
EventHandler.on(window, EVENT_LOAD_DATA_API, () => {
const carousels = SelectorEngine.find(SELECTOR_DATA_RIDE);
for (const carousel of carousels) {
Carousel.getOrCreateInstance(carousel);
}
});
/**
* jQuery
*/
index_js.defineJQueryPlugin(Carousel);
return Carousel;
}));
//# sourceMappingURL=carousel.js.map | javascript | github | https://github.com/twbs/bootstrap | js/dist/carousel.js |
#
# This file is part of Netsukuku, it is based on q2sim.py
#
# ntksim
# (c) Copyright 2007 Alessandro Ordine
#
# q2sim.py
# (c) Copyright 2007 Andrea Lo Pumo aka AlpT <alpt@freaknet.org>
#
# This source code is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2 of the License,
# or (at your option) any later version.
#
# This source code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# Please refer to the GNU Public License for more details.
#
# You should have received a copy of the GNU Public License along with
# this source code; if not, write to:
# Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
from G import *
#rem_t class
class rem_t:
cmp_metric=G.metrics[0]
def __init__(self,rtt_,bwup_,bwdw_,dp_):
self.rtt=rtt_ #rtt in ms
self.bwup=bwup_ #bandiwidth
self.bwdw=bwdw_
self.dp=dp_
self.rem_avg_compute() #an ad-hoc average of the previous metrics
def rem_avg_compute(self):
self.avg=G.AVG_UPBW_COEF*self.bwup + G.AVG_DWBW_COEF*self.bwdw + G.AVG_RTT_COEF*(G.REM_MAX_RTT8 - self.rtt)
def print_rem(self):
print "rtt:",self.rtt, " bwup:",self.bwup," bwdw:",self.bwdw,"death_p:",self.dp,"avg:",self.avg
def bw_up_cmp(self, b):
#alessandro 1/bw modification 5-21-2007
#return (self.bwup > b.bwup) - (self.bwup < b.bwup)
return (self.bwup < b.bwup) - (self.bwup > b.bwup)
"""
* self < b -1 --> The bw self' is worse than `new' --> APPEND b ROUTE
* self > b 1 --> The bw self' is better than `new'
* self = b 0 --> They are the same
"""
def bw_dw_cmp(self, b):
#alessandro 1/bw modification 5-21-2007
#return (self.bwdw > b.bwdw) - (self.bwdw < b.bwdw)
return (self.bwdw < b.bwdw) - (self.bwdw > b.bwdw)
"""
* self < b -1 --> The bw self' is worse than `new' --> APPEND b ROUTE
* self > b 1 --> The bw self' is better than `new'
* self = b 0 --> They are the same
"""
def rtt_cmp(self, b):
return (self.rtt < b.rtt) - (self.rtt > b.rtt)
"""
* self > b -1 --> The rtt self is worse than b -->APPEND b ROUTE
* self < b 1 --> The rtt self is better than b
* self = b 0 --> They are the same
"""
def dp_cmp(self, b):
return (self.dp < b.dp) - (self.dp > b.dp)
"""
we need to take the lower dp!!
* self > b -1 --> The route self is worse than `new' --> APPEND b ROUTE
* self < b 1 --> The route self is better than `new'
* self = b 0 --> They are the same
"""
def avg_cmp(self, b):
return (self.avg > b.avg) - (self.avg < b.avg)
"""
* self < b -1 --> The avg self is worse than b
* self > b 1 --> The avg self is better than b
* self = b 0 --> They are the same
"""
def __cmp__(self,b):
"""Remember to set rem_t.cmp_metric before comparing two
routes!!!"""
if rem_t.cmp_metric not in G.metrics:
EHM_SOMETHING_WRONG
if rem_t.cmp_metric == "rtt":
ret=self.rtt_cmp(b)
elif rem_t.cmp_metric == "bwup":
ret=self.bw_up_cmp(b)
elif rem_t.cmp_metric == "bwdw":
ret=self.bw_dw_cmp(b)
elif rem_t.cmp_metric == "avg":
ret=self.avg_cmp(b)
elif rem_t.cmp_metric == "dp":
ret=self.dp_cmp(b)
else:
EHM_SOMETHING_WRONG
return ret | unknown | codeparrot/codeparrot-clean | ||
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Classes for controlling a cluster of cloud instances.
"""
from __future__ import with_statement
import gzip
import StringIO
import urllib
from hadoop.cloud.storage import Storage
CLUSTER_PROVIDER_MAP = {
"dummy": ('hadoop.cloud.providers.dummy', 'DummyCluster'),
"ec2": ('hadoop.cloud.providers.ec2', 'Ec2Cluster'),
"rackspace": ('hadoop.cloud.providers.rackspace', 'RackspaceCluster'),
}
def get_cluster(provider):
"""
Retrieve the Cluster class for a provider.
"""
mod_name, driver_name = CLUSTER_PROVIDER_MAP[provider]
_mod = __import__(mod_name, globals(), locals(), [driver_name])
return getattr(_mod, driver_name)
class Cluster(object):
"""
A cluster of server instances. A cluster has a unique name.
One may launch instances which run in a certain role.
"""
def __init__(self, name, config_dir):
self.name = name
self.config_dir = config_dir
def get_provider_code(self):
"""
The code that uniquely identifies the cloud provider.
"""
raise Exception("Unimplemented")
def authorize_role(self, role, from_port, to_port, cidr_ip):
"""
Authorize access to machines in a given role from a given network.
"""
pass
def get_instances_in_role(self, role, state_filter=None):
"""
Get all the instances in a role, filtered by state.
@param role: the name of the role
@param state_filter: the state that the instance should be in
(e.g. "running"), or None for all states
"""
raise Exception("Unimplemented")
def print_status(self, roles=None, state_filter="running"):
"""
Print the status of instances in the given roles, filtered by state.
"""
pass
def check_running(self, role, number):
"""
Check that a certain number of instances in a role are running.
"""
instances = self.get_instances_in_role(role, "running")
if len(instances) != number:
print "Expected %s instances in role %s, but was %s %s" % \
(number, role, len(instances), instances)
return False
else:
return instances
def launch_instances(self, roles, number, image_id, size_id,
instance_user_data, **kwargs):
"""
Launch instances (having the given roles) in the cluster.
Returns a list of IDs for the instances started.
"""
pass
def wait_for_instances(self, instance_ids, timeout=600):
"""
Wait for instances to start.
Raise TimeoutException if the timeout is exceeded.
"""
pass
def terminate(self):
"""
Terminate all instances in the cluster.
"""
pass
def delete(self):
"""
Delete the cluster permanently. This operation is only permitted if no
instances are running.
"""
pass
def get_storage(self):
"""
Return the external storage for the cluster.
"""
return Storage(self)
class InstanceUserData(object):
"""
The data passed to an instance on start up.
"""
def __init__(self, filename, replacements={}):
self.filename = filename
self.replacements = replacements
def _read_file(self, filename):
"""
Read the user data.
"""
return urllib.urlopen(filename).read()
def read(self):
"""
Read the user data, making replacements.
"""
contents = self._read_file(self.filename)
for (match, replacement) in self.replacements.iteritems():
if replacement == None:
replacement = ''
contents = contents.replace(match, replacement)
return contents
def read_as_gzip_stream(self):
"""
Read and compress the data.
"""
output = StringIO.StringIO()
compressed = gzip.GzipFile(mode='wb', fileobj=output)
compressed.write(self.read())
compressed.close()
return output.getvalue()
class Instance(object):
"""
A server instance.
"""
def __init__(self, id, public_ip, private_ip):
self.id = id
self.public_ip = public_ip
self.private_ip = private_ip
class RoleSyntaxException(Exception):
"""
Raised when a role name is invalid. Role names may consist of a sequence
of alphanumeric characters and underscores. Dashes are not permitted in role
names.
"""
def __init__(self, message):
super(RoleSyntaxException, self).__init__()
self.message = message
def __str__(self):
return repr(self.message)
class TimeoutException(Exception):
"""
Raised when a timeout is exceeded.
"""
pass | unknown | codeparrot/codeparrot-clean | ||
from django.template import TemplateSyntaxError
from django.test import SimpleTestCase
from ..utils import setup
class FirstOfTagTests(SimpleTestCase):
@setup({"firstof01": "{% firstof a b c %}"})
def test_firstof01(self):
output = self.engine.render_to_string("firstof01", {"a": 0, "c": 0, "b": 0})
self.assertEqual(output, "")
@setup({"firstof02": "{% firstof a b c %}"})
def test_firstof02(self):
output = self.engine.render_to_string("firstof02", {"a": 1, "c": 0, "b": 0})
self.assertEqual(output, "1")
@setup({"firstof03": "{% firstof a b c %}"})
def test_firstof03(self):
output = self.engine.render_to_string("firstof03", {"a": 0, "c": 0, "b": 2})
self.assertEqual(output, "2")
@setup({"firstof04": "{% firstof a b c %}"})
def test_firstof04(self):
output = self.engine.render_to_string("firstof04", {"a": 0, "c": 3, "b": 0})
self.assertEqual(output, "3")
@setup({"firstof05": "{% firstof a b c %}"})
def test_firstof05(self):
output = self.engine.render_to_string("firstof05", {"a": 1, "c": 3, "b": 2})
self.assertEqual(output, "1")
@setup({"firstof06": "{% firstof a b c %}"})
def test_firstof06(self):
output = self.engine.render_to_string("firstof06", {"c": 3, "b": 0})
self.assertEqual(output, "3")
@setup({"firstof07": '{% firstof a b "c" %}'})
def test_firstof07(self):
output = self.engine.render_to_string("firstof07", {"a": 0})
self.assertEqual(output, "c")
@setup({"firstof08": '{% firstof a b "c and d" %}'})
def test_firstof08(self):
output = self.engine.render_to_string("firstof08", {"a": 0, "b": 0})
self.assertEqual(output, "c and d")
@setup({"firstof09": "{% firstof %}"})
def test_firstof09(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template("firstof09")
@setup({"firstof10": "{% firstof a %}"})
def test_firstof10(self):
output = self.engine.render_to_string("firstof10", {"a": "<"})
self.assertEqual(output, "<")
@setup({"firstof11": "{% firstof a b %}"})
def test_firstof11(self):
output = self.engine.render_to_string("firstof11", {"a": "<", "b": ">"})
self.assertEqual(output, "<")
@setup({"firstof12": "{% firstof a b %}"})
def test_firstof12(self):
output = self.engine.render_to_string("firstof12", {"a": "", "b": ">"})
self.assertEqual(output, ">")
@setup({"firstof13": "{% autoescape off %}{% firstof a %}{% endautoescape %}"})
def test_firstof13(self):
output = self.engine.render_to_string("firstof13", {"a": "<"})
self.assertEqual(output, "<")
@setup({"firstof14": "{% firstof a|safe b %}"})
def test_firstof14(self):
output = self.engine.render_to_string("firstof14", {"a": "<"})
self.assertEqual(output, "<")
@setup({"firstof15": "{% firstof a b c as myvar %}"})
def test_firstof15(self):
ctx = {"a": 0, "b": 2, "c": 3}
output = self.engine.render_to_string("firstof15", ctx)
self.assertEqual(ctx["myvar"], "2")
self.assertEqual(output, "")
@setup({"firstof16": "{% firstof a b c as myvar %}"})
def test_all_false_arguments_asvar(self):
ctx = {"a": 0, "b": 0, "c": 0}
output = self.engine.render_to_string("firstof16", ctx)
self.assertEqual(ctx["myvar"], "")
self.assertEqual(output, "") | python | github | https://github.com/django/django | tests/template_tests/syntax_tests/test_firstof.py |
# Copyright 2010-2012 RethinkDB, all rights reserved.
import gdb
import itertools, re
class StdPointerPrinter:
"Print a smart pointer of some kind"
def __init__(self, typename, val):
self.typename = typename
self.val = val
def to_string(self):
if self.val['_M_refcount']['_M_pi'] == 0:
return '%s (empty) %s' % (self.typename, self.val['_M_ptr'])
return '%s (count %d) %s' % (self.typename,
self.val['_M_refcount']['_M_pi']['_M_use_count'],
self.val['_M_ptr'])
class UniquePointerPrinter:
"Print a unique_ptr"
def __init__(self, val):
self.val = val
def to_string(self):
return self.val['_M_t']
class StdListPrinter:
"Print a std::list"
class _iterator:
def __init__(self, nodetype, head):
self.nodetype = nodetype
self.base = head['_M_next']
self.head = head.address
self.count = 0
def __iter__(self):
return self
def next(self):
if self.base == self.head:
raise StopIteration
elt = self.base.cast(self.nodetype).dereference()
self.base = elt['_M_next']
count = self.count
self.count = self.count + 1
return ('[%d]' % count, elt['_M_data'])
def __init__(self, typename, val):
self.typename = typename
self.val = val
def children(self):
itype = self.val.type.template_argument(0)
# If the inferior program is compiled with -D_GLIBCXX_DEBUG
# some of the internal implementation details change.
if self.typename == "std::list":
nodetype = gdb.lookup_type('std::_List_node<%s>' % itype).pointer()
elif self.typename == "std::__debug::list":
nodetype = gdb.lookup_type('std::__norm::_List_node<%s>' % itype).pointer()
else:
raise ValueError("Cannot cast list node for list printer.")
return self._iterator(nodetype, self.val['_M_impl']['_M_node'])
def to_string(self):
if self.val['_M_impl']['_M_node'].address == self.val['_M_impl']['_M_node']['_M_next']:
return 'empty %s' % (self.typename)
return '%s' % (self.typename)
class StdListIteratorPrinter:
"Print std::list::iterator"
def __init__(self, typename, val):
self.val = val
self.typename = typename
def to_string(self):
itype = self.val.type.template_argument(0)
# If the inferior program is compiled with -D_GLIBCXX_DEBUG
# some of the internal implementation details change.
if self.typename == "std::_List_iterator" or self.typename == "std::_List_const_iterator":
nodetype = gdb.lookup_type('std::_List_node<%s>' % itype).pointer()
elif self.typename == "std::__norm::_List_iterator" or self.typename == "std::__norm::_List_const_iterator":
nodetype = gdb.lookup_type('std::__norm::_List_node<%s>' % itype).pointer()
else:
raise ValueError("Cannot cast list node for list iterator printer.")
return self.val['_M_node'].cast(nodetype).dereference()['_M_data']
class StdSlistPrinter:
"Print a __gnu_cxx::slist"
class _iterator:
def __init__(self, nodetype, head):
self.nodetype = nodetype
self.base = head['_M_head']['_M_next']
self.count = 0
def __iter__(self):
return self
def next(self):
if self.base == 0:
raise StopIteration
elt = self.base.cast(self.nodetype).dereference()
self.base = elt['_M_next']
count = self.count
self.count = self.count + 1
return ('[%d]' % count, elt['_M_data'])
def __init__(self, val):
self.val = val
def children(self):
itype = self.val.type.template_argument(0)
nodetype = gdb.lookup_type('__gnu_cxx::_Slist_node<%s>' % itype).pointer()
return self._iterator(nodetype, self.val)
def to_string(self):
if self.val['_M_head']['_M_next'] == 0:
return 'empty __gnu_cxx::slist'
return '__gnu_cxx::slist'
class StdSlistIteratorPrinter:
"Print __gnu_cxx::slist::iterator"
def __init__(self, val):
self.val = val
def to_string(self):
itype = self.val.type.template_argument(0)
nodetype = gdb.lookup_type('__gnu_cxx::_Slist_node<%s>' % itype).pointer()
return self.val['_M_node'].cast(nodetype).dereference()['_M_data']
class StdVectorPrinter:
"Print a std::vector"
class _iterator:
def __init__(self, start, finish):
self.item = start
self.finish = finish
self.count = 0
def __iter__(self):
return self
def next(self):
if self.item == self.finish:
raise StopIteration
count = self.count
self.count = self.count + 1
elt = self.item.dereference()
self.item = self.item + 1
return ('[%d]' % count, elt)
def __init__(self, typename, val):
self.typename = typename
self.val = val
def children(self):
return self._iterator(self.val['_M_impl']['_M_start'],
self.val['_M_impl']['_M_finish'])
def to_string(self):
start = self.val['_M_impl']['_M_start']
finish = self.val['_M_impl']['_M_finish']
end = self.val['_M_impl']['_M_end_of_storage']
return ('%s of length %d, capacity %d' % (self.typename, int(finish - start), int(end - start)))
def display_hint(self):
return 'array'
class StdVectorIteratorPrinter:
"Print std::vector::iterator"
def __init__(self, val):
self.val = val
def to_string(self):
return self.val['_M_current'].dereference()
class StdTuplePrinter:
"Print a std::tuple"
class _iterator:
def __init__(self, head):
self.head = head
# Set the base class as the initial head of the
# tuple.
nodes = self.head.type.fields()
if len(nodes) != 1:
raise ValueError("Top of tuple tree does not consist of a single node.")
# Set the actual head to the first pair.
self.head = self.head.cast(nodes[0].type)
self.count = 0
def __iter__(self):
return self
def next(self):
nodes = self.head.type.fields()
# Check for further recursions in the inheritance tree.
if len(nodes) == 0:
raise StopIteration
# Check that this iteration has an expected structure.
if len(nodes) != 2:
raise ValueError("Cannot parse more than 2 nodes in a tuple tree.")
# - Left node is the next recursion parent.
# - Right node is the actual class contained in the tuple.
# Process right node.
impl = self.head.cast(nodes[1].type)
# Process left node and set it as head.
self.head = self.head.cast(nodes[0].type)
self.count = self.count + 1
# Finally, check the implementation. If it is
# wrapped in _M_head_impl return that, otherwise return
# the value "as is".
fields = impl.type.fields()
if len(fields) < 1 or fields[0].name != "_M_head_impl":
return ('[%d]' % self.count, impl)
else:
return ('[%d]' % self.count, impl['_M_head_impl'])
def __init__(self, typename, val):
self.typename = typename
self.val = val;
def children(self):
return self._iterator(self.val)
def to_string(self):
return '%s containing' % (self.typename)
class StdStackOrQueuePrinter:
"Print a std::stack or std::queue"
def __init__(self, typename, val):
self.typename = typename
self.visualizer = gdb.default_visualizer(val['c'])
def children(self):
return self.visualizer.children()
def to_string(self):
return '%s wrapping: %s' % (self.typename,
self.visualizer.to_string())
def display_hint(self):
if hasattr(self.visualizer, 'display_hint'):
return self.visualizer.display_hint()
return None
class RbtreeIterator:
def __init__(self, rbtree):
self.size = rbtree['_M_t']['_M_impl']['_M_node_count']
self.node = rbtree['_M_t']['_M_impl']['_M_header']['_M_left']
self.count = 0
def __iter__(self):
return self
def __len__(self):
return int(self.size)
def next(self):
if self.count == self.size:
raise StopIteration
result = self.node
self.count = self.count + 1
if self.count < self.size:
# Compute the next node.
node = self.node
if node.dereference()['_M_right']:
node = node.dereference()['_M_right']
while node.dereference()['_M_left']:
node = node.dereference()['_M_left']
else:
parent = node.dereference()['_M_parent']
while node == parent.dereference()['_M_right']:
node = parent
parent = parent.dereference()['_M_parent']
if node.dereference()['_M_right'] != parent:
node = parent
self.node = node
return result
# This is a pretty printer for std::_Rb_tree_iterator (which is
# std::map::iterator), and has nothing to do with the RbtreeIterator
# class above.
class StdRbtreeIteratorPrinter:
"Print std::map::iterator"
def __init__(self, val):
self.val = val
def to_string(self):
valuetype = self.val.type.template_argument(0)
nodetype = gdb.lookup_type('std::_Rb_tree_node < %s >' % valuetype)
nodetype = nodetype.pointer()
return self.val.cast(nodetype).dereference()['_M_value_field']
class StdDebugIteratorPrinter:
"Print a debug enabled version of an iterator"
def __init__(self, val):
self.val = val
# Just strip away the encapsulating __gnu_debug::_Safe_iterator
# and return the wrapped iterator value.
def to_string(self):
itype = self.val.type.template_argument(0)
return self.val['_M_current'].cast(itype)
class StdMapPrinter:
"Print a std::map or std::multimap"
# Turn an RbtreeIterator into a pretty-print iterator.
class _iter:
def __init__(self, rbiter, type):
self.rbiter = rbiter
self.count = 0
self.type = type
def __iter__(self):
return self
def next(self):
if self.count % 2 == 0:
n = self.rbiter.next()
n = n.cast(self.type).dereference()['_M_value_field']
self.pair = n
item = n['first']
else:
item = self.pair['second']
result = ('[%d]' % self.count, item)
self.count = self.count + 1
return result
def __init__(self, typename, val):
self.typename = typename
self.val = val
def to_string(self):
return '%s with %d elements' % (self.typename, len(RbtreeIterator(self.val)))
def children(self):
keytype = self.val.type.template_argument(0).const()
valuetype = self.val.type.template_argument(1)
nodetype = gdb.lookup_type('std::_Rb_tree_node< std::pair< %s, %s > >' % (keytype, valuetype))
nodetype = nodetype.pointer()
return self._iter(RbtreeIterator(self.val), nodetype)
def display_hint(self):
return 'map'
class StdSetPrinter:
"Print a std::set or std::multiset"
# Turn an RbtreeIterator into a pretty-print iterator.
class _iter:
def __init__(self, rbiter, type):
self.rbiter = rbiter
self.count = 0
self.type = type
def __iter__(self):
return self
def next(self):
item = self.rbiter.next()
item = item.cast(self.type).dereference()['_M_value_field']
# FIXME: this is weird ... what to do?
# Maybe a 'set' display hint?
result = ('[%d]' % self.count, item)
self.count = self.count + 1
return result
def __init__(self, typename, val):
self.typename = typename
self.val = val
def to_string(self):
return '%s with %d elements' % (self.typename, len(RbtreeIterator(self.val)))
def children(self):
keytype = self.val.type.template_argument(0)
nodetype = gdb.lookup_type('std::_Rb_tree_node< %s >' % keytype).pointer()
return self._iter(RbtreeIterator(self.val), nodetype)
class StdBitsetPrinter:
"Print a std::bitset"
def __init__(self, typename, val):
self.typename = typename
self.val = val
def to_string(self):
# If template_argument handled values, we could print the
# size. Or we could use a regexp on the type.
return '%s' % (self.typename)
def children(self):
words = self.val['_M_w']
wtype = words.type
# The _M_w member can be either an unsigned long, or an
# array. This depends on the template specialization used.
# If it is a single long, convert to a single element list.
if wtype.code == gdb.TYPE_CODE_ARRAY:
tsize = wtype.target().sizeof
else:
words = [words]
tsize = wtype.sizeof
nwords = wtype.sizeof / tsize
result = []
byte = 0
while byte < nwords:
w = words[byte]
bit = 0
while w != 0:
if (w & 1) != 0:
# Another spot where we could use 'set'?
result.append(('[%d]' % (byte * tsize * 8 + bit), 1))
bit = bit + 1
w = w >> 1
byte = byte + 1
return result
class StdDequePrinter:
"Print a std::deque"
class _iter:
def __init__(self, node, start, end, last, buffer_size):
self.node = node
self.p = start
self.end = end
self.last = last
self.buffer_size = buffer_size
self.count = 0
def __iter__(self):
return self
def next(self):
if self.p == self.last:
raise StopIteration
result = ('[%d]' % self.count, self.p.dereference())
self.count = self.count + 1
# Advance the 'cur' pointer.
self.p = self.p + 1
if self.p == self.end:
# If we got to the end of this bucket, move to the
# next bucket.
self.node = self.node + 1
self.p = self.node[0]
self.end = self.p + self.buffer_size
return result
def __init__(self, typename, val):
self.typename = typename
self.val = val
self.elttype = val.type.template_argument(0)
size = self.elttype.sizeof
if size < 512:
self.buffer_size = int(512 / size)
else:
self.buffer_size = 1
def to_string(self):
start = self.val['_M_impl']['_M_start']
end = self.val['_M_impl']['_M_finish']
delta_n = end['_M_node'] - start['_M_node'] - 1
delta_s = start['_M_last'] - start['_M_cur']
delta_e = end['_M_cur'] - end['_M_first']
size = self.buffer_size * delta_n + delta_s + delta_e
return '%s with %d elements' % (self.typename, long(size))
def children(self):
start = self.val['_M_impl']['_M_start']
end = self.val['_M_impl']['_M_finish']
return self._iter(start['_M_node'], start['_M_cur'], start['_M_last'],
end['_M_cur'], self.buffer_size)
def display_hint(self):
return 'array'
class StdDequeIteratorPrinter:
"Print std::deque::iterator"
def __init__(self, val):
self.val = val
def to_string(self):
return self.val['_M_cur'].dereference()
class StdStringPrinter:
"Print a std::basic_string of some kind"
def __init__(self, val):
self.val = val
def to_string(self):
# Make sure &string works, too.
type = self.val.type
if type.code == gdb.TYPE_CODE_REF:
type = type.target()
# Calculate the length of the string so that to_string returns
# the string according to length, not according to first null
# encountered.
ptr = self.val['_M_dataplus']['_M_p']
realtype = type.unqualified().strip_typedefs()
try: # clang thinks std::string is std::basic_string<char>
realtype.template_argument(1)
except RuntimeError:
basetype = str(realtype.template_argument(0))
realtype = gdb.lookup_type(("std::basic_string<%s, std::char_traits" +
"<%s>, std::allocator<%s> >")
% (basetype, basetype, basetype))
reptype = gdb.lookup_type(str(realtype) + '::_Rep').pointer()
header = ptr.cast(reptype) - 1
len = header.dereference()['_M_length']
return self.val['_M_dataplus']['_M_p'].lazy_string(length=len)
def display_hint(self):
return 'string'
class Tr1HashtableIterator:
def __init__(self, hash):
self.count = 0
self.n_buckets = hash['_M_element_count']
if self.n_buckets == 0:
self.node = False
else:
self.bucket = hash['_M_buckets']
self.node = self.bucket[0]
self.update()
def __iter__(self):
return self
def update(self):
# If we advanced off the end of the chain, move to the next
# bucket.
while self.node == 0:
self.bucket = self.bucket + 1
self.node = self.bucket[0]
# If we advanced off the end of the bucket array, then
# we're done.
if self.count == self.n_buckets:
self.node = False
else:
self.count = self.count + 1
def next(self):
if not self.node:
raise StopIteration
result = self.node.dereference()['_M_v']
self.node = self.node.dereference()['_M_next']
self.update()
return result
class Tr1UnorderedSetPrinter:
"Print a tr1::unordered_set"
def __init__(self, typename, val):
self.typename = typename
self.val = val
def to_string(self):
return '%s with %d elements' % (self.typename, self.val['_M_element_count'])
@staticmethod
def format_count(i):
return '[%d]' % i
def children(self):
counter = itertools.imap(self.format_count, itertools.count())
return itertools.izip(counter, Tr1HashtableIterator(self.val))
class Tr1UnorderedMapPrinter:
"Print a tr1::unordered_map"
def __init__(self, typename, val):
self.typename = typename
self.val = val
def to_string(self):
return '%s with %d elements' % (self.typename, self.val['_M_element_count'])
@staticmethod
def flatten(list):
for elt in list:
for i in elt:
yield i
@staticmethod
def format_one(elt):
return (elt['first'], elt['second'])
@staticmethod
def format_count(i):
return '[%d]' % i
def children(self):
counter = itertools.imap(self.format_count, itertools.count())
# Map over the hash table and flatten the result.
data = self.flatten(itertools.imap(self.format_one, Tr1HashtableIterator(self.val)))
# Zip the two iterators together.
return itertools.izip(counter, data)
def display_hint(self):
return 'map'
def lookup_function (val):
"Look-up and return a pretty-printer that can print val."
global pretty_printers_dict
# Get the type.
type = val.type
# If it points to a reference, get the reference.
if type.code == gdb.TYPE_CODE_REF:
type = type.target ()
# Get the unqualified type, stripped of typedefs.
type = type.unqualified ().strip_typedefs ()
# Get the type name.
typename = type.tag
if typename == None:
return None
# Iterate over local dictionary of types to determine
# if a printer is registered for that type. Return an
# instantiation of the printer if found.
for function in pretty_printers_dict:
if function.match (typename):
return pretty_printers_dict[function] (val)
# Cannot find a pretty printer. Return None.
return None
def disable_lookup_function ():
lookup_function.enabled = False
def enable_lookup_function ():
lookup_function.enabled = True
pretty_printers_dict = {}
def register_libstdcxx_printers(obj):
"Register libstdc++ pretty-printers with objfile Obj."
if obj == None:
obj = gdb
obj.pretty_printers.append(lookup_function)
# libstdc++ objects requiring pretty-printing.
# In order from:
# http://gcc.gnu.org/onlinedocs/libstdc++/latest-doxygen/a01847.html
pretty_printers_dict[re.compile('^std::basic_string<.*>$')] = lambda val: StdStringPrinter(val)
pretty_printers_dict[re.compile('^std::bitset<.*>$')] = lambda val: StdBitsetPrinter("std::bitset", val)
pretty_printers_dict[re.compile('^std::deque<.*>$')] = lambda val: StdDequePrinter("std::deque", val)
pretty_printers_dict[re.compile('^std::list<.*>$')] = lambda val: StdListPrinter("std::list", val)
pretty_printers_dict[re.compile('^std::map<.*>$')] = lambda val: StdMapPrinter("std::map", val)
pretty_printers_dict[re.compile('^std::multimap<.*>$')] = lambda val: StdMapPrinter("std::multimap", val)
pretty_printers_dict[re.compile('^std::multiset<.*>$')] = lambda val: StdSetPrinter("std::multiset", val)
pretty_printers_dict[re.compile('^std::priority_queue<.*>$')] = lambda val: StdStackOrQueuePrinter("std::priority_queue", val)
pretty_printers_dict[re.compile('^std::queue<.*>$')] = lambda val: StdStackOrQueuePrinter("std::queue", val)
pretty_printers_dict[re.compile('^std::tuple<.*>$')] = lambda val: StdTuplePrinter("std::tuple", val)
pretty_printers_dict[re.compile('^std::set<.*>$')] = lambda val: StdSetPrinter("std::set", val)
pretty_printers_dict[re.compile('^std::stack<.*>$')] = lambda val: StdStackOrQueuePrinter("std::stack", val)
pretty_printers_dict[re.compile('^std::unique_ptr<.*>$')] = UniquePointerPrinter
pretty_printers_dict[re.compile('^std::vector<.*>$')] = lambda val: StdVectorPrinter("std::vector", val)
# vector<bool>
# Printer registrations for classes compiled with -D_GLIBCXX_DEBUG.
pretty_printers_dict[re.compile('^std::__debug::bitset<.*>$')] = lambda val: StdBitsetPrinter("std::__debug::bitset", val)
pretty_printers_dict[re.compile('^std::__debug::deque<.*>$')] = lambda val: StdDequePrinter("std::__debug::deque", val)
pretty_printers_dict[re.compile('^std::__debug::list<.*>$')] = lambda val: StdListPrinter("std::__debug::list", val)
pretty_printers_dict[re.compile('^std::__debug::map<.*>$')] = lambda val: StdMapPrinter("std::__debug::map", val)
pretty_printers_dict[re.compile('^std::__debug::multimap<.*>$')] = lambda val: StdMapPrinter("std::__debug::multimap", val)
pretty_printers_dict[re.compile('^std::__debug::multiset<.*>$')] = lambda val: StdSetPrinter("std::__debug::multiset", val)
pretty_printers_dict[re.compile('^std::__debug::priority_queue<.*>$')] = lambda val: StdStackOrQueuePrinter("std::__debug::priority_queue", val)
pretty_printers_dict[re.compile('^std::__debug::queue<.*>$')] = lambda val: StdStackOrQueuePrinter("std::__debug::queue", val)
pretty_printers_dict[re.compile('^std::__debug::set<.*>$')] = lambda val: StdSetPrinter("std::__debug::set", val)
pretty_printers_dict[re.compile('^std::__debug::stack<.*>$')] = lambda val: StdStackOrQueuePrinter("std::__debug::stack", val)
pretty_printers_dict[re.compile('^std::__debug::unique_ptr<.*>$')] = UniquePointerPrinter
pretty_printers_dict[re.compile('^std::__debug::vector<.*>$')] = lambda val: StdVectorPrinter("std::__debug::vector", val)
# These are the TR1 and C++0x printers.
# For array - the default GDB pretty-printer seems reasonable.
pretty_printers_dict[re.compile('^std::shared_ptr<.*>$')] = lambda val: StdPointerPrinter('std::shared_ptr', val)
pretty_printers_dict[re.compile('^std::weak_ptr<.*>$')] = lambda val: StdPointerPrinter('std::weak_ptr', val)
pretty_printers_dict[re.compile('^std::unordered_map<.*>$')] = lambda val: Tr1UnorderedMapPrinter('std::unordered_map', val)
pretty_printers_dict[re.compile('^std::unordered_set<.*>$')] = lambda val: Tr1UnorderedSetPrinter('std::unordered_set', val)
pretty_printers_dict[re.compile('^std::unordered_multimap<.*>$')] = lambda val: Tr1UnorderedMapPrinter('std::unordered_multimap', val)
pretty_printers_dict[re.compile('^std::unordered_multiset<.*>$')] = lambda val: Tr1UnorderedSetPrinter('std::unordered_multiset', val)
pretty_printers_dict[re.compile('^std::tr1::shared_ptr<.*>$')] = lambda val: StdPointerPrinter('std::tr1::shared_ptr', val)
pretty_printers_dict[re.compile('^std::tr1::weak_ptr<.*>$')] = lambda val: StdPointerPrinter('std::tr1::weak_ptr', val)
pretty_printers_dict[re.compile('^std::tr1::unordered_map<.*>$')] = lambda val: Tr1UnorderedMapPrinter('std::tr1::unordered_map', val)
pretty_printers_dict[re.compile('^std::tr1::unordered_set<.*>$')] = lambda val: Tr1UnorderedSetPrinter('std::tr1::unordered_set', val)
pretty_printers_dict[re.compile('^std::tr1::unordered_multimap<.*>$')] = lambda val: Tr1UnorderedMapPrinter('std::tr1::unordered_multimap', val)
pretty_printers_dict[re.compile('^std::tr1::unordered_multiset<.*>$')] = lambda val: Tr1UnorderedSetPrinter('std::tr1::unordered_multiset', val)
# These are the C++0x printer registrations for -D_GLIBCXX_DEBUG cases.
# The tr1 namespace printers do not seem to have any debug
# equivalents, so do no register them.
pretty_printers_dict[re.compile('^std::__debug::unordered_map<.*>$')] = lambda val: Tr1UnorderedMapPrinter('std::__debug::unordered_map', val)
pretty_printers_dict[re.compile('^std::__debug::unordered_set<.*>$')] = lambda val: Tr1UnorderedSetPrinter('std::__debug::unordered_set', val)
pretty_printers_dict[re.compile('^std::__debug::unordered_multimap<.*>$')] = lambda val: Tr1UnorderedMapPrinter('std::__debug::unordered_multimap', val)
pretty_printers_dict[re.compile('^std::__debug::unordered_multiset<.*>$')] = lambda val: Tr1UnorderedSetPrinter('std::__debug:unordered_multiset', val)
# Extensions.
pretty_printers_dict[re.compile('^__gnu_cxx::slist<.*>$')] = StdSlistPrinter
if True:
# These shouldn't be necessary, if GDB "print *i" worked.
# But it often doesn't, so here they are.
pretty_printers_dict[re.compile('^std::_List_iterator<.*>$')] = lambda val: StdListIteratorPrinter("std::_List_iterator", val)
pretty_printers_dict[re.compile('^std::_List_const_iterator<.*>$')] = lambda val: StdListIteratorPrinter("std::_List_const_iterator", val)
pretty_printers_dict[re.compile('^std::_Rb_tree_iterator<.*>$')] = lambda val: StdRbtreeIteratorPrinter(val)
pretty_printers_dict[re.compile('^std::_Rb_tree_const_iterator<.*>$')] = lambda val: StdRbtreeIteratorPrinter(val)
pretty_printers_dict[re.compile('^std::_Deque_iterator<.*>$')] = lambda val: StdDequeIteratorPrinter(val)
pretty_printers_dict[re.compile('^std::_Deque_const_iterator<.*>$')] = lambda val: StdDequeIteratorPrinter(val)
pretty_printers_dict[re.compile('^__gnu_cxx::__normal_iterator<.*>$')] = lambda val: StdVectorIteratorPrinter(val)
pretty_printers_dict[re.compile('^__gnu_cxx::_Slist_iterator<.*>$')] = lambda val: StdSlistIteratorPrinter(val)
# Debug (compiled with -D_GLIBCXX_DEBUG) printer registrations.
# The Rb_tree debug iterator when unwrapped from the encapsulating __gnu_debug::_Safe_iterator
# does not have the __norm namespace. Just use the existing printer registration for that.
pretty_printers_dict[re.compile('^__gnu_debug::_Safe_iterator<.*>$')] = lambda val: StdDebugIteratorPrinter(val)
pretty_printers_dict[re.compile('^std::__norm::_List_iterator<.*>$')] = lambda val: StdListIteratorPrinter("std::__norm::_List_iterator", val)
pretty_printers_dict[re.compile('^std::__norm::_List_const_iterator<.*>$')] = lambda val: StdListIteratorPrinter("std::__norm::_List_const_iterator", val)
pretty_printers_dict[re.compile('^std::__norm::_Deque_const_iterator<.*>$')] = lambda val: StdDequeIteratorPrinter(val)
pretty_printers_dict[re.compile('^std::__norm::_Deque_iterator<.*>$')] = lambda val: StdDequeIteratorPrinter(val)
gdb.pretty_printers.append(lookup_function) | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2002-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.jmx.support;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import javax.management.JMException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.remote.JMXConnectorServer;
import javax.management.remote.JMXConnectorServerFactory;
import javax.management.remote.JMXServiceURL;
import javax.management.remote.MBeanServerForwarder;
import org.jspecify.annotations.Nullable;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.jmx.JmxException;
import org.springframework.util.CollectionUtils;
/**
* {@link FactoryBean} that creates a JSR-160 {@link JMXConnectorServer},
* optionally registers it with the {@link MBeanServer}, and then starts it.
*
* <p>The {@code JMXConnectorServer} can be started in a separate thread by setting the
* {@code threaded} property to {@code true}. You can configure this thread to be a
* daemon thread by setting the {@code daemon} property to {@code true}.
*
* <p>The {@code JMXConnectorServer} is correctly shut down when an instance of this
* class is destroyed on shutdown of the containing {@code ApplicationContext}.
*
* @author Rob Harrop
* @author Juergen Hoeller
* @since 1.2
* @see JMXConnectorServer
* @see MBeanServer
*/
public class ConnectorServerFactoryBean extends MBeanRegistrationSupport
implements FactoryBean<JMXConnectorServer>, InitializingBean, DisposableBean {
/** The default service URL. */
public static final String DEFAULT_SERVICE_URL = "service:jmx:jmxmp://localhost:9875";
private String serviceUrl = DEFAULT_SERVICE_URL;
private final Map<String, Object> environment = new HashMap<>();
private @Nullable MBeanServerForwarder forwarder;
private @Nullable ObjectName objectName;
private boolean threaded = false;
private boolean daemon = false;
private @Nullable JMXConnectorServer connectorServer;
/**
* Set the service URL for the {@code JMXConnectorServer}.
*/
public void setServiceUrl(String serviceUrl) {
this.serviceUrl = serviceUrl;
}
/**
* Set the environment properties used to construct the {@code JMXConnectorServer}
* as {@code java.util.Properties} (String key/value pairs).
*/
public void setEnvironment(@Nullable Properties environment) {
CollectionUtils.mergePropertiesIntoMap(environment, this.environment);
}
/**
* Set the environment properties used to construct the {@code JMXConnector}
* as a {@code Map} of String keys and arbitrary Object values.
*/
public void setEnvironmentMap(@Nullable Map<String, ?> environment) {
if (environment != null) {
this.environment.putAll(environment);
}
}
/**
* Set an MBeanServerForwarder to be applied to the {@code JMXConnectorServer}.
*/
public void setForwarder(MBeanServerForwarder forwarder) {
this.forwarder = forwarder;
}
/**
* Set the {@code ObjectName} used to register the {@code JMXConnectorServer}
* itself with the {@code MBeanServer}, as {@code ObjectName} instance
* or as {@code String}.
* @throws MalformedObjectNameException if the {@code ObjectName} is malformed
*/
public void setObjectName(Object objectName) throws MalformedObjectNameException {
this.objectName = ObjectNameManager.getInstance(objectName);
}
/**
* Set whether the {@code JMXConnectorServer} should be started in a separate thread.
*/
public void setThreaded(boolean threaded) {
this.threaded = threaded;
}
/**
* Set whether any threads started for the {@code JMXConnectorServer} should be
* started as daemon threads.
*/
public void setDaemon(boolean daemon) {
this.daemon = daemon;
}
/**
* Start the connector server. If the {@code threaded} flag is set to {@code true},
* the {@code JMXConnectorServer} will be started in a separate thread.
* If the {@code daemon} flag is set to {@code true}, that thread will be
* started as a daemon thread.
* @throws JMException if a problem occurred when registering the connector server
* with the {@code MBeanServer}
* @throws IOException if there is a problem starting the connector server
*/
@Override
public void afterPropertiesSet() throws JMException, IOException {
if (this.server == null) {
this.server = JmxUtils.locateMBeanServer();
}
// Create the JMX service URL.
JMXServiceURL url = new JMXServiceURL(this.serviceUrl);
// Create the connector server now.
this.connectorServer = JMXConnectorServerFactory.newJMXConnectorServer(url, this.environment, this.server);
// Set the given MBeanServerForwarder, if any.
if (this.forwarder != null) {
this.connectorServer.setMBeanServerForwarder(this.forwarder);
}
// Do we want to register the connector with the MBean server?
if (this.objectName != null) {
doRegister(this.connectorServer, this.objectName);
}
try {
if (this.threaded) {
// Start the connector server asynchronously (in a separate thread).
final JMXConnectorServer serverToStart = this.connectorServer;
Thread connectorThread = new Thread() {
@Override
public void run() {
try {
serverToStart.start();
}
catch (IOException ex) {
throw new JmxException("Could not start JMX connector server after delay", ex);
}
}
};
connectorThread.setName("JMX Connector Thread [" + this.serviceUrl + "]");
connectorThread.setDaemon(this.daemon);
connectorThread.start();
}
else {
// Start the connector server in the same thread.
this.connectorServer.start();
}
if (logger.isInfoEnabled()) {
logger.info("JMX connector server started: " + this.connectorServer);
}
}
catch (IOException ex) {
// Unregister the connector server if startup failed.
unregisterBeans();
throw ex;
}
}
@Override
public @Nullable JMXConnectorServer getObject() {
return this.connectorServer;
}
@Override
public Class<? extends JMXConnectorServer> getObjectType() {
return (this.connectorServer != null ? this.connectorServer.getClass() : JMXConnectorServer.class);
}
@Override
public boolean isSingleton() {
return true;
}
/**
* Stop the {@code JMXConnectorServer} managed by an instance of this class.
* Automatically called on {@code ApplicationContext} shutdown.
* @throws IOException if there is an error stopping the connector server
*/
@Override
public void destroy() throws IOException {
try {
if (this.connectorServer != null) {
if (logger.isInfoEnabled()) {
logger.info("Stopping JMX connector server: " + this.connectorServer);
}
this.connectorServer.stop();
}
}
finally {
unregisterBeans();
}
}
} | java | github | https://github.com/spring-projects/spring-framework | spring-context/src/main/java/org/springframework/jmx/support/ConnectorServerFactoryBean.java |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from html import escape
import basket
import basket.errors
from django.conf import settings
from django.http import JsonResponse
from django.views.decorators.http import require_POST, require_safe
from bedrock.base.waffle import switch
from bedrock.newsletter.views import general_error, invalid_email_address
from bedrock.products.forms import VPNWaitlistForm
from lib import l10n_utils
from lib.l10n_utils.fluent import ftl
def vpn_fixed_price_countries():
if switch('vpn-variable-pricing-wave-1'):
return '||'
else:
countries = settings.VPN_FIXED_PRICE_COUNTRY_CODES
return '|%s|' % '|'.join(cc.lower() for cc in countries)
def vpn_variable_price_countries():
countries = settings.VPN_VARIABLE_PRICE_COUNTRY_CODES
if switch('vpn-variable-pricing-wave-1'):
countries = countries + settings.VPN_FIXED_PRICE_COUNTRY_CODES
if switch('vpn-variable-pricing-eu-expansion'):
countries = countries + settings.VPN_VARIABLE_PRICE_COUNTRY_CODES_EXPANSION
return '|%s|' % '|'.join(cc.lower() for cc in countries)
@require_safe
def vpn_landing_page(request):
ftl_files = ['products/vpn/landing', 'products/vpn/shared']
sub_not_found = request.GET.get('vpn-sub-not-found', None)
entrypoint_experiment = request.GET.get('entrypoint_experiment', None)
entrypoint_variation = request.GET.get('entrypoint_variation', None)
locale = l10n_utils.get_locale(request)
pricing_params = settings.VPN_VARIABLE_PRICING.get(locale, settings.VPN_VARIABLE_PRICING['us'])
# ensure experiment parameters matches pre-defined values
if entrypoint_experiment == 'vpn-landing-page-sub-position' and entrypoint_variation in ['a', 'b', 'c']:
template_name = 'products/vpn/variants/pricing-{}.html'.format(entrypoint_variation)
else:
template_name = 'products/vpn/landing.html'
# error message for visitors who try to sign-in without a subscription (issue 10002)
if sub_not_found == 'true':
sub_not_found = True
else:
sub_not_found = False
context = {
'fixed_price_countries': vpn_fixed_price_countries(),
'fixed_monthly_price': settings.VPN_FIXED_MONTHLY_PRICE,
'variable_price_countries': vpn_variable_price_countries(),
'default_monthly_price': pricing_params['default']['monthly']['price'],
'default_6_month_price': pricing_params['default']['6-month']['price'],
'default_12_month_price': pricing_params['default']['12-month']['price'],
'available_countries': (settings.VPN_AVAILABLE_COUNTRIES_EXPANSION if switch('vpn-variable-pricing-eu-expansion') else
settings.VPN_AVAILABLE_COUNTRIES),
'connect_servers': settings.VPN_CONNECT_SERVERS,
'connect_countries': settings.VPN_CONNECT_COUNTRIES,
'connect_devices': settings.VPN_CONNECT_DEVICES,
'sub_not_found': sub_not_found
}
return l10n_utils.render(request, template_name, context, ftl_files=ftl_files)
@require_safe
def vpn_invite_page(request):
ftl_files = ['products/vpn/landing', 'products/vpn/shared']
locale = l10n_utils.get_locale(request)
newsletter_form = VPNWaitlistForm(locale)
return l10n_utils.render(
request, 'products/vpn/invite.html', {'newsletter_form': newsletter_form}, ftl_files=ftl_files
)
@require_POST
def vpn_invite_waitlist(request):
errors = []
locale = l10n_utils.get_locale(request)
form = VPNWaitlistForm(locale, request.POST)
if form.is_valid():
data = form.cleaned_data
kwargs = {
'email': data['email'],
'fpn_platform': ','.join(data['platforms']),
'fpn_country': data['country'],
'lang': data['lang'],
'newsletters': 'guardian-vpn-waitlist',
}
if settings.BASKET_API_KEY:
kwargs['api_key'] = settings.BASKET_API_KEY
# NOTE this is not a typo; Referrer is misspelled in the HTTP spec
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.36
if not kwargs.get('source_url') and request.META.get('HTTP_REFERER'):
kwargs['source_url'] = request.META['HTTP_REFERER']
try:
basket.subscribe(**kwargs)
except basket.BasketException as e:
if e.code == basket.errors.BASKET_INVALID_EMAIL:
errors.append(str(invalid_email_address))
else:
errors.append(str(general_error))
else:
if 'email' in form.errors:
errors.append(ftl('newsletter-form-please-enter-a-valid'))
if 'privacy' in form.errors:
errors.append(ftl('newsletter-form-you-must-agree-to'))
for fieldname in ('fmt', 'lang', 'country'):
if fieldname in form.errors:
errors.extend(form.errors[fieldname])
if errors:
errors = [escape(e) for e in errors]
resp = {
'success': False,
'errors': errors,
}
else:
resp = {'success': True}
return JsonResponse(resp) | unknown | codeparrot/codeparrot-clean | ||
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Template for Test</title>
</head>
<body></body>
</html> | html | github | https://github.com/django/django | tests/admin_docs/templates/view_for_loader_test.html |
###############################################################################
# Copyright 2016 - Climate Research Division
# Environment and Climate Change Canada
#
# This file is part of the "EC-CAS diags" package.
#
# "EC-CAS diags" is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# "EC-CAS diags" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with "EC-CAS diags". If not, see <http://www.gnu.org/licenses/>.
###############################################################################
from .eccas_dry import ECCAS_Data
class ECCAS_EnKF_Data(ECCAS_Data):
"""
EC-CAS model output for Ensemble Kalman Filter experiments.
"""
# Method to open a single file
@staticmethod
def open_file (filename):
from pygeode.formats import fstd
# Ugly hack to force the PyGeode FSTD interface to always associate the
# !! record with the fields (override the IG*/IP* pairing).
orig_attach_vertical_axes = fstd.attach_vertical_axes
def hacked_attach_vertical_axes (varlist, vertical_records):
vertical_records['ip1'] = varlist[0].atts['ig1']
vertical_records['ip2'] = varlist[0].atts['ig2']
vertical_records['ip3'] = varlist[0].atts['ig3']
return orig_attach_vertical_axes (varlist, vertical_records)
# Apply the hack, read the data, then remove the hack after we're done.
fstd.attach_vertical_axes = hacked_attach_vertical_axes
dataset = fstd.open(filename, raw_list=True)
fstd.attach_vertical_axes = orig_attach_vertical_axes
# We need to rename the CO2 field from the ensemble spread file, so it
# doesn't get mixed up with the ensemble mean data (also called CO2).
# Determine if we have ensemble spread data from EC-CAS
# Add a suffix to the variable names, if we have ensemble spread data.
for var in dataset:
etiket = var.atts.get('etiket')
if etiket in ('STDDEV','E2090KFN192'):
var.name += "_ensemblespread"
elif etiket in ('MEAN','E2AVGANNALL','E2AVGANPALL','INITIAL'):
pass # No name clobbering for ensemble mean
else:
from warnings import warn
warn ("Unable to determine if etiket '%s' is mean or spread. Data will not be used."%etiket)
var.name += "_unknown"
return dataset
# Method to decode an opened dataset (standardize variable names, and add any
# extra info needed (pressure values, cell area, etc.)
@classmethod
def decode (cls,dataset):
from .eccas_dry import ECCAS_Data
from pygeode.dataset import Dataset
dataset = list(dataset)
# Limit the forecast period so there's no overlap.
# Assuming a 6-hour interval between analyses, and exclude last (hour 6)
# forecast.
for i,var in enumerate(dataset):
# Only consider doing this if the forecast extends over range of values.
# Not applicable for the standard trial and analysis output, which has a
# single forecast value of 6 and 0 respectively.
if var.hasaxis('forecast') and len(var.forecast) > 1:
dataset[i] = var(forecast=(0.0,4.5))
# Detect ensemble spread fields
for var in dataset:
if var.name.endswith('_ensemblespread'):
var.name = var.name.rstrip('_ensemblespread')
var.atts['ensemble_op'] = 'spread'
# Do EC-CAS field decoding
dataset = ECCAS_Data.decode.__func__(cls,dataset)
# Add back ensemble spread suffix.
dataset = list(dataset)
for var in dataset:
if var.atts.get('ensemble_op') == 'spread':
var.name += '_ensemblespread'
return dataset
# For our EnKF cycles, we need to hard-code the ig1/ig2 of the tracers.
# This is so we match the ip1/ip2 of the EnKF initial file we're injecting
# into.
@staticmethod
def _fstd_tweak_records (records):
# Select non-coordinate records (things that aren't already using IP2)
ind = (records['ip2'] == 0)
# Hard code the ig1 / ig2
records['ig1'][ind] = 38992
records['ig2'][ind] = 45710
records['ig3'][ind] = 1
# Update the coordinate records to be consistent.
records['ip1'][~ind] = 38992
records['ip2'][~ind] = 45710
records['ip3'][~ind] = 1
# Just for completion, set the typvar and deet as well.
records['typvar'][ind] = 'A'
records['deet'][ind] = 0
# Method to find all files in the given directory, which can be accessed
# through this interface.
@staticmethod
def find_files (dirname):
from os.path import exists
from glob import glob
files = []
##############################
# Model output
##############################
if exists (dirname+'/model'):
model_dir = dirname+'/model'
else:
model_dir = dirname
files.extend(glob(model_dir+"/[0-9]*_[0-9]*chmmean"))
files.extend(glob(model_dir+"/[0-9]*_[0-9]*chmstd"))
files.extend(glob(model_dir+"/[0-9]*_[0-9]*analrms"))
return files
# Add this interface to the table.
from . import table
table['eccas-enkf'] = ECCAS_EnKF_Data | unknown | codeparrot/codeparrot-clean | ||
# ==================================================================================================
# Copyright 2011 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
import json
from thrift.Thrift import TType
class ThriftJSONDecoder(json.JSONDecoder):
"""A decoder that makes python thrift structs JSON deserializable via the
standard python json module.
Pass this decoder when reading json, like this:
json.loads(str, cls=text.ThriftJSONDecoder, <other kwargs>)
Note that this is not a full protocol implementation in the thrift sense. This
is just a quick-and-easy parser for unittests etc.
"""
ROOT_THRIFT_CLASS = 'root_thrift_class'
def __init__(self, *args, **kwargs):
self.root_thrift_class = kwargs[ThriftJSONDecoder.ROOT_THRIFT_CLASS]
del kwargs[ThriftJSONDecoder.ROOT_THRIFT_CLASS]
super(ThriftJSONDecoder, self).__init__(*args, **kwargs)
def decode(self, json_str):
dict = super(ThriftJSONDecoder, self).decode(json_str)
return self._convert(dict, TType.STRUCT,
(self.root_thrift_class, self.root_thrift_class.thrift_spec))
def _convert(self, val, ttype, ttype_info):
if ttype == TType.STRUCT:
(thrift_class, thrift_spec) = ttype_info
ret = thrift_class()
for field in thrift_spec:
if field is not None:
(tag, field_ttype, field_name, field_ttype_info, dummy) = field
if field_name in val:
converted_val = self._convert(val[field_name], field_ttype, field_ttype_info)
setattr(ret, field_name, converted_val)
elif ttype == TType.LIST:
(element_ttype, element_ttype_info) = ttype_info
ret = [self._convert(x, element_ttype, element_ttype_info) for x in val]
elif ttype == TType.SET:
(element_ttype, element_ttype_info) = ttype_info
ret = set([self._convert(x, element_ttype, element_ttype_info) for x in val])
elif ttype == TType.MAP:
(key_ttype, key_ttype_info, val_ttype, val_ttype_info) = ttype_info
ret = dict([(self._convert(k, key_ttype, key_ttype_info),
self._convert(v, val_ttype, val_ttype_info)) for (k, v) in val.iteritems()])
elif ttype == TType.STRING:
ret = unicode(val)
elif ttype == TType.DOUBLE:
ret = float(val)
elif ttype == TType.I64:
ret = long(val)
elif ttype == TType.I32 or ttype == TType.I16 or ttype == TType.BYTE:
ret = int(val)
elif ttype == TType.BOOL:
ret = not not val
else:
raise Exception, 'Unrecognized thrift field type: %d' % ttype
return ret
def json_to_thrift(json_str, root_thrift_class):
"""A utility shortcut function to parse a thrift json object of the specified class."""
return json.loads(json_str, cls=ThriftJSONDecoder, root_thrift_class=root_thrift_class) | unknown | codeparrot/codeparrot-clean | ||
# $Id$
#
# pjsua Python GUI Demo
#
# Copyright (C)2013 Teluu Inc. (http://www.teluu.com)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import sys
if sys.version_info[0] >= 3: # Python 3
import tkinter as tk
from tkinter import ttk
from tkinter import messagebox as msgbox
else:
import Tkinter as tk
import tkMessageBox as msgbox
import ttk
import random
import pjsua2 as pj
import application
import endpoint as ep
# Call class
class Call(pj.Call):
"""
High level Python Call object, derived from pjsua2's Call object.
"""
def __init__(self, acc, peer_uri='', chat=None, call_id = pj.PJSUA_INVALID_ID):
pj.Call.__init__(self, acc, call_id)
self.acc = acc
self.peerUri = peer_uri
self.chat = chat
self.connected = False
self.onhold = False
def onCallState(self, prm):
ci = self.getInfo()
self.connected = ci.state == pj.PJSIP_INV_STATE_CONFIRMED
if self.chat:
self.chat.updateCallState(self, ci)
def onCallMediaState(self, prm):
ci = self.getInfo()
for mi in ci.media:
if mi.type == pj.PJMEDIA_TYPE_AUDIO and \
(mi.status == pj.PJSUA_CALL_MEDIA_ACTIVE or \
mi.status == pj.PJSUA_CALL_MEDIA_REMOTE_HOLD):
m = self.getMedia(mi.index)
am = pj.AudioMedia.typecastFromMedia(m)
# connect ports
ep.Endpoint.instance.audDevManager().getCaptureDevMedia().startTransmit(am)
am.startTransmit(ep.Endpoint.instance.audDevManager().getPlaybackDevMedia())
if mi.status == pj.PJSUA_CALL_MEDIA_REMOTE_HOLD and not self.onhold:
self.chat.addMessage(None, "'%s' sets call onhold" % (self.peerUri))
self.onhold = True
elif mi.status == pj.PJSUA_CALL_MEDIA_ACTIVE and self.onhold:
self.chat.addMessage(None, "'%s' sets call active" % (self.peerUri))
self.onhold = False
if self.chat:
self.chat.updateCallMediaState(self, ci)
def onInstantMessage(self, prm):
# chat instance should have been initalized
if not self.chat: return
self.chat.addMessage(self.peerUri, prm.msgBody)
self.chat.showWindow()
def onInstantMessageStatus(self, prm):
if prm.code/100 == 2: return
# chat instance should have been initalized
if not self.chat: return
self.chat.addMessage(None, "Failed sending message to '%s' (%d): %s" % (self.peerUri, prm.code, prm.reason))
def onTypingIndication(self, prm):
# chat instance should have been initalized
if not self.chat: return
self.chat.setTypingIndication(self.peerUri, prm.isTyping)
def onDtmfDigit(self, prm):
#msgbox.showinfo("pygui", 'Got DTMF:' + prm.digit)
pass
def onCallMediaTransportState(self, prm):
#msgbox.showinfo("pygui", "Media transport state")
pass
if __name__ == '__main__':
application.main() | unknown | codeparrot/codeparrot-clean | ||
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from sqlalchemy import select
from airflow.models import Log
from airflow.utils.session import provide_session
pytestmark = pytest.mark.db_test
class TestSession:
def dummy_session(self, session=None):
return session
def test_raised_provide_session(self):
with pytest.raises(ValueError, match="Function .*dummy has no `session` argument"):
@provide_session
def dummy():
pass
def test_provide_session_without_args_and_kwargs(self):
assert self.dummy_session() is None
wrapper = provide_session(self.dummy_session)
assert wrapper() is not None
def test_provide_session_with_args(self):
wrapper = provide_session(self.dummy_session)
session = object()
assert wrapper(session) is session
def test_provide_session_with_kwargs(self):
wrapper = provide_session(self.dummy_session)
session = object()
assert wrapper(session=session) is session
@pytest.mark.asyncio
async def test_async_session(self):
from airflow.settings import AsyncSession
session = AsyncSession()
session.add(Log(event="hihi1234"))
await session.commit()
my_special_log_event = await session.scalar(select(Log).where(Log.event == "hihi1234").limit(1))
assert my_special_log_event.event == "hihi1234" | python | github | https://github.com/apache/airflow | airflow-core/tests/unit/utils/test_session.py |
install_path: "{{ galaxy_dir }}/collections/ansible_collections"
alt_install_path: "{{ galaxy_dir }}/other_collections/ansible_collections"
scm_path: "{{ galaxy_dir }}/development"
test_repo_path: "{{ galaxy_dir }}/development/ansible_test"
test_error_repo_path: "{{ galaxy_dir }}/development/error_test"
supported_resolvelib_versions:
- "0.8.0" # Oldest supported
- "< 2.0.0" | unknown | github | https://github.com/ansible/ansible | test/integration/targets/ansible-galaxy-collection-scm/vars/main.yml |
//
// Response.swift
//
// Copyright (c) 2014-2018 Alamofire Software Foundation (http://alamofire.org/)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
import Foundation
/// Default type of `DataResponse` returned by Alamofire, with an `AFError` `Failure` type.
public typealias AFDataResponse<Success> = DataResponse<Success, AFError>
/// Default type of `DownloadResponse` returned by Alamofire, with an `AFError` `Failure` type.
public typealias AFDownloadResponse<Success> = DownloadResponse<Success, AFError>
/// Type used to store all values associated with a serialized response of a `DataRequest` or `UploadRequest`.
public struct DataResponse<Success, Failure: Error>: Sendable where Success: Sendable, Failure: Sendable {
/// The URL request sent to the server.
public let request: URLRequest?
/// The server's response to the URL request.
public let response: HTTPURLResponse?
/// The data returned by the server.
public let data: Data?
/// The final metrics of the response.
///
/// - Note: Due to `FB7624529`, collection of `URLSessionTaskMetrics` on watchOS is currently disabled.`
///
public let metrics: URLSessionTaskMetrics?
/// The time taken to serialize the response.
public let serializationDuration: TimeInterval
/// The result of response serialization.
public let result: Result<Success, Failure>
/// Returns the associated value of the result if it is a success, `nil` otherwise.
public var value: Success? { result.success }
/// Returns the associated error value if the result if it is a failure, `nil` otherwise.
public var error: Failure? { result.failure }
/// Creates a `DataResponse` instance with the specified parameters derived from the response serialization.
///
/// - Parameters:
/// - request: The `URLRequest` sent to the server.
/// - response: The `HTTPURLResponse` from the server.
/// - data: The `Data` returned by the server.
/// - metrics: The `URLSessionTaskMetrics` of the `DataRequest` or `UploadRequest`.
/// - serializationDuration: The duration taken by serialization.
/// - result: The `Result` of response serialization.
public init(request: URLRequest?,
response: HTTPURLResponse?,
data: Data?,
metrics: URLSessionTaskMetrics?,
serializationDuration: TimeInterval,
result: Result<Success, Failure>) {
self.request = request
self.response = response
self.data = data
self.metrics = metrics
self.serializationDuration = serializationDuration
self.result = result
}
}
// MARK: -
extension DataResponse: CustomStringConvertible, CustomDebugStringConvertible {
/// The textual representation used when written to an output stream, which includes whether the result was a
/// success or failure.
public var description: String {
"\(result)"
}
/// The debug textual representation used when written to an output stream, which includes (if available) a summary
/// of the `URLRequest`, the request's headers and body (if decodable as a `String` below 100KB); the
/// `HTTPURLResponse`'s status code, headers, and body; the duration of the network and serialization actions; and
/// the `Result` of serialization.
public var debugDescription: String {
guard let urlRequest = request else { return "[Request]: None\n[Result]: \(result)" }
let requestDescription = DebugDescription.description(of: urlRequest)
let responseDescription = response.map { response in
let responseBodyDescription = DebugDescription.description(for: data, headers: response.headers)
return """
\(DebugDescription.description(of: response))
\(responseBodyDescription.indentingNewlines())
"""
} ?? "[Response]: None"
let networkDuration = metrics.map { "\($0.taskInterval.duration)s" } ?? "None"
return """
\(requestDescription)
\(responseDescription)
[Network Duration]: \(networkDuration)
[Serialization Duration]: \(serializationDuration)s
[Result]: \(result)
"""
}
}
// MARK: -
extension DataResponse {
/// Evaluates the specified closure when the result of this `DataResponse` is a success, passing the unwrapped
/// result value as a parameter.
///
/// Use the `map` method with a closure that does not throw. For example:
///
/// let possibleData: DataResponse<Data> = ...
/// let possibleInt = possibleData.map { $0.count }
///
/// - parameter transform: A closure that takes the success value of the instance's result.
///
/// - returns: A `DataResponse` whose result wraps the value returned by the given closure. If this instance's
/// result is a failure, returns a response wrapping the same failure.
public func map<NewSuccess>(_ transform: (Success) -> NewSuccess) -> DataResponse<NewSuccess, Failure> {
DataResponse<NewSuccess, Failure>(request: request,
response: response,
data: data,
metrics: metrics,
serializationDuration: serializationDuration,
result: result.map(transform))
}
/// Evaluates the given closure when the result of this `DataResponse` is a success, passing the unwrapped result
/// value as a parameter.
///
/// Use the `tryMap` method with a closure that may throw an error. For example:
///
/// let possibleData: DataResponse<Data> = ...
/// let possibleObject = possibleData.tryMap {
/// try JSONSerialization.jsonObject(with: $0)
/// }
///
/// - parameter transform: A closure that takes the success value of the instance's result.
///
/// - returns: A success or failure `DataResponse` depending on the result of the given closure. If this instance's
/// result is a failure, returns the same failure.
public func tryMap<NewSuccess>(_ transform: (Success) throws -> NewSuccess) -> DataResponse<NewSuccess, any Error> {
DataResponse<NewSuccess, any Error>(request: request,
response: response,
data: data,
metrics: metrics,
serializationDuration: serializationDuration,
result: result.tryMap(transform))
}
/// Evaluates the specified closure when the `DataResponse` is a failure, passing the unwrapped error as a parameter.
///
/// Use the `mapError` function with a closure that does not throw. For example:
///
/// let possibleData: DataResponse<Data> = ...
/// let withMyError = possibleData.mapError { MyError.error($0) }
///
/// - Parameter transform: A closure that takes the error of the instance.
///
/// - Returns: A `DataResponse` instance containing the result of the transform.
public func mapError<NewFailure: Error>(_ transform: (Failure) -> NewFailure) -> DataResponse<Success, NewFailure> {
DataResponse<Success, NewFailure>(request: request,
response: response,
data: data,
metrics: metrics,
serializationDuration: serializationDuration,
result: result.mapError(transform))
}
/// Evaluates the specified closure when the `DataResponse` is a failure, passing the unwrapped error as a parameter.
///
/// Use the `tryMapError` function with a closure that may throw an error. For example:
///
/// let possibleData: DataResponse<Data> = ...
/// let possibleObject = possibleData.tryMapError {
/// try someFailableFunction(taking: $0)
/// }
///
/// - Parameter transform: A throwing closure that takes the error of the instance.
///
/// - Returns: A `DataResponse` instance containing the result of the transform.
public func tryMapError<NewFailure: Error>(_ transform: (Failure) throws -> NewFailure) -> DataResponse<Success, any Error> {
DataResponse<Success, any Error>(request: request,
response: response,
data: data,
metrics: metrics,
serializationDuration: serializationDuration,
result: result.tryMapError(transform))
}
}
// MARK: -
/// Used to store all data associated with a serialized response of a download request.
public struct DownloadResponse<Success, Failure: Error>: Sendable where Success: Sendable, Failure: Sendable {
/// The URL request sent to the server.
public let request: URLRequest?
/// The server's response to the URL request.
public let response: HTTPURLResponse?
/// The final destination URL of the data returned from the server after it is moved.
public let fileURL: URL?
/// The resume data generated if the request was cancelled.
public let resumeData: Data?
/// The final metrics of the response.
///
/// - Note: Due to `FB7624529`, collection of `URLSessionTaskMetrics` on watchOS is currently disabled.`
///
public let metrics: URLSessionTaskMetrics?
/// The time taken to serialize the response.
public let serializationDuration: TimeInterval
/// The result of response serialization.
public let result: Result<Success, Failure>
/// Returns the associated value of the result if it is a success, `nil` otherwise.
public var value: Success? { result.success }
/// Returns the associated error value if the result if it is a failure, `nil` otherwise.
public var error: Failure? { result.failure }
/// Creates a `DownloadResponse` instance with the specified parameters derived from response serialization.
///
/// - Parameters:
/// - request: The `URLRequest` sent to the server.
/// - response: The `HTTPURLResponse` from the server.
/// - fileURL: The final destination URL of the data returned from the server after it is moved.
/// - resumeData: The resume `Data` generated if the request was cancelled.
/// - metrics: The `URLSessionTaskMetrics` of the `DownloadRequest`.
/// - serializationDuration: The duration taken by serialization.
/// - result: The `Result` of response serialization.
public init(request: URLRequest?,
response: HTTPURLResponse?,
fileURL: URL?,
resumeData: Data?,
metrics: URLSessionTaskMetrics?,
serializationDuration: TimeInterval,
result: Result<Success, Failure>) {
self.request = request
self.response = response
self.fileURL = fileURL
self.resumeData = resumeData
self.metrics = metrics
self.serializationDuration = serializationDuration
self.result = result
}
}
// MARK: -
extension DownloadResponse: CustomStringConvertible, CustomDebugStringConvertible {
/// The textual representation used when written to an output stream, which includes whether the result was a
/// success or failure.
public var description: String {
"\(result)"
}
/// The debug textual representation used when written to an output stream, which includes the URL request, the URL
/// response, the temporary and destination URLs, the resume data, the durations of the network and serialization
/// actions, and the response serialization result.
public var debugDescription: String {
guard let urlRequest = request else { return "[Request]: None\n[Result]: \(result)" }
let requestDescription = DebugDescription.description(of: urlRequest)
let responseDescription = response.map(DebugDescription.description(of:)) ?? "[Response]: None"
let networkDuration = metrics.map { "\($0.taskInterval.duration)s" } ?? "None"
let resumeDataDescription = resumeData.map { "\($0)" } ?? "None"
return """
\(requestDescription)
\(responseDescription)
[File URL]: \(fileURL?.path ?? "None")
[Resume Data]: \(resumeDataDescription)
[Network Duration]: \(networkDuration)
[Serialization Duration]: \(serializationDuration)s
[Result]: \(result)
"""
}
}
// MARK: -
extension DownloadResponse {
/// Evaluates the given closure when the result of this `DownloadResponse` is a success, passing the unwrapped
/// result value as a parameter.
///
/// Use the `map` method with a closure that does not throw. For example:
///
/// let possibleData: DownloadResponse<Data> = ...
/// let possibleInt = possibleData.map { $0.count }
///
/// - parameter transform: A closure that takes the success value of the instance's result.
///
/// - returns: A `DownloadResponse` whose result wraps the value returned by the given closure. If this instance's
/// result is a failure, returns a response wrapping the same failure.
public func map<NewSuccess>(_ transform: (Success) -> NewSuccess) -> DownloadResponse<NewSuccess, Failure> {
DownloadResponse<NewSuccess, Failure>(request: request,
response: response,
fileURL: fileURL,
resumeData: resumeData,
metrics: metrics,
serializationDuration: serializationDuration,
result: result.map(transform))
}
/// Evaluates the given closure when the result of this `DownloadResponse` is a success, passing the unwrapped
/// result value as a parameter.
///
/// Use the `tryMap` method with a closure that may throw an error. For example:
///
/// let possibleData: DownloadResponse<Data> = ...
/// let possibleObject = possibleData.tryMap {
/// try JSONSerialization.jsonObject(with: $0)
/// }
///
/// - parameter transform: A closure that takes the success value of the instance's result.
///
/// - returns: A success or failure `DownloadResponse` depending on the result of the given closure. If this
/// instance's result is a failure, returns the same failure.
public func tryMap<NewSuccess>(_ transform: (Success) throws -> NewSuccess) -> DownloadResponse<NewSuccess, any Error> {
DownloadResponse<NewSuccess, any Error>(request: request,
response: response,
fileURL: fileURL,
resumeData: resumeData,
metrics: metrics,
serializationDuration: serializationDuration,
result: result.tryMap(transform))
}
/// Evaluates the specified closure when the `DownloadResponse` is a failure, passing the unwrapped error as a parameter.
///
/// Use the `mapError` function with a closure that does not throw. For example:
///
/// let possibleData: DownloadResponse<Data> = ...
/// let withMyError = possibleData.mapError { MyError.error($0) }
///
/// - Parameter transform: A closure that takes the error of the instance.
///
/// - Returns: A `DownloadResponse` instance containing the result of the transform.
public func mapError<NewFailure: Error>(_ transform: (Failure) -> NewFailure) -> DownloadResponse<Success, NewFailure> {
DownloadResponse<Success, NewFailure>(request: request,
response: response,
fileURL: fileURL,
resumeData: resumeData,
metrics: metrics,
serializationDuration: serializationDuration,
result: result.mapError(transform))
}
/// Evaluates the specified closure when the `DownloadResponse` is a failure, passing the unwrapped error as a parameter.
///
/// Use the `tryMapError` function with a closure that may throw an error. For example:
///
/// let possibleData: DownloadResponse<Data> = ...
/// let possibleObject = possibleData.tryMapError {
/// try someFailableFunction(taking: $0)
/// }
///
/// - Parameter transform: A throwing closure that takes the error of the instance.
///
/// - Returns: A `DownloadResponse` instance containing the result of the transform.
public func tryMapError<NewFailure: Error>(_ transform: (Failure) throws -> NewFailure) -> DownloadResponse<Success, any Error> {
DownloadResponse<Success, any Error>(request: request,
response: response,
fileURL: fileURL,
resumeData: resumeData,
metrics: metrics,
serializationDuration: serializationDuration,
result: result.tryMapError(transform))
}
}
private enum DebugDescription {
static func description(of request: URLRequest) -> String {
let requestSummary = "\(request.httpMethod!) \(request)"
let requestHeadersDescription = DebugDescription.description(for: request.headers)
let requestBodyDescription = DebugDescription.description(for: request.httpBody, headers: request.headers)
return """
[Request]: \(requestSummary)
\(requestHeadersDescription.indentingNewlines())
\(requestBodyDescription.indentingNewlines())
"""
}
static func description(of response: HTTPURLResponse) -> String {
"""
[Response]:
[Status Code]: \(response.statusCode)
\(DebugDescription.description(for: response.headers).indentingNewlines())
"""
}
static func description(for headers: HTTPHeaders) -> String {
guard !headers.isEmpty else { return "[Headers]: None" }
let headerDescription = "\(headers.sorted())".indentingNewlines()
return """
[Headers]:
\(headerDescription)
"""
}
static func description(for data: Data?,
headers: HTTPHeaders,
allowingPrintableTypes printableTypes: [String] = ["json", "xml", "text"],
maximumLength: Int = 100_000) -> String {
guard let data, !data.isEmpty else { return "[Body]: None" }
guard
data.count <= maximumLength,
printableTypes.compactMap({ headers["Content-Type"]?.contains($0) }).contains(true)
else { return "[Body]: \(data.count) bytes" }
return """
[Body]:
\(String(decoding: data, as: UTF8.self)
.trimmingCharacters(in: .whitespacesAndNewlines)
.indentingNewlines())
"""
}
}
extension String {
fileprivate func indentingNewlines(by spaceCount: Int = 4) -> String {
let spaces = String(repeating: " ", count: spaceCount)
return replacingOccurrences(of: "\n", with: "\n\(spaces)")
}
} | swift | github | https://github.com/Alamofire/Alamofire | Source/Core/Response.swift |
# -*- coding: utf-8 -*-
from django.db import migrations, models
from django.db.migrations.optimizer import MigrationOptimizer
from django.test import SimpleTestCase
from .models import CustomModelBase, EmptyManager
class OptimizerTests(SimpleTestCase):
"""
Tests the migration autodetector.
"""
def optimize(self, operations):
"""
Handy shortcut for getting results + number of loops
"""
optimizer = MigrationOptimizer()
return optimizer.optimize(operations), optimizer._iterations
def assertOptimizesTo(self, operations, expected, exact=None, less_than=None):
result, iterations = self.optimize(operations)
result = [repr(f.deconstruct()) for f in result]
expected = [repr(f.deconstruct()) for f in expected]
self.assertEqual(expected, result)
if exact is not None and iterations != exact:
raise self.failureException("Optimization did not take exactly %s iterations (it took %s)" % (exact, iterations))
if less_than is not None and iterations >= less_than:
raise self.failureException("Optimization did not take less than %s iterations (it took %s)" % (less_than, iterations))
def assertDoesNotOptimize(self, operations):
self.assertOptimizesTo(operations, operations)
def test_single(self):
"""
Tests that the optimizer does nothing on a single operation,
and that it does it in just one pass.
"""
self.assertOptimizesTo(
[migrations.DeleteModel("Foo")],
[migrations.DeleteModel("Foo")],
exact=1,
)
def test_create_delete_model(self):
"""
CreateModel and DeleteModel should collapse into nothing.
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.DeleteModel("Foo"),
],
[],
)
def test_create_rename_model(self):
"""
CreateModel should absorb RenameModels.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
migrations.RenameModel("Foo", "Bar"),
],
[
migrations.CreateModel(
"Bar",
[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
)
],
)
def test_rename_model_self(self):
"""
RenameModels should absorb themselves.
"""
self.assertOptimizesTo(
[
migrations.RenameModel("Foo", "Baa"),
migrations.RenameModel("Baa", "Bar"),
],
[
migrations.RenameModel("Foo", "Bar"),
],
)
def _test_create_alter_foo_delete_model(self, alter_foo):
"""
CreateModel, AlterModelTable, AlterUniqueTogether/AlterIndexTogether/
AlterOrderWithRespectTo, and DeleteModel should collapse into nothing.
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.AlterModelTable("Foo", "woohoo"),
alter_foo,
migrations.DeleteModel("Foo"),
],
[],
)
def test_create_alter_unique_delete_model(self):
self._test_create_alter_foo_delete_model(migrations.AlterUniqueTogether("Foo", [["a", "b"]]))
def test_create_alter_index_delete_model(self):
self._test_create_alter_foo_delete_model(migrations.AlterIndexTogether("Foo", [["a", "b"]]))
def test_create_alter_owrt_delete_model(self):
self._test_create_alter_foo_delete_model(migrations.AlterOrderWithRespectTo("Foo", "a"))
def _test_alter_alter_model(self, alter_foo, alter_bar):
"""
Two AlterUniqueTogether/AlterIndexTogether/AlterOrderWithRespectTo
should collapse into the second.
"""
self.assertOptimizesTo(
[
alter_foo,
alter_bar,
],
[
alter_bar,
],
)
def test_alter_alter_table_model(self):
self._test_alter_alter_model(
migrations.AlterModelTable("Foo", "a"),
migrations.AlterModelTable("Foo", "b"),
)
def test_alter_alter_unique_model(self):
self._test_alter_alter_model(
migrations.AlterUniqueTogether("Foo", [["a", "b"]]),
migrations.AlterUniqueTogether("Foo", [["a", "c"]]),
)
def test_alter_alter_index_model(self):
self._test_alter_alter_model(
migrations.AlterIndexTogether("Foo", [["a", "b"]]),
migrations.AlterIndexTogether("Foo", [["a", "c"]]),
)
def test_alter_alter_owrt_model(self):
self._test_alter_alter_model(
migrations.AlterOrderWithRespectTo("Foo", "a"),
migrations.AlterOrderWithRespectTo("Foo", "b"),
)
def test_optimize_through_create(self):
"""
We should be able to optimize away create/delete through a create or delete
of a different model, but only if the create operation does not mention the model
at all.
"""
# These should work
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Bar"),
migrations.DeleteModel("Foo"),
],
[],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Foo"),
migrations.DeleteModel("Bar"),
],
[],
)
# This should not work - FK should block it
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]),
migrations.DeleteModel("Foo"),
],
)
# This should not work - bases should block it
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo", )),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo", )),
migrations.DeleteModel("Foo"),
],
)
def test_create_model_add_field(self):
"""
AddField should optimize into CreateModel.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
migrations.AddField("Foo", "age", models.IntegerField()),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
("age", models.IntegerField()),
],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
],
)
def test_create_model_add_field_not_through_fk(self):
"""
AddField should NOT optimize into CreateModel if it's an FK to a model
that's between them.
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Link", [("url", models.TextField())]),
migrations.AddField("Foo", "link", models.ForeignKey("migrations.Link", models.CASCADE)),
],
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Link", [("url", models.TextField())]),
migrations.AddField("Foo", "link", models.ForeignKey("migrations.Link", models.CASCADE)),
],
)
def test_create_model_add_field_not_through_m2m_through(self):
"""
AddField should NOT optimize into CreateModel if it's an M2M using a
through that's created between them.
"""
# Note: The middle model is not actually a valid through model,
# but that doesn't matter, as we never render it.
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("LinkThrough", []),
migrations.AddField("Foo", "link", models.ManyToManyField("migrations.Link", through="migrations.LinkThrough")),
],
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("LinkThrough", []),
migrations.AddField("Foo", "link", models.ManyToManyField("migrations.Link", through="migrations.LinkThrough")),
],
)
def test_create_model_alter_field(self):
"""
AlterField should optimize into CreateModel.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
migrations.AlterField("Foo", "name", models.IntegerField()),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.IntegerField()),
],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
],
)
def test_create_model_rename_field(self):
"""
RenameField should optimize into CreateModel.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
migrations.RenameField("Foo", "name", "title"),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("title", models.CharField(max_length=255)),
],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
],
)
def test_add_field_rename_field(self):
"""
RenameField should optimize into AddField
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "name", models.CharField(max_length=255)),
migrations.RenameField("Foo", "name", "title"),
],
[
migrations.AddField("Foo", "title", models.CharField(max_length=255)),
],
)
def test_alter_field_rename_field(self):
"""
RenameField should optimize to the other side of AlterField,
and into itself.
"""
self.assertOptimizesTo(
[
migrations.AlterField("Foo", "name", models.CharField(max_length=255)),
migrations.RenameField("Foo", "name", "title"),
migrations.RenameField("Foo", "title", "nom"),
],
[
migrations.RenameField("Foo", "name", "nom"),
migrations.AlterField("Foo", "nom", models.CharField(max_length=255)),
],
)
def test_create_model_remove_field(self):
"""
RemoveField should optimize into CreateModel.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
("age", models.IntegerField()),
],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
migrations.RemoveField("Foo", "age"),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
],
)
def test_add_field_alter_field(self):
"""
AlterField should optimize into AddField.
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.AlterField("Foo", "age", models.FloatField(default=2.4)),
],
[
migrations.AddField("Foo", name="age", field=models.FloatField(default=2.4)),
],
)
def test_add_field_delete_field(self):
"""
RemoveField should cancel AddField
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.RemoveField("Foo", "age"),
],
[],
)
def test_alter_field_delete_field(self):
"""
RemoveField should absorb AlterField
"""
self.assertOptimizesTo(
[
migrations.AlterField("Foo", "age", models.IntegerField()),
migrations.RemoveField("Foo", "age"),
],
[
migrations.RemoveField("Foo", "age"),
],
)
def _test_create_alter_foo_field(self, alter):
"""
CreateModel, AlterFooTogether/AlterOrderWithRespectTo followed by an
add/alter/rename field should optimize to CreateModel and the Alter*
"""
# AddField
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
]),
alter,
migrations.AddField("Foo", "c", models.IntegerField()),
],
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
]),
alter,
],
)
# AlterField
self.assertDoesNotOptimize(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
]),
alter,
migrations.AlterField("Foo", "b", models.CharField(max_length=255)),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
]),
alter,
migrations.AlterField("Foo", "c", models.CharField(max_length=255)),
],
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.CharField(max_length=255)),
]),
alter,
],
)
# RenameField
self.assertDoesNotOptimize(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
]),
alter,
migrations.RenameField("Foo", "b", "c"),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
]),
alter,
migrations.RenameField("Foo", "b", "x"),
migrations.RenameField("Foo", "x", "c"),
],
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
]),
alter,
migrations.RenameField("Foo", "b", "c"),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
]),
alter,
migrations.RenameField("Foo", "c", "d"),
],
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
("d", models.IntegerField()),
]),
alter,
],
)
# RemoveField
self.assertDoesNotOptimize(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
]),
alter,
migrations.RemoveField("Foo", "b"),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
]),
alter,
migrations.RemoveField("Foo", "c"),
],
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
]),
alter,
],
)
def test_create_alter_unique_field(self):
self._test_create_alter_foo_field(migrations.AlterUniqueTogether("Foo", [["a", "b"]]))
def test_create_alter_index_field(self):
self._test_create_alter_foo_field(migrations.AlterIndexTogether("Foo", [["a", "b"]]))
def test_create_alter_owrt_field(self):
self._test_create_alter_foo_field(migrations.AlterOrderWithRespectTo("Foo", "b"))
def test_optimize_through_fields(self):
"""
Checks that field-level through checking is working.
This should manage to collapse model Foo to nonexistence,
and model Bar to a single IntegerField called "width".
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.AddField("Bar", "width", models.IntegerField()),
migrations.AlterField("Foo", "age", models.IntegerField()),
migrations.RenameField("Bar", "size", "dimensions"),
migrations.RemoveField("Foo", "age"),
migrations.RenameModel("Foo", "Phou"),
migrations.RemoveField("Bar", "dimensions"),
migrations.RenameModel("Phou", "Fou"),
migrations.DeleteModel("Fou"),
],
[
migrations.CreateModel("Bar", [("width", models.IntegerField())]),
],
) | unknown | codeparrot/codeparrot-clean | ||
# frozen_string_literal: true
class CallbackJob < ActiveJob::Base
before_perform ->(job) { job.history << "CallbackJob ran before_perform" }
after_perform ->(job) { job.history << "CallbackJob ran after_perform" }
before_enqueue ->(job) { job.history << "CallbackJob ran before_enqueue" }
after_enqueue ->(job) { job.history << "CallbackJob ran after_enqueue" }
around_perform do |job, block|
job.history << "CallbackJob ran around_perform_start"
block.call
job.history << "CallbackJob ran around_perform_stop"
end
around_enqueue do |job, block|
job.history << "CallbackJob ran around_enqueue_start"
block.call
job.history << "CallbackJob ran around_enqueue_stop"
end
def perform(person = "david")
# NOTHING!
end
def history
@history ||= []
end
end | ruby | github | https://github.com/rails/rails | activejob/test/jobs/callback_job.rb |
const range: number = 2
export function generateCodeFrame(
source: string,
start = 0,
end: number = source.length,
): string {
// Ensure start and end are within the source length
start = Math.max(0, Math.min(start, source.length))
end = Math.max(0, Math.min(end, source.length))
if (start > end) return ''
// Split the content into individual lines but capture the newline sequence
// that separated each line. This is important because the actual sequence is
// needed to properly take into account the full line length for offset
// comparison
let lines = source.split(/(\r?\n)/)
// Separate the lines and newline sequences into separate arrays for easier referencing
const newlineSequences = lines.filter((_, idx) => idx % 2 === 1)
lines = lines.filter((_, idx) => idx % 2 === 0)
let count = 0
const res: string[] = []
for (let i = 0; i < lines.length; i++) {
count +=
lines[i].length +
((newlineSequences[i] && newlineSequences[i].length) || 0)
if (count >= start) {
for (let j = i - range; j <= i + range || end > count; j++) {
if (j < 0 || j >= lines.length) continue
const line = j + 1
res.push(
`${line}${' '.repeat(Math.max(3 - String(line).length, 0))}| ${
lines[j]
}`,
)
const lineLength = lines[j].length
const newLineSeqLength =
(newlineSequences[j] && newlineSequences[j].length) || 0
if (j === i) {
// push underline
const pad = start - (count - (lineLength + newLineSeqLength))
const length = Math.max(
1,
end > count ? lineLength - pad : end - start,
)
res.push(` | ` + ' '.repeat(pad) + '^'.repeat(length))
} else if (j > i) {
if (end > count) {
const length = Math.max(Math.min(end - count, lineLength), 1)
res.push(` | ` + '^'.repeat(length))
}
count += lineLength + newLineSeqLength
}
}
break
}
}
return res.join('\n')
} | typescript | github | https://github.com/vuejs/core | packages/shared/src/codeframe.ts |
#!/bin/sh
#
# Copyright (c) 2005 Junio C Hamano
#
test_description='git apply should handle files with incomplete lines.
'
. ./test-lib.sh
# setup
(echo a; echo b) >frotz.0
(echo a; echo b; echo c) >frotz.1
(echo a; echo b | tr -d '\012') >frotz.2
(echo a; echo c; echo b | tr -d '\012') >frotz.3
for i in 0 1 2 3
do
for j in 0 1 2 3
do
test $i -eq $j && continue
cat frotz.$i >frotz
test_expect_success "apply diff between $i and $j" '
git apply <"$TEST_DIRECTORY"/t4101/diff.$i-$j &&
test_cmp frotz.$j frotz
'
done
done
test_done | unknown | github | https://github.com/git/git | t/t4101-apply-nonl.sh |
<?php
namespace Illuminate\Container\Attributes;
use Attribute;
#[Attribute(Attribute::TARGET_CLASS)]
final class Scoped
{
} | php | github | https://github.com/laravel/framework | src/Illuminate/Container/Attributes/Scoped.php |
---
navigation_title: "Percentiles"
mapped_pages:
- https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-percentile-aggregation.html
---
# Percentiles aggregation [search-aggregations-metrics-percentile-aggregation]
A `multi-value` metrics aggregation that calculates one or more percentiles over numeric values extracted from the aggregated documents. These values can be extracted from specific numeric or [histogram fields](/reference/elasticsearch/mapping-reference/histogram.md) in the documents.
Percentiles show the point at which a certain percentage of observed values occur. For example, the 95th percentile is the value which is greater than 95% of the observed values.
Percentiles are often used to find outliers. In normal distributions, the 0.13th and 99.87th percentiles represents three standard deviations from the mean. Any data which falls outside three standard deviations is often considered an anomaly.
When a range of percentiles are retrieved, they can be used to estimate the data distribution and determine if the data is skewed, bimodal, etc.
Assume your data consists of website load times. The average and median load times are not overly useful to an administrator. The max may be interesting, but it can be easily skewed by a single slow response.
Let’s look at a range of percentiles representing load time:
```console
GET latency/_search
{
"size": 0,
"aggs": {
"load_time_outlier": {
"percentiles": {
"field": "load_time" <1>
}
}
}
}
```
% TEST[setup:latency]
1. The field `load_time` must be a numeric field
By default, the `percentile` metric will generate a range of percentiles: `[ 1, 5, 25, 50, 75, 95, 99 ]`. The response will look like this:
```console-result
{
...
"aggregations": {
"load_time_outlier": {
"values": {
"1.0": 10.0,
"5.0": 30.0,
"25.0": 170.0,
"50.0": 445.0,
"75.0": 720.0,
"95.0": 940.0,
"99.0": 980.0
}
}
}
}
```
% TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/]
% TESTRESPONSE[s/"1.0": 10.0/"1.0": 9.9/]
% TESTRESPONSE[s/"5.0": 30.0/"5.0": 29.5/]
% TESTRESPONSE[s/"25.0": 170.0/"25.0": 167.5/]
% TESTRESPONSE[s/"50.0": 445.0/"50.0": 445.0/]
% TESTRESPONSE[s/"75.0": 720.0/"75.0": 722.5/]
% TESTRESPONSE[s/"95.0": 940.0/"95.0": 940.5/]
% TESTRESPONSE[s/"99.0": 980.0/"99.0": 980.1/]
As you can see, the aggregation will return a calculated value for each percentile in the default range. If we assume response times are in milliseconds, it is immediately obvious that the webpage normally loads in 10-720ms, but occasionally spikes to 940-980ms.
Often, administrators are only interested in outliers — the extreme percentiles. We can specify just the percents we are interested in (requested percentiles must be a value between 0-100 inclusive):
```console
GET latency/_search
{
"size": 0,
"aggs": {
"load_time_outlier": {
"percentiles": {
"field": "load_time",
"percents": [ 95, 99, 99.9 ] <1>
}
}
}
}
```
% TEST[setup:latency]
1. Use the `percents` parameter to specify particular percentiles to calculate
## Keyed Response [_keyed_response_6]
By default the `keyed` flag is set to `true` which associates a unique string key with each bucket and returns the ranges as a hash rather than an array. Setting the `keyed` flag to `false` will disable this behavior:
```console
GET latency/_search
{
"size": 0,
"aggs": {
"load_time_outlier": {
"percentiles": {
"field": "load_time",
"keyed": false
}
}
}
}
```
% TEST[setup:latency]
Response:
```console-result
{
...
"aggregations": {
"load_time_outlier": {
"values": [
{
"key": 1.0,
"value": 10.0
},
{
"key": 5.0,
"value": 30.0
},
{
"key": 25.0,
"value": 170.0
},
{
"key": 50.0,
"value": 445.0
},
{
"key": 75.0,
"value": 720.0
},
{
"key": 95.0,
"value": 940.0
},
{
"key": 99.0,
"value": 980.0
}
]
}
}
}
```
% TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/]
% TESTRESPONSE[s/"value": 10.0/"value": 9.9/]
% TESTRESPONSE[s/"value": 30.0/"value": 29.5/]
% TESTRESPONSE[s/"value": 170.0/"value": 167.5/]
% TESTRESPONSE[s/"value": 445.0/"value": 445.0/]
% TESTRESPONSE[s/"value": 720.0/"value": 722.5/]
% TESTRESPONSE[s/"value": 940.0/"value": 940.5/]
% TESTRESPONSE[s/"value": 980.0/"value": 980.1/]
## Script [_script_10]
If you need to run the aggregation against values that aren’t indexed, use a [runtime field](docs-content://manage-data/data-store/mapping/runtime-fields.md). For example, if our load times are in milliseconds but you want percentiles calculated in seconds:
```console
GET latency/_search
{
"size": 0,
"runtime_mappings": {
"load_time.seconds": {
"type": "long",
"script": {
"source": "emit(doc['load_time'].value / params.timeUnit)",
"params": {
"timeUnit": 1000
}
}
}
},
"aggs": {
"load_time_outlier": {
"percentiles": {
"field": "load_time.seconds"
}
}
}
}
```
% TEST[setup:latency]
% TEST[s/_search/_search?filter_path=aggregations/]
% TEST[s/"timeUnit": 1000/"timeUnit": 10/]
## Percentiles are (usually) approximate [search-aggregations-metrics-percentile-aggregation-approximation]
There are many different algorithms to calculate percentiles. The naive implementation simply stores all the values in a sorted array. To find the 50th percentile, you simply find the value that is at `my_array[count(my_array) * 0.5]`.
Clearly, the naive implementation does not scale — the sorted array grows linearly with the number of values in your dataset. To calculate percentiles across potentially billions of values in an Elasticsearch cluster, *approximate* percentiles are calculated.
The algorithm used by the `percentile` metric is called TDigest (introduced by Ted Dunning in [Computing Accurate Quantiles using T-Digests](https://github.com/tdunning/t-digest/blob/master/docs/t-digest-paper/histo.pdf)).
When using this metric, there are a few guidelines to keep in mind:
* Accuracy is proportional to `q(1-q)`. This means that extreme percentiles (e.g. 99%) are more accurate than less extreme percentiles, such as the median
* For small sets of values, percentiles are highly accurate (and potentially 100% accurate if the data is small enough).
* As the quantity of values in a bucket grows, the algorithm begins to approximate the percentiles. It is effectively trading accuracy for memory savings. The exact level of inaccuracy is difficult to generalize, since it depends on your data distribution and volume of data being aggregated
The following chart shows the relative error on a uniform distribution depending on the number of collected values and the requested percentile:

It shows how precision is better for extreme percentiles. The reason why error diminishes for large number of values is that the law of large numbers makes the distribution of values more and more uniform and the t-digest tree can do a better job at summarizing it. It would not be the case on more skewed distributions.
::::{warning}
Percentile aggregations are also [non-deterministic](https://en.wikipedia.org/wiki/Nondeterministic_algorithm). This means you can get slightly different results using the same data.
::::
## Compression [search-aggregations-metrics-percentile-aggregation-compression]
Approximate algorithms must balance memory utilization with estimation accuracy. This balance can be controlled using a `compression` parameter:
```console
GET latency/_search
{
"size": 0,
"aggs": {
"load_time_outlier": {
"percentiles": {
"field": "load_time",
"tdigest": {
"compression": 200 <1>
}
}
}
}
}
```
% TEST[setup:latency]
1. Compression controls memory usage and approximation error
The TDigest algorithm uses a number of "nodes" to approximate percentiles — the more nodes available, the higher the accuracy (and large memory footprint) proportional to the volume of data. The `compression` parameter limits the maximum number of nodes to `20 * compression`.
Therefore, by increasing the compression value, you can increase the accuracy of your percentiles at the cost of more memory. Larger compression values also make the algorithm slower since the underlying tree data structure grows in size, resulting in more expensive operations. The default compression value is `100`.
A "node" uses roughly 32 bytes of memory, so under worst-case scenarios (large amount of data which arrives sorted and in-order) the default settings will produce a TDigest roughly 64KB in size. In practice data tends to be more random and the TDigest will use less memory.
## Execution hint [search-aggregations-metrics-percentile-aggregation-execution-hint]
The default implementation of TDigest is optimized for performance, scaling to millions or even billions of sample values while maintaining acceptable accuracy levels (close to 1% relative error for millions of samples in some cases). There’s an option to use an implementation optimized for accuracy by setting parameter `execution_hint` to value `high_accuracy`:
```console
GET latency/_search
{
"size": 0,
"aggs": {
"load_time_outlier": {
"percentiles": {
"field": "load_time",
"tdigest": {
"execution_hint": "high_accuracy" <1>
}
}
}
}
}
```
% TEST[setup:latency]
1. Optimize TDigest for accuracy, at the expense of performance
This option can lead to improved accuracy (relative error close to 0.01% for millions of samples in some cases) but then percentile queries take 2x-10x longer to complete.
## HDR histogram [_hdr_histogram_2]
[HDR Histogram](https://github.com/HdrHistogram/HdrHistogram) (High Dynamic Range Histogram) is an alternative implementation that can be useful when calculating percentiles for latency measurements as it can be faster than the t-digest implementation with the trade-off of a larger memory footprint. This implementation maintains a fixed worse-case percentage error (specified as a number of significant digits). This means that if data is recorded with values from 1 microsecond up to 1 hour (3,600,000,000 microseconds) in a histogram set to 3 significant digits, it will maintain a value resolution of 1 microsecond for values up to 1 millisecond and 3.6 seconds (or better) for the maximum tracked value (1 hour).
The HDR Histogram can be used by specifying the `hdr` parameter in the request:
```console
GET latency/_search
{
"size": 0,
"aggs": {
"load_time_outlier": {
"percentiles": {
"field": "load_time",
"percents": [ 95, 99, 99.9 ],
"hdr": { <1>
"number_of_significant_value_digits": 3 <2>
}
}
}
}
}
```
% TEST[setup:latency]
1. `hdr` object indicates that HDR Histogram should be used to calculate the percentiles and specific settings for this algorithm can be specified inside the object
2. `number_of_significant_value_digits` specifies the resolution of values for the histogram in number of significant digits
The HDRHistogram only supports positive values and will error if it is passed a negative value. It is also not a good idea to use the HDRHistogram if the range of values is unknown as this could lead to high memory usage.
## Missing value [_missing_value_14]
The `missing` parameter defines how documents that are missing a value should be treated. By default they will be ignored but it is also possible to treat them as if they had a value.
```console
GET latency/_search
{
"size": 0,
"aggs": {
"grade_percentiles": {
"percentiles": {
"field": "grade",
"missing": 10 <1>
}
}
}
}
```
% TEST[setup:latency]
1. Documents without a value in the `grade` field will fall into the same bucket as documents that have the value `10`. | unknown | github | https://github.com/elastic/elasticsearch | docs/reference/aggregations/search-aggregations-metrics-percentile-aggregation.md |
imports:
- { resource: default.yml }
parameters:
env(LIFETIME_INTERVAL): 'PT10S'
env(LIFETIME_EXPRESSION): '13 seconds'
framework:
cache:
pools:
cache.pool1:
public: true
adapter: cache.system
default_lifetime: '%env(LIFETIME_EXPRESSION)%'
cache.pool2:
public: true
adapter: cache.pool3
default_lifetime: '%env(LIFETIME_INTERVAL)%'
cache.pool3:
clearer: ~
cache.pool4:
tags: true
public: true
cache.pool5:
tags: cache.pool2
public: true
cache.pool6:
tags: cache.pool4
public: true
cache.pool7:
adapter: cache.pool4
public: true | unknown | github | https://github.com/symfony/symfony | src/Symfony/Bundle/FrameworkBundle/Tests/Functional/app/CachePools/config.yml |
#!/usr/bin/env python
import sys
import os
import re
### output cpp file
# qualifiers:
# c : embedded constant
# m : embedded mutable
# n : native
# plus:
# v : vector
# vv : vector vector
# vvv : vector vector vector
# types:
# - native: void, bool, int, string
# - objects: the rest
def is_native(arg_qual):
return (arg_qual[0] == 'n')
def is_mutable(arg_qual):
return (arg_qual[0] == 'm')
def is_const(arg_qual):
return (arg_qual[0] == 'c')
def forall(p, s):
for x in s:
if not p(s):
return False
return True
def check_arg_qual(arg_qual):
return \
(is_native(arg_qual) or is_mutable(arg_qual) or is_const(arg_qual)) \
and \
forall(lambda q: q == 'v', arg_qual[1:])
def is_vector(arg_qual):
return (len(arg_qual) > 1 and arg_qual[1] == 'v')
def is_vector2(arg_qual):
return (is_vector(arg_qual) and len(arg_qual) > 2 and arg_qual[2] == 'v')
def is_vector3(arg_qual):
return (is_vector2(arg_qual) and len(arg_qual) > 3 and arg_qual[3] == 'v')
# returns the jni type corresponding to the pseudo type signature
def arg_type_to_java((arg_qual, arg_type, arg_name)):
check_arg_qual(arg_qual);
if arg_type == "jobject":
if (not is_native(arg_qual)) or is_vector(arg_qual):
print("Error: native defined in implementation with qualifier: " + arg_qual)
sys.exit(1)
return "jobject"
elif arg_type == "bool":
if not is_native(arg_qual):
print("Error: bool defined in implementation with qualifier: " + arg_qual)
sys.exit(1)
if is_vector(arg_qual):
return "jbooleanArray"
else:
return "jboolean"
elif arg_type == "int":
if not is_native(arg_qual):
print("Error: int defined in implementation with qualifier: " + arg_qual)
sys.exit(1)
if is_vector(arg_qual):
return "jintArray"
else:
return "jint"
elif arg_type == "string":
if not is_native(arg_qual):
print("Error: string defined in implementation with qualifier: " + arg_qual)
sys.exit(1)
if is_vector(arg_qual):
return "jobjectArray"
else:
return "jstring"
else:
if is_vector(arg_qual):
return "jobjectArray"
else:
return "jobject"
def print_unembed_arg(cpp_file, (arg_qual, arg_type, arg_name)):
check_arg_qual(arg_qual);
if arg_type == "jobject":
()
elif arg_type == "bool":
if is_vector3(arg_qual):
cpp_file.write(" vector<vector<vector<bool> > > " + arg_name \
+ "(toCppVVV(env, j" + arg_name + "));\n");
elif is_vector2(arg_qual):
cpp_file.write(" vector<vector<bool> > " + arg_name \
+ "(toCppVV(env, j" + arg_name + "));\n");
elif is_vector(arg_qual):
cpp_file.write(" vector<bool> " + arg_name \
+ "(toCppV(env, j" + arg_name + "));\n");
else:
cpp_file.write(" bool " + arg_name + "(j" + arg_name + ");\n");
elif arg_type == "int":
if is_vector3(arg_qual):
cpp_file.write(" vector<vector<vector<int> > > " + arg_name \
+ "(toCppVVV(env, j" + arg_name + "));\n");
elif is_vector2(arg_qual):
cpp_file.write(" vector<vector<int> > " + arg_name \
+ "(toCppVV(env, j" + arg_name + "));\n");
elif is_vector(arg_qual):
cpp_file.write(" vector<int> " + arg_name \
+ "(toCppV(env, j" + arg_name + "));\n");
else:
cpp_file.write(" int " + arg_name + "(j" + arg_name + ");\n");
elif arg_type == "string":
if is_vector3(arg_qual):
cpp_file.write(" vector<vector<vector<string> > > " + arg_name \
+ "(toCppVVV(env, j" + arg_name + "));\n");
elif is_vector2(arg_qual):
cpp_file.write(" vector<vector<string> > " + arg_name \
+ "(toCppVV(env, j" + arg_name + "));\n");
elif is_vector(arg_qual):
cpp_file.write(" vector<string> " + arg_name \
+ "(toCppV(env, j" + arg_name + "));\n");
else:
cpp_file.write(" string " + arg_name + "(toCpp(env, j" + arg_name + "));\n");
else:
if is_vector3(arg_qual):
cpp_file.write(" vector<vector<vector<" + arg_type + "> > > " + arg_name \
+ "(toCppVVV<" + arg_type + ">(env, j" + arg_name + "));\n");
elif is_vector2(arg_qual):
cpp_file.write(" vector<vector<" + arg_type + "> > " + arg_name \
+ "(toCppVV<" + arg_type + ">(env, j" + arg_name + "));\n");
elif is_vector(arg_qual):
cpp_file.write(" vector<" + arg_type + "> " + arg_name \
+ "(toCppV<" + arg_type + ">(env, j" + arg_name + "));\n");
elif is_const(arg_qual):
cpp_file.write(" const " + arg_type + "* " + arg_name \
+ " = unembed_const<" + arg_type + ">(env, j" + arg_name + ");\n");
else:
cpp_file.write(" " + arg_type + "* " + arg_name \
+ " = unembed_mut<" + arg_type + ">(env, j" + arg_name + ");\n");
def print_unembed_args(cpp_file, args):
for arg in args:
print_unembed_arg(cpp_file, arg)
# check hat declaration and definition signatures match
def match_signatures((decl_result, decl_args), (def_result, def_args, _)):
if decl_result != def_result or len(decl_args) != len(def_args):
return False
for i in xrange(0, len(decl_args)):
java_type = arg_type_to_java(def_args[i])
#print java_type
if decl_args[i] != java_type:
return False
return True
def print_header(cpp_file, includes):
cpp_file.writelines(map(lambda name: "#include " + name + "\n", includes))
cpp_file.writelines(
[
"#include \"JniUtils.h\"\n",
"\n",
"using namespace std;\n",
"using namespace Java_cvc3_JniUtils;\n",
"using namespace CVC3;\n",
"\n"
])
def print_signature(cpp_file, name, result, args):
arg_strings = ["JNIEnv* env", "jclass"]
arg_strings.extend( \
map(lambda (argQual, argType, argName): \
arg_type_to_java((argQual, argType, argName)) \
+ " j" + argName, args))
cpp_file.writelines([
"JNIEXPORT " + result + " JNICALL " + name + "\n",
"(" + ", ".join(arg_strings) + ")\n"])
def print_definition(cpp_file, name, (result, args, body)):
cpp_file.writelines(["extern \"C\"\n"])
print_signature(cpp_file, name, result, args)
cpp_file.writelines([
"{\n",
" try {\n"])
print_unembed_args(cpp_file, args)
cpp_file.writelines([
" " + " ".join(body),
" } catch (const Exception& e) {\n",
" toJava(env, e);\n"])
if result in [ "jobject", "jobjectArray", "jstring" ]:
cpp_file.writelines([" return NULL;\n"])
elif result == "jboolean":
cpp_file.writelines([" return false;\n"])
elif result == "jint":
cpp_file.writelines([" return -1;\n"])
elif result <> "void":
print("BUG: return type " + result + " is not handled in print_definition")
sys.exit(1)
cpp_file.writelines([" };\n",
"}\n\n"])
def print_cpp(cpp_name, declarations, definitions, includes):
try:
cpp_file = open(cpp_name, 'w')
print_header(cpp_file, includes)
#names = declarations.keys()
#names.sort()
for name in declarations[0]:
if not definitions.has_key(name):
#continue
print("Error: " + name + " is declared in header" \
+ " but not defined in implementation.")
sys.exit(1)
declaration = declarations[1][name]
definition = definitions[name]
definitions.pop(name)
if not match_signatures(declaration, definition):
print("Error: signature for " + name \
+ " in definition and implementation do not match:")
print declaration
print (definition[0], definition[1])
sys.exit(1)
print_definition(cpp_file, name, definition)
if not len(definitions) == 0:
print("Error: found definitions in implementation" \
" without declaration in header file:")
print definitions
sys.exit(1)
except IOError, (error_nr, error_string):
print ("Couldn't open " + cpp_name + ": " + error_string)
sys.exit(0)
### header file function declarations
# header file function declaration:
# - name: function name
# - result: result type
# - args: list of argument types, except for the first two (JNIEnv*, jclass)
def register_declaration(declarations, name, result, args):
assert(not declarations[1].has_key(name));
declarations[0].append(name)
declarations[1][name] = (result, args)
# extract function signatures from generated JNI header file
def read_header(header_name):
# 0.: names of declared functions in same order as in input
# 1.: map from names to signature
declarations = ([], {})
try:
header_file = open(header_name)
line = header_file.readline()
while (line):
# look for start of signature
# declaration will look like:
# JNIEXPORT <result> JNICALL <name> (JNIENV *env, jclass, jobject);
# with an optional linebreak before the parameter list, and
# perhaps missing the "env"
if re.search("^JNIEXPORT", line):
# extract name and return type
elements = re.sub('[,();]+',' ',line).split();
assert(elements[0] == "JNIEXPORT");
assert(elements[2] == "JNICALL");
name = elements[3]
result = elements[1]
# If there are no more elements on this line,
# read and tokenize the next line
if len(elements) > 4:
elements = elements[4:]
else:
line = header_file.readline ()
elements = re.sub('[,();]+',' ',line).split();
# extract argument types
assert(elements[0] == "JNIEnv");
assert(elements[1] == "*" or elements[1] == "*env");
assert(elements[2] == "jclass")
args = elements[3:]
register_declaration(declarations, name, result, args)
line = header_file.readline ()
header_file.close()
except IOError, (error_nr, error_string):
print ("Couldn't open " + header_name + ": " + error_string)
sys.exit(0)
return declarations
# function definitions:
# cpp file function definition:
# - name: function name
# - result: result type
# - args: list of pairs of argument types and argument names,
# except for the first two (JNIEnv*, jclass)
def register_definition(definitions, name, result, args, body):
if definitions.has_key(name):
print("Error: redefinition of " + name + " in implementation.")
sys.exit(1)
definitions[name] = (result, args, body)
#print_definition(name, declarations[name])
# extract function definition from implementation file
def read_impl(impl_name):
definitions = {}
includes = []
try:
impl_file = open(impl_name)
line = impl_file.readline()
while (line):
# look for include
if re.search("^INCLUDE:", line):
elements = line.split();
assert(len(elements) == 2);
assert(elements[0] == "INCLUDE:")
includes.append(elements[1])
line = impl_file.readline()
#print line
# look for start of definition
elif re.search("^DEFINITION:", line):
#print line,
# get name
elements = line.split();
if not (len(elements) == 2):
print("Error: misformed signature: " + line)
sys.exit(1)
assert(elements[0] == "DEFINITION:")
name = elements[1]
# get signature
line = impl_file.readline ()
elements = line.split();
assert(len(elements) > 0);
if not (len(elements) % 3 == 1):
print("Error: misformed signature for: " + name)
print(line)
sys.exit(1)
result = elements.pop(0)
args = []
while len(elements) > 0:
argQual = elements.pop(0)
argType = elements.pop(0)
argName = elements.pop(0)
args.append((argQual, argType, argName))
# get body
body = []
line = impl_file.readline ()
while line and not re.search("^DEFINITION:", line):
body.append(line)
line = impl_file.readline()
while body and body[len(body) - 1] == "\n":
body.pop(len(body) - 1)
assert(len(body) > 0)
register_definition(definitions, name, result, args, body)
else:
line = impl_file.readline()
impl_file.close()
except IOError, (error_nr, error_string):
print ("Couldn't open " + impl_name + ": " + error_string)
sys.exit(0)
return definitions, includes
# read name of input file
if (len(sys.argv) != 4):
print("Expected path to header, implementation, and target file.")
print("")
print("./create_impl.py <H_FILE> <IMPL_FILE> <CPP_FILE>")
sys.exit(0)
else:
#print(sys.argv)
header_name = sys.argv[1]
impl_name = sys.argv[2]
cpp_file = sys.argv[3]
# extract information from header
declarations = read_header(header_name)
#print declarations
# extract information from template
definitions, includes = read_impl(impl_name)
#print definitions
# create implementation
print_cpp(cpp_file, declarations, definitions, includes) | unknown | codeparrot/codeparrot-clean | ||
- set_fact:
test_ppa_name: 'ppa:git-core/ppa'
test_ppa_filename: 'git-core'
test_ppa_spec: 'deb https://ppa.launchpadcontent.net/git-core/ppa/ubuntu {{ansible_distribution_release}} main'
test_ppa_key: 'E1DF1F24' # http://keyserver.ubuntu.com:11371/pks/lookup?search=0xD06AAF4C11DAB86DF421421EFE6B20ECA7AD98A1&op=index
python_apt: python3-apt
- name: verify that comments are preserved unmodified when writing their source file
vars:
test_source_filename: ansible_test_comment
test_source_path: "/etc/apt/sources.list.d/{{ test_source_filename }}.list"
block:
- name: ensure the test source is absent
file:
path: "{{ test_source_path }}"
state: absent
- name: add the test PPA to the test source
apt_repository:
repo: "{{ test_ppa_name }}"
filename: "{{ test_source_filename }}"
update_cache: false
register: add_ppa
- name: verify the expected test source was added
assert:
that:
- add_ppa.sources_added | length == 1
- add_ppa.sources_added[0] == test_source_path
- name: overwrite the test source with a comment
copy:
content: "## comment"
dest: "{{ test_source_path }}"
- name: add the test PPA to the test source again
apt_repository:
repo: "{{ test_ppa_name }}"
filename: "{{ test_source_filename }}"
update_cache: false
register: add_ppa
- name: verify no sources were added
assert:
that:
- add_ppa.sources_added | length == 0
- name: read the test source
slurp:
src: "{{ test_source_path }}"
register: test_source
- name: decode the contents of the test source
set_fact:
test_source_content: "{{ test_source.content | b64decode }}"
- name: verify the comment in the test source was preserved
assert:
that:
- '"# # comment\n" in test_source_content' # bug, see: https://github.com/ansible/ansible/issues/54403
# - '"## comment\n" in test_source_content' # correct behavior
always:
- name: ensure the test source is absent
file:
path: "{{ test_source_path }}"
state: absent
# UNINSTALL 'python-apt'
# The `apt_repository` module has the smarts to auto-install `python-apt`. To
# test, we will first uninstall `python-apt`.
- name: check {{ python_apt }} with dpkg
shell: dpkg -s {{ python_apt }}
register: dpkg_result
ignore_errors: true
- name: uninstall {{ python_apt }} with apt
apt: pkg={{ python_apt }} state=absent purge=yes
register: apt_result
when: dpkg_result is successful
#
# TEST: apt_repository: repo=<name>
#
- import_tasks: 'cleanup.yml'
- name: 'record apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
register: cache_before
- name: 'name=<name> (expect: pass)'
apt_repository: repo='{{test_ppa_name}}' state=present
register: result
- name: 'assert the apt cache did *NOT* change'
assert:
that:
- 'result.changed'
- 'result.state == "present"'
- 'result.repo == test_ppa_name'
- name: 'examine apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
register: cache_after
- name: 'assert the apt cache did change'
assert:
that:
- 'cache_before.stat.mtime != cache_after.stat.mtime'
- name: 'ensure ppa key is installed (expect: pass)'
apt_key:
id: '{{test_ppa_key}}'
state: present
keyserver: keyserver.ubuntu.com
#
# TEST: apt_repository: repo=<name> update_cache=no
#
- import_tasks: 'cleanup.yml'
- name: 'record apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
register: cache_before
- name: 'name=<name> update_cache=no (expect: pass)'
apt_repository: repo='{{test_ppa_name}}' state=present update_cache=no
register: result
- assert:
that:
- 'result.changed'
- 'result.state == "present"'
- 'result.repo == test_ppa_name'
- name: 'examine apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
register: cache_after
- name: 'assert the apt cache did *NOT* change'
assert:
that:
- 'cache_before.stat.mtime == cache_after.stat.mtime'
- name: 'ensure ppa key is installed (expect: pass)'
apt_key:
id: '{{test_ppa_key}}'
state: present
keyserver: keyserver.ubuntu.com
#
# TEST: apt_repository: repo=<name> update_cache=yes
#
- import_tasks: 'cleanup.yml'
- name: 'record apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
register: cache_before
- name: 'name=<name> update_cache=yes (expect: pass)'
apt_repository: repo='{{test_ppa_name}}' state=present update_cache=yes
register: result
- assert:
that:
- 'result.changed'
- 'result.state == "present"'
- 'result.repo == test_ppa_name'
- name: 'examine apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
register: cache_after
- name: 'assert the apt cache did change'
assert:
that:
- 'cache_before.stat.mtime != cache_after.stat.mtime'
- name: 'ensure ppa key is installed (expect: pass)'
apt_key:
id: '{{test_ppa_key}}'
state: present
keyserver: keyserver.ubuntu.com
#
# TEST: apt_repository: repo=<spec>
#
- import_tasks: 'cleanup.yml'
- name: 'record apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
register: cache_before
- name: ensure ppa key is present before adding repo that requires authentication
apt_key:
id: '{{test_ppa_key}}'
state: present
keyserver: keyserver.ubuntu.com
- name: 'name=<spec> (expect: pass)'
apt_repository: repo='{{test_ppa_spec}}' state=present
register: result
- name: update the cache
apt:
update_cache: true
register: result_cache
- assert:
that:
- 'result.changed'
- 'result.state == "present"'
- 'result.repo == test_ppa_spec'
- '"sources_added" in result'
- 'result.sources_added | length == 1'
- '"git" in result.sources_added[0]'
- '"sources_removed" in result'
- 'result.sources_removed | length == 0'
- result_cache is not changed
- name: 'examine apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
register: cache_after
- name: 'assert the apt cache did change'
assert:
that:
- 'cache_before.stat.mtime != cache_after.stat.mtime'
- name: remove repo by spec
apt_repository: repo='{{test_ppa_spec}}' state=absent
register: result
- assert:
that:
- 'result.changed'
- 'result.state == "absent"'
- 'result.repo == test_ppa_spec'
- '"sources_added" in result'
- 'result.sources_added | length == 0'
- '"sources_removed" in result'
- 'result.sources_removed | length == 1'
- '"git" in result.sources_removed[0]'
# When installing a repo with the spec, the key is *NOT* added
- name: 'ensure ppa key is absent (expect: pass)'
apt_key: id='{{test_ppa_key}}' state=absent
#
# TEST: apt_repository: repo=<spec> filename=<filename>
#
- import_tasks: 'cleanup.yml'
- name: 'record apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
register: cache_before
- name: ensure ppa key is present before adding repo that requires authentication
apt_key:
id: '{{test_ppa_key}}'
state: present
keyserver: keyserver.ubuntu.com
- name: 'name=<spec> filename=<filename> (expect: pass)'
apt_repository: repo='{{test_ppa_spec}}' filename='{{test_ppa_filename}}' state=present
register: result
- assert:
that:
- 'result.changed'
- 'result.state == "present"'
- 'result.repo == test_ppa_spec'
- name: 'examine source file'
stat: path='/etc/apt/sources.list.d/{{test_ppa_filename}}.list'
register: source_file
- name: 'assert source file exists'
assert:
that:
- 'source_file.stat.exists == True'
- name: 'examine apt cache mtime'
stat: path='/var/cache/apt/pkgcache.bin'
register: cache_after
- name: 'assert the apt cache did change'
assert:
that:
- 'cache_before.stat.mtime != cache_after.stat.mtime'
# When installing a repo with the spec, the key is *NOT* added
- name: 'ensure ppa key is absent (expect: pass)'
apt_key: id='{{test_ppa_key}}' state=absent
- name: Test apt_repository with a null value for repo
apt_repository:
repo:
register: result
ignore_errors: yes
- assert:
that:
- result is failed
- result.msg == 'Please set argument \'repo\' to a non-empty value'
- name: Test apt_repository with an empty value for repo
apt_repository:
repo: ""
register: result
ignore_errors: yes
- assert:
that:
- result is failed
- result.msg == 'Please set argument \'repo\' to a non-empty value'
#
# TEST: keep symlink
#
- import_tasks: 'cleanup.yml'
- name: install local-apt-repository with apt
apt: pkg=local-apt-repository state=present
- name: Check if local apt repo file is a symlink
stat:
path: /etc/apt/sources.list.d/local-apt-repository.list
register: stat_result
- name: Assert if local apt repo file is a symlink
assert:
that:
- stat_result.stat.islnk is defined and stat_result.stat.islnk
- stat_result.stat.lnk_source == "/usr/lib/local-apt-repository/local-apt-repository.list"
- name: Try installing an invalid repo
apt_repository:
repo: deb http://dl.google.com/linux/chrome/deb2/ stable main
state: present
filename: google-chrome
ignore_errors: true
- name: Check the stat for the given symlink
stat:
path: /etc/apt/sources.list.d/local-apt-repository.list
register: stat_result2
- name: Assert that the symlink is intact after apt_repository operation
assert:
that:
- stat_result2.stat.islnk is defined and stat_result2.stat.islnk
- stat_result2.stat.lnk_source == "/usr/lib/local-apt-repository/local-apt-repository.list"
- name: uninstall local-apt-repository with apt
apt: pkg=local-apt-repository state=absent purge=yes
#
# TEST: PPA HTTPS URL
#
- name: Add PPA using HTTPS URL
apt_repository:
repo: 'ppa:deadsnakes'
filename: 'deadsnakes'
state: present
register: result
- name: Check if PPA using HTTPS URL is added
assert:
that:
- 'result.changed'
- 'result.state == "present"'
- 'result.repo == "ppa:deadsnakes"'
- name: 'examine source file'
stat:
path: '/etc/apt/sources.list.d/deadsnakes.list'
register: source_file
- name: 'assert source file exists'
assert:
that:
- 'source_file.stat.exists == True'
- name: Check if the PPA URL
shell: "grep 'https://ppa.launchpadcontent.net' /etc/apt/sources.list.d/deadsnakes.list"
register: r
- name: Test if PPA URL points to https URL
assert:
that:
- r.changed
- "'https://ppa.launchpadcontent.net' in r.stdout"
- name: Remove PPA file
file:
path: '/etc/apt/sources.list.d/deadsnakes.list'
state: absent
#
# TEARDOWN
#
- import_tasks: 'cleanup.yml' | unknown | github | https://github.com/ansible/ansible | test/integration/targets/apt_repository/tasks/apt.yml |
#! /usr/bin/env python3
import os
import subprocess
import sys
def svn(*args, svn_dir=None, capture_output=False):
# Avoid forgetting this arg.
assert svn_dir is None or os.path.isdir(svn_dir)
command = ['svn', '--non-interactive']
command.extend(args)
stdout = subprocess.PIPE if capture_output else None
# Always let stderr print to the caller.
process = subprocess.Popen(
command,
stdin=subprocess.DEVNULL,
stdout=stdout,
cwd=svn_dir,
universal_newlines=True)
output, _ = process.communicate()
if process.returncode != 0:
sys.exit(1)
return output
def remote_head_rev(url):
print('svn info', url)
info = svn('info', url, capture_output=True).split('\n')
for item in info:
if item.startswith('Revision: '):
return item.split()[1]
print('svn revision info not found', file=sys.stderr)
sys.exit(1)
def plugin_sync():
# Just fetch the target revision and strip the metadata.
# Plugin-level caching for Subversion is futile.
svn('export', '--force', '--revision', os.environ['PERU_MODULE_REV']
or 'HEAD', os.environ['PERU_MODULE_URL'], os.environ['PERU_SYNC_DEST'])
def plugin_reup():
url = os.environ['PERU_MODULE_URL']
rev = remote_head_rev(url)
output_file = os.environ['PERU_REUP_OUTPUT']
with open(output_file, 'w') as f:
# Quote Subversion revisions to prevent integer intepretation.
print('rev:', '"{}"'.format(rev), file=f)
command = os.environ['PERU_PLUGIN_COMMAND']
if command == 'sync':
plugin_sync()
elif command == 'reup':
plugin_reup()
else:
raise RuntimeError('Unknown command: ' + repr(command)) | unknown | codeparrot/codeparrot-clean | ||
// Copyright 2023 The Cockroach Authors.
//
// Use of this software is governed by the CockroachDB Software License
// included in the /LICENSE file.
// Code generated by "stringer"; DO NOT EDIT.
package sql
import "strconv"
func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[funcDistSQLBlocklist-1]
_ = x[routineProhibited-2]
_ = x[oidProhibited-4]
_ = x[castToOidProhibited-8]
_ = x[arrayOfUntypedTuplesProhibited-16]
_ = x[untypedTupleProhibited-32]
_ = x[jsonpathProhibited-64]
_ = x[unsupportedPlanNode-128]
_ = x[aggDistSQLBlocklist-256]
_ = x[rowLevelLockingProhibited-512]
_ = x[invertedFilterProhibited-1024]
_ = x[localityOptimizedOpProhibited-2048]
_ = x[ordinalityProhibited-4096]
_ = x[vectorSearchProhibited-8192]
_ = x[systemColumnsAndBufferedWritesProhibited-16384]
_ = x[valuesNodeProhibited-32768]
_ = x[numDistSQLBlockers-65536]
}
func (i distSQLBlocker) String() string {
switch i {
case funcDistSQLBlocklist:
return "funcDistSQLBlocklist"
case routineProhibited:
return "routineProhibited"
case oidProhibited:
return "oidProhibited"
case castToOidProhibited:
return "castToOidProhibited"
case arrayOfUntypedTuplesProhibited:
return "arrayOfUntypedTuplesProhibited"
case untypedTupleProhibited:
return "untypedTupleProhibited"
case jsonpathProhibited:
return "jsonpathProhibited"
case unsupportedPlanNode:
return "unsupportedPlanNode"
case aggDistSQLBlocklist:
return "aggDistSQLBlocklist"
case rowLevelLockingProhibited:
return "rowLevelLockingProhibited"
case invertedFilterProhibited:
return "invertedFilterProhibited"
case localityOptimizedOpProhibited:
return "localityOptimizedOpProhibited"
case ordinalityProhibited:
return "ordinalityProhibited"
case vectorSearchProhibited:
return "vectorSearchProhibited"
case systemColumnsAndBufferedWritesProhibited:
return "systemColumnsAndBufferedWritesProhibited"
case valuesNodeProhibited:
return "valuesNodeProhibited"
case numDistSQLBlockers:
return "numDistSQLBlockers"
default:
return "distSQLBlocker(" + strconv.FormatInt(int64(i), 10) + ")"
}
} | go | github | https://github.com/cockroachdb/cockroach | pkg/sql/distsqlblocker_string.go |
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from accounts.forms import AddressForm
import datetime
from django.utils import timezone
from .forms import LoginForm, RegisterForm, EmailForm, ForgotPasswordForm, ChangePasswordForm
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.models import User
from . import accounts_messages as ac_msg
from helper import random_alphanumeric as ran
from django.core.mail import send_mail, EmailMessage
from easy_ecom import settings_sensitive
from .models import EmailVerification, ForgotPasswordVerification, UserExtended, Address
# Create your views here.
def changePassword(user, password):
user.set_password(password)
user.save()
user.userextended.last_updated_password_datetime = timezone.now()
user.userextended.save()
def loginView(request):
# if this is a POST request we need to process the form data
login_error_messages, register_error_messages = [], []
if request.method == 'POST':
# create a form instance and populate it with data from the request:
if 'login' in request.POST:
loginForm = LoginForm(request.POST)
if loginForm.is_valid():
username = loginForm.cleaned_data['username']
loginPassword = loginForm.cleaned_data['loginPassword']
user = authenticate(username= username, password=loginPassword)
if user is not None:
# the password verified for the user
if user.is_active:
if not user.userextended.is_email_verified:
return render(request, "accounts/email_not_verified.html",{})
login(request, user)
# print("User is valid, active and authenticated")
return HttpResponseRedirect(reverse('accounts:dashboard'))
else:
login_error_messages.append(ac_msg.login_account_disabled)
# print("The password is valid, but the account has been disabled!")
else:
# the accounts system was unable to verify the username and password
login_error_messages.append(ac_msg.login_wrong_username_password)
# print("username/password combination was incorrect")
registerForm = RegisterForm()
elif 'register' in request.POST:
registerForm = RegisterForm(request.POST)
if registerForm.is_valid():
password = registerForm.cleaned_data['password']
email = registerForm.cleaned_data['email']
firstName = registerForm.cleaned_data['firstName']
lastName = registerForm.cleaned_data['lastName']
while(True):
try:
username = ran.rand_from_name(firstName.lower(), lastName.lower()) #generating username
user = User.objects.create_user(username, email=email, password=password)
break
except Exception:
pass
user.first_name = firstName
user.last_name = lastName
user.save()
UserExtended(user = user).save()
send_verification_email(user)
return render(request, "accounts/new_user_registered.html",{})
loginForm = LoginForm()
else:
return HttpResponseRedirect(reverse('accounts:login'))
# if a GET (or any other method) we'll create a blank form
else:
loginForm = LoginForm()
registerForm = RegisterForm()
# template = Template('Hello {{ name }}!')
# template.render({'knights': 'that say nih'})
return render(request, "accounts/login.html",{'loginForm' : loginForm, 'registerForm' : registerForm, 'login_error_messages' : login_error_messages, 'register_error_messages': register_error_messages})
def send_verification_email(user):
try:
result = EmailVerification.objects.get(user = user)
if result.is_not_expired_email_verification(): #if verification code is not expired, send the same code
result.sent_datetime = timezone.now() #reset the time
result.save()
verification_code = result.verification_code
else: #if expired, delete the previous code
result.delete()
raise Exception
except Exception:
verification_code = ran.rand_alphanumeric()
email_ver_storage = EmailVerification.objects.create(user=user, verification_code = verification_code)
email_msg = ac_msg.registration_email_verfication
email_msg += "http://127.0.0.1:8000" + reverse('accounts:verify', kwargs= {'verification_code' : verification_code, 'username' : user.username})
send_mail('Verify your email', email_msg, settings_sensitive.EMAIL_HOST_USER, [user.email], fail_silently=True)
def send_forgot_password_verification_email(user):
try:
result = ForgotPasswordVerification.objects.get(user = user)
if result.is_not_expired_forgot_password(): #if verification code is not expired, send the same code
result.sent_datetime = timezone.now() #reset the time
result.save()
verification_code = result.verification_code
else: #if expired, delete the previous code
result.delete()
raise Exception
except Exception:
verification_code = ran.rand_alphanumeric()
forgot_password_ver_storage = ForgotPasswordVerification.objects.create(user=user, verification_code = verification_code)
email_msg = ac_msg.forgot_password_message
email_msg += "http://127.0.0.1:8000" + reverse('accounts:forget_password_check')+ "?verification_code=" + verification_code + '&username=' + user.username
send_mail('Reset Password', email_msg, settings_sensitive.EMAIL_HOST_USER, [user.email], fail_silently=True)
@login_required()
def dashboardView(request):
return render(request, "accounts/dashboard.html",{})
@login_required()
def logoutView(request):
logout(request)
return HttpResponseRedirect(reverse('accounts:logout'))
def emailVerificationCheckView(request, verification_code, username):
if request.user.is_authenticated() and request.user.userextended.is_email_verified: #in case user clicks more than one email verification link
return HttpResponseRedirect(reverse('accounts:dashboard'))
try:
result = EmailVerification.objects.get(user__username = username, verification_code = verification_code)
if not result.is_not_expired_email_verification:
raise Exception
except Exception:
return render(request, "accounts/invalid_verification_email.html",{})
user = User.objects.get(username = username)
user.userextended.is_email_verified = True
user.userextended.email_verified_datetime = timezone.now()
user.userextended.save()
result.delete()
return render(request, "accounts/email_verified.html",{})
# (?P<username>[\w]*)/(?P<verification_code>[a-z0-9]*)
def forgotPasswordCheckView(request):
if request.user.is_authenticated(): #only if the user didn't login
return HttpResponseRedirect(reverse('accounts:dashboard'))
verification_code, username = request.GET.get('verification_code'), request.GET.get('username')
if verification_code == None or username == None:
return render(request, "accounts/invalid_forgot_password_reset.html",{})
else:
try:
result = ForgotPasswordVerification.objects.get(user__username = username, verification_code = verification_code)
if not result.is_not_expired_forgot_password:
raise Exception
except Exception:
return render(request, "accounts/invalid_forgot_password_reset.html",{})
user = User.objects.get(username = username)
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = ForgotPasswordForm(request.POST)
# check whether it's valid:
if form.is_valid():
# process the data in form.cleaned_data as required
password = form.cleaned_data['password']
changePassword(user, password)
result.delete()
return render(request, "accounts/forgot_password_reset_done.html",{})
# if a GET (or any other method) we'll create a blank form
else:
form = ForgotPasswordForm()
return render(request, 'accounts/forgot_password_reset.html',
{'form': form, 'verification_code' : verification_code, 'username' : username})
def troubleLoginView(request):
if request.user.is_authenticated(): #trouble login is for forgot password and resend verification alone
return HttpResponseRedirect(reverse('accounts:dashboard'))
return render(request, "accounts/trouble_login.html",{})
def forgetPasswordView(request):
if request.user.is_authenticated(): #forgot password is only if user couldn't login
return HttpResponseRedirect(reverse('accounts:dashboard'))
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = EmailForm(request.POST)
# check whether it's valid:
if form.is_valid():
# process the data in form.cleaned_data as required
# ...
# redirect to a new URL:
email = form.cleaned_data['email']
user = User.objects.get(email=email)
if not user.userextended.is_email_verified:
return render(request, "accounts/resend_verification_email.html", {'already_verified': False})
send_forgot_password_verification_email(user)
return render(request, "accounts/forgot_password.html", {'success': True})
# if a GET (or any other method) we'll create a blank form
else:
form = EmailForm()
return render(request, "accounts/forgot_password.html", {'form': form, 'already_verified': True})
def resendVerificationEmailView(request):
# if this is a POST request we need to process the form data
if request.user.is_authenticated(): #only if the no user logged
return HttpResponseRedirect(reverse('accounts:dashboard'))
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = EmailForm(request.POST)
# check whether it's valid:
if form.is_valid():
# process the data in form.cleaned_data as required
# ...
# redirect to a new URL:
email = form.cleaned_data['email']
user = User.objects.get(email=email)
if user.userextended.is_email_verified:
return render(request, "accounts/resend_verification_email.html", {'already_verified': True})
send_verification_email(user)
return render(request, "accounts/resend_verification_email.html", {'success': True})
# if a GET (or any other method) we'll create a blank form
else:
form = EmailForm()
return render(request, "accounts/resend_verification_email.html", {'form': form})
@login_required()
def changePasswordView(request):
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = ChangePasswordForm(request.POST, user = request.user)
# check whether it's valid:
if form.is_valid():
# process the data in form.cleaned_data as required
# ...
# redirect to a new URL:
newPassword = form.cleaned_data['newPassword']
changePassword(request.user, newPassword)
logout(request)
return render(request, "accounts/change_password_done.html", {})
# if a GET (or any other method) we'll create a blank form
else:
form = ChangePasswordForm(user = request.user)
return render(request, "accounts/change_password.html", {'form': form})
@login_required()
def newAddress(request):
user = request.user
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = AddressForm(request.POST)
# check whether it's valid:
if form.is_valid():
# process the data in form.cleaned_data as required
# ...
# redirect to a new URL:
contact_name = form.cleaned_data['contact_name']
country_name = form.cleaned_data['country_name']
city_name = form.cleaned_data['city_name']
state_name = form.cleaned_data['state_name']
street_address_line_1 = form.cleaned_data['street_address_line_1']
street_address_line_2 = form.cleaned_data['street_address_line_2']
zipcode = form.cleaned_data['zipcode']
phone_number = form.cleaned_data['phone_number']
country_code_phone_number = form.cleaned_data['country_code_phone_number']
Address.objects.create\
(user = request.user.userextended, contact_name = contact_name, country_name=country_name,
city_name = city_name,state_name = state_name, street_address_line_1 = street_address_line_1,
street_address_line_2 = street_address_line_2, zipcode = zipcode, phone_number = phone_number,
country_code_phone_number = country_code_phone_number,
)
return render(request, "accounts/new_address_added.html", {})
# if a GET (or any other method) we'll create a blank form
else:
form = AddressForm()
return render(request, "accounts/new_address.html", {'form': form}) | unknown | codeparrot/codeparrot-clean | ||
"""
.. todo::
WRITEME
"""
import logging
import sys
import numpy
from theano.compat.six.moves import xrange
from pylearn2.utils.image import Image, ensure_Image
logger = logging.getLogger(__name__)
def scale_to_unit_interval(ndar,eps=1e-8):
"""
.. todo::
WRITEME
"""
ndar = ndar.copy()
ndar -= ndar.min()
ndar *= 1.0 / max(ndar.max(),eps)
return ndar
def tile_raster_images(X, img_shape,
tile_shape=None, tile_spacing=(1,1),
scale_rows_to_unit_interval=True,
output_pixel_vals=True,
min_dynamic_range=1e-4,
):
"""
Transform an array with one flattened image per row, into an array in which
images are reshaped and layed out like tiles on a floor.
This function is useful for visualizing datasets whose rows are images, and
also columns of matrices for transforming those rows (such as the first
layer of a neural net).
Parameters
----------
X : numpy.ndarray or tuple of 4 channels or None
A 2-D array in which every row is a flattened image.
img_shape : tuple
The original shape of each image
tile_shape: tuple
The number of images to tile (rows, cols). Defaults to a square-ish \
shape with the right area for the number of images.
min_dynamic_range: float, positive
Dynamic range of each image is used in scaling to the unit interval, \
but images with less dynamic range than this will be scaled as if \
this were the dynamic range.
Returns
-------
out_array : 2D array with same dtype as X
Array suitable for viewing as an image (See:`PIL.Image.fromarray`).
"""
# This is premature when tile_slices_to_image is not documented at all yet,
# but ultimately true:
#print >> sys.stderr, "WARN: tile_raster_images sucks, use tile_slices_to_image"
if len(img_shape)==3 and img_shape[2]==3:
# make this save an rgb image
if scale_rows_to_unit_interval:
logger.warning("tile_raster_images' scaling routine "
"messes up colour - try tile_slices_to_image")
return tile_raster_images(
(X[:,0::3], X[:,1::3], X[:,2::3], None),
img_shape=img_shape[:2],
tile_shape=tile_shape,
tile_spacing=tile_spacing,
scale_rows_to_unit_interval=scale_rows_to_unit_interval,
output_pixel_vals=output_pixel_vals,
min_dynamic_range=min_dynamic_range)
if isinstance(X, tuple):
n_images_in_x = X[0].shape[0]
else:
n_images_in_x = X.shape[0]
if tile_shape is None:
tile_shape = most_square_shape(n_images_in_x)
assert len(img_shape) == 2
assert len(tile_shape) == 2
assert len(tile_spacing) == 2
#out_shape is the shape in pixels of the returned image array
out_shape = [(ishp + tsp) * tshp - tsp for ishp, tshp, tsp
in zip(img_shape, tile_shape, tile_spacing)]
if isinstance(X, tuple):
if scale_rows_to_unit_interval:
raise NotImplementedError()
assert len(X) == 4
if output_pixel_vals:
out_array = numpy.zeros((out_shape[0], out_shape[1], 4), dtype='uint8')
else:
out_array = numpy.zeros((out_shape[0], out_shape[1], 4), dtype=X.dtype)
#colors default to 0, alpha defaults to 1 (opaque)
if output_pixel_vals:
channel_defaults = [0,0,0,255]
else:
channel_defaults = [0.,0.,0.,1.]
for i in xrange(4):
if X[i] is None:
out_array[:,:,i] = numpy.zeros(out_shape,
dtype='uint8' if output_pixel_vals else out_array.dtype
)+channel_defaults[i]
else:
out_array[:,:,i] = tile_raster_images(X[i], img_shape, tile_shape, tile_spacing, scale_rows_to_unit_interval, output_pixel_vals)
return out_array
else:
H, W = img_shape
Hs, Ws = tile_spacing
out_scaling = 1
if output_pixel_vals and str(X.dtype).startswith('float'):
out_scaling = 255
out_array = numpy.zeros(out_shape, dtype='uint8' if output_pixel_vals else X.dtype)
for tile_row in xrange(tile_shape[0]):
for tile_col in xrange(tile_shape[1]):
if tile_row * tile_shape[1] + tile_col < X.shape[0]:
if scale_rows_to_unit_interval:
try:
this_img = scale_to_unit_interval(
X[tile_row * tile_shape[1] + tile_col].reshape(img_shape),
eps=min_dynamic_range)
except ValueError:
raise ValueError('Failed to reshape array of shape %s to shape %s'
% (
X[tile_row*tile_shape[1] + tile_col].shape
, img_shape
))
else:
this_img = X[tile_row * tile_shape[1] + tile_col].reshape(img_shape)
out_array[
tile_row * (H+Hs):tile_row*(H+Hs)+H,
tile_col * (W+Ws):tile_col*(W+Ws)+W
] \
= this_img * out_scaling
return out_array
def most_square_shape(N):
"""
Return a rectangle (height, width) with area N that is closest to square.
Parameters
----------
N : int
WRITEME
Returns
-------
WRITEME
"""
for i in xrange(int(numpy.sqrt(N)),0, -1):
if 0 == N % i:
return (i, N/i)
def save_tiled_raster_images(tiled_img, filename):
"""
Save a a return value from `tile_raster_images` to `filename`.
Returns
-------
img : WRITEME
The PIL image that was saved
"""
if tiled_img.ndim==2:
ensure_Image()
img = Image.fromarray( tiled_img, 'L')
elif tiled_img.ndim==3:
ensure_Image()
img = Image.fromarray(tiled_img, 'RGBA')
else:
raise TypeError('bad ndim', tiled_img)
img.save(filename)
return img
def tile_slices_to_image_uint8(X, tile_shape=None):
"""
.. todo::
WRITEME
"""
if str(X.dtype) != 'uint8':
raise TypeError(X)
if tile_shape is None:
#how many tile rows and cols
(TR, TC) = most_square_shape(X.shape[0])
H, W = X.shape[1], X.shape[2]
Hs = H+1 #spacing between tiles
Ws = W+1 #spacing between tiles
trows, tcols= most_square_shape(X.shape[0])
outrows = trows * Hs - 1
outcols = tcols * Ws - 1
out = numpy.zeros((outrows, outcols,3), dtype='uint8')
tr_stride= 1+X.shape[1]
for tr in range(trows):
for tc in range(tcols):
Xrc = X[tr*tcols+tc]
if Xrc.ndim==2: # if no color channel make it broadcast
Xrc=Xrc[:,:,None]
#print Xrc.shape
#print out[tr*Hs:tr*Hs+H,tc*Ws:tc*Ws+W].shape
out[tr*Hs:tr*Hs+H,tc*Ws:tc*Ws+W] = Xrc
ensure_Image()
img = Image.fromarray(out, 'RGB')
return img
def tile_slices_to_image(X,
tile_shape=None,
scale_each=True,
min_dynamic_range=1e-4):
"""
.. todo::
WRITEME
"""
#always returns an RGB image
def scale_0_255(x):
xmin = x.min()
xmax = x.max()
return numpy.asarray(
255 * (x - xmin) / max(xmax - xmin, min_dynamic_range),
dtype='uint8')
if scale_each:
uintX = numpy.empty(X.shape, dtype='uint8')
for i, Xi in enumerate(X):
uintX[i] = scale_0_255(Xi)
X = uintX
else:
X = scale_0_255(X)
return tile_slices_to_image_uint8(X, tile_shape=tile_shape) | unknown | codeparrot/codeparrot-clean | ||
##########################################################################
#
# Copyright (c) 2013, John Haddon. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import unittest
import imath
import IECore
import IECoreScene
import Gaffer
import GafferTest
import GafferScene
import GafferSceneTest
class TextTest( GafferSceneTest.SceneTestCase ) :
def testConstruct( self ) :
t = GafferScene.Text()
self.assertEqual( t.getName(), "Text" )
self.assertEqual( t["name"].getValue(), "text" )
def testCompute( self ) :
t = GafferScene.Text()
self.assertEqual( t["out"].object( "/" ), IECore.NullObject() )
self.assertEqual( t["out"].transform( "/" ), imath.M44f() )
self.assertEqual( t["out"].childNames( "/" ), IECore.InternedStringVectorData( [ "text" ] ) )
m1 = t["out"].object( "/text" )
self.assertTrue( isinstance( m1, IECoreScene.MeshPrimitive ) )
t["text"].setValue( "Hello World 2" )
m2 = t["out"].object( "/text" )
self.assertTrue( isinstance( m2, IECoreScene.MeshPrimitive ) )
self.assertGreater( m2.bound().size().x, m1.bound().size().x )
def testAffects( self ) :
t = GafferScene.Text()
s = GafferTest.CapturingSlot( t.plugDirtiedSignal() )
t["name"].setValue( "ground" )
self.assertEqual(
{ x[0] for x in s if not x[0].getName().startswith( "__" ) },
{ t["name"], t["out"]["childNames"], t["out"]["exists"], t["out"]["childBounds"], t["out"]["set"], t["out"] }
)
del s[:]
t["text"].setValue( "cat" )
self.assertTrue( "out.object" in [ x[0].relativeName( x[0].node() ) for x in s ] )
self.assertTrue( "out.bound" in [ x[0].relativeName( x[0].node() ) for x in s ] )
self.assertFalse( "out.childNames" in [ x[0].relativeName( x[0].node() ) for x in s ] )
self.assertFalse( "out.transform" in [ x[0].relativeName( x[0].node() ) for x in s ] )
del s[:]
t["font"].setValue( os.path.expandvars( "$GAFFER_ROOT/fonts/VeraBI.ttf" ) )
self.assertTrue( "out.object" in [ x[0].relativeName( x[0].node() ) for x in s ] )
self.assertTrue( "out.bound" in [ x[0].relativeName( x[0].node() ) for x in s ] )
self.assertFalse( "out.childNames" in [ x[0].relativeName( x[0].node() ) for x in s ] )
self.assertFalse( "out.transform" in [ x[0].relativeName( x[0].node() ) for x in s ] )
if __name__ == "__main__":
unittest.main() | unknown | codeparrot/codeparrot-clean | ||
from PyZ3950 import asn1
oids = {}
oids['Z3950'] = {'oid': asn1.OidVal([1, 2, 840, 10003]), 'val': [1, 2, 840, 10003]}
oids['Z3950']['ATTRS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3]), 'val': [1, 2, 840, 10003, 3]}
oids['Z3950']['DIAG'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 4]), 'val': [1, 2, 840, 10003, 4]}
oids['Z3950']['RECSYN'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5]), 'val': [1, 2, 840, 10003, 5]}
oids['Z3950']['TRANSFER'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 6]), 'val': [1, 2, 840, 10003, 6]}
oids['Z3950']['RRF'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 7]), 'val': [1, 2, 840, 10003, 7]}
oids['Z3950']['ACCESS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 8]), 'val': [1, 2, 840, 10003, 8]}
oids['Z3950']['ES'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9]), 'val': [1, 2, 840, 10003, 9]}
oids['Z3950']['USR'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10]), 'val': [1, 2, 840, 10003, 10]}
oids['Z3950']['SPEC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 11]), 'val': [1, 2, 840, 10003, 11]}
oids['Z3950']['VAR'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 12]), 'val': [1, 2, 840, 10003, 12]}
oids['Z3950']['SCHEMA'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13]), 'val': [1, 2, 840, 10003, 13]}
oids['Z3950']['TAGSET'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14]), 'val': [1, 2, 840, 10003, 14]}
oids['Z3950']['NEG'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 15]), 'val': [1, 2, 840, 10003, 15]}
oids['Z3950']['QUERY'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 16]), 'val': [1, 2, 840, 10003, 16]}
oids['Z3950']['ATTRS']['BIB1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 1]), 'val': [1, 2, 840, 10003, 3, 1]}
oids['Z3950']['ATTRS']['EXP1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 2]), 'val': [1, 2, 840, 10003, 3, 2]}
oids['Z3950']['ATTRS']['EXT1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 3]), 'val': [1, 2, 840, 10003, 3, 3]}
oids['Z3950']['ATTRS']['CCL1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 4]), 'val': [1, 2, 840, 10003, 3, 4]}
oids['Z3950']['ATTRS']['GILS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 5]), 'val': [1, 2, 840, 10003, 3, 5]}
oids['Z3950']['ATTRS']['STAS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 6]), 'val': [1, 2, 840, 10003, 3, 6]}
oids['Z3950']['ATTRS']['COLLECTIONS1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 7]), 'val': [1, 2, 840, 10003, 3, 7]}
oids['Z3950']['ATTRS']['CIMI1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 8]), 'val': [1, 2, 840, 10003, 3, 8]}
oids['Z3950']['ATTRS']['GEO'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 9]), 'val': [1, 2, 840, 10003, 3, 9]}
oids['Z3950']['ATTRS']['ZBIG'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 10]), 'val': [1, 2, 840, 10003, 3, 10]}
oids['Z3950']['ATTRS']['UTIL'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 11]), 'val': [1, 2, 840, 10003, 3, 11]}
oids['Z3950']['ATTRS']['XD1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 12]), 'val': [1, 2, 840, 10003, 3, 12]}
oids['Z3950']['ATTRS']['ZTHES'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 13]), 'val': [1, 2, 840, 10003, 3, 13]}
oids['Z3950']['ATTRS']['FIN1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 14]), 'val': [1, 2, 840, 10003, 3, 14]}
oids['Z3950']['ATTRS']['DAN1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 15]), 'val': [1, 2, 840, 10003, 3, 15]}
oids['Z3950']['ATTRS']['HOLD'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 16]), 'val': [1, 2, 840, 10003, 3, 16]}
oids['Z3950']['ATTRS']['MARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 17]), 'val': [1, 2, 840, 10003, 3, 17]}
oids['Z3950']['ATTRS']['BIB2'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 18]), 'val': [1, 2, 840, 10003, 3, 18]}
oids['Z3950']['ATTRS']['ZEEREX'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 3, 19]), 'val': [1, 2, 840, 10003, 3, 19]}
oids['Z3950']['DIAG']['BIB1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 4, 1]), 'val': [1, 2, 840, 10003, 4, 1]}
oids['Z3950']['DIAG']['DIAG1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 4, 2]), 'val': [1, 2, 840, 10003, 4, 2]}
oids['Z3950']['DIAG']['ES'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 4, 3]), 'val': [1, 2, 840, 10003, 4, 3]}
oids['Z3950']['DIAG']['GENERAL'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 4, 4]), 'val': [1, 2, 840, 10003, 4, 4]}
oids['Z3950']['RECSYN']['UNIMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 1]), 'val': [1, 2, 840, 10003, 5, 1]}
oids['Z3950']['RECSYN']['INTERMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 2]), 'val': [1, 2, 840, 10003, 5, 2]}
oids['Z3950']['RECSYN']['CCF'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 3]), 'val': [1, 2, 840, 10003, 5, 3]}
oids['Z3950']['RECSYN']['USMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 10]), 'val': [1, 2, 840, 10003, 5, 10]}
oids['Z3950']['RECSYN']['USMARC']['BIBLIO'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 10, 1]), 'val': [1, 2, 840, 10003, 5, 10, 1]}
oids['Z3950']['RECSYN']['USMARC']['AUTH'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 10, 2]), 'val': [1, 2, 840, 10003, 5, 10, 2]}
oids['Z3950']['RECSYN']['USMARC']['HOLD'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 10, 3]), 'val': [1, 2, 840, 10003, 5, 10, 3]}
oids['Z3950']['RECSYN']['USMARC']['COMMUNITY'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 10, 4]), 'val': [1, 2, 840, 10003, 5, 10, 4]}
oids['Z3950']['RECSYN']['USMARC']['CLASS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 10, 5]), 'val': [1, 2, 840, 10003, 5, 10, 5]}
oids['Z3950']['RECSYN']['UKMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 11]), 'val': [1, 2, 840, 10003, 5, 11]}
oids['Z3950']['RECSYN']['NORMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 12]), 'val': [1, 2, 840, 10003, 5, 12]}
oids['Z3950']['RECSYN']['LIBRISMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 13]), 'val': [1, 2, 840, 10003, 5, 13]}
oids['Z3950']['RECSYN']['DANMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 14]), 'val': [1, 2, 840, 10003, 5, 14]}
oids['Z3950']['RECSYN']['FINMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 15]), 'val': [1, 2, 840, 10003, 5, 15]}
oids['Z3950']['RECSYN']['MAB'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 16]), 'val': [1, 2, 840, 10003, 5, 16]}
oids['Z3950']['RECSYN']['CANMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 17]), 'val': [1, 2, 840, 10003, 5, 17]}
oids['Z3950']['RECSYN']['SBNMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 18]), 'val': [1, 2, 840, 10003, 5, 18]}
oids['Z3950']['RECSYN']['PICAMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 19]), 'val': [1, 2, 840, 10003, 5, 19]}
oids['Z3950']['RECSYN']['AUSMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 20]), 'val': [1, 2, 840, 10003, 5, 20]}
oids['Z3950']['RECSYN']['IBERMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 21]), 'val': [1, 2, 840, 10003, 5, 21]}
oids['Z3950']['RECSYN']['CATMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 22]), 'val': [1, 2, 840, 10003, 5, 22]}
oids['Z3950']['RECSYN']['MALMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 23]), 'val': [1, 2, 840, 10003, 5, 23]}
oids['Z3950']['RECSYN']['JPMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 24]), 'val': [1, 2, 840, 10003, 5, 24]}
oids['Z3950']['RECSYN']['SWEMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 25]), 'val': [1, 2, 840, 10003, 5, 25]}
oids['Z3950']['RECSYN']['SIGLEMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 26]), 'val': [1, 2, 840, 10003, 5, 26]}
oids['Z3950']['RECSYN']['ISDSMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 27]), 'val': [1, 2, 840, 10003, 5, 27]}
oids['Z3950']['RECSYN']['RUSMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 28]), 'val': [1, 2, 840, 10003, 5, 28]}
oids['Z3950']['RECSYN']['HUNMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 29]), 'val': [1, 2, 840, 10003, 5, 29]}
oids['Z3950']['RECSYN']['NACSISCATP'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 30]), 'val': [1, 2, 840, 10003, 5, 30]}
oids['Z3950']['RECSYN']['FINMARC2000'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 31]), 'val': [1, 2, 840, 10003, 5, 31]}
oids['Z3950']['RECSYN']['MARC21FIN'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 32]), 'val': [1, 2, 840, 10003, 5, 32]}
oids['Z3950']['RECSYN']['COMARC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 33]), 'val': [1, 2, 840, 10003, 5, 33]}
oids['Z3950']['RECSYN']['EXPLAIN'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 100]), 'val': [1, 2, 840, 10003, 5, 100]}
oids['Z3950']['RECSYN']['SUTRS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 101]), 'val': [1, 2, 840, 10003, 5, 101]}
oids['Z3950']['RECSYN']['OPAC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 102]), 'val': [1, 2, 840, 10003, 5, 102]}
oids['Z3950']['RECSYN']['SUMMARY'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 103]), 'val': [1, 2, 840, 10003, 5, 103]}
oids['Z3950']['RECSYN']['GRS0'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 104]), 'val': [1, 2, 840, 10003, 5, 104]}
oids['Z3950']['RECSYN']['GRS1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 105]), 'val': [1, 2, 840, 10003, 5, 105]}
oids['Z3950']['RECSYN']['ES'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 106]), 'val': [1, 2, 840, 10003, 5, 106]}
oids['Z3950']['RECSYN']['FRAGMENT'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 107]), 'val': [1, 2, 840, 10003, 5, 107]}
oids['Z3950']['RECSYN']['MIME'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109]), 'val': [1, 2, 840, 10003, 5, 109]}
oids['Z3950']['RECSYN']['MIME']['PDF'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 1]), 'val': [1, 2, 840, 10003, 5, 109, 1]}
oids['Z3950']['RECSYN']['MIME']['POSTSCRIPT'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 2]), 'val': [1, 2, 840, 10003, 5, 109, 2]}
oids['Z3950']['RECSYN']['MIME']['HTML'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 3]), 'val': [1, 2, 840, 10003, 5, 109, 3]}
oids['Z3950']['RECSYN']['MIME']['TIFF'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 4]), 'val': [1, 2, 840, 10003, 5, 109, 4]}
oids['Z3950']['RECSYN']['MIME']['GIF'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 5]), 'val': [1, 2, 840, 10003, 5, 109, 5]}
oids['Z3950']['RECSYN']['MIME']['JPEG'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 6]), 'val': [1, 2, 840, 10003, 5, 109, 6]}
oids['Z3950']['RECSYN']['MIME']['PNG'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 7]), 'val': [1, 2, 840, 10003, 5, 109, 7]}
oids['Z3950']['RECSYN']['MIME']['MPEG'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 8]), 'val': [1, 2, 840, 10003, 5, 109, 8]}
oids['Z3950']['RECSYN']['MIME']['SGML'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 9]), 'val': [1, 2, 840, 10003, 5, 109, 9]}
oids['Z3950']['RECSYN']['MIME']['XML'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 109, 10]), 'val': [1, 2, 840, 10003, 5, 109, 10]}
oids['Z3950']['RECSYN']['ZMIME'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 110]), 'val': [1, 2, 840, 10003, 5, 110]}
oids['Z3950']['RECSYN']['ZMIME']['TIFFB'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 110, 1]), 'val': [1, 2, 840, 10003, 5, 110, 1]}
oids['Z3950']['RECSYN']['ZMIME']['WAV'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 110, 2]), 'val': [1, 2, 840, 10003, 5, 110, 2]}
oids['Z3950']['RECSYN']['SQL'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 5, 111]), 'val': [1, 2, 840, 10003, 5, 111]}
oids['Z3950']['RRF']['RESOURCE1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 7, 1]), 'val': [1, 2, 840, 10003, 7, 1]}
oids['Z3950']['RRF']['RESOURCE2'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 7, 2]), 'val': [1, 2, 840, 10003, 7, 2]}
oids['Z3950']['ACCESS']['PROMPT1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 8, 1]), 'val': [1, 2, 840, 10003, 8, 1]}
oids['Z3950']['ACCESS']['DES1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 8, 2]), 'val': [1, 2, 840, 10003, 8, 2]}
oids['Z3950']['ACCESS']['KRB1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 8, 3]), 'val': [1, 2, 840, 10003, 8, 3]}
oids['Z3950']['ES']['PERSISTRS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 1]), 'val': [1, 2, 840, 10003, 9, 1]}
oids['Z3950']['ES']['PERSISTQRY'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 2]), 'val': [1, 2, 840, 10003, 9, 2]}
oids['Z3950']['ES']['PERIODQRY'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 3]), 'val': [1, 2, 840, 10003, 9, 3]}
oids['Z3950']['ES']['ITEMORDER'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 4]), 'val': [1, 2, 840, 10003, 9, 4]}
oids['Z3950']['ES']['DBUPDATE'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 5]), 'val': [1, 2, 840, 10003, 9, 5]}
oids['Z3950']['ES']['DBUPDATE']['REV'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 5, 1]), 'val': [1, 2, 840, 10003, 9, 5, 1]}
oids['Z3950']['ES']['DBUPDATE']['REV']['1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 5, 1, 1]), 'val': [1, 2, 840, 10003, 9, 5, 1, 1]}
oids['Z3950']['ES']['EXPORTSPEC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 6]), 'val': [1, 2, 840, 10003, 9, 6]}
oids['Z3950']['ES']['EXPORTINV'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 9, 7]), 'val': [1, 2, 840, 10003, 9, 7]}
oids['Z3950']['USR']['SEARCHRES1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 1]), 'val': [1, 2, 840, 10003, 10, 1]}
oids['Z3950']['USR']['CHARSETNEG'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 2]), 'val': [1, 2, 840, 10003, 10, 2]}
oids['Z3950']['USR']['INFO1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 3]), 'val': [1, 2, 840, 10003, 10, 3]}
oids['Z3950']['USR']['SEARCHTERMS1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 4]), 'val': [1, 2, 840, 10003, 10, 4]}
oids['Z3950']['USR']['SEARCHTERMS2'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 5]), 'val': [1, 2, 840, 10003, 10, 5]}
oids['Z3950']['USR']['DATETIME'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 6]), 'val': [1, 2, 840, 10003, 10, 6]}
oids['Z3950']['USR']['INSERTACTIONQUAL'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 9]), 'val': [1, 2, 840, 10003, 10, 9]}
oids['Z3950']['USR']['EDITACTIONQUAL'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 10]), 'val': [1, 2, 840, 10003, 10, 10]}
oids['Z3950']['USR']['AUTHFILE'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 11]), 'val': [1, 2, 840, 10003, 10, 11]}
oids['Z3950']['USR']['PRIVATE'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 1000]), 'val': [1, 2, 840, 10003, 10, 1000]}
oids['Z3950']['USR']['PRIVATE']['OCLC'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 1000, 17]), 'val': [1, 2, 840, 10003, 10, 1000, 17]}
oids['Z3950']['USR']['PRIVATE']['OCLC']['INFO'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 10, 1000, 17, 1]), 'val': [1, 2, 840, 10003, 10, 1000, 17, 1]}
oids['Z3950']['SPEC']['ESPEC1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 11, 1]), 'val': [1, 2, 840, 10003, 11, 1]}
oids['Z3950']['SPEC']['ESPEC2'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 11, 2]), 'val': [1, 2, 840, 10003, 11, 2]}
oids['Z3950']['SPEC']['ESPECQ'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 11, 3]), 'val': [1, 2, 840, 10003, 11, 3]}
oids['Z3950']['VAR']['VARIANT1'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 12, 1]), 'val': [1, 2, 840, 10003, 12, 1]}
oids['Z3950']['SCHEMA']['WAIS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 1]), 'val': [1, 2, 840, 10003, 13, 1]}
oids['Z3950']['SCHEMA']['GILS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 2]), 'val': [1, 2, 840, 10003, 13, 2]}
oids['Z3950']['SCHEMA']['COLLECTIONS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 3]), 'val': [1, 2, 840, 10003, 13, 3]}
oids['Z3950']['SCHEMA']['GEO'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 4]), 'val': [1, 2, 840, 10003, 13, 4]}
oids['Z3950']['SCHEMA']['CIMI'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 5]), 'val': [1, 2, 840, 10003, 13, 5]}
oids['Z3950']['SCHEMA']['UPDATE'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 6]), 'val': [1, 2, 840, 10003, 13, 6]}
oids['Z3950']['SCHEMA']['HOLDINGS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 7]), 'val': [1, 2, 840, 10003, 13, 7]}
oids['Z3950']['SCHEMA']['HOLDINGS']['11'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 7, 1]), 'val': [1, 2, 840, 10003, 13, 7, 1]}
oids['Z3950']['SCHEMA']['HOLDINGS']['12'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 7, 2]), 'val': [1, 2, 840, 10003, 13, 7, 2]}
oids['Z3950']['SCHEMA']['HOLDINGS']['14'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 7, 4]), 'val': [1, 2, 840, 10003, 13, 7, 4]}
oids['Z3950']['SCHEMA']['ZTHES'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 1]), 'val': [1, 2, 840, 10003, 13, 1]}
oids['Z3950']['SCHEMA']['INSERT'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 1]), 'val': [1, 2, 840, 10003, 13, 1]}
oids['Z3950']['SCHEMA']['EDIT'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 13, 1]), 'val': [1, 2, 840, 10003, 13, 1]}
oids['Z3950']['TAGSET']['M'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 1]), 'val': [1, 2, 840, 10003, 14, 1]}
oids['Z3950']['TAGSET']['G'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 2]), 'val': [1, 2, 840, 10003, 14, 2]}
oids['Z3950']['TAGSET']['STAS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 3]), 'val': [1, 2, 840, 10003, 14, 3]}
oids['Z3950']['TAGSET']['GILS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 4]), 'val': [1, 2, 840, 10003, 14, 4]}
oids['Z3950']['TAGSET']['COLLECTIONS'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 5]), 'val': [1, 2, 840, 10003, 14, 5]}
oids['Z3950']['TAGSET']['CIMI'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 6]), 'val': [1, 2, 840, 10003, 14, 6]}
oids['Z3950']['TAGSET']['UPDATE'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 7]), 'val': [1, 2, 840, 10003, 14, 7]}
oids['Z3950']['TAGSET']['ZTHES'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 14, 8]), 'val': [1, 2, 840, 10003, 14, 8]}
oids['Z3950']['NEG']['CHARSET2'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 15, 1]), 'val': [1, 2, 840, 10003, 15, 1]}
oids['Z3950']['NEG']['ES'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 15, 2]), 'val': [1, 2, 840, 10003, 15, 2]}
oids['Z3950']['NEG']['CHARSET3'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 15, 3]), 'val': [1, 2, 840, 10003, 15, 3]}
oids['Z3950']['NEG']['PRIVATE'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 15, 1000]), 'val': [1, 2, 840, 10003, 15, 1000]}
oids['Z3950']['NEG']['PRIVATE']['INDEXDATA'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 15, 1000, 81]), 'val': [1, 2, 840, 10003, 15, 1000, 81]}
oids['Z3950']['NEG']['PRIVATE']['INDEXDATA']['CHARSETNAME'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 15, 1000, 81, 1]), 'val': [1, 2, 840, 10003, 15, 1000, 81, 1]}
oids['Z3950']['QUERY']['SQL'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 16, 1]), 'val': [1, 2, 840, 10003, 16, 1]}
oids['Z3950']['QUERY']['CQL'] = {'oid': asn1.OidVal([1, 2, 840, 10003, 16, 2]), 'val': [1, 2, 840, 10003, 16, 2]}
oids['UNICODE'] = {'oid': asn1.OidVal([1, 0, 10646]), 'val': [1, 0, 10646]}
oids['UNICODE']['PART1'] = {'oid': asn1.OidVal([1, 0, 10646, 1]), 'val': [1, 0, 10646, 1]}
oids['UNICODE']['PART1']['XFERSYN'] = {'oid': asn1.OidVal([1, 0, 10646, 1, 0]), 'val': [1, 0, 10646, 1, 0]}
oids['UNICODE']['PART1']['XFERSYN']['UCS2'] = {'oid': asn1.OidVal([1, 0, 10646, 1, 0, 2]), 'val': [1, 0, 10646, 1, 0, 2]}
oids['UNICODE']['PART1']['XFERSYN']['UCS4'] = {'oid': asn1.OidVal([1, 0, 10646, 1, 0, 4]), 'val': [1, 0, 10646, 1, 0, 4]}
oids['UNICODE']['PART1']['XFERSYN']['UTF16'] = {'oid': asn1.OidVal([1, 0, 10646, 1, 0, 5]), 'val': [1, 0, 10646, 1, 0, 5]}
oids['UNICODE']['PART1']['XFERSYN']['UTF8'] = {'oid': asn1.OidVal([1, 0, 10646, 1, 0, 8]), 'val': [1, 0, 10646, 1, 0, 8]}
UNICODE = [1, 0, 10646]
UNICODE_ov = asn1.OidVal([1, 0, 10646])
UNICODE_PART1 = [1, 0, 10646, 1]
UNICODE_PART1_ov = asn1.OidVal([1, 0, 10646, 1])
UNICODE_PART1_XFERSYN = [1, 0, 10646, 1, 0]
UNICODE_PART1_XFERSYN_ov = asn1.OidVal([1, 0, 10646, 1, 0])
UNICODE_PART1_XFERSYN_UCS2 = [1, 0, 10646, 1, 0, 2]
UNICODE_PART1_XFERSYN_UCS2_ov = asn1.OidVal([1, 0, 10646, 1, 0, 2])
UNICODE_PART1_XFERSYN_UCS4 = [1, 0, 10646, 1, 0, 4]
UNICODE_PART1_XFERSYN_UCS4_ov = asn1.OidVal([1, 0, 10646, 1, 0, 4])
UNICODE_PART1_XFERSYN_UTF16 = [1, 0, 10646, 1, 0, 5]
UNICODE_PART1_XFERSYN_UTF16_ov = asn1.OidVal([1, 0, 10646, 1, 0, 5])
UNICODE_PART1_XFERSYN_UTF8 = [1, 0, 10646, 1, 0, 8]
UNICODE_PART1_XFERSYN_UTF8_ov = asn1.OidVal([1, 0, 10646, 1, 0, 8])
Z3950 = [1, 2, 840, 10003]
Z3950_ov = asn1.OidVal([1, 2, 840, 10003])
Z3950_ACCESS = [1, 2, 840, 10003, 8]
Z3950_ACCESS_ov = asn1.OidVal([1, 2, 840, 10003, 8])
Z3950_ACCESS_DES1 = [1, 2, 840, 10003, 8, 2]
Z3950_ACCESS_DES1_ov = asn1.OidVal([1, 2, 840, 10003, 8, 2])
Z3950_ACCESS_KRB1 = [1, 2, 840, 10003, 8, 3]
Z3950_ACCESS_KRB1_ov = asn1.OidVal([1, 2, 840, 10003, 8, 3])
Z3950_ACCESS_PROMPT1 = [1, 2, 840, 10003, 8, 1]
Z3950_ACCESS_PROMPT1_ov = asn1.OidVal([1, 2, 840, 10003, 8, 1])
Z3950_ATTRS = [1, 2, 840, 10003, 3]
Z3950_ATTRS_ov = asn1.OidVal([1, 2, 840, 10003, 3])
Z3950_ATTRS_BIB1 = [1, 2, 840, 10003, 3, 1]
Z3950_ATTRS_BIB1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 1])
Z3950_ATTRS_BIB2 = [1, 2, 840, 10003, 3, 18]
Z3950_ATTRS_BIB2_ov = asn1.OidVal([1, 2, 840, 10003, 3, 18])
Z3950_ATTRS_CCL1 = [1, 2, 840, 10003, 3, 4]
Z3950_ATTRS_CCL1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 4])
Z3950_ATTRS_CIMI1 = [1, 2, 840, 10003, 3, 8]
Z3950_ATTRS_CIMI1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 8])
Z3950_ATTRS_COLLECTIONS1 = [1, 2, 840, 10003, 3, 7]
Z3950_ATTRS_COLLECTIONS1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 7])
Z3950_ATTRS_DAN1 = [1, 2, 840, 10003, 3, 15]
Z3950_ATTRS_DAN1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 15])
Z3950_ATTRS_EXP1 = [1, 2, 840, 10003, 3, 2]
Z3950_ATTRS_EXP1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 2])
Z3950_ATTRS_EXT1 = [1, 2, 840, 10003, 3, 3]
Z3950_ATTRS_EXT1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 3])
Z3950_ATTRS_FIN1 = [1, 2, 840, 10003, 3, 14]
Z3950_ATTRS_FIN1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 14])
Z3950_ATTRS_GEO = [1, 2, 840, 10003, 3, 9]
Z3950_ATTRS_GEO_ov = asn1.OidVal([1, 2, 840, 10003, 3, 9])
Z3950_ATTRS_GILS = [1, 2, 840, 10003, 3, 5]
Z3950_ATTRS_GILS_ov = asn1.OidVal([1, 2, 840, 10003, 3, 5])
Z3950_ATTRS_HOLD = [1, 2, 840, 10003, 3, 16]
Z3950_ATTRS_HOLD_ov = asn1.OidVal([1, 2, 840, 10003, 3, 16])
Z3950_ATTRS_MARC = [1, 2, 840, 10003, 3, 17]
Z3950_ATTRS_MARC_ov = asn1.OidVal([1, 2, 840, 10003, 3, 17])
Z3950_ATTRS_STAS = [1, 2, 840, 10003, 3, 6]
Z3950_ATTRS_STAS_ov = asn1.OidVal([1, 2, 840, 10003, 3, 6])
Z3950_ATTRS_UTIL = [1, 2, 840, 10003, 3, 11]
Z3950_ATTRS_UTIL_ov = asn1.OidVal([1, 2, 840, 10003, 3, 11])
Z3950_ATTRS_XD1 = [1, 2, 840, 10003, 3, 12]
Z3950_ATTRS_XD1_ov = asn1.OidVal([1, 2, 840, 10003, 3, 12])
Z3950_ATTRS_ZBIG = [1, 2, 840, 10003, 3, 10]
Z3950_ATTRS_ZBIG_ov = asn1.OidVal([1, 2, 840, 10003, 3, 10])
Z3950_ATTRS_ZEEREX = [1, 2, 840, 10003, 3, 19]
Z3950_ATTRS_ZEEREX_ov = asn1.OidVal([1, 2, 840, 10003, 3, 19])
Z3950_ATTRS_ZTHES = [1, 2, 840, 10003, 3, 13]
Z3950_ATTRS_ZTHES_ov = asn1.OidVal([1, 2, 840, 10003, 3, 13])
Z3950_DIAG = [1, 2, 840, 10003, 4]
Z3950_DIAG_ov = asn1.OidVal([1, 2, 840, 10003, 4])
Z3950_DIAG_BIB1 = [1, 2, 840, 10003, 4, 1]
Z3950_DIAG_BIB1_ov = asn1.OidVal([1, 2, 840, 10003, 4, 1])
Z3950_DIAG_DIAG1 = [1, 2, 840, 10003, 4, 2]
Z3950_DIAG_DIAG1_ov = asn1.OidVal([1, 2, 840, 10003, 4, 2])
Z3950_DIAG_ES = [1, 2, 840, 10003, 4, 3]
Z3950_DIAG_ES_ov = asn1.OidVal([1, 2, 840, 10003, 4, 3])
Z3950_DIAG_GENERAL = [1, 2, 840, 10003, 4, 4]
Z3950_DIAG_GENERAL_ov = asn1.OidVal([1, 2, 840, 10003, 4, 4])
Z3950_ES = [1, 2, 840, 10003, 9]
Z3950_ES_ov = asn1.OidVal([1, 2, 840, 10003, 9])
Z3950_ES_DBUPDATE = [1, 2, 840, 10003, 9, 5]
Z3950_ES_DBUPDATE_ov = asn1.OidVal([1, 2, 840, 10003, 9, 5])
Z3950_ES_DBUPDATE_REV = [1, 2, 840, 10003, 9, 5, 1]
Z3950_ES_DBUPDATE_REV_ov = asn1.OidVal([1, 2, 840, 10003, 9, 5, 1])
Z3950_ES_DBUPDATE_REV_1 = [1, 2, 840, 10003, 9, 5, 1, 1]
Z3950_ES_DBUPDATE_REV_1_ov = asn1.OidVal([1, 2, 840, 10003, 9, 5, 1, 1])
Z3950_ES_EXPORTINV = [1, 2, 840, 10003, 9, 7]
Z3950_ES_EXPORTINV_ov = asn1.OidVal([1, 2, 840, 10003, 9, 7])
Z3950_ES_EXPORTSPEC = [1, 2, 840, 10003, 9, 6]
Z3950_ES_EXPORTSPEC_ov = asn1.OidVal([1, 2, 840, 10003, 9, 6])
Z3950_ES_ITEMORDER = [1, 2, 840, 10003, 9, 4]
Z3950_ES_ITEMORDER_ov = asn1.OidVal([1, 2, 840, 10003, 9, 4])
Z3950_ES_PERIODQRY = [1, 2, 840, 10003, 9, 3]
Z3950_ES_PERIODQRY_ov = asn1.OidVal([1, 2, 840, 10003, 9, 3])
Z3950_ES_PERSISTQRY = [1, 2, 840, 10003, 9, 2]
Z3950_ES_PERSISTQRY_ov = asn1.OidVal([1, 2, 840, 10003, 9, 2])
Z3950_ES_PERSISTRS = [1, 2, 840, 10003, 9, 1]
Z3950_ES_PERSISTRS_ov = asn1.OidVal([1, 2, 840, 10003, 9, 1])
Z3950_NEG = [1, 2, 840, 10003, 15]
Z3950_NEG_ov = asn1.OidVal([1, 2, 840, 10003, 15])
Z3950_NEG_CHARSET2 = [1, 2, 840, 10003, 15, 1]
Z3950_NEG_CHARSET2_ov = asn1.OidVal([1, 2, 840, 10003, 15, 1])
Z3950_NEG_CHARSET3 = [1, 2, 840, 10003, 15, 3]
Z3950_NEG_CHARSET3_ov = asn1.OidVal([1, 2, 840, 10003, 15, 3])
Z3950_NEG_ES = [1, 2, 840, 10003, 15, 2]
Z3950_NEG_ES_ov = asn1.OidVal([1, 2, 840, 10003, 15, 2])
Z3950_NEG_PRIVATE = [1, 2, 840, 10003, 15, 1000]
Z3950_NEG_PRIVATE_ov = asn1.OidVal([1, 2, 840, 10003, 15, 1000])
Z3950_NEG_PRIVATE_INDEXDATA = [1, 2, 840, 10003, 15, 1000, 81]
Z3950_NEG_PRIVATE_INDEXDATA_ov = asn1.OidVal([1, 2, 840, 10003, 15, 1000, 81])
Z3950_NEG_PRIVATE_INDEXDATA_CHARSETNAME = [1, 2, 840, 10003, 15, 1000, 81, 1]
Z3950_NEG_PRIVATE_INDEXDATA_CHARSETNAME_ov = asn1.OidVal([1, 2, 840, 10003, 15, 1000, 81, 1])
Z3950_QUERY = [1, 2, 840, 10003, 16]
Z3950_QUERY_ov = asn1.OidVal([1, 2, 840, 10003, 16])
Z3950_QUERY_CQL = [1, 2, 840, 10003, 16, 2]
Z3950_QUERY_CQL_ov = asn1.OidVal([1, 2, 840, 10003, 16, 2])
Z3950_QUERY_SQL = [1, 2, 840, 10003, 16, 1]
Z3950_QUERY_SQL_ov = asn1.OidVal([1, 2, 840, 10003, 16, 1])
Z3950_RECSYN = [1, 2, 840, 10003, 5]
Z3950_RECSYN_ov = asn1.OidVal([1, 2, 840, 10003, 5])
Z3950_RECSYN_AUSMARC = [1, 2, 840, 10003, 5, 20]
Z3950_RECSYN_AUSMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 20])
Z3950_RECSYN_CANMARC = [1, 2, 840, 10003, 5, 17]
Z3950_RECSYN_CANMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 17])
Z3950_RECSYN_CATMARC = [1, 2, 840, 10003, 5, 22]
Z3950_RECSYN_CATMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 22])
Z3950_RECSYN_CCF = [1, 2, 840, 10003, 5, 3]
Z3950_RECSYN_CCF_ov = asn1.OidVal([1, 2, 840, 10003, 5, 3])
Z3950_RECSYN_COMARC = [1, 2, 840, 10003, 5, 33]
Z3950_RECSYN_COMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 33])
Z3950_RECSYN_DANMARC = [1, 2, 840, 10003, 5, 14]
Z3950_RECSYN_DANMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 14])
Z3950_RECSYN_ES = [1, 2, 840, 10003, 5, 106]
Z3950_RECSYN_ES_ov = asn1.OidVal([1, 2, 840, 10003, 5, 106])
Z3950_RECSYN_EXPLAIN = [1, 2, 840, 10003, 5, 100]
Z3950_RECSYN_EXPLAIN_ov = asn1.OidVal([1, 2, 840, 10003, 5, 100])
Z3950_RECSYN_FINMARC = [1, 2, 840, 10003, 5, 15]
Z3950_RECSYN_FINMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 15])
Z3950_RECSYN_FINMARC2000 = [1, 2, 840, 10003, 5, 31]
Z3950_RECSYN_FINMARC2000_ov = asn1.OidVal([1, 2, 840, 10003, 5, 31])
Z3950_RECSYN_FRAGMENT = [1, 2, 840, 10003, 5, 107]
Z3950_RECSYN_FRAGMENT_ov = asn1.OidVal([1, 2, 840, 10003, 5, 107])
Z3950_RECSYN_GRS0 = [1, 2, 840, 10003, 5, 104]
Z3950_RECSYN_GRS0_ov = asn1.OidVal([1, 2, 840, 10003, 5, 104])
Z3950_RECSYN_GRS1 = [1, 2, 840, 10003, 5, 105]
Z3950_RECSYN_GRS1_ov = asn1.OidVal([1, 2, 840, 10003, 5, 105])
Z3950_RECSYN_HUNMARC = [1, 2, 840, 10003, 5, 29]
Z3950_RECSYN_HUNMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 29])
Z3950_RECSYN_IBERMARC = [1, 2, 840, 10003, 5, 21]
Z3950_RECSYN_IBERMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 21])
Z3950_RECSYN_INTERMARC = [1, 2, 840, 10003, 5, 2]
Z3950_RECSYN_INTERMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 2])
Z3950_RECSYN_ISDSMARC = [1, 2, 840, 10003, 5, 27]
Z3950_RECSYN_ISDSMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 27])
Z3950_RECSYN_JPMARC = [1, 2, 840, 10003, 5, 24]
Z3950_RECSYN_JPMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 24])
Z3950_RECSYN_LIBRISMARC = [1, 2, 840, 10003, 5, 13]
Z3950_RECSYN_LIBRISMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 13])
Z3950_RECSYN_MAB = [1, 2, 840, 10003, 5, 16]
Z3950_RECSYN_MAB_ov = asn1.OidVal([1, 2, 840, 10003, 5, 16])
Z3950_RECSYN_MALMARC = [1, 2, 840, 10003, 5, 23]
Z3950_RECSYN_MALMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 23])
Z3950_RECSYN_MARC21FIN = [1, 2, 840, 10003, 5, 32]
Z3950_RECSYN_MARC21FIN_ov = asn1.OidVal([1, 2, 840, 10003, 5, 32])
Z3950_RECSYN_MIME = [1, 2, 840, 10003, 5, 109]
Z3950_RECSYN_MIME_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109])
Z3950_RECSYN_MIME_GIF = [1, 2, 840, 10003, 5, 109, 5]
Z3950_RECSYN_MIME_GIF_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 5])
Z3950_RECSYN_MIME_HTML = [1, 2, 840, 10003, 5, 109, 3]
Z3950_RECSYN_MIME_HTML_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 3])
Z3950_RECSYN_MIME_JPEG = [1, 2, 840, 10003, 5, 109, 6]
Z3950_RECSYN_MIME_JPEG_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 6])
Z3950_RECSYN_MIME_MPEG = [1, 2, 840, 10003, 5, 109, 8]
Z3950_RECSYN_MIME_MPEG_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 8])
Z3950_RECSYN_MIME_PDF = [1, 2, 840, 10003, 5, 109, 1]
Z3950_RECSYN_MIME_PDF_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 1])
Z3950_RECSYN_MIME_PNG = [1, 2, 840, 10003, 5, 109, 7]
Z3950_RECSYN_MIME_PNG_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 7])
Z3950_RECSYN_MIME_POSTSCRIPT = [1, 2, 840, 10003, 5, 109, 2]
Z3950_RECSYN_MIME_POSTSCRIPT_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 2])
Z3950_RECSYN_MIME_SGML = [1, 2, 840, 10003, 5, 109, 9]
Z3950_RECSYN_MIME_SGML_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 9])
Z3950_RECSYN_MIME_TIFF = [1, 2, 840, 10003, 5, 109, 4]
Z3950_RECSYN_MIME_TIFF_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 4])
Z3950_RECSYN_MIME_XML = [1, 2, 840, 10003, 5, 109, 10]
Z3950_RECSYN_MIME_XML_ov = asn1.OidVal([1, 2, 840, 10003, 5, 109, 10])
Z3950_RECSYN_NACSISCATP = [1, 2, 840, 10003, 5, 30]
Z3950_RECSYN_NACSISCATP_ov = asn1.OidVal([1, 2, 840, 10003, 5, 30])
Z3950_RECSYN_NORMARC = [1, 2, 840, 10003, 5, 12]
Z3950_RECSYN_NORMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 12])
Z3950_RECSYN_OPAC = [1, 2, 840, 10003, 5, 102]
Z3950_RECSYN_OPAC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 102])
Z3950_RECSYN_PICAMARC = [1, 2, 840, 10003, 5, 19]
Z3950_RECSYN_PICAMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 19])
Z3950_RECSYN_RUSMARC = [1, 2, 840, 10003, 5, 28]
Z3950_RECSYN_RUSMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 28])
Z3950_RECSYN_SBNMARC = [1, 2, 840, 10003, 5, 18]
Z3950_RECSYN_SBNMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 18])
Z3950_RECSYN_SIGLEMARC = [1, 2, 840, 10003, 5, 26]
Z3950_RECSYN_SIGLEMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 26])
Z3950_RECSYN_SQL = [1, 2, 840, 10003, 5, 111]
Z3950_RECSYN_SQL_ov = asn1.OidVal([1, 2, 840, 10003, 5, 111])
Z3950_RECSYN_SUMMARY = [1, 2, 840, 10003, 5, 103]
Z3950_RECSYN_SUMMARY_ov = asn1.OidVal([1, 2, 840, 10003, 5, 103])
Z3950_RECSYN_SUTRS = [1, 2, 840, 10003, 5, 101]
Z3950_RECSYN_SUTRS_ov = asn1.OidVal([1, 2, 840, 10003, 5, 101])
Z3950_RECSYN_SWEMARC = [1, 2, 840, 10003, 5, 25]
Z3950_RECSYN_SWEMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 25])
Z3950_RECSYN_UKMARC = [1, 2, 840, 10003, 5, 11]
Z3950_RECSYN_UKMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 11])
Z3950_RECSYN_UNIMARC = [1, 2, 840, 10003, 5, 1]
Z3950_RECSYN_UNIMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 1])
Z3950_RECSYN_USMARC = [1, 2, 840, 10003, 5, 10]
Z3950_RECSYN_USMARC_ov = asn1.OidVal([1, 2, 840, 10003, 5, 10])
Z3950_RECSYN_USMARC_AUTH = [1, 2, 840, 10003, 5, 10, 2]
Z3950_RECSYN_USMARC_AUTH_ov = asn1.OidVal([1, 2, 840, 10003, 5, 10, 2])
Z3950_RECSYN_USMARC_BIBLIO = [1, 2, 840, 10003, 5, 10, 1]
Z3950_RECSYN_USMARC_BIBLIO_ov = asn1.OidVal([1, 2, 840, 10003, 5, 10, 1])
Z3950_RECSYN_USMARC_CLASS = [1, 2, 840, 10003, 5, 10, 5]
Z3950_RECSYN_USMARC_CLASS_ov = asn1.OidVal([1, 2, 840, 10003, 5, 10, 5])
Z3950_RECSYN_USMARC_COMMUNITY = [1, 2, 840, 10003, 5, 10, 4]
Z3950_RECSYN_USMARC_COMMUNITY_ov = asn1.OidVal([1, 2, 840, 10003, 5, 10, 4])
Z3950_RECSYN_USMARC_HOLD = [1, 2, 840, 10003, 5, 10, 3]
Z3950_RECSYN_USMARC_HOLD_ov = asn1.OidVal([1, 2, 840, 10003, 5, 10, 3])
Z3950_RECSYN_ZMIME = [1, 2, 840, 10003, 5, 110]
Z3950_RECSYN_ZMIME_ov = asn1.OidVal([1, 2, 840, 10003, 5, 110])
Z3950_RECSYN_ZMIME_TIFFB = [1, 2, 840, 10003, 5, 110, 1]
Z3950_RECSYN_ZMIME_TIFFB_ov = asn1.OidVal([1, 2, 840, 10003, 5, 110, 1])
Z3950_RECSYN_ZMIME_WAV = [1, 2, 840, 10003, 5, 110, 2]
Z3950_RECSYN_ZMIME_WAV_ov = asn1.OidVal([1, 2, 840, 10003, 5, 110, 2])
Z3950_RRF = [1, 2, 840, 10003, 7]
Z3950_RRF_ov = asn1.OidVal([1, 2, 840, 10003, 7])
Z3950_RRF_RESOURCE1 = [1, 2, 840, 10003, 7, 1]
Z3950_RRF_RESOURCE1_ov = asn1.OidVal([1, 2, 840, 10003, 7, 1])
Z3950_RRF_RESOURCE2 = [1, 2, 840, 10003, 7, 2]
Z3950_RRF_RESOURCE2_ov = asn1.OidVal([1, 2, 840, 10003, 7, 2])
Z3950_SCHEMA = [1, 2, 840, 10003, 13]
Z3950_SCHEMA_ov = asn1.OidVal([1, 2, 840, 10003, 13])
Z3950_SCHEMA_CIMI = [1, 2, 840, 10003, 13, 5]
Z3950_SCHEMA_CIMI_ov = asn1.OidVal([1, 2, 840, 10003, 13, 5])
Z3950_SCHEMA_COLLECTIONS = [1, 2, 840, 10003, 13, 3]
Z3950_SCHEMA_COLLECTIONS_ov = asn1.OidVal([1, 2, 840, 10003, 13, 3])
Z3950_SCHEMA_EDIT = [1, 2, 840, 10003, 13, 1]
Z3950_SCHEMA_EDIT_ov = asn1.OidVal([1, 2, 840, 10003, 13, 1])
Z3950_SCHEMA_GEO = [1, 2, 840, 10003, 13, 4]
Z3950_SCHEMA_GEO_ov = asn1.OidVal([1, 2, 840, 10003, 13, 4])
Z3950_SCHEMA_GILS = [1, 2, 840, 10003, 13, 2]
Z3950_SCHEMA_GILS_ov = asn1.OidVal([1, 2, 840, 10003, 13, 2])
Z3950_SCHEMA_HOLDINGS = [1, 2, 840, 10003, 13, 7]
Z3950_SCHEMA_HOLDINGS_ov = asn1.OidVal([1, 2, 840, 10003, 13, 7])
Z3950_SCHEMA_HOLDINGS_11 = [1, 2, 840, 10003, 13, 7, 1]
Z3950_SCHEMA_HOLDINGS_11_ov = asn1.OidVal([1, 2, 840, 10003, 13, 7, 1])
Z3950_SCHEMA_HOLDINGS_12 = [1, 2, 840, 10003, 13, 7, 2]
Z3950_SCHEMA_HOLDINGS_12_ov = asn1.OidVal([1, 2, 840, 10003, 13, 7, 2])
Z3950_SCHEMA_HOLDINGS_14 = [1, 2, 840, 10003, 13, 7, 4]
Z3950_SCHEMA_HOLDINGS_14_ov = asn1.OidVal([1, 2, 840, 10003, 13, 7, 4])
Z3950_SCHEMA_INSERT = [1, 2, 840, 10003, 13, 1]
Z3950_SCHEMA_INSERT_ov = asn1.OidVal([1, 2, 840, 10003, 13, 1])
Z3950_SCHEMA_UPDATE = [1, 2, 840, 10003, 13, 6]
Z3950_SCHEMA_UPDATE_ov = asn1.OidVal([1, 2, 840, 10003, 13, 6])
Z3950_SCHEMA_WAIS = [1, 2, 840, 10003, 13, 1]
Z3950_SCHEMA_WAIS_ov = asn1.OidVal([1, 2, 840, 10003, 13, 1])
Z3950_SCHEMA_ZTHES = [1, 2, 840, 10003, 13, 1]
Z3950_SCHEMA_ZTHES_ov = asn1.OidVal([1, 2, 840, 10003, 13, 1])
Z3950_SPEC = [1, 2, 840, 10003, 11]
Z3950_SPEC_ov = asn1.OidVal([1, 2, 840, 10003, 11])
Z3950_SPEC_ESPEC1 = [1, 2, 840, 10003, 11, 1]
Z3950_SPEC_ESPEC1_ov = asn1.OidVal([1, 2, 840, 10003, 11, 1])
Z3950_SPEC_ESPEC2 = [1, 2, 840, 10003, 11, 2]
Z3950_SPEC_ESPEC2_ov = asn1.OidVal([1, 2, 840, 10003, 11, 2])
Z3950_SPEC_ESPECQ = [1, 2, 840, 10003, 11, 3]
Z3950_SPEC_ESPECQ_ov = asn1.OidVal([1, 2, 840, 10003, 11, 3])
Z3950_TAGSET = [1, 2, 840, 10003, 14]
Z3950_TAGSET_ov = asn1.OidVal([1, 2, 840, 10003, 14])
Z3950_TAGSET_CIMI = [1, 2, 840, 10003, 14, 6]
Z3950_TAGSET_CIMI_ov = asn1.OidVal([1, 2, 840, 10003, 14, 6])
Z3950_TAGSET_COLLECTIONS = [1, 2, 840, 10003, 14, 5]
Z3950_TAGSET_COLLECTIONS_ov = asn1.OidVal([1, 2, 840, 10003, 14, 5])
Z3950_TAGSET_G = [1, 2, 840, 10003, 14, 2]
Z3950_TAGSET_G_ov = asn1.OidVal([1, 2, 840, 10003, 14, 2])
Z3950_TAGSET_GILS = [1, 2, 840, 10003, 14, 4]
Z3950_TAGSET_GILS_ov = asn1.OidVal([1, 2, 840, 10003, 14, 4])
Z3950_TAGSET_M = [1, 2, 840, 10003, 14, 1]
Z3950_TAGSET_M_ov = asn1.OidVal([1, 2, 840, 10003, 14, 1])
Z3950_TAGSET_STAS = [1, 2, 840, 10003, 14, 3]
Z3950_TAGSET_STAS_ov = asn1.OidVal([1, 2, 840, 10003, 14, 3])
Z3950_TAGSET_UPDATE = [1, 2, 840, 10003, 14, 7]
Z3950_TAGSET_UPDATE_ov = asn1.OidVal([1, 2, 840, 10003, 14, 7])
Z3950_TAGSET_ZTHES = [1, 2, 840, 10003, 14, 8]
Z3950_TAGSET_ZTHES_ov = asn1.OidVal([1, 2, 840, 10003, 14, 8])
Z3950_TRANSFER = [1, 2, 840, 10003, 6]
Z3950_TRANSFER_ov = asn1.OidVal([1, 2, 840, 10003, 6])
Z3950_USR = [1, 2, 840, 10003, 10]
Z3950_USR_ov = asn1.OidVal([1, 2, 840, 10003, 10])
Z3950_USR_AUTHFILE = [1, 2, 840, 10003, 10, 11]
Z3950_USR_AUTHFILE_ov = asn1.OidVal([1, 2, 840, 10003, 10, 11])
Z3950_USR_CHARSETNEG = [1, 2, 840, 10003, 10, 2]
Z3950_USR_CHARSETNEG_ov = asn1.OidVal([1, 2, 840, 10003, 10, 2])
Z3950_USR_DATETIME = [1, 2, 840, 10003, 10, 6]
Z3950_USR_DATETIME_ov = asn1.OidVal([1, 2, 840, 10003, 10, 6])
Z3950_USR_EDITACTIONQUAL = [1, 2, 840, 10003, 10, 10]
Z3950_USR_EDITACTIONQUAL_ov = asn1.OidVal([1, 2, 840, 10003, 10, 10])
Z3950_USR_INFO1 = [1, 2, 840, 10003, 10, 3]
Z3950_USR_INFO1_ov = asn1.OidVal([1, 2, 840, 10003, 10, 3])
Z3950_USR_INSERTACTIONQUAL = [1, 2, 840, 10003, 10, 9]
Z3950_USR_INSERTACTIONQUAL_ov = asn1.OidVal([1, 2, 840, 10003, 10, 9])
Z3950_USR_PRIVATE = [1, 2, 840, 10003, 10, 1000]
Z3950_USR_PRIVATE_ov = asn1.OidVal([1, 2, 840, 10003, 10, 1000])
Z3950_USR_PRIVATE_OCLC = [1, 2, 840, 10003, 10, 1000, 17]
Z3950_USR_PRIVATE_OCLC_ov = asn1.OidVal([1, 2, 840, 10003, 10, 1000, 17])
Z3950_USR_PRIVATE_OCLC_INFO = [1, 2, 840, 10003, 10, 1000, 17, 1]
Z3950_USR_PRIVATE_OCLC_INFO_ov = asn1.OidVal([1, 2, 840, 10003, 10, 1000, 17, 1])
Z3950_USR_SEARCHRES1 = [1, 2, 840, 10003, 10, 1]
Z3950_USR_SEARCHRES1_ov = asn1.OidVal([1, 2, 840, 10003, 10, 1])
Z3950_USR_SEARCHTERMS1 = [1, 2, 840, 10003, 10, 4]
Z3950_USR_SEARCHTERMS1_ov = asn1.OidVal([1, 2, 840, 10003, 10, 4])
Z3950_USR_SEARCHTERMS2 = [1, 2, 840, 10003, 10, 5]
Z3950_USR_SEARCHTERMS2_ov = asn1.OidVal([1, 2, 840, 10003, 10, 5])
Z3950_VAR = [1, 2, 840, 10003, 12]
Z3950_VAR_ov = asn1.OidVal([1, 2, 840, 10003, 12])
Z3950_VAR_VARIANT1 = [1, 2, 840, 10003, 12, 1]
Z3950_VAR_VARIANT1_ov = asn1.OidVal([1, 2, 840, 10003, 12, 1]) | unknown | codeparrot/codeparrot-clean | ||
kind: BUG FIXES
body: 'cli: Fixed `terraform init -json` to properly format all backend configuration messages as JSON instead of plain text'
time: 2025-11-19T10:30:00.000000Z
custom:
Issue: "37911" | unknown | github | https://github.com/hashicorp/terraform | .changes/v1.15/BUG FIXES-20251119-103000.yaml |
type: lib/benchmark_driver/runner/ractor
benchmark:
ractor_const: Object
ractor: 1 | unknown | github | https://github.com/ruby/ruby | benchmark/ractor_const.yml |
# jhbuild - a tool to ease building collections of source packages
# Copyright (C) 2008 Frederic Peters
#
# build.py: custom builder pages
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import urllib, time
from twisted.web import html
from twisted.web.util import Redirect
from buildbot.status.web.builder import BuildersResource, StatusResourceBuilder
from buildbot.status.web.base import make_row, make_force_build_form, \
path_to_slave, path_to_builder
from build import JhBuildsResource
class JhStatusResourceBuilder(StatusResourceBuilder):
def getTitle(self, request):
buildbot_service = request.site.buildbot_service
builder_name = self.builder_status.getName()
for slave in buildbot_service.slaves:
if builder_name.endswith(slave):
slave_name = slave
module_name = builder_name[:-len(slave)-1]
break
else:
slave_name = None
module_name = None
status = self.getStatus(request)
p = status.getProjectName()
if slave_name:
return '%s - %s @ %s' % (
p, module_name, slave_name)
return '%s - %s' % (p, builder_name)
def body(self, req):
b = self.builder_status
control = self.builder_control
status = self.getStatus(req)
slaves = b.getSlaves()
connected_slaves = [s for s in slaves if s.isConnected()]
projectName = status.getProjectName()
data = ''
# the first section shows builds which are currently running, if any.
current = b.getCurrentBuilds()
if current:
data += "<h2>Currently Building:</h2>\n"
data += "<ul>\n"
for build in current:
data += " <li>" + self.build_line(build, req) + "</li>\n"
data += "</ul>\n"
else:
data += "<h2>No current builds</h2>\n"
# Then a section with the last 5 builds, with the most recent build
# distinguished from the rest.
data += "<h2>Recent Builds</h2>\n"
data += "<ul>\n"
numbuilds = int(req.args.get('numbuilds', ['5'])[0])
for i,build in enumerate(b.generateFinishedBuilds(num_builds=int(numbuilds))):
data += " <li>" + self.make_line(req, build, False) + "</li>\n"
if i == 0:
data += "<br />\n" # separator
# TODO: or empty list?
data += "</ul>\n"
data += "<h2>Buildslaves:</h2>\n"
data += "<ol>\n"
for slave in slaves:
slaveurl = path_to_slave(req, slave)
data += "<li><b><a href=\"%s\">%s</a></b>: " % (html.escape(slaveurl), html.escape(slave.getName()))
if slave.isConnected():
data += "CONNECTED\n"
if slave.getAdmin():
data += make_row("Admin:", html.escape(slave.getAdmin()))
if slave.getHost():
data += "<span class='label'>Host info:</span>\n"
data += html.PRE(html.escape(slave.getHost()))
else:
data += ("NOT CONNECTED\n")
data += "</li>\n"
data += "</ol>\n"
if control is not None and connected_slaves:
forceURL = path_to_builder(req, b) + '/force'
data += make_force_build_form(forceURL, self.isUsingUserPasswd(req))
elif control is not None:
data += """
<p>All buildslaves appear to be offline, so it's not possible
to force this build to execute at this time.</p>
"""
if control is not None:
pingURL = path_to_builder(req, b) + '/ping'
data += """
<form method="post" action="%s" class='command pingbuilder'>
<p>To ping the buildslave(s), push the 'Ping' button</p>
<input type="submit" value="Ping Builder" />
</form>
""" % pingURL
return data
def getChild(self, path, req):
if path == 'builds':
return JhBuildsResource(self.builder_status, self.builder_control)
return StatusResourceBuilder.getChild(self, path, req)
class JhBuildersResource(BuildersResource):
def getChild(self, path, req):
if path == '':
return Redirect('..')
s = self.getStatus(req)
if path in s.getBuilderNames():
builder_status = s.getBuilder(path)
builder_control = None
c = self.getControl(req)
if c:
builder_control = c.getBuilder(path)
return JhStatusResourceBuilder(builder_status, builder_control)
return BuildersResource.getChild(self, path, req) | unknown | codeparrot/codeparrot-clean | ||
from agon_ratings.models import OverallRating
from dialogos.models import Comment
from django.contrib.contenttypes.models import ContentType
from django.db.models import Avg
from haystack import indexes
from geonode.maps.models import Layer
class LayerIndex(indexes.SearchIndex, indexes.Indexable):
id = indexes.IntegerField(model_attr='resourcebase_ptr_id')
abstract = indexes.CharField(model_attr="abstract", boost=1.5)
category__gn_description = indexes.CharField(model_attr="category__gn_description", null=True)
csw_type = indexes.CharField(model_attr="csw_type")
csw_wkt_geometry = indexes.CharField(model_attr="csw_wkt_geometry")
detail_url = indexes.CharField(model_attr="get_absolute_url")
distribution_description = indexes.CharField(model_attr="distribution_description", null=True)
distribution_url = indexes.CharField(model_attr="distribution_url", null=True)
owner__username = indexes.CharField(model_attr="owner", faceted=True, null=True)
popular_count = indexes.IntegerField(
model_attr="popular_count",
default=0,
boost=20)
share_count = indexes.IntegerField(model_attr="share_count", default=0)
rating = indexes.IntegerField(null=True)
srid = indexes.CharField(model_attr="srid")
supplemental_information = indexes.CharField(model_attr="supplemental_information", null=True)
thumbnail_url = indexes.CharField(model_attr="thumbnail_url", null=True)
uuid = indexes.CharField(model_attr="uuid")
title = indexes.CharField(model_attr="title", boost=2)
date = indexes.DateTimeField(model_attr="date")
text = indexes.EdgeNgramField(document=True, use_template=True, stored=False)
type = indexes.CharField(faceted=True)
subtype = indexes.CharField(faceted=True)
typename = indexes.CharField(model_attr='typename')
title_sortable = indexes.CharField(indexed=False, stored=False) # Necessary for sorting
category = indexes.CharField(
model_attr="category__identifier",
faceted=True,
null=True,
stored=True)
bbox_left = indexes.FloatField(model_attr="bbox_x0", null=True, stored=False)
bbox_right = indexes.FloatField(model_attr="bbox_x1", null=True, stored=False)
bbox_bottom = indexes.FloatField(model_attr="bbox_y0", null=True, stored=False)
bbox_top = indexes.FloatField(model_attr="bbox_y1", null=True, stored=False)
temporal_extent_start = indexes.DateTimeField(
model_attr="temporal_extent_start",
null=True,
stored=False)
temporal_extent_end = indexes.DateTimeField(
model_attr="temporal_extent_end",
null=True,
stored=False)
keywords = indexes.MultiValueField(
model_attr="keyword_slug_list",
null=True,
faceted=True,
stored=True)
regions = indexes.MultiValueField(
model_attr="region_name_list",
null=True,
faceted=True,
stored=True)
popular_count = indexes.IntegerField(
model_attr="popular_count",
default=0,
boost=20)
share_count = indexes.IntegerField(model_attr="share_count", default=0)
rating = indexes.IntegerField(null=True)
num_ratings = indexes.IntegerField(stored=False)
num_comments = indexes.IntegerField(stored=False)
def get_model(self):
return Layer
def prepare_type(self, obj):
return "layer"
def prepare_subtype(self, obj):
if obj.storeType == "dataStore":
return "vector"
elif obj.storeType == "coverageStore":
return "raster"
elif obj.storeType == "remoteStore":
return "remote"
def prepare_rating(self, obj):
ct = ContentType.objects.get_for_model(obj)
try:
rating = OverallRating.objects.filter(
object_id=obj.pk,
content_type=ct
).aggregate(r=Avg("rating"))["r"]
return float(str(rating or "0"))
except OverallRating.DoesNotExist:
return 0.0
def prepare_num_ratings(self, obj):
ct = ContentType.objects.get_for_model(obj)
try:
return OverallRating.objects.filter(
object_id=obj.pk,
content_type=ct
).all().count()
except OverallRating.DoesNotExist:
return 0
def prepare_num_comments(self, obj):
try:
return Comment.objects.filter(
object_id=obj.pk,
content_type=ContentType.objects.get_for_model(obj)
).all().count()
except:
return 0
def prepare_title_sortable(self, obj):
return obj.title.lower() | unknown | codeparrot/codeparrot-clean | ||
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Classes for hadoop-tos object response.
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "YARN", "Hive"})
@InterfaceStability.Evolving
package org.apache.hadoop.fs.tosfs.object.response;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; | java | github | https://github.com/apache/hadoop | hadoop-cloud-storage-project/hadoop-tos/src/main/java/org/apache/hadoop/fs/tosfs/object/response/package-info.java |
"""
Amd64 Support Module
"""
# Copyright (C) 2007 Invisigoth - See LICENSE file for details
import struct
import envi.archs.amd64 as e_amd64
import vtrace.archs.i386 as vt_i386
class Amd64Mixin(
e_amd64.Amd64Module,
e_amd64.Amd64RegisterContext,
vt_i386.i386WatchMixin):
"""
Do what we need to for the lucious amd64
"""
def __init__(self):
e_amd64.Amd64Module.__init__(self)
e_amd64.Amd64RegisterContext.__init__(self)
vt_i386.i386WatchMixin.__init__(self)
self.setMeta('Architecture','amd64')
def archGetStackTrace(self):
self.requireAttached()
current = 0
sanity = 1000
frames = []
rbp = self.getRegisterByName("rbp")
rip = self.getRegisterByName("rip")
frames.append((rip,rbp))
while rbp != 0 and current < sanity:
try:
rbp,rip = self.readMemoryFormat(rbp, "<QQ")
except:
break
frames.append((rip,rbp))
current += 1
return frames | unknown | codeparrot/codeparrot-clean | ||
import { makeMap } from './makeMap'
const GLOBALS_ALLOWED =
'Infinity,undefined,NaN,isFinite,isNaN,parseFloat,parseInt,decodeURI,' +
'decodeURIComponent,encodeURI,encodeURIComponent,Math,Number,Date,Array,' +
'Object,Boolean,String,RegExp,Map,Set,JSON,Intl,BigInt,console,Error,Symbol'
export const isGloballyAllowed: (key: string) => boolean =
/*@__PURE__*/ makeMap(GLOBALS_ALLOWED)
/** @deprecated use `isGloballyAllowed` instead */
export const isGloballyWhitelisted: (key: string) => boolean = isGloballyAllowed | typescript | github | https://github.com/vuejs/core | packages/shared/src/globalsAllowList.ts |
/*
* Copyright 2002-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.docs.core.beans.dependencies.beansfactorylazyinit;
public class ExpensiveToCreateBean {
} | java | github | https://github.com/spring-projects/spring-framework | framework-docs/src/main/java/org/springframework/docs/core/beans/dependencies/beansfactorylazyinit/ExpensiveToCreateBean.java |
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package stackmigrate
import (
"encoding/json"
"fmt"
"path/filepath"
"sort"
"strings"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/hashicorp/go-slug/sourceaddrs"
"github.com/hashicorp/go-slug/sourcebundle"
"github.com/hashicorp/hcl/v2"
"github.com/zclconf/go-cty-debug/ctydebug"
"github.com/zclconf/go-cty/cty"
"github.com/hashicorp/terraform/internal/addrs"
"github.com/hashicorp/terraform/internal/collections"
"github.com/hashicorp/terraform/internal/depsfile"
"github.com/hashicorp/terraform/internal/getproviders/providerreqs"
"github.com/hashicorp/terraform/internal/providers"
"github.com/hashicorp/terraform/internal/stacks/stackaddrs"
"github.com/hashicorp/terraform/internal/stacks/stackconfig"
stacks_testing_provider "github.com/hashicorp/terraform/internal/stacks/stackruntime/testing"
"github.com/hashicorp/terraform/internal/stacks/stackstate"
"github.com/hashicorp/terraform/internal/states"
"github.com/hashicorp/terraform/internal/tfdiags"
)
func TestMigrate(t *testing.T) {
deposedKey := states.NewDeposedKey()
tcs := map[string]struct {
path string
state func(ss *states.SyncState)
resources map[string]string
modules map[string]string
expected []stackstate.AppliedChange
expectedDiags tfdiags.Diagnostics
}{
"module": {
path: filepath.Join("with-single-input", "valid"),
state: func(ss *states.SyncState) {
ss.SetResourceInstanceCurrent(
addrs.AbsResourceInstance{
Module: addrs.ModuleInstance{
{
Name: "child",
},
},
Resource: addrs.ResourceInstance{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
},
Key: addrs.NoKey,
},
},
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceDeposed(
addrs.AbsResourceInstance{
Module: addrs.ModuleInstance{
{
Name: "child",
},
},
Resource: addrs.ResourceInstance{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
},
Key: addrs.NoKey,
},
},
deposedKey,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
},
modules: map[string]string{
"child": "self",
},
expected: []stackstate.AppliedChange{
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.self"),
ComponentInstanceAddr: mustAbsComponentInstance("component.self"),
OutputValues: map[addrs.OutputValue]cty.Value{},
InputVariables: map[addrs.InputVariable]cty.Value{
{Name: "id"}: cty.DynamicVal,
{Name: "input"}: cty.DynamicVal,
},
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self.testing_resource.data"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: mustAbsResourceInstanceObject("component.self.testing_resource.data").Component,
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: mustAbsResourceInstanceObject("component.self.testing_resource.data").Item.ResourceInstance,
DeposedKey: deposedKey,
},
},
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
},
"root resources": {
path: filepath.Join("with-single-input", "valid"),
state: func(ss *states.SyncState) {
ss.SetResourceInstanceDeposed(
addrs.AbsResourceInstance{
Module: addrs.RootModuleInstance,
Resource: addrs.ResourceInstance{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
},
Key: addrs.NoKey,
},
},
deposedKey,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.AbsResourceInstance{
Module: addrs.RootModuleInstance,
Resource: addrs.ResourceInstance{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
},
Key: addrs.NoKey,
},
},
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
},
resources: map[string]string{
"testing_resource.data": "component.self",
},
expected: []stackstate.AppliedChange{
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.self"),
ComponentInstanceAddr: mustAbsComponentInstance("component.self"),
OutputValues: map[addrs.OutputValue]cty.Value{},
InputVariables: map[addrs.InputVariable]cty.Value{
{Name: "id"}: cty.DynamicVal,
{Name: "input"}: cty.DynamicVal,
},
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self.testing_resource.data"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: stackaddrs.AbsResourceInstanceObject{
Component: mustAbsResourceInstanceObject("component.self.testing_resource.data").Component,
Item: addrs.AbsResourceInstanceObject{
ResourceInstance: mustAbsResourceInstanceObject("component.self.testing_resource.data").Item.ResourceInstance,
DeposedKey: deposedKey,
},
},
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
},
"component_dependency": {
path: filepath.Join("for-stacks-migrate", "with-dependency", "input-dependency"),
state: func(ss *states.SyncState) {
ss.SetOutputValue(addrs.AbsOutputValue{
Module: addrs.RootModuleInstance,
OutputValue: addrs.OutputValue{Name: "output"},
}, cty.StringVal("before"), false)
ss.SetResourceInstanceCurrent(
addrs.AbsResourceInstance{
Module: addrs.RootModuleInstance,
Resource: addrs.ResourceInstance{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
},
Key: addrs.NoKey,
},
},
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.AbsResourceInstance{
Module: addrs.RootModuleInstance,
Resource: addrs.ResourceInstance{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "another",
},
Key: addrs.IntKey(0),
},
},
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.AbsResourceInstance{
Module: addrs.RootModuleInstance,
Resource: addrs.ResourceInstance{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "another",
},
Key: addrs.IntKey(1),
},
},
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
},
resources: map[string]string{
"testing_resource.data": "component.parent",
"testing_resource.another[0]": "component.child",
"testing_resource.another[1]": "component.child",
},
modules: map[string]string{},
expected: []stackstate.AppliedChange{
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.child"),
ComponentInstanceAddr: mustAbsComponentInstance("component.child"),
OutputValues: map[addrs.OutputValue]cty.Value{
{Name: "id"}: cty.DynamicVal,
},
InputVariables: map[addrs.InputVariable]cty.Value{
{Name: "id"}: cty.DynamicVal,
{Name: "input"}: cty.DynamicVal,
},
Dependencies: collections.NewSet(mustAbsComponent("component.parent")),
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.child.testing_resource.another[0]"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.child.testing_resource.another[1]"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.parent"),
ComponentInstanceAddr: mustAbsComponentInstance("component.parent"),
OutputValues: map[addrs.OutputValue]cty.Value{
{Name: "id"}: cty.DynamicVal,
},
InputVariables: map[addrs.InputVariable]cty.Value{
{Name: "id"}: cty.DynamicVal,
{Name: "input"}: cty.DynamicVal,
},
Dependents: collections.NewSet(mustAbsComponent("component.child")),
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.parent.testing_resource.data"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
},
"nested module resources": {
path: filepath.Join("for-stacks-migrate", "with-nested-module"),
state: func(ss *states.SyncState) {
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "another",
}.Instance(addrs.IntKey(0)).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "another",
}.Instance(addrs.IntKey(1)).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
for _, child := range []string{"child_mod", "child_mod2"} {
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "child_data",
}.Instance(addrs.NoKey).Absolute(addrs.ModuleInstance{
{
Name: child,
},
}),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "another_child_data",
}.Instance(addrs.IntKey(0)).Absolute(addrs.ModuleInstance{
{
Name: child,
},
}),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "another_child_data",
}.Instance(addrs.IntKey(1)).Absolute(addrs.ModuleInstance{
{
Name: child,
},
}),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
}
},
resources: map[string]string{
"testing_resource.data": "component.parent",
"testing_resource.another[0]": "component.parent",
"testing_resource.another[1]": "component.parent",
},
modules: map[string]string{
"child_mod": "child",
"child_mod2": "child2",
},
expected: []stackstate.AppliedChange{
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.child"),
ComponentInstanceAddr: mustAbsComponentInstance("component.child"),
OutputValues: map[addrs.OutputValue]cty.Value{
{Name: "id"}: cty.DynamicVal,
},
InputVariables: map[addrs.InputVariable]cty.Value{
{Name: "id"}: cty.DynamicVal,
{Name: "input"}: cty.DynamicVal,
},
Dependencies: collections.NewSet(mustAbsComponent("component.parent")),
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.child.testing_resource.another_child_data[0]"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.child.testing_resource.another_child_data[1]"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.child.testing_resource.child_data"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.child2"),
ComponentInstanceAddr: mustAbsComponentInstance("component.child2"),
OutputValues: map[addrs.OutputValue]cty.Value{
{Name: "id"}: cty.DynamicVal,
},
InputVariables: map[addrs.InputVariable]cty.Value{
{Name: "id"}: cty.DynamicVal,
{Name: "input"}: cty.DynamicVal,
},
Dependencies: collections.NewSet(mustAbsComponent("component.parent")),
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.child2.testing_resource.another_child_data[0]"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.child2.testing_resource.another_child_data[1]"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.child2.testing_resource.child_data"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.parent"),
ComponentInstanceAddr: mustAbsComponentInstance("component.parent"),
OutputValues: map[addrs.OutputValue]cty.Value{
{Name: "id"}: cty.DynamicVal,
},
InputVariables: map[addrs.InputVariable]cty.Value{
{Name: "id"}: cty.DynamicVal,
{Name: "input"}: cty.DynamicVal,
},
Dependents: collections.NewSet(mustAbsComponent("component.child"), mustAbsComponent("component.child2")),
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.parent.testing_resource.another[0]"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.parent.testing_resource.another[1]"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.parent.testing_resource.data"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
},
"missing config resource": {
path: filepath.Join("for-stacks-migrate", "with-nested-module"),
state: func(ss *states.SyncState) {
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "another",
}.Instance(addrs.IntKey(0)).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "another",
}.Instance(addrs.IntKey(1)).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "for_child",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
},
resources: map[string]string{
"testing_resource.data": "component.parent",
"testing_resource.another[0]": "component.parent",
"testing_resource.another[1]": "component.parent",
"testing_resource.for_child": "component.child",
},
expected: []stackstate.AppliedChange{
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.child"),
ComponentInstanceAddr: mustAbsComponentInstance("component.child"),
OutputValues: map[addrs.OutputValue]cty.Value{
{Name: "id"}: cty.DynamicVal,
},
InputVariables: map[addrs.InputVariable]cty.Value{
{Name: "id"}: cty.DynamicVal,
{Name: "input"}: cty.DynamicVal,
},
Dependencies: collections.NewSet(mustAbsComponent("component.parent")),
},
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.parent"),
ComponentInstanceAddr: mustAbsComponentInstance("component.parent"),
OutputValues: map[addrs.OutputValue]cty.Value{
{Name: "id"}: cty.DynamicVal,
},
InputVariables: map[addrs.InputVariable]cty.Value{
{Name: "id"}: cty.DynamicVal,
{Name: "input"}: cty.DynamicVal,
},
Dependents: collections.NewSet(mustAbsComponent("component.child")),
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.parent.testing_resource.another[0]"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.parent.testing_resource.another[1]"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.parent.testing_resource.data"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
expectedDiags: tfdiags.Diagnostics{}.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Resource mapped to non-existent target",
Detail: "Could not migrate resource \"testing_resource.for_child\". Target resource \"testing_resource.for_child\" not found in component \"component.child\".",
}),
},
"missing mapping for state resource": {
path: filepath.Join("for-stacks-migrate", "with-nested-module"),
state: func(ss *states.SyncState) {
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "another",
}.Instance(addrs.IntKey(0)).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "another",
}.Instance(addrs.IntKey(1)).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "for_child",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
},
mustDefaultRootProvider("testing"),
)
},
resources: map[string]string{
"testing_resource.data": "component.parent",
"testing_resource.another[0]": "component.parent",
"testing_resource.another[1]": "component.parent",
},
modules: map[string]string{},
expected: []stackstate.AppliedChange{
// this component has a dependent "child", but that other component
// is not present in the modules mapping, so it is not included here
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.parent"),
ComponentInstanceAddr: mustAbsComponentInstance("component.parent"),
OutputValues: map[addrs.OutputValue]cty.Value{
{Name: "id"}: cty.DynamicVal,
},
InputVariables: map[addrs.InputVariable]cty.Value{
{Name: "id"}: cty.DynamicVal,
{Name: "input"}: cty.DynamicVal,
},
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.parent.testing_resource.another[0]"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.parent.testing_resource.another[1]"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.parent.testing_resource.data"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "hello",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
expectedDiags: tfdiags.Diagnostics{}.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Resource not found",
Detail: "Resource \"testing_resource.for_child\" exists in state, but was not included in any provided mapping.",
}),
},
"config depends on": {
path: filepath.Join("for-stacks-migrate", "with-depends-on"),
state: func(ss *states.SyncState) {
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "depends_test",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "second",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "depends_test",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "third",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "depends_test",
}),
},
mustDefaultRootProvider("testing"),
)
},
resources: map[string]string{
"testing_resource.data": "component.first",
"testing_resource.second": "component.second",
"testing_resource.third": "component.second",
},
modules: map[string]string{},
expected: []stackstate.AppliedChange{
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.first"),
ComponentInstanceAddr: mustAbsComponentInstance("component.first"),
OutputValues: make(map[addrs.OutputValue]cty.Value),
InputVariables: map[addrs.InputVariable]cty.Value{
{Name: "input"}: cty.DynamicVal,
{Name: "id"}: cty.DynamicVal,
},
Dependents: collections.NewSet(mustAbsComponent("component.second")),
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.first.testing_resource.data"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "depends_test",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.second"),
ComponentInstanceAddr: mustAbsComponentInstance("component.second"),
OutputValues: make(map[addrs.OutputValue]cty.Value),
InputVariables: map[addrs.InputVariable]cty.Value{
{Name: "input"}: cty.DynamicVal,
{Name: "id"}: cty.DynamicVal,
},
Dependencies: collections.NewSet(mustAbsComponent("component.first")),
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.second.testing_resource.second"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "depends_test",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.second.testing_resource.third"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "depends_test",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
expectedDiags: tfdiags.Diagnostics{}.Append(),
},
"unsupported component ref": {
path: filepath.Join("for-stacks-migrate", "with-depends-on"),
state: func(ss *states.SyncState) {
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "data",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "depends_test",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "second",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "depends_test",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "third",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "depends_test",
}),
},
mustDefaultRootProvider("testing"),
)
},
resources: map[string]string{
"testing_resource.data": "component.first",
"testing_resource.second": "component.second",
"testing_resource.third": "stack.embedded.component.self",
},
modules: map[string]string{},
expected: []stackstate.AppliedChange{
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.first"),
ComponentInstanceAddr: mustAbsComponentInstance("component.first"),
OutputValues: make(map[addrs.OutputValue]cty.Value),
InputVariables: map[addrs.InputVariable]cty.Value{
{Name: "input"}: cty.DynamicVal,
{Name: "id"}: cty.DynamicVal,
},
Dependents: collections.NewSet(mustAbsComponent("component.second")),
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.first.testing_resource.data"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "depends_test",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.second"),
ComponentInstanceAddr: mustAbsComponentInstance("component.second"),
OutputValues: make(map[addrs.OutputValue]cty.Value),
InputVariables: map[addrs.InputVariable]cty.Value{
{Name: "input"}: cty.DynamicVal,
{Name: "id"}: cty.DynamicVal,
},
Dependencies: collections.NewSet(mustAbsComponent("component.first")),
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.second.testing_resource.second"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "foo",
"value": "depends_test",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
expectedDiags: tfdiags.Diagnostics{}.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid component instance",
Detail: "Only root component instances are allowed, got \"stack.embedded.component.self\"",
}),
},
"child module as component source": {
path: filepath.Join("for-stacks-migrate", "child-module-as-component-source"),
state: func(ss *states.SyncState) {
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "root_id",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "root_id",
"value": "root_output",
}),
},
mustDefaultRootProvider("testing"),
)
childProv := mustDefaultRootProvider("testing")
childProv.Module = addrs.Module{"child_module"}
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "child_data",
}.Instance(addrs.NoKey).Absolute(addrs.ModuleInstance{
{
Name: "child_module",
},
}),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "child_data",
"value": "child_output",
}),
},
childProv,
)
},
resources: map[string]string{
"testing_resource.root_id": "component.self",
"testing_resource.child_data": "component.self", // this should just be ignored
},
modules: map[string]string{
"child_module": "triage",
},
expected: []stackstate.AppliedChange{
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.self"),
ComponentInstanceAddr: mustAbsComponentInstance("component.self"),
OutputValues: map[addrs.OutputValue]cty.Value{},
InputVariables: map[addrs.InputVariable]cty.Value{},
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.self.testing_resource.root_id"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "root_id",
"value": "root_output",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.triage"),
ComponentInstanceAddr: mustAbsComponentInstance("component.triage"),
OutputValues: map[addrs.OutputValue]cty.Value{},
InputVariables: map[addrs.InputVariable]cty.Value{
addrs.InputVariable{Name: "input"}: cty.DynamicVal,
},
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.triage.testing_resource.child_data"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "child_data",
"value": "child_output",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
},
"unclaimed resources fall into modules": {
path: filepath.Join("for-stacks-migrate", "multiple-components"),
state: func(ss *states.SyncState) {
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "one",
}.Instance(addrs.NoKey).Absolute(addrs.ModuleInstance{
{
Name: "self",
},
}),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "one",
"value": "one",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "resource",
}.Instance(addrs.NoKey).Absolute(addrs.ModuleInstance{
{
Name: "self",
},
}),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "two",
"value": "two",
}),
},
mustDefaultRootProvider("testing"),
)
},
resources: map[string]string{
// this specific resource goes to component.one
"module.self.testing_resource.one": "component.one.testing_resource.resource",
},
modules: map[string]string{
"self": "two", // all other resources go to component.two
},
expected: []stackstate.AppliedChange{
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.one"),
ComponentInstanceAddr: mustAbsComponentInstance("component.one"),
OutputValues: map[addrs.OutputValue]cty.Value{},
InputVariables: map[addrs.InputVariable]cty.Value{},
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.one.testing_resource.resource"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "one",
"value": "one",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.two"),
ComponentInstanceAddr: mustAbsComponentInstance("component.two"),
OutputValues: map[addrs.OutputValue]cty.Value{},
InputVariables: map[addrs.InputVariable]cty.Value{},
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.two.testing_resource.resource"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "two",
"value": "two",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
},
"single component": {
path: filepath.Join("for-stacks-migrate", "single-component"),
state: func(ss *states.SyncState) {
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "one",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "one",
"value": "one",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "two",
}.Instance(addrs.NoKey).Absolute(addrs.ModuleInstance{
{
Name: "two",
},
}),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "two",
"value": "two",
}),
},
mustDefaultRootProvider("testing"),
)
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "testing_resource",
Name: "three",
}.Instance(addrs.NoKey).Absolute(addrs.ModuleInstance{
{
Name: "three",
},
}),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "three",
"value": "three",
}),
},
mustDefaultRootProvider("testing"),
)
},
resources: map[string]string{
"testing_resource.one": "component.single.testing_resource.one",
},
modules: map[string]string{
"two": "single",
"three": "single",
}, expected: []stackstate.AppliedChange{
&stackstate.AppliedChangeComponentInstance{
ComponentAddr: mustAbsComponent("component.single"),
ComponentInstanceAddr: mustAbsComponentInstance("component.single"),
OutputValues: map[addrs.OutputValue]cty.Value{},
InputVariables: map[addrs.InputVariable]cty.Value{},
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.single.testing_resource.one"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "one",
"value": "one",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.single.testing_resource.three"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "three",
"value": "three",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
&stackstate.AppliedChangeResourceInstanceObject{
ResourceInstanceObjectAddr: mustAbsResourceInstanceObject("component.single.testing_resource.two"),
NewStateSrc: &states.ResourceInstanceObjectSrc{
AttrsJSON: mustMarshalJSONAttrs(map[string]interface{}{
"id": "two",
"value": "two",
}),
Status: states.ObjectReady,
Private: nil,
},
ProviderConfigAddr: mustDefaultRootProvider("testing"),
Schema: stacks_testing_provider.TestingResourceSchema,
},
},
},
}
for name, tc := range tcs {
t.Run(name, func(t *testing.T) {
cfg := loadMainBundleConfigForTest(t, tc.path)
lock := depsfile.NewLocks()
lock.SetProvider(
addrs.NewDefaultProvider("testing"),
providerreqs.MustParseVersion("0.0.0"),
providerreqs.MustParseVersionConstraints("=0.0.0"),
providerreqs.PreferredHashes([]providerreqs.Hash{}),
)
state := states.BuildState(tc.state)
migration := Migration{
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("testing"): func() (providers.Interface, error) {
return stacks_testing_provider.NewProvider(t), nil
},
},
PreviousState: state,
Config: cfg,
}
var applied []stackstate.AppliedChange
var gotDiags tfdiags.Diagnostics
migration.Migrate(tc.resources, tc.modules, func(change stackstate.AppliedChange) {
applied = append(applied, change)
}, func(diagnostic tfdiags.Diagnostic) {
gotDiags = append(gotDiags, diagnostic)
})
sort.SliceStable(applied, func(i, j int) bool {
key := func(change stackstate.AppliedChange) string {
switch change := change.(type) {
case *stackstate.AppliedChangeComponentInstance:
return change.ComponentInstanceAddr.String()
case *stackstate.AppliedChangeResourceInstanceObject:
return change.ResourceInstanceObjectAddr.String()
default:
panic("unsupported change type")
}
}
return key(applied[i]) < key(applied[j])
})
if diff := cmp.Diff(tc.expected, applied, cmp.Options{
ctydebug.CmpOptions,
collections.CmpOptions,
cmpopts.IgnoreUnexported(addrs.InputVariable{}),
cmpopts.IgnoreUnexported(states.ResourceInstanceObjectSrc{}),
}); len(diff) > 0 {
t.Errorf("unexpected applied changes:\n%s", diff)
}
tfdiags.AssertDiagnosticsMatch(t, gotDiags, tc.expectedDiags)
})
}
}
func mustMarshalJSONAttrs(attrs map[string]interface{}) []byte {
jsonAttrs, err := json.Marshal(attrs)
if err != nil {
panic(err)
}
return jsonAttrs
}
func mustDefaultRootProvider(provider string) addrs.AbsProviderConfig {
return addrs.AbsProviderConfig{
Module: addrs.RootModule,
Provider: addrs.NewDefaultProvider(provider),
}
}
func mustAbsResourceInstanceObject(addr string) stackaddrs.AbsResourceInstanceObject {
ret, diags := stackaddrs.ParseAbsResourceInstanceObjectStr(addr)
if len(diags) > 0 {
panic(fmt.Sprintf("failed to parse resource instance object address %q: %s", addr, diags))
}
return ret
}
func mustAbsComponentInstance(addr string) stackaddrs.AbsComponentInstance {
ret, diags := stackaddrs.ParsePartialComponentInstanceStr(addr)
if len(diags) > 0 {
panic(fmt.Sprintf("failed to parse component instance address %q: %s", addr, diags))
}
return ret
}
func mustAbsComponent(addr string) stackaddrs.AbsComponent {
ret, diags := stackaddrs.ParsePartialComponentInstanceStr(addr)
if len(diags) > 0 {
panic(fmt.Sprintf("failed to parse component instance address %q: %s", addr, diags))
}
return stackaddrs.AbsComponent{
Stack: ret.Stack,
Item: ret.Item.Component,
}
}
// TODO: Perhaps export this from helper_test instead
func loadMainBundleConfigForTest(t *testing.T, dirName string) *stackconfig.Config {
t.Helper()
fullSourceAddr := mainBundleSourceAddrStr(dirName)
return loadConfigForTest(t, "../stackruntime/testdata/mainbundle", fullSourceAddr)
}
func mainBundleSourceAddrStr(dirName string) string {
return "git::https://example.com/test.git//" + dirName
}
// loadConfigForTest is a test helper that tries to open bundleRoot as a
// source bundle, and then if successful tries to load the given source address
// from it as a stack configuration. If any part of the operation fails then
// it halts execution of the test and doesn't return.
func loadConfigForTest(t *testing.T, bundleRoot string, configSourceAddr string) *stackconfig.Config {
t.Helper()
sources, err := sourcebundle.OpenDir(bundleRoot)
if err != nil {
t.Fatalf("cannot load source bundle: %s", err)
}
// We force using remote source addresses here because that avoids
// us having to deal with the extra version constraints argument
// that registry sources require. Exactly what source address type
// we use isn't relevant for tests in this package, since it's
// the sourcebundle package's responsibility to make sure its
// abstraction works for all of the source types.
sourceAddr, err := sourceaddrs.ParseRemoteSource(configSourceAddr)
if err != nil {
t.Fatalf("invalid config source address: %s", err)
}
cfg, diags := stackconfig.LoadConfigDir(sourceAddr, sources)
reportDiagnosticsForTest(t, diags)
return cfg
}
// reportDiagnosticsForTest creates a test log entry for every diagnostic in
// the given diags, and halts the test if any of them are error diagnostics.
func reportDiagnosticsForTest(t *testing.T, diags tfdiags.Diagnostics) {
t.Helper()
for _, diag := range diags {
var b strings.Builder
desc := diag.Description()
locs := diag.Source()
switch sev := diag.Severity(); sev {
case tfdiags.Error:
b.WriteString("Error: ")
case tfdiags.Warning:
b.WriteString("Warning: ")
default:
t.Errorf("unsupported diagnostic type %s", sev)
}
b.WriteString(desc.Summary)
if desc.Address != "" {
b.WriteString("\nwith ")
b.WriteString(desc.Summary)
}
if locs.Subject != nil {
b.WriteString("\nat ")
b.WriteString(locs.Subject.StartString())
}
if desc.Detail != "" {
b.WriteString("\n\n")
b.WriteString(desc.Detail)
}
t.Log(b.String())
}
if diags.HasErrors() {
t.FailNow()
}
} | go | github | https://github.com/hashicorp/terraform | internal/stacks/stackmigrate/migrate_test.go |
# Generated by Django 2.2.2 on 2019-06-16 16:34
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('site_info', '0010_homealert'),
]
operations = [
migrations.AlterField(
model_name='exec',
name='academic_year',
field=models.IntegerField(choices=[(1997, 1997), (1998, 1998), (1999, 1999), (2000, 2000), (2001, 2001), (2002, 2002), (2003, 2003), (2004, 2004), (2005, 2005), (2006, 2006), (2007, 2007), (2008, 2008), (2009, 2009), (2010, 2010), (2011, 2011), (2012, 2012), (2013, 2013), (2014, 2014), (2015, 2015), (2016, 2016), (2017, 2017), (2018, 2018), (2019, 2019)], default=2019, verbose_name='Academic year starting'),
),
migrations.AlterField(
model_name='exec',
name='user',
field=models.ForeignKey(limit_choices_to={'is_staff': True}, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='historyentry',
name='academic_year',
field=models.IntegerField(choices=[(1997, 1997), (1998, 1998), (1999, 1999), (2000, 2000), (2001, 2001), (2002, 2002), (2003, 2003), (2004, 2004), (2005, 2005), (2006, 2006), (2007, 2007), (2008, 2008), (2009, 2009), (2010, 2010), (2011, 2011), (2012, 2012), (2013, 2013), (2014, 2014), (2015, 2015), (2016, 2016), (2017, 2017), (2018, 2018), (2019, 2019)], default=2019, verbose_name='Academic year starting'),
),
] | unknown | codeparrot/codeparrot-clean | ||
#! /usr/bin/env python
from __future__ import print_function
import getopt
import os
import re
import six
from six.moves import socketserver
import subprocess
import sys
import tempfile
import threading
from ryu.ofproto import ofproto_parser
from ryu.ofproto import ofproto_v1_0
from ryu.ofproto import ofproto_v1_0_parser
from ryu.ofproto import ofproto_v1_5
from ryu.ofproto import ofproto_v1_5_parser
from ryu.ofproto import ofproto_protocol
if six.PY3:
TimeoutExpired = subprocess.TimeoutExpired
else:
# As python2 doesn't have timeout for subprocess.call,
# this script may hang.
TimeoutExpired = None
STD_MATCH = [
'in_port=43981',
'dl_vlan=999',
'dl_dst=aa:bb:cc:99:88:77',
'dl_type=0x0800', # ETH_TYPE_IP
'nw_dst=192.168.2.1',
'tun_src=192.168.2.3',
'tun_dst=192.168.2.4',
'tun_id=50000']
MESSAGES = [
{'name': 'action_learn',
'versions': [4],
'cmd': 'add-flow',
'args': ['table=2',
'importance=39032'] + STD_MATCH + [
'actions=strip_vlan,mod_nw_dst:192.168.2.9,' +
'learn(table=99,priority=1,hard_timeout=300,' +
'OXM_OF_VLAN_VID[0..11],' +
'OXM_OF_ETH_DST[]=OXM_OF_ETH_SRC[],' +
'load:0->OXM_OF_VLAN_VID[],' +
'load:OXM_OF_TUNNEL_ID[]->OXM_OF_TUNNEL_ID[],' +
'output:OXM_OF_IN_PORT[]),goto_table:100']},
{'name': 'match_conj',
'versions': [4],
'cmd': 'mod-flows',
'args': ['table=3',
'cookie=0x123456789abcdef0/0xffffffffffffffff',
'dl_vlan=1234',
'conj_id=0xabcdef',
'actions=strip_vlan,goto_table:100']},
{'name': 'match_pkt_mark',
'versions': [4],
'cmd': 'mod-flows',
'args': ['table=3',
'cookie=0x123456789abcdef0/0xffffffffffffffff',
'dl_vlan=1234',
'pkt_mark=54321',
'actions=strip_vlan,goto_table:100']},
{'name': 'match_pkt_mark_masked',
'versions': [4],
'cmd': 'mod-flows',
'args': ['table=3',
'cookie=0x123456789abcdef0/0xffffffffffffffff',
'dl_vlan=1234',
'pkt_mark=0xd431/0xffff',
'actions=strip_vlan,goto_table:100']},
{'name': 'action_conjunction',
'versions': [4],
'cmd': 'mod-flows',
'args': (['table=2',
'cookie=0x123456789abcdef0/0xffffffffffffffff'] +
STD_MATCH +
['actions=conjunction(0xabcdef,1/2)'])},
{'name': 'match_load_nx_register',
'versions': [4],
'cmd': 'mod-flows',
'args': ['table=3',
'cookie=0x123456789abcdef0/0xffffffffffffffff',
'reg0=0x1234',
'reg5=0xabcd/0xffff',
'actions=load:0xdeadbee->NXM_NX_REG0[4..31]']},
{'name': 'match_move_nx_register',
'versions': [4],
'cmd': 'mod-flows',
'args': ['table=3',
'cookie=0x123456789abcdef0/0xffffffffffffffff',
'reg0=0x1234',
'reg5=0xabcd/0xffff',
'actions=move:NXM_NX_REG0[10..15]->NXM_NX_REG1[0..5]']},
{'name': 'action_resubmit',
'versions': [4],
'cmd': 'add-flow',
'args': (['table=3',
'importance=39032'] +
STD_MATCH +
['actions=resubmit(1234,99)'])},
{'name': 'action_ct',
'versions': [4],
'cmd': 'add-flow',
'args': (['table=3,',
'importance=39032'] +
['dl_type=0x0800,ct_state=-trk'] +
['actions=ct(table=4,zone=NXM_NX_REG0[4..31])'])},
{'name': 'action_ct_exec',
'versions': [4],
'cmd': 'add-flow',
'args': (['table=3,',
'importance=39032'] +
['dl_type=0x0800,ct_state=+trk+est'] +
['actions=ct(commit,exec(set_field:0x654321->ct_mark))'])},
{'name': 'action_ct_nat',
'versions': [4],
'cmd': 'add-flow',
'args': (['table=3,',
'importance=39032'] +
['dl_type=0x0800'] +
['actions=ct(commit,nat(src=10.1.12.0-10.1.13.255:1-1023)'])},
{'name': 'action_ct_nat_v6',
'versions': [4],
'cmd': 'add-flow',
'args': (['table=3,',
'importance=39032'] +
['dl_type=0x86dd'] +
['actions=ct(commit,nat(dst=2001:1::1-2001:1::ffff)'])},
{'name': 'action_ct_clear',
'versions': [4],
'cmd': 'add-flow',
'args': (['table=3,',
'importance=39032'] +
['dl_type=0x0800,ct_state=+trk'] +
['actions=ct_clear'])},
{'name': 'action_note',
'versions': [4],
'cmd': 'add-flow',
'args': (['priority=100'] +
['actions=note:04.05.06.07.00.00'])},
{'name': 'action_controller',
'versions': [4],
'cmd': 'add-flow',
'args': (['priority=100'] +
['actions=controller(reason=packet_out,max_len=1024,id=1)'])},
{'name': 'action_fintimeout',
'versions': [4],
'cmd': 'add-flow',
'args': (['priority=100,tcp'] +
['actions=fin_timeout(idle_timeout=30,hard_timeout=60)'])},
{'name': 'action_dec_nw_ttl',
'versions': [1],
'cmd': 'add-flow',
'args': (['priority=100,mpls'] +
['actions=dec_ttl'])},
{'name': 'action_push_mpls',
'versions': [1],
'cmd': 'add-flow',
'args': (['priority=100,ip'] +
['actions=push_mpls:0x8847'])},
{'name': 'action_pop_mpls',
'versions': [1],
'cmd': 'add-flow',
'args': (['priority=100,mpls'] +
['actions=pop_mpls:0x0800'])},
{'name': 'action_set_mpls_ttl',
'versions': [1],
'cmd': 'add-flow',
'args': (['priority=100,mpls'] +
['actions=set_mpls_ttl(127)'])},
{'name': 'action_dec_mpls_ttl',
'versions': [1],
'cmd': 'add-flow',
'args': (['priority=100,mpls'] +
['actions=dec_mpls_ttl'])},
{'name': 'action_set_mpls_label',
'versions': [1],
'cmd': 'add-flow',
'args': (['priority=100,mpls'] +
['actions=set_mpls_label(10)'])},
{'name': 'action_set_mpls_tc',
'versions': [1],
'cmd': 'add-flow',
'args': (['priority=100,mpls'] +
['actions=set_mpls_tc(10)'])},
{'name': 'action_dec_ttl_cnt_ids',
'versions': [4],
'cmd': 'add-flow',
'args': (['priority=100,tcp'] +
['actions=dec_ttl(1,2,3,4,5)'])},
{'name': 'action_stack_push',
'versions': [4],
'cmd': 'add-flow',
'args': (['priority=100'] +
['actions=push:NXM_NX_REG2[1..5]'])},
{'name': 'action_stack_pop',
'versions': [4],
'cmd': 'add-flow',
'args': (['priority=100'] +
['actions=pop:NXM_NX_REG2[1..5]'])},
{'name': 'action_sample',
'versions': [4],
'cmd': 'add-flow',
'args': (['priority=100'] +
['actions=sample(probability=3,collector_set_id=1,' +
'obs_domain_id=2,obs_point_id=3)'])},
{'name': 'action_sample2',
'versions': [4],
'cmd': 'add-flow',
'args': (['priority=100'] +
['actions=sample(probability=3,collector_set_id=1,' +
'obs_domain_id=2,obs_point_id=3,sampling_port=8080)'])},
{'name': 'action_controller2',
'versions': [4],
'cmd': 'add-flow',
'args': (['priority=100'] +
['actions=controller(reason=packet_out,max_len=1024,' +
'id=10,userdata=01.02.03.04.05,pause)'])},
{'name': 'action_output_trunc',
'versions': [4],
'cmd': 'add-flow',
'args': (['priority=100'] +
['actions=output(port=8080,max_len=1024)'])},
{'name': 'bundle-add',
'versions': [4],
'bundled': True,
'cmd': 'add-flow',
'args': ['table=33',
'dl_vlan=1234',
'actions=strip_vlan,goto_table:100']},
# ToDo: The following actions are not eligible
# {'name': 'action_regload2'},
# {'name': 'action_outputreg2'},
]
buf = []
class MyHandler(socketserver.BaseRequestHandler):
verbose = False
@staticmethod
def _add_msg_to_buf(data, msg_len):
# HACK: Clear xid into zero
buf.append(data[:4] + b'\x00\x00\x00\x00' + data[8:msg_len])
def handle(self):
desc = ofproto_protocol.ProtocolDesc()
residue = b''
while True:
if residue:
data = residue
residue = b''
else:
data = self.request.recv(1024)
if data == b'':
break
if self.verbose:
print(data)
h = ofproto_parser.header(data)
if self.verbose:
print(h)
version, msg_type, msg_len, xid = h
residue = data[msg_len:]
desc.set_version(version=version)
if msg_type == desc.ofproto.OFPT_HELLO:
hello = desc.ofproto_parser.OFPHello(desc)
hello.serialize()
self.request.send(hello.buf)
elif msg_type == desc.ofproto.OFPT_FLOW_MOD:
self._add_msg_to_buf(data, msg_len)
elif version == 4 and msg_type == desc.ofproto.OFPT_EXPERIMENTER:
# This is for OF13 Ext-230 bundle
# TODO: support bundle for OF>1.3
exp = desc.ofproto_parser.OFPExperimenter.parser(
object(), version, msg_type, msg_len, xid, data)
self._add_msg_to_buf(data, msg_len)
if isinstance(exp, desc.ofproto_parser.ONFBundleCtrlMsg):
ctrlrep = desc.ofproto_parser.ONFBundleCtrlMsg(
desc, exp.bundle_id, exp.type + 1, 0, [])
ctrlrep.xid = xid
ctrlrep.serialize()
self.request.send(ctrlrep.buf)
elif msg_type == desc.ofproto.OFPT_BARRIER_REQUEST:
brep = desc.ofproto_parser.OFPBarrierReply(desc)
brep.xid = xid
brep.serialize()
self.request.send(brep.buf)
class MyVerboseHandler(MyHandler):
verbose = True
if __name__ == '__main__':
optlist, args = getopt.getopt(sys.argv[1:], 'dvo:')
debug = False
ofctl_cmd = '/usr/bin/ovs-ofctl'
verbose = False
for o, a in optlist:
if o == '-d':
debug = True
elif o == '-v':
verbose = True
elif o == '-o':
ofctl_cmd = a
if not os.access(ofctl_cmd, os.X_OK):
raise Exception("%s is not executable" % ofctl_cmd)
ovs_version = subprocess.Popen([ofctl_cmd, '--version'],
stdout=subprocess.PIPE)
has_names = False
try:
ver_tuple = re.search(r'\s(\d+)\.(\d+)(\.\d*|\s*$)',
ovs_version.stdout.readline().decode()).groups()
if int(ver_tuple[0]) > 2 or \
int(ver_tuple[0]) == 2 and int(ver_tuple[1]) >= 8:
has_names = True
except AttributeError:
pass
outpath = '../packet_data'
socketdir = tempfile.mkdtemp()
socketname = os.path.join(socketdir, 'ovs')
server = socketserver.UnixStreamServer(socketname,
MyVerboseHandler if verbose else
MyHandler)
if debug or verbose:
print("Serving at %s" % socketname)
for msg in MESSAGES:
bundled = msg.get('bundled', False)
for v in msg['versions']:
cmdargs = [ofctl_cmd, '-O', 'OpenFlow%2d' % (v + 9)]
if verbose:
cmdargs.append('-v')
if has_names:
cmdargs.append('--no-names')
if bundled:
cmdargs.append('--bundle')
cmdargs.append(msg['cmd'])
cmdargs.append('unix:%s' % socketname)
cmdargs.append('\n'.join(msg['args']))
if verbose:
print("Running cmd: " + ' '.join(cmdargs) + "\n")
t = threading.Thread(target=subprocess.call, args=[cmdargs],
kwargs={'timeout': 5})
t.start()
server.handle_request()
if debug:
for buf1 in buf:
print(buf1)
buf = []
else:
for i, buf1 in enumerate(buf):
suffix = ('-%d' % (i + 1)) if i else ''
outf = os.path.join(
outpath, "of%d" % (v + 9),
"ovs-ofctl-of%d-%s%s.packet" % (
v + 9, msg['name'], suffix))
print("Writing %s..." % outf)
with open(outf, 'wb') as f:
f.write(buf1)
buf = []
try:
t.join()
except TimeoutExpired as e:
print(e)
if debug:
while True:
server.handle_request()
print(buf.pop())
os.unlink(socketname)
os.rmdir(socketdir) | unknown | codeparrot/codeparrot-clean | ||
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CORE_KERNELS_SOFTSIGN_OP_H_
#define TENSORFLOW_CORE_KERNELS_SOFTSIGN_OP_H_
// Functor definition for SoftsignOp and SoftsignGradOp, must be compilable by
// nvcc.
#include "unsupported/Eigen/CXX11/Tensor" // from @eigen_archive
#include "tensorflow/core/framework/tensor_types.h"
namespace tensorflow {
namespace functor {
// Functor used by SoftsignOp to do the computations.
template <typename Device, typename T>
struct Softsign {
// Computes Softsign activation.
//
// features: any shape.
// activations: same shape as "features".
void operator()(const Device& d, typename TTypes<T>::ConstTensor features,
typename TTypes<T>::Tensor activations) {
activations.device(d) =
features / (features.abs() + features.constant(T(1)));
}
};
// Functor used by SoftsignGradOp to do the computations.
template <typename Device, typename T>
struct SoftsignGrad {
// Computes SoftsignGrad backprops.
//
// gradients: gradients backpropagated to the Softsign op.
// features: inputs that were passed to the Softsign op.
// backprops: gradients to backpropagate to the Softsign inputs.
void operator()(const Device& d, typename TTypes<T>::ConstTensor gradients,
typename TTypes<T>::ConstTensor features,
typename TTypes<T>::Tensor backprops) {
backprops.device(d) =
gradients / (features.abs() + features.constant(T(1))).square();
}
};
} // namespace functor
} // namespace tensorflow
#endif // TENSORFLOW_CORE_KERNELS_SOFTSIGN_OP_H_ | c | github | https://github.com/tensorflow/tensorflow | tensorflow/core/kernels/softsign_op.h |
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
// loggraphdiff is a tool for interpreting changes to the Terraform graph
// based on the simple graph printing format used in the TF_LOG=trace log
// output from Terraform, which looks like this:
//
// aws_instance.b (destroy) - *terraform.NodeDestroyResourceInstance
// aws_instance.b (prepare state) - *terraform.NodeApplyableResource
// provider.aws - *terraform.NodeApplyableProvider
// aws_instance.b (prepare state) - *terraform.NodeApplyableResource
// provider.aws - *terraform.NodeApplyableProvider
// module.child.aws_instance.a (destroy) - *terraform.NodeDestroyResourceInstance
// module.child.aws_instance.a (prepare state) - *terraform.NodeApplyableResource
// module.child.output.a_output - *terraform.NodeApplyableOutput
// provider.aws - *terraform.NodeApplyableProvider
// module.child.aws_instance.a (prepare state) - *terraform.NodeApplyableResource
// provider.aws - *terraform.NodeApplyableProvider
// module.child.output.a_output - *terraform.NodeApplyableOutput
// module.child.aws_instance.a (prepare state) - *terraform.NodeApplyableResource
// provider.aws - *terraform.NodeApplyableProvider
//
// It takes the names of two files containing this style of output and
// produces a single graph description in graphviz format that shows the
// differences between the two graphs: nodes and edges which are only in the
// first graph are shown in red, while those only in the second graph are
// shown in green. This color combination is not useful for those who are
// red/green color blind, so the result can be adjusted by replacing the
// keywords "red" and "green" with a combination that the user is able to
// distinguish.
package main
import (
"bufio"
"fmt"
"log"
"os"
"sort"
"strings"
)
type Graph struct {
nodes map[string]struct{}
edges map[[2]string]struct{}
}
func main() {
if len(os.Args) != 3 {
log.Fatal("usage: loggraphdiff <old-graph-file> <new-graph-file>")
}
old, err := readGraph(os.Args[1])
if err != nil {
log.Fatalf("failed to read %s: %s", os.Args[1], err)
}
new, err := readGraph(os.Args[2])
if err != nil {
log.Fatalf("failed to read %s: %s", os.Args[1], err)
}
var nodes []string
for n := range old.nodes {
nodes = append(nodes, n)
}
for n := range new.nodes {
if _, exists := old.nodes[n]; !exists {
nodes = append(nodes, n)
}
}
sort.Strings(nodes)
var edges [][2]string
for e := range old.edges {
edges = append(edges, e)
}
for e := range new.edges {
if _, exists := old.edges[e]; !exists {
edges = append(edges, e)
}
}
sort.Slice(edges, func(i, j int) bool {
if edges[i][0] != edges[j][0] {
return edges[i][0] < edges[j][0]
}
return edges[i][1] < edges[j][1]
})
fmt.Println("digraph G {")
fmt.Print(" rankdir = \"BT\";\n\n")
for _, n := range nodes {
var attrs string
_, inOld := old.nodes[n]
_, inNew := new.nodes[n]
switch {
case inOld && inNew:
// no attrs required
case inOld:
attrs = " [color=red]"
case inNew:
attrs = " [color=green]"
}
fmt.Printf(" %q%s;\n", n, attrs)
}
fmt.Println("")
for _, e := range edges {
var attrs string
_, inOld := old.edges[e]
_, inNew := new.edges[e]
switch {
case inOld && inNew:
// no attrs required
case inOld:
attrs = " [color=red]"
case inNew:
attrs = " [color=green]"
}
fmt.Printf(" %q -> %q%s;\n", e[0], e[1], attrs)
}
fmt.Println("}")
}
func readGraph(fn string) (Graph, error) {
ret := Graph{
nodes: map[string]struct{}{},
edges: map[[2]string]struct{}{},
}
r, err := os.Open(fn)
if err != nil {
return ret, err
}
sc := bufio.NewScanner(r)
var latestNode string
for sc.Scan() {
l := sc.Text()
dash := strings.Index(l, " - ")
if dash == -1 {
// invalid line, so we'll ignore it
continue
}
name := l[:dash]
if strings.HasPrefix(name, " ") {
// It's an edge
name = name[2:]
edge := [2]string{latestNode, name}
ret.edges[edge] = struct{}{}
} else {
// It's a node
latestNode = name
ret.nodes[name] = struct{}{}
}
}
return ret, nil
} | go | github | https://github.com/hashicorp/terraform | tools/loggraphdiff/loggraphdiff.go |
from ltk.actions.action import *
class RmAction(Action):
def __init__(self, path):
Action.__init__(self, path)
self.use_delete = False
def rm_action(self, file_patterns, **kwargs):
try:
removed_folder = False
for pattern in file_patterns:
if os.path.isdir(pattern):
# print("checking folder "+self.norm_path(pattern))
if self.folder_manager.folder_exists(self.norm_path(pattern)):
self.folder_manager.remove_element(self.norm_path(pattern))
logger.info("Removed folder "+pattern)
removed_folder = True
else:
logger.warning("Folder "+str(pattern)+" has not been added and so can not be removed")
if 'directory' in kwargs and kwargs['directory']:
if not removed_folder:
logger.info("No folders to remove at the given path(s)")
return
matched_files = None
if isinstance(file_patterns,str):
file_patterns = [file_patterns]
if 'force' in kwargs and kwargs['force']:
force = True
else:
force = False
if 'id' in kwargs and kwargs['id']:
useID = True
else:
useID = False
if 'remote' in kwargs and kwargs['remote']:
self.use_delete = True
else:
self.use_delete = False
if 'all' in kwargs and kwargs['all']:
local = False
self.folder_manager.clear_all()
removed_folder = True
logger.info("Removed all folders.")
useID = False
matched_files = self.doc_manager.get_file_names()
elif 'local' in kwargs and kwargs['local']:
local = True
if 'name' in kwargs and kwargs['name']:
matched_files = []
for pattern in file_patterns:
doc = self.doc_manager.get_doc_by_prop("name",pattern)
if doc:
matched_files.append(doc['file_name'])
else:
if len(file_patterns) == 0:
self.folder_manager.clear_all()
removed_folder = True
logger.info("Removed all folders.")
useID = False
matched_files = self.doc_manager.get_file_names()
elif not useID:
local = False
# use current working directory as root for files instead of project root
if 'name' in kwargs and kwargs['name']:
matched_files = []
for pattern in file_patterns:
doc = self.doc_manager.get_doc_by_prop("name",pattern)
if doc:
matched_files.append(doc['file_name'])
else:
matched_files = self.get_doc_filenames_in_path(file_patterns)
else:
local = False
matched_files = file_patterns
if not matched_files or len(matched_files) == 0:
if useID:
raise exceptions.ResourceNotFound("No documents to remove with the specified id")
elif removed_folder:
logger.info("No documents to remove")
elif local:
raise exceptions.ResourceNotFound("Too many agruments, to specify a document to be removed locally use -l in association with -n")
elif not 'all' in kwargs or not kwargs['all']:
raise exceptions.ResourceNotFound("No documents to remove with the specified file path")
else:
raise exceptions.ResourceNotFound("No documents to remove")
is_directory = False
for pattern in file_patterns: # If attemping to remove any directory, don't print failure message
basename = os.path.basename(pattern)
if not basename or basename == "":
is_directory = True
for file_name in matched_files:
# title = os.path.basename(os.path.normpath(file_name)).split('.')[0]
self._rm_document(self.norm_path(file_name).replace(self.path,""), useID, (force or local))
except Exception as e:
# Python 3
# log_error(self.error_file_name, e)
# End Python 3
if 'string indices must be integers' in str(e):
logger.error("Error connecting to Lingotek's TMS")
else:
logger.error("Error on remove: "+str(e))
def _rm_clone(self, file_name):
trans_files = []
entry = self.doc_manager.get_doc_by_prop("file_name", file_name)
if entry:
if 'locales' in entry and entry['locales']:
locales = entry['locales']
for locale_code in locales:
if locale_code in self.locale_folders:
download_root = self.locale_folders[locale_code]
elif self.download_dir and len(self.download_dir):
download_root = os.path.join((self.download_dir if self.download_dir and self.download_dir != 'null' else ''),locale_code)
else:
download_root = locale_code
download_root = os.path.join(self.path,download_root)
source_file_name = entry['file_name']
source_path = os.path.join(self.path,os.path.dirname(source_file_name))
trans_files.extend(get_translation_files(file_name, download_root, self.download_option, self.doc_manager))
return trans_files
def _rm_document(self, file_name, useID, force):
try:
doc = None
if not useID:
relative_path = self.norm_path(file_name)
doc = self.doc_manager.get_doc_by_prop('file_name', relative_path)
title = os.path.basename(self.norm_path(file_name))
try:
document_id = doc['id']
except TypeError: # Documents specified by name must be found in the local database to be removed.
logger.warning("Document name specified for remove isn't in the local database: {0}".format(relative_path))
return
# raise exceptions.ResourceNotFound("Document name specified doesn't exist: {0}".format(document_name))
else:
document_id = file_name
doc = self.doc_manager.get_doc_by_prop('id', document_id)
if doc:
file_name = doc['file_name']
if self.use_delete:
response = self.api.document_delete(document_id)
else:
response = self.api.document_cancel(document_id)
#print (response)
if response.status_code != 204 and response.status_code != 202:
# raise_error(response.json(), "Failed to delete document {0}".format(document_name), True)
logger.error("Failed to {0} {1} remotely".format('delete' if self.use_delete else 'cancel', file_name))
else:
logger.info("{0} has been {1} remotely".format(file_name, 'deleted' if self.use_delete else 'cancelled'))
if force:
#delete local translation file(s) for the document being deleted NOTE:this does not seem to be implemented. The translation files are gathered below but nothing is ever done with them
trans_files = []
if 'clone' in self.download_option:
trans_files = self._rm_clone(file_name)
elif 'folder' in self.download_option:
trans_files = self._rm_folder(file_name)
elif 'same' in self.download_option:
download_path = self.path
trans_files = get_translation_files(file_name, download_path, self.download_option, self.doc_manager)
self.delete_local(file_name, document_id)
self.doc_manager.remove_element(document_id)
except json.decoder.JSONDecodeError:
logger.error("JSON error on removing document")
except KeyboardInterrupt:
raise_error("", "Canceled removing document")
return
except Exception as e:
log_error(self.error_file_name, e)
logger.error("Error on removing document "+str(file_name)+": "+str(e))
def _rm_folder(self, file_name):
trans_files = []
entry = self.doc_manager.get_doc_by_prop("file_name", file_name)
if entry:
if 'locales' in entry and entry['locales']:
locales = entry['locales']
for locale_code in locales:
if locale_code in self.locale_folders:
if self.locale_folders[locale_code] == 'null':
logger.warning("Download failed: folder not specified for "+locale_code)
else:
download_path = self.locale_folders[locale_code]
else:
download_path = self.download_dir
download_path = os.path.join(self.path,download_path)
trans_files.extend(get_translation_files(file_name, download_path, self.download_option, self.doc_manager))
return trans_files | unknown | codeparrot/codeparrot-clean | ||
//! For middleware documentation, see [`Compress`].
use std::{
future::Future,
marker::PhantomData,
pin::Pin,
task::{Context, Poll},
};
use actix_http::encoding::Encoder;
use actix_service::{Service, Transform};
use actix_utils::future::{ok, Either, Ready};
use futures_core::ready;
use mime::Mime;
use once_cell::sync::Lazy;
use pin_project_lite::pin_project;
use crate::{
body::{EitherBody, MessageBody},
http::{
header::{self, AcceptEncoding, ContentEncoding, Encoding, HeaderValue},
StatusCode,
},
service::{ServiceRequest, ServiceResponse},
Error, HttpMessage, HttpResponse,
};
/// Middleware for compressing response payloads.
///
/// # Encoding Negotiation
/// `Compress` will read the `Accept-Encoding` header to negotiate which compression codec to use.
/// Payloads are not compressed if the header is not sent. The `compress-*` [feature flags] are also
/// considered in this selection process.
///
/// # Pre-compressed Payload
/// If you are serving some data that is already using a compressed representation (e.g., a gzip
/// compressed HTML file from disk) you can signal this to `Compress` by setting an appropriate
/// `Content-Encoding` header. In addition to preventing double compressing the payload, this header
/// is required by the spec when using compressed representations and will inform the client that
/// the content should be uncompressed.
///
/// However, it is not advised to unconditionally serve encoded representations of content because
/// the client may not support it. The [`AcceptEncoding`] typed header has some utilities to help
/// perform manual encoding negotiation, if required. When negotiating content encoding, it is also
/// required by the spec to send a `Vary: Accept-Encoding` header.
///
/// A (naïve) example serving an pre-compressed Gzip file is included below.
///
/// # Examples
/// To enable automatic payload compression just include `Compress` as a top-level middleware:
/// ```
/// use actix_web::{middleware, web, App, HttpResponse};
///
/// let app = App::new()
/// .wrap(middleware::Compress::default())
/// .default_service(web::to(|| async { HttpResponse::Ok().body("hello world") }));
/// ```
///
/// Pre-compressed Gzip file being served from disk with correct headers added to bypass middleware:
/// ```no_run
/// use actix_web::{middleware, http::header, web, App, HttpResponse, Responder};
///
/// async fn index_handler() -> actix_web::Result<impl Responder> {
/// Ok(actix_files::NamedFile::open_async("./assets/index.html.gz").await?
/// .customize()
/// .insert_header(header::ContentEncoding::Gzip))
/// }
///
/// let app = App::new()
/// .wrap(middleware::Compress::default())
/// .default_service(web::to(index_handler));
/// ```
///
/// [feature flags]: ../index.html#crate-features
#[derive(Debug, Clone, Default)]
#[non_exhaustive]
pub struct Compress;
impl<S, B> Transform<S, ServiceRequest> for Compress
where
B: MessageBody,
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
{
type Response = ServiceResponse<EitherBody<Encoder<B>>>;
type Error = Error;
type Transform = CompressMiddleware<S>;
type InitError = ();
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ok(CompressMiddleware { service })
}
}
pub struct CompressMiddleware<S> {
service: S,
}
impl<S, B> Service<ServiceRequest> for CompressMiddleware<S>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
B: MessageBody,
{
type Response = ServiceResponse<EitherBody<Encoder<B>>>;
type Error = Error;
#[allow(clippy::type_complexity)]
type Future = Either<CompressResponse<S, B>, Ready<Result<Self::Response, Self::Error>>>;
actix_service::forward_ready!(service);
#[allow(clippy::borrow_interior_mutable_const)]
fn call(&self, req: ServiceRequest) -> Self::Future {
// negotiate content-encoding
let accept_encoding = req.get_header::<AcceptEncoding>();
let accept_encoding = match accept_encoding {
// missing header; fallback to identity
None => {
return Either::left(CompressResponse {
encoding: Encoding::identity(),
fut: self.service.call(req),
_phantom: PhantomData,
})
}
// valid accept-encoding header
Some(accept_encoding) => accept_encoding,
};
match accept_encoding.negotiate(SUPPORTED_ENCODINGS.iter()) {
None => {
let mut res = HttpResponse::with_body(
StatusCode::NOT_ACCEPTABLE,
SUPPORTED_ENCODINGS_STRING.as_str(),
);
res.headers_mut()
.insert(header::VARY, HeaderValue::from_static("Accept-Encoding"));
Either::right(ok(req
.into_response(res)
.map_into_boxed_body()
.map_into_right_body()))
}
Some(encoding) => Either::left(CompressResponse {
fut: self.service.call(req),
encoding,
_phantom: PhantomData,
}),
}
}
}
pin_project! {
pub struct CompressResponse<S, B>
where
S: Service<ServiceRequest>,
{
#[pin]
fut: S::Future,
encoding: Encoding,
_phantom: PhantomData<B>,
}
}
impl<S, B> Future for CompressResponse<S, B>
where
B: MessageBody,
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
{
type Output = Result<ServiceResponse<EitherBody<Encoder<B>>>, Error>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = self.as_mut().project();
match ready!(this.fut.poll(cx)) {
Ok(resp) => {
let enc = match this.encoding {
Encoding::Known(enc) => *enc,
Encoding::Unknown(enc) => {
unimplemented!("encoding '{enc}' should not be here");
}
};
Poll::Ready(Ok(resp.map_body(move |head, body| {
let content_type = head.headers.get(header::CONTENT_TYPE);
fn default_compress_predicate(content_type: Option<&HeaderValue>) -> bool {
match content_type {
None => true,
Some(hdr) => {
match hdr.to_str().ok().and_then(|hdr| hdr.parse::<Mime>().ok()) {
Some(mime) if mime.type_() == mime::IMAGE => {
matches!(mime.subtype(), mime::SVG)
}
Some(mime) if mime.type_() == mime::VIDEO => false,
_ => true,
}
}
}
}
let enc = if default_compress_predicate(content_type) {
enc
} else {
ContentEncoding::Identity
};
EitherBody::left(Encoder::response(enc, head, body))
})))
}
Err(err) => Poll::Ready(Err(err)),
}
}
}
static SUPPORTED_ENCODINGS_STRING: Lazy<String> = Lazy::new(|| {
#[allow(unused_mut)] // only unused when no compress features enabled
let mut encoding: Vec<&str> = vec![];
#[cfg(feature = "compress-brotli")]
{
encoding.push("br");
}
#[cfg(feature = "compress-gzip")]
{
encoding.push("gzip");
encoding.push("deflate");
}
#[cfg(feature = "compress-zstd")]
{
encoding.push("zstd");
}
assert!(
!encoding.is_empty(),
"encoding can not be empty unless __compress feature has been explicitly enabled by itself"
);
encoding.join(", ")
});
static SUPPORTED_ENCODINGS: &[Encoding] = &[
Encoding::identity(),
#[cfg(feature = "compress-brotli")]
{
Encoding::brotli()
},
#[cfg(feature = "compress-gzip")]
{
Encoding::gzip()
},
#[cfg(feature = "compress-gzip")]
{
Encoding::deflate()
},
#[cfg(feature = "compress-zstd")]
{
Encoding::zstd()
},
];
// move cfg(feature) to prevents_double_compressing if more tests are added
#[cfg(feature = "compress-gzip")]
#[cfg(test)]
mod tests {
use std::collections::HashSet;
use static_assertions::assert_impl_all;
use super::*;
use crate::{http::header::ContentType, middleware::DefaultHeaders, test, web, App};
const HTML_DATA_PART: &str = "<html><h1>hello world</h1></html";
const HTML_DATA: &str = const_str::repeat!(HTML_DATA_PART, 100);
const TEXT_DATA_PART: &str = "hello world ";
const TEXT_DATA: &str = const_str::repeat!(TEXT_DATA_PART, 100);
assert_impl_all!(Compress: Send, Sync);
pub fn gzip_decode(bytes: impl AsRef<[u8]>) -> Vec<u8> {
use std::io::Read as _;
let mut decoder = flate2::read::GzDecoder::new(bytes.as_ref());
let mut buf = Vec::new();
decoder.read_to_end(&mut buf).unwrap();
buf
}
#[track_caller]
fn assert_successful_res_with_content_type<B>(res: &ServiceResponse<B>, ct: &str) {
assert!(res.status().is_success());
assert!(
res.headers()
.get(header::CONTENT_TYPE)
.expect("content-type header should be present")
.to_str()
.expect("content-type header should be utf-8")
.contains(ct),
"response's content-type did not match {}",
ct
);
}
#[track_caller]
fn assert_successful_gzip_res_with_content_type<B>(res: &ServiceResponse<B>, ct: &str) {
assert_successful_res_with_content_type(res, ct);
assert_eq!(
res.headers()
.get(header::CONTENT_ENCODING)
.expect("response should be gzip compressed"),
"gzip",
);
}
#[track_caller]
fn assert_successful_identity_res_with_content_type<B>(res: &ServiceResponse<B>, ct: &str) {
assert_successful_res_with_content_type(res, ct);
assert!(
res.headers().get(header::CONTENT_ENCODING).is_none(),
"response should not be compressed",
);
}
#[actix_rt::test]
async fn prevents_double_compressing() {
let app = test::init_service({
App::new()
.wrap(Compress::default())
.route(
"/single",
web::get().to(move || HttpResponse::Ok().body(TEXT_DATA)),
)
.service(
web::resource("/double")
.wrap(Compress::default())
.wrap(DefaultHeaders::new().add(("x-double", "true")))
.route(web::get().to(move || HttpResponse::Ok().body(TEXT_DATA))),
)
})
.await;
let req = test::TestRequest::default()
.uri("/single")
.insert_header((header::ACCEPT_ENCODING, "gzip"))
.to_request();
let res = test::call_service(&app, req).await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.headers().get("x-double"), None);
assert_eq!(res.headers().get(header::CONTENT_ENCODING).unwrap(), "gzip");
let bytes = test::read_body(res).await;
assert_eq!(gzip_decode(bytes), TEXT_DATA.as_bytes());
let req = test::TestRequest::default()
.uri("/double")
.insert_header((header::ACCEPT_ENCODING, "gzip"))
.to_request();
let res = test::call_service(&app, req).await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.headers().get("x-double").unwrap(), "true");
assert_eq!(res.headers().get(header::CONTENT_ENCODING).unwrap(), "gzip");
let bytes = test::read_body(res).await;
assert_eq!(gzip_decode(bytes), TEXT_DATA.as_bytes());
}
#[actix_rt::test]
async fn retains_previously_set_vary_header() {
let app = test::init_service({
App::new()
.wrap(Compress::default())
.default_service(web::to(move || {
HttpResponse::Ok()
.insert_header((header::VARY, "x-test"))
.body(TEXT_DATA)
}))
})
.await;
let req = test::TestRequest::default()
.insert_header((header::ACCEPT_ENCODING, "gzip"))
.to_request();
let res = test::call_service(&app, req).await;
assert_eq!(res.status(), StatusCode::OK);
#[allow(clippy::mutable_key_type)]
let vary_headers = res.headers().get_all(header::VARY).collect::<HashSet<_>>();
assert!(vary_headers.contains(&HeaderValue::from_static("x-test")));
assert!(vary_headers.contains(&HeaderValue::from_static("accept-encoding")));
}
fn configure_predicate_test(cfg: &mut web::ServiceConfig) {
cfg.route(
"/html",
web::get().to(|| {
HttpResponse::Ok()
.content_type(ContentType::html())
.body(HTML_DATA)
}),
)
.route(
"/image",
web::get().to(|| {
HttpResponse::Ok()
.content_type(ContentType::jpeg())
.body(TEXT_DATA)
}),
);
}
#[actix_rt::test]
async fn prevents_compression_jpeg() {
let app = test::init_service(
App::new()
.wrap(Compress::default())
.configure(configure_predicate_test),
)
.await;
let req =
test::TestRequest::with_uri("/html").insert_header((header::ACCEPT_ENCODING, "gzip"));
let res = test::call_service(&app, req.to_request()).await;
assert_successful_gzip_res_with_content_type(&res, "text/html");
assert_ne!(test::read_body(res).await, HTML_DATA.as_bytes());
let req =
test::TestRequest::with_uri("/image").insert_header((header::ACCEPT_ENCODING, "gzip"));
let res = test::call_service(&app, req.to_request()).await;
assert_successful_identity_res_with_content_type(&res, "image/jpeg");
assert_eq!(test::read_body(res).await, TEXT_DATA.as_bytes());
}
#[actix_rt::test]
async fn prevents_compression_empty() {
let app = test::init_service({
App::new()
.wrap(Compress::default())
.default_service(web::to(move || HttpResponse::Ok().finish()))
})
.await;
let req = test::TestRequest::default()
.insert_header((header::ACCEPT_ENCODING, "gzip"))
.to_request();
let res = test::call_service(&app, req).await;
assert_eq!(res.status(), StatusCode::OK);
assert!(!res.headers().contains_key(header::CONTENT_ENCODING));
assert!(test::read_body(res).await.is_empty());
}
}
#[cfg(feature = "compress-brotli")]
#[cfg(test)]
mod tests_brotli {
use super::*;
use crate::{test, web, App};
#[actix_rt::test]
async fn prevents_compression_empty() {
let app = test::init_service({
App::new()
.wrap(Compress::default())
.default_service(web::to(move || HttpResponse::Ok().finish()))
})
.await;
let req = test::TestRequest::default()
.insert_header((header::ACCEPT_ENCODING, "br"))
.to_request();
let res = test::call_service(&app, req).await;
assert_eq!(res.status(), StatusCode::OK);
assert!(!res.headers().contains_key(header::CONTENT_ENCODING));
assert!(test::read_body(res).await.is_empty());
}
} | rust | github | https://github.com/actix/actix-web | actix-web/src/middleware/compress.rs |
#### Note: this error code is no longer emitted by the compiler.
The `not` cfg-predicate was malformed.
Erroneous code example (using `cargo doc`):
```ignore, E0536 (only triggers on cargo doc)
#![feature(doc_cfg)]
#[doc(cfg(not()))]
pub fn main() {
}
```
The `not` predicate expects one cfg-pattern. Example:
```
#![feature(doc_cfg)]
#[doc(cfg(not(target_os = "linux")))] // ok!
pub fn main() {
}
```
For more information about the `cfg` macro, read the section on
[Conditional Compilation][conditional-compilation] in the Reference.
[conditional-compilation]: https://doc.rust-lang.org/reference/conditional-compilation.html | unknown | github | https://github.com/rust-lang/rust | compiler/rustc_error_codes/src/error_codes/E0536.md |
# run doom process on a series of maps
# can be used for regression testing, or to fetch media
# keeps a log of each run ( see getLogfile )
# currently uses a basic stdout activity timeout to decide when to move on
# using a periodic check of /proc/<pid>/status SleepAVG
# when the sleep average is reaching 0, issue a 'quit' to stdout
# keeps serialized run status in runner.pickle
# NOTE: can be used to initiate runs on failed maps only for instance etc.
# TODO: use the serialized and not the logs to sort the run order
# TODO: better logging. Use idLogger?
# TODO: configurable event when the process is found interactive
# instead of emitting a quit, perform some warning action?
import sys, os, commands, string, time, traceback, pickle
from twisted.application import internet, service
from twisted.internet import protocol, reactor, utils, defer
from twisted.internet.task import LoopingCall
class doomClientProtocol( protocol.ProcessProtocol ):
# ProcessProtocol API
def connectionMade( self ):
self.logfile.write( 'connectionMade\n' )
def outReceived( self, data ):
print data
self.logfile.write( data )
def errReceived( self, data ):
print 'stderr: ' + data
self.logfile.write( 'stderr: ' + data )
def inConnectionLost( self ):
self.logfile.write( 'inConnectionLost\n' )
def outConnectionLost( self ):
self.logfile.write( 'outConnectionLost\n' )
def errConnectionLost( self ):
self.logfile.write( 'errConnectionLost\n' )
def processEnded( self, status_object ):
self.logfile.write( 'processEnded %s\n' % repr( status_object ) )
self.logfile.write( time.strftime( '%H:%M:%S', time.localtime( time.time() ) ) + '\n' )
self.logfile.close()
self.deferred.callback( None )
# mac management
def __init__( self, logfilename, deferred ):
self.logfilename = logfilename
self.logfile = open( logfilename, 'a' )
self.logfile.write( time.strftime( '%H:%M:%S', time.localtime( time.time() ) ) + '\n' )
self.deferred = deferred
class doomService( service.Service ):
# current monitoring state
# 0: nothing running
# 1: we have a process running, we're monitoring it's CPU usage
# 2: we issued a 'quit' to the process's stdin
# either going to get a processEnded, or a timeout
# 3: we forced a kill because of error, timeout etc.
state = 0
# load check period
check_period = 10
# pickled status file
pickle_file = 'runner.pickle'
# stores status indexed by filename
# { 'mapname' : ( state, last_update ), .. }
status = {}
# start the maps as multiplayer server
multiplayer = 0
def __init__( self, bin, cmdline, maps, sort = 0, multiplayer = 0, blank_run = 0 ):
self.p_transport = None
self.multiplayer = multiplayer
self.blank_run = blank_run
if ( self.multiplayer ):
print 'Operate in multiplayer mode'
self.bin = os.path.abspath( bin )
if ( type( cmdline ) is type( '' ) ):
self.cmdline = string.split( cmdline, ' ' )
else:
self.cmdline = cmdline
self.maps = maps
if ( os.path.exists( self.pickle_file ) ):
print 'Loading pickled status %s' % self.pickle_file
handle = open( self.pickle_file, 'r' )
self.status = pickle.load( handle )
handle.close()
if ( sort ):
print 'Sorting maps oldest runs first'
maps_sorted = [ ]
for i in self.maps:
i_log = self.getLogfile( i )
if ( os.path.exists( i_log ) ):
maps_sorted.append( ( i, os.path.getmtime( i_log ) ) )
else:
maps_sorted.append( ( i, 0 ) )
maps_sorted.sort( lambda x,y : cmp( x[1], y[1] ) )
self.maps = [ ]
if ( blank_run ):
self.maps.append( 'blankrun' )
for i in maps_sorted:
self.maps.append( i[ 0 ] )
print 'Sorted as: %s\n' % repr( self.maps )
def getLogfile( self, name ):
return 'logs/' + string.translate( name, string.maketrans( '/', '-' ) ) + '.log'
# deferred call when child process dies
def processEnded( self, val ):
print 'child has died - state %d' % self.state
self.status[ self.maps[ self.i_map ] ] = ( self.state, time.time() )
self.i_map += 1
if ( self.i_map >= len( self.maps ) ):
reactor.stop()
else:
self.nextMap()
def processTimeout( self ):
self.p_transport.signalProcess( "KILL" )
def sleepAVGReply( self, val ):
try:
s = val[10:][:-2]
print 'sleepAVGReply %s%%' % s
if ( s == '0' ):
# need twice in a row
if ( self.state == 2 ):
print 'child process is interactive'
self.p_transport.write( 'quit\n' )
else:
self.state = 2
else:
self.state = 1
# else:
# reactor.callLater( self.check_period, self.checkCPU )
except:
print traceback.format_tb( sys.exc_info()[2] )
print sys.exc_info()[0]
print 'exception raised in sleepAVGReply - killing process'
self.state = 3
self.p_transport.signalProcess( 'KILL' )
def sleepAVGTimeout( self ):
print 'sleepAVGTimeout - killing process'
self.state = 3
self.p_transport.signalProcess( 'KILL' )
# called at regular intervals to monitor the sleep average of the child process
# when sleep reaches 0, it means the map is loaded and interactive
def checkCPU( self ):
if ( self.state == 0 or self.p_transport is None or self.p_transport.pid is None ):
print 'checkCPU: no child process atm'
return
defer = utils.getProcessOutput( '/bin/bash', [ '-c', 'cat /proc/%d/status | grep SleepAVG' % self.p_transport.pid ] )
defer.addCallback( self.sleepAVGReply )
defer.setTimeout( 2, self.sleepAVGTimeout )
def nextMap( self ):
self.state = 0
name = self.maps[ self.i_map ]
print 'Starting map: ' + name
logfile = self.getLogfile( name )
print 'Logging to: ' + logfile
if ( self.multiplayer ):
cmdline = [ self.bin ] + self.cmdline + [ '+set', 'si_map', name ]
if ( name != 'blankrun' ):
cmdline.append( '+spawnServer' )
else:
cmdline = [ self.bin ] + self.cmdline
if ( name != 'blankrun' ):
cmdline += [ '+devmap', name ]
print 'Command line: ' + repr( cmdline )
self.deferred = defer.Deferred()
self.deferred.addCallback( self.processEnded )
self.p_transport = reactor.spawnProcess( doomClientProtocol( logfile, self.deferred ), self.bin, cmdline , path = os.path.dirname( self.bin ), env = os.environ )
self.state = 1
# # setup the CPU usage loop
# reactor.callLater( self.check_period, self.checkCPU )
def startService( self ):
print 'doomService startService'
loop = LoopingCall( self.checkCPU )
loop.start( self.check_period )
self.i_map = 0
self.nextMap()
def stopService( self ):
print 'doomService stopService'
if ( not self.p_transport.pid is None ):
self.p_transport.signalProcess( 'KILL' )
# serialize
print 'saving status to %s' % self.pickle_file
handle = open( self.pickle_file, 'w+' )
pickle.dump( self.status, handle )
handle.close() | unknown | codeparrot/codeparrot-clean | ||
base_suite: sharding_jscore_passthrough_base
description: >-
Test the correctness of the query settings fallback mechanism by introducing planner
failures and then expecting that the fallback-generated plan is identical to the
original plan. This suite runs jscore tests using a sharded cluster fixture.
overrides:
# TODO (SERVER-119259) - Remove this override once query settings fallback is fixed for different mongos.
- "query_settings.sharding_temporarily_enforce_1_mongos"
excludes:
- "query_settings.common_core_excludes"
- "query_settings.fallback_core_excludes"
eval:
- "query_settings.eval_implicit_query_settings_fallback" | unknown | github | https://github.com/mongodb/mongo | buildscripts/resmokeconfig/matrix_suites/mappings/sharding_pqs_fallback.yml |
from yowsup.structs import ProtocolEntity, ProtocolTreeNode
class ChatstateProtocolEntity(ProtocolEntity):
'''
INCOMING
<chatstate from="xxxxxxxxxxx@s.whatsapp.net">
<{{composing|paused}}></{{composing|paused}}>
</chatstate>
OUTGOING
<chatstate to="xxxxxxxxxxx@s.whatsapp.net">
<{{composing|paused}}></{{composing|paused}}>
</chatstate>
'''
STATE_TYPING = "composing"
STATE_PAUSED = "paused"
STATES = (STATE_TYPING, STATE_PAUSED)
def __init__(self, _state):
super(ChatstateProtocolEntity, self).__init__("chatstate")
assert _state in self.__class__.STATES, "Expected chat state to be in %s, got %s" % (self.__class__.STATES, _state)
self._state = _state
def getState(self):
return self._state
def toProtocolTreeNode(self):
node = self._createProtocolTreeNode({}, None, data = None)
node.addChild(ProtocolTreeNode(self._state))
return node
def __str__(self):
out = "CHATSTATE:\n"
out += "State: %s\n" % self._state
return out
@staticmethod
def fromProtocolTreeNode(node):
return ChatstateProtocolEntity(
node.getAllChildren()[0].tag,
) | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2010-2024 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.analysis.api.fir.test.cases.generated.cases.annotations;
import com.intellij.testFramework.TestDataPath;
import org.jetbrains.kotlin.test.util.KtTestUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.kotlin.analysis.api.fir.test.configurators.AnalysisApiFirTestConfiguratorFactory;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiTestConfiguratorFactoryData;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiTestConfigurator;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.TestModuleKind;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.FrontendKind;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisSessionMode;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiMode;
import org.jetbrains.kotlin.analysis.api.impl.base.test.cases.annotations.AbstractAnalysisApiAnnotationsOnTypesTest;
import org.jetbrains.kotlin.test.TestMetadata;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.util.regex.Pattern;
/** This class is generated by {@link org.jetbrains.kotlin.generators.tests.analysis.api.GenerateAnalysisApiTestsKt}. DO NOT MODIFY MANUALLY */
@SuppressWarnings("all")
@TestMetadata("analysis/analysis-api/testData/annotations/annotationsOnTypes")
@TestDataPath("$PROJECT_ROOT")
public class FirIdeNormalAnalysisSourceModuleAnalysisApiAnnotationsOnTypesTestGenerated extends AbstractAnalysisApiAnnotationsOnTypesTest {
@NotNull
@Override
public AnalysisApiTestConfigurator getConfigurator() {
return AnalysisApiFirTestConfiguratorFactory.INSTANCE.createConfigurator(
new AnalysisApiTestConfiguratorFactoryData(
FrontendKind.Fir,
TestModuleKind.Source,
AnalysisSessionMode.Normal,
AnalysisApiMode.Ide
)
);
}
@Test
public void testAllFilesPresentInAnnotationsOnTypes() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/annotations/annotationsOnTypes"), Pattern.compile("^(.+)\\.kt$"), null, true);
}
@Test
@TestMetadata("annotaionOnType.kt")
public void testAnnotaionOnType() {
runTest("analysis/analysis-api/testData/annotations/annotationsOnTypes/annotaionOnType.kt");
}
@Test
@TestMetadata("annotaionOnTypeArgument.kt")
public void testAnnotaionOnTypeArgument() {
runTest("analysis/analysis-api/testData/annotations/annotationsOnTypes/annotaionOnTypeArgument.kt");
}
@Test
@TestMetadata("annotaionOnTypeArgumentOfTypeArgument.kt")
public void testAnnotaionOnTypeArgumentOfTypeArgument() {
runTest("analysis/analysis-api/testData/annotations/annotationsOnTypes/annotaionOnTypeArgumentOfTypeArgument.kt");
}
@Test
@TestMetadata("annotaionWithComplexArgumentOnType.kt")
public void testAnnotaionWithComplexArgumentOnType() {
runTest("analysis/analysis-api/testData/annotations/annotationsOnTypes/annotaionWithComplexArgumentOnType.kt");
}
@Test
@TestMetadata("annotaionWithComplexArgumentOnTypeArgument.kt")
public void testAnnotaionWithComplexArgumentOnTypeArgument() {
runTest("analysis/analysis-api/testData/annotations/annotationsOnTypes/annotaionWithComplexArgumentOnTypeArgument.kt");
}
@Test
@TestMetadata("annotaionWithLiteralArgumentOnType.kt")
public void testAnnotaionWithLiteralArgumentOnType() {
runTest("analysis/analysis-api/testData/annotations/annotationsOnTypes/annotaionWithLiteralArgumentOnType.kt");
}
@Test
@TestMetadata("annotaionWithLiteralArgumentOnTypeArgument.kt")
public void testAnnotaionWithLiteralArgumentOnTypeArgument() {
runTest("analysis/analysis-api/testData/annotations/annotationsOnTypes/annotaionWithLiteralArgumentOnTypeArgument.kt");
}
} | java | github | https://github.com/JetBrains/kotlin | analysis/analysis-api-fir/tests-gen/org/jetbrains/kotlin/analysis/api/fir/test/cases/generated/cases/annotations/FirIdeNormalAnalysisSourceModuleAnalysisApiAnnotationsOnTypesTestGenerated.java |
import { flushSync } from 'svelte';
import { test } from '../../test';
// https://github.com/sveltejs/svelte/issues/7884
export default test({
test({ assert, target, component, window }) {
let inputs = target.querySelectorAll('input');
assert.htmlEqual(
target.innerHTML,
`
<p>{"foo":[],"bar":[]}</p>
<h2>foo</h2>
<ul>
<li><label><input name="foo" type="checkbox" value="1"> 1</label></li>
<li><label><input name="foo" type="checkbox" value="2"> 2</label></li>
<li><label><input name="foo" type="checkbox" value="3"> 3</label></li>
</ul>
<h2>bar</h2>
<ul>
<li><label><input name="bar" type="checkbox" value="1"> 1</label></li>
<li><label><input name="bar" type="checkbox" value="2"> 2</label></li>
<li><label><input name="bar" type="checkbox" value="3"> 3</label></li>
</ul>
`
);
const event = new window.Event('change');
inputs[0].checked = true;
inputs[0].dispatchEvent(event);
flushSync();
inputs[2].checked = true;
inputs[2].dispatchEvent(event);
flushSync();
inputs[3].checked = true;
inputs[3].dispatchEvent(event);
flushSync();
assert.htmlEqual(
target.innerHTML,
`
<p>{"foo":[1,3],"bar":[1]}</p>
<h2>foo</h2>
<ul>
<li><label><input name="foo" type="checkbox" value="1"> 1</label></li>
<li><label><input name="foo" type="checkbox" value="2"> 2</label></li>
<li><label><input name="foo" type="checkbox" value="3"> 3</label></li>
</ul>
<h2>bar</h2>
<ul>
<li><label><input name="bar" type="checkbox" value="1"> 1</label></li>
<li><label><input name="bar" type="checkbox" value="2"> 2</label></li>
<li><label><input name="bar" type="checkbox" value="3"> 3</label></li>
</ul>
`
);
component.update();
flushSync();
assert.htmlEqual(
target.innerHTML,
`
<p>{"foo":[1,3],"bar":[1],"qux":[]}</p>
<h2>qux</h2>
<ul>
<li><label><input name="qux" type="checkbox" value="4"> 4</label></li>
<li><label><input name="qux" type="checkbox" value="5"> 5</label></li>
<li><label><input name="qux" type="checkbox" value="6"> 6</label></li>
</ul>
`
);
inputs = target.querySelectorAll('input');
inputs[0].checked = true;
inputs[0].dispatchEvent(event);
flushSync();
assert.htmlEqual(
target.innerHTML,
`
<p>{"foo":[1,3],"bar":[1],"qux":[4]}</p>
<h2>qux</h2>
<ul>
<li><label><input name="qux" type="checkbox" value="4"> 4</label></li>
<li><label><input name="qux" type="checkbox" value="5"> 5</label></li>
<li><label><input name="qux" type="checkbox" value="6"> 6</label></li>
</ul>
`
);
assert.equal(inputs[0].checked, true);
assert.equal(inputs[1].checked, false);
assert.equal(inputs[2].checked, false);
}
}); | javascript | github | https://github.com/sveltejs/svelte | packages/svelte/tests/runtime-legacy/samples/binding-input-group-each-8/_config.js |
# Copyright 2013-2016 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.integration import get_server_versions, use_singledc, PROTOCOL_VERSION, BasicSharedKeyspaceUnitTestCaseWFunctionTable, BasicSharedKeyspaceUnitTestCase, execute_until_pass
try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
from cassandra.cluster import Cluster, ResultSet
from cassandra.query import tuple_factory, named_tuple_factory, dict_factory, ordered_dict_factory
from cassandra.util import OrderedDict
def setup_module():
use_singledc()
class NameTupleFactory(BasicSharedKeyspaceUnitTestCase):
def setUp(self):
super(NameTupleFactory, self).setUp()
self.session.row_factory = named_tuple_factory
ddl = '''
CREATE TABLE {0}.{1} (
k int PRIMARY KEY,
v1 text,
v2 text,
v3 text)'''.format(self.ks_name, self.function_table_name)
self.session.execute(ddl)
execute_until_pass(self.session, ddl)
def test_sanitizing(self):
"""
Test to ensure that same named results are surfaced in the NamedTupleFactory
Creates a table with a few different text fields. Inserts a few values in that table.
It then fetches the values and confirms that despite all be being selected as the same name
they are propagated in the result set differently.
@since 3.3
@jira_ticket PYTHON-467
@expected_result duplicate named results have unique row names.
@test_category queries
"""
for x in range(5):
insert1 = '''
INSERT INTO {0}.{1}
( k , v1, v2, v3 )
VALUES
( 1 , 'v1{2}', 'v2{2}','v3{2}' )
'''.format(self.keyspace_name, self.function_table_name, str(x))
self.session.execute(insert1)
query = "SELECT v1 AS duplicate, v2 AS duplicate, v3 AS duplicate from {0}.{1}".format(self.ks_name, self.function_table_name)
rs = self.session.execute(query)
row = rs[0]
self.assertTrue(hasattr(row, 'duplicate'))
self.assertTrue(hasattr(row, 'duplicate_'))
self.assertTrue(hasattr(row, 'duplicate__'))
class RowFactoryTests(BasicSharedKeyspaceUnitTestCaseWFunctionTable):
"""
Test different row_factories and access code
"""
def setUp(self):
super(RowFactoryTests, self).setUp()
self.insert1 = '''
INSERT INTO {0}.{1}
( k , v )
VALUES
( 1 , 1 )
'''.format(self.keyspace_name, self.function_table_name)
self.insert2 = '''
INSERT INTO {0}.{1}
( k , v )
VALUES
( 2 , 2 )
'''.format(self.keyspace_name, self.function_table_name)
self.select = '''
SELECT * FROM {0}.{1}
'''.format(self.keyspace_name, self.function_table_name)
def tearDown(self):
self.drop_function_table()
def test_tuple_factory(self):
session = self.session
session.row_factory = tuple_factory
session.execute(self.insert1)
session.execute(self.insert2)
result = session.execute(self.select)
self.assertIsInstance(result, ResultSet)
self.assertIsInstance(result[0], tuple)
for row in result:
self.assertEqual(row[0], row[1])
self.assertEqual(result[0][0], result[0][1])
self.assertEqual(result[0][0], 1)
self.assertEqual(result[1][0], result[1][1])
self.assertEqual(result[1][0], 2)
def test_named_tuple_factory(self):
session = self.session
session.row_factory = named_tuple_factory
session.execute(self.insert1)
session.execute(self.insert2)
result = session.execute(self.select)
self.assertIsInstance(result, ResultSet)
result = list(result)
for row in result:
self.assertEqual(row.k, row.v)
self.assertEqual(result[0].k, result[0].v)
self.assertEqual(result[0].k, 1)
self.assertEqual(result[1].k, result[1].v)
self.assertEqual(result[1].k, 2)
def test_dict_factory(self):
session = self.session
session.row_factory = dict_factory
session.execute(self.insert1)
session.execute(self.insert2)
result = session.execute(self.select)
self.assertIsInstance(result, ResultSet)
self.assertIsInstance(result[0], dict)
for row in result:
self.assertEqual(row['k'], row['v'])
self.assertEqual(result[0]['k'], result[0]['v'])
self.assertEqual(result[0]['k'], 1)
self.assertEqual(result[1]['k'], result[1]['v'])
self.assertEqual(result[1]['k'], 2)
def test_ordered_dict_factory(self):
session = self.session
session.row_factory = ordered_dict_factory
session.execute(self.insert1)
session.execute(self.insert2)
result = session.execute(self.select)
self.assertIsInstance(result, ResultSet)
self.assertIsInstance(result[0], OrderedDict)
for row in result:
self.assertEqual(row['k'], row['v'])
self.assertEqual(result[0]['k'], result[0]['v'])
self.assertEqual(result[0]['k'], 1)
self.assertEqual(result[1]['k'], result[1]['v'])
self.assertEqual(result[1]['k'], 2)
class NamedTupleFactoryAndNumericColNamesTests(unittest.TestCase):
"""
Test for PYTHON-122: Improve Error Handling/Reporting for named_tuple_factory and Numeric Column Names
"""
@classmethod
def setup_class(cls):
cls.cluster = Cluster(protocol_version=PROTOCOL_VERSION)
cls.session = cls.cluster.connect()
cls._cass_version, cls._cql_version = get_server_versions()
ddl = '''
CREATE TABLE test1rf.table_num_col ( key blob PRIMARY KEY, "626972746864617465" blob )
WITH COMPACT STORAGE'''
cls.session.execute(ddl)
@classmethod
def teardown_class(cls):
cls.session.execute("DROP TABLE test1rf.table_num_col")
cls.cluster.shutdown()
def test_no_exception_on_select(self):
"""
no exception on SELECT for numeric column name
"""
try:
self.session.execute('SELECT * FROM test1rf.table_num_col')
except ValueError as e:
self.fail("Unexpected ValueError exception: %s" % e.message)
def test_can_select_using_alias(self):
"""
can SELECT "<numeric col name>" AS aliases
"""
if self._cass_version < (2, 0, 0):
raise unittest.SkipTest("Alias in SELECT not supported before 2.0")
try:
self.session.execute('SELECT key, "626972746864617465" AS my_col from test1rf.table_num_col')
except ValueError as e:
self.fail("Unexpected ValueError exception: %s" % e.message)
def test_can_select_with_dict_factory(self):
"""
can SELECT numeric column using dict_factory
"""
self.session.row_factory = dict_factory
try:
self.session.execute('SELECT * FROM test1rf.table_num_col')
except ValueError as e:
self.fail("Unexpected ValueError exception: %s" % e.message) | unknown | codeparrot/codeparrot-clean | ||
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import urlparse
from libcloud.utils.py3 import parse_qsl
try:
import simplejson as json
except ImportError:
import json
from libcloud.common.types import ProviderError
from libcloud.compute.drivers.cloudstack import CloudStackNodeDriver, \
CloudStackAffinityGroupType
from libcloud.compute.types import LibcloudError, Provider, InvalidCredsError
from libcloud.compute.types import KeyPairDoesNotExistError
from libcloud.compute.types import NodeState
from libcloud.compute.providers import get_driver
from libcloud.test import unittest
from libcloud.test import MockHttpTestCase
from libcloud.test.compute import TestCaseMixin
from libcloud.test.file_fixtures import ComputeFileFixtures
class CloudStackCommonTestCase(TestCaseMixin):
driver_klass = CloudStackNodeDriver
def setUp(self):
self.driver_klass.connectionCls.conn_classes = \
(None, CloudStackMockHttp)
self.driver = self.driver_klass('apikey', 'secret',
path='/test/path',
host='api.dummy.com')
self.driver.path = '/test/path'
self.driver.type = -1
CloudStackMockHttp.type = None
CloudStackMockHttp.fixture_tag = 'default'
self.driver.connection.poll_interval = 0.0
def test_invalid_credentials(self):
CloudStackMockHttp.type = 'invalid_credentials'
driver = self.driver_klass('invalid', 'invalid', path='/test/path',
host='api.dummy.com')
self.assertRaises(InvalidCredsError, driver.list_nodes)
def test_import_keypair_from_string_api_error(self):
CloudStackMockHttp.type = 'api_error'
name = 'test-pair'
key_material = ''
expected_msg = 'Public key is invalid'
self.assertRaisesRegexp(ProviderError, expected_msg,
self.driver.import_key_pair_from_string,
name=name, key_material=key_material)
def test_create_node_immediate_failure(self):
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
CloudStackMockHttp.fixture_tag = 'deployfail'
self.assertRaises(
Exception,
self.driver.create_node,
name='node-name', image=image, size=size)
def test_create_node_delayed_failure(self):
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
CloudStackMockHttp.fixture_tag = 'deployfail2'
self.assertRaises(
Exception,
self.driver.create_node,
name='node-name', image=image, size=size)
def test_create_node_default_location_success(self):
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
default_location = self.driver.list_locations()[0]
node = self.driver.create_node(name='fred',
image=image,
size=size)
self.assertEqual(node.name, 'fred')
self.assertEqual(node.public_ips, [])
self.assertEqual(node.private_ips, ['192.168.1.2'])
self.assertEqual(node.extra['zone_id'], default_location.id)
def test_create_node_ex_networks(self):
CloudStackMockHttp.fixture_tag = 'deploynetworks'
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
location = self.driver.list_locations()[0]
networks = [nw for nw in self.driver.ex_list_networks()
if str(nw.zoneid) == str(location.id)]
node = self.driver.create_node(name='deploynetworks',
location=location,
image=image,
size=size,
networks=networks)
self.assertEqual(node.name, 'deploynetworks')
self.assertEqual(node.extra['size_id'], size.id)
self.assertEqual(node.extra['zone_id'], location.id)
self.assertEqual(node.extra['image_id'], image.id)
self.assertEqual(len(node.private_ips), 2)
def test_create_node_ex_ipaddress(self):
CloudStackMockHttp.fixture_tag = 'deployip'
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
location = self.driver.list_locations()[0]
ipaddress = '10.1.0.128'
networks = [nw for nw in self.driver.ex_list_networks()
if str(nw.zoneid) == str(location.id)]
node = self.driver.create_node(name='deployip',
location=location,
image=image,
size=size,
networks=networks,
ex_ip_address=ipaddress)
self.assertEqual(node.name, 'deployip')
self.assertEqual(node.extra['size_id'], size.id)
self.assertEqual(node.extra['zone_id'], location.id)
self.assertEqual(node.extra['image_id'], image.id)
self.assertEqual(node.private_ips[0], ipaddress)
def test_create_node_ex_rootdisksize(self):
CloudStackMockHttp.fixture_tag = 'rootdisksize'
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
location = self.driver.list_locations()[0]
volumes = self.driver.list_volumes()
rootdisksize = '50'
networks = [nw for nw in self.driver.ex_list_networks()
if str(nw.zoneid) == str(location.id)]
node = self.driver.create_node(name='rootdisksize',
location=location,
image=image,
size=size,
networks=networks,
ex_rootdisksize=rootdisksize)
self.assertEqual(node.name, 'rootdisksize')
self.assertEqual(node.extra['size_id'], size.id)
self.assertEqual(node.extra['zone_id'], location.id)
self.assertEqual(node.extra['image_id'], image.id)
self.assertEqual(1, len(volumes))
self.assertEqual('ROOT-69941', volumes[0].name)
self.assertEqual(53687091200, volumes[0].size)
def test_create_node_ex_start_vm_false(self):
CloudStackMockHttp.fixture_tag = 'stoppedvm'
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
location = self.driver.list_locations()[0]
networks = [nw for nw in self.driver.ex_list_networks()
if str(nw.zoneid) == str(location.id)]
node = self.driver.create_node(name='stopped_vm',
location=location,
image=image,
size=size,
networks=networks,
ex_start_vm=False)
self.assertEqual(node.name, 'stopped_vm')
self.assertEqual(node.extra['size_id'], size.id)
self.assertEqual(node.extra['zone_id'], location.id)
self.assertEqual(node.extra['image_id'], image.id)
self.assertEqual(node.state, NodeState.STOPPED)
def test_create_node_ex_security_groups(self):
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
location = self.driver.list_locations()[0]
sg = [sg['name'] for sg in self.driver.ex_list_security_groups()]
CloudStackMockHttp.fixture_tag = 'deploysecuritygroup'
node = self.driver.create_node(name='test',
location=location,
image=image,
size=size,
ex_security_groups=sg)
self.assertEqual(node.name, 'test')
self.assertEqual(node.extra['security_group'], sg)
self.assertEqual(node.id, 'fc4fd31a-16d3-49db-814a-56b39b9ef986')
def test_create_node_ex_keyname(self):
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
location = self.driver.list_locations()[0]
CloudStackMockHttp.fixture_tag = 'deploykeyname'
node = self.driver.create_node(name='test',
location=location,
image=image,
size=size,
ex_keyname='foobar')
self.assertEqual(node.name, 'test')
self.assertEqual(node.extra['key_name'], 'foobar')
def test_create_node_project(self):
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
location = self.driver.list_locations()[0]
project = self.driver.ex_list_projects()[0]
CloudStackMockHttp.fixture_tag = 'deployproject'
node = self.driver.create_node(name='test',
location=location,
image=image,
size=size,
project=project)
self.assertEqual(node.name, 'TestNode')
self.assertEqual(node.extra['project'], 'Test Project')
def test_list_images_no_images_available(self):
CloudStackMockHttp.fixture_tag = 'notemplates'
images = self.driver.list_images()
self.assertEqual(0, len(images))
def test_list_images(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listTemplates_default.json')
templates = fixture['listtemplatesresponse']['template']
images = self.driver.list_images()
for i, image in enumerate(images):
# NodeImage expects id to be a string,
# the CloudStack fixture has an int
tid = str(templates[i]['id'])
tname = templates[i]['name']
self.assertIsInstance(image.driver, CloudStackNodeDriver)
self.assertEqual(image.id, tid)
self.assertEqual(image.name, tname)
def test_ex_list_disk_offerings(self):
diskOfferings = self.driver.ex_list_disk_offerings()
self.assertEqual(1, len(diskOfferings))
diskOffering, = diskOfferings
self.assertEqual('Disk offer 1', diskOffering.name)
self.assertEqual(10, diskOffering.size)
def test_ex_list_networks(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listNetworks_default.json')
fixture_networks = fixture['listnetworksresponse']['network']
networks = self.driver.ex_list_networks()
for i, network in enumerate(networks):
self.assertEqual(network.id, fixture_networks[i]['id'])
self.assertEqual(
network.displaytext, fixture_networks[i]['displaytext'])
self.assertEqual(network.name, fixture_networks[i]['name'])
self.assertEqual(
network.networkofferingid,
fixture_networks[i]['networkofferingid'])
self.assertEqual(network.zoneid, fixture_networks[i]['zoneid'])
def test_ex_list_network_offerings(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listNetworkOfferings_default.json')
fixture_networkoffers = \
fixture['listnetworkofferingsresponse']['networkoffering']
networkoffers = self.driver.ex_list_network_offerings()
for i, networkoffer in enumerate(networkoffers):
self.assertEqual(networkoffer.id, fixture_networkoffers[i]['id'])
self.assertEqual(networkoffer.name,
fixture_networkoffers[i]['name'])
self.assertEqual(networkoffer.display_text,
fixture_networkoffers[i]['displaytext'])
self.assertEqual(networkoffer.for_vpc,
fixture_networkoffers[i]['forvpc'])
self.assertEqual(networkoffer.guest_ip_type,
fixture_networkoffers[i]['guestiptype'])
self.assertEqual(networkoffer.service_offering_id,
fixture_networkoffers[i]['serviceofferingid'])
def test_ex_create_network(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'createNetwork_default.json')
fixture_network = fixture['createnetworkresponse']['network']
netoffer = self.driver.ex_list_network_offerings()[0]
location = self.driver.list_locations()[0]
network = self.driver.ex_create_network(display_text='test',
name='test',
network_offering=netoffer,
location=location,
gateway='10.1.1.1',
netmask='255.255.255.0',
network_domain='cloud.local',
vpc_id="2",
project_id="2")
self.assertEqual(network.name, fixture_network['name'])
self.assertEqual(network.displaytext, fixture_network['displaytext'])
self.assertEqual(network.id, fixture_network['id'])
self.assertEqual(network.extra['gateway'], fixture_network['gateway'])
self.assertEqual(network.extra['netmask'], fixture_network['netmask'])
self.assertEqual(network.networkofferingid,
fixture_network['networkofferingid'])
self.assertEqual(network.extra['vpc_id'], fixture_network['vpcid'])
self.assertEqual(network.extra['project_id'],
fixture_network['projectid'])
def test_ex_delete_network(self):
network = self.driver.ex_list_networks()[0]
result = self.driver.ex_delete_network(network=network)
self.assertTrue(result)
def test_ex_list_nics(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listNics_default.json')
fixture_nic = fixture['listnicsresponse']['nic']
vm = self.driver.list_nodes()[0]
nics = self.driver.ex_list_nics(vm)
for i, nic in enumerate(nics):
self.assertEqual(nic.id, fixture_nic[i]['id'])
self.assertEqual(nic.network_id,
fixture_nic[i]['networkid'])
self.assertEqual(nic.net_mask,
fixture_nic[i]['netmask'])
self.assertEqual(nic.gateway,
fixture_nic[i]['gateway'])
self.assertEqual(nic.ip_address,
fixture_nic[i]['ipaddress'])
self.assertEqual(nic.is_default,
fixture_nic[i]['isdefault'])
self.assertEqual(nic.mac_address,
fixture_nic[i]['macaddress'])
def test_ex_add_nic_to_node(self):
vm = self.driver.list_nodes()[0]
network = self.driver.ex_list_networks()[0]
ip = "10.1.4.123"
result = self.driver.ex_attach_nic_to_node(node=vm, network=network, ip_address=ip)
self.assertTrue(result)
def test_ex_remove_nic_from_node(self):
vm = self.driver.list_nodes()[0]
nic = self.driver.ex_list_nics(node=vm)[0]
result = self.driver.ex_detach_nic_from_node(node=vm, nic=nic)
self.assertTrue(result)
def test_ex_list_vpc_offerings(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listVPCOfferings_default.json')
fixture_vpcoffers = \
fixture['listvpcofferingsresponse']['vpcoffering']
vpcoffers = self.driver.ex_list_vpc_offerings()
for i, vpcoffer in enumerate(vpcoffers):
self.assertEqual(vpcoffer.id, fixture_vpcoffers[i]['id'])
self.assertEqual(vpcoffer.name,
fixture_vpcoffers[i]['name'])
self.assertEqual(vpcoffer.display_text,
fixture_vpcoffers[i]['displaytext'])
def test_ex_list_vpcs(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listVPCs_default.json')
fixture_vpcs = fixture['listvpcsresponse']['vpc']
vpcs = self.driver.ex_list_vpcs()
for i, vpc in enumerate(vpcs):
self.assertEqual(vpc.id, fixture_vpcs[i]['id'])
self.assertEqual(vpc.display_text, fixture_vpcs[i]['displaytext'])
self.assertEqual(vpc.name, fixture_vpcs[i]['name'])
self.assertEqual(vpc.vpc_offering_id,
fixture_vpcs[i]['vpcofferingid'])
self.assertEqual(vpc.zone_id, fixture_vpcs[i]['zoneid'])
def test_ex_list_routers(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listRouters_default.json')
fixture_routers = fixture['listroutersresponse']['router']
routers = self.driver.ex_list_routers()
for i, router in enumerate(routers):
self.assertEqual(router.id, fixture_routers[i]['id'])
self.assertEqual(router.name, fixture_routers[i]['name'])
self.assertEqual(router.state, fixture_routers[i]['state'])
self.assertEqual(router.public_ip, fixture_routers[i]['publicip'])
self.assertEqual(router.vpc_id, fixture_routers[i]['vpcid'])
def test_ex_create_vpc(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'createVPC_default.json')
fixture_vpc = fixture['createvpcresponse']
vpcoffer = self.driver.ex_list_vpc_offerings()[0]
vpc = self.driver.ex_create_vpc(cidr='10.1.1.0/16',
display_text='cloud.local',
name='cloud.local',
vpc_offering=vpcoffer,
zone_id="2")
self.assertEqual(vpc.id, fixture_vpc['id'])
def test_ex_delete_vpc(self):
vpc = self.driver.ex_list_vpcs()[0]
result = self.driver.ex_delete_vpc(vpc=vpc)
self.assertTrue(result)
def test_ex_create_network_acllist(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'createNetworkACLList_default.json')
fixture_network_acllist = fixture['createnetworkacllistresponse']
vpc = self.driver.ex_list_vpcs()[0]
network_acllist = self.driver.ex_create_network_acllist(
name='test_acllist',
vpc_id=vpc.id,
description='test description')
self.assertEqual(network_acllist.id, fixture_network_acllist['id'])
def test_ex_list_network_acllist(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listNetworkACLLists_default.json')
fixture_acllist = \
fixture['listnetworkacllistsresponse']['networkacllist']
acllist = self.driver.ex_list_network_acllists()
for i, acllist in enumerate(acllist):
self.assertEqual(acllist.id,
fixture_acllist[i]['id'])
self.assertEqual(acllist.name,
fixture_acllist[i]['name'])
self.assertEqual(acllist.description,
fixture_acllist[i]['description'])
def test_ex_create_network_acl(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'createNetworkACL_default.json')
fixture_network_acllist = fixture['createnetworkaclresponse']
acllist = self.driver.ex_list_network_acllists()[0]
network_acl = self.driver.ex_create_network_acl(
protocol='test_acllist',
acl_id=acllist.id,
cidr_list='',
start_port='80',
end_port='80')
self.assertEqual(network_acl.id, fixture_network_acllist['id'])
def test_ex_list_projects(self):
_, fixture = CloudStackMockHttp()._load_fixture(
'listProjects_default.json')
fixture_projects = fixture['listprojectsresponse']['project']
projects = self.driver.ex_list_projects()
for i, project in enumerate(projects):
self.assertEqual(project.id, fixture_projects[i]['id'])
self.assertEqual(
project.display_text, fixture_projects[i]['displaytext'])
self.assertEqual(project.name, fixture_projects[i]['name'])
self.assertEqual(
project.extra['domainid'],
fixture_projects[i]['domainid'])
self.assertEqual(
project.extra['cpulimit'],
fixture_projects[i]['cpulimit'])
# Note -1 represents unlimited
self.assertEqual(project.extra['networklimit'], -1)
def test_create_volume(self):
volumeName = 'vol-0'
location = self.driver.list_locations()[0]
volume = self.driver.create_volume(10, volumeName, location)
self.assertEqual(volumeName, volume.name)
self.assertEqual(10, volume.size)
def test_create_volume_no_noncustomized_offering_with_size(self):
"""If the sizes of disk offerings are not configurable and there
are no disk offerings with the requested size, an exception should
be thrown."""
location = self.driver.list_locations()[0]
self.assertRaises(
LibcloudError,
self.driver.create_volume,
'vol-0', location, 11)
def test_create_volume_with_custom_disk_size_offering(self):
CloudStackMockHttp.fixture_tag = 'withcustomdisksize'
volumeName = 'vol-0'
location = self.driver.list_locations()[0]
volume = self.driver.create_volume(10, volumeName, location)
self.assertEqual(volumeName, volume.name)
def test_attach_volume(self):
node = self.driver.list_nodes()[0]
volumeName = 'vol-0'
location = self.driver.list_locations()[0]
volume = self.driver.create_volume(10, volumeName, location)
attachReturnVal = self.driver.attach_volume(volume, node)
self.assertTrue(attachReturnVal)
def test_detach_volume(self):
volumeName = 'gre-test-volume'
location = self.driver.list_locations()[0]
volume = self.driver.create_volume(10, volumeName, location)
res = self.driver.detach_volume(volume)
self.assertTrue(res)
def test_destroy_volume(self):
volumeName = 'gre-test-volume'
location = self.driver.list_locations()[0]
volume = self.driver.create_volume(10, volumeName, location)
res = self.driver.destroy_volume(volume)
self.assertTrue(res)
def test_list_volumes(self):
volumes = self.driver.list_volumes()
self.assertEqual(1, len(volumes))
self.assertEqual('ROOT-69942', volumes[0].name)
def test_ex_get_volume(self):
volume = self.driver.ex_get_volume(2600)
self.assertEqual('ROOT-69942', volume.name)
def test_list_nodes(self):
nodes = self.driver.list_nodes()
self.assertEqual(2, len(nodes))
self.assertEqual('test', nodes[0].name)
self.assertEqual('2600', nodes[0].id)
self.assertEqual([], nodes[0].extra['security_group'])
self.assertEqual(None, nodes[0].extra['key_name'])
def test_ex_get_node(self):
node = self.driver.ex_get_node(2600)
self.assertEqual('test', node.name)
self.assertEqual('2600', node.id)
self.assertEqual([], node.extra['security_group'])
self.assertEqual(None, node.extra['key_name'])
def test_ex_get_node_doesnt_exist(self):
self.assertRaises(Exception, self.driver.ex_get_node(26), node_id=26)
def test_list_locations(self):
location = self.driver.list_locations()[0]
self.assertEqual('1', location.id)
self.assertEqual('Sydney', location.name)
def test_list_sizes(self):
sizes = self.driver.list_sizes()
self.assertEqual('Compute Micro PRD', sizes[0].name)
self.assertEqual('105', sizes[0].id)
self.assertEqual(384, sizes[0].ram)
self.assertEqual('Compute Large PRD', sizes[2].name)
self.assertEqual('69', sizes[2].id)
self.assertEqual(6964, sizes[2].ram)
def test_ex_start_node(self):
node = self.driver.list_nodes()[0]
res = node.ex_start()
self.assertEqual('Starting', res)
def test_ex_stop_node(self):
node = self.driver.list_nodes()[0]
res = node.ex_stop()
self.assertEqual('Stopped', res)
def test_destroy_node(self):
node = self.driver.list_nodes()[0]
res = node.destroy()
self.assertTrue(res)
def test_expunge_node(self):
node = self.driver.list_nodes()[0]
res = self.driver.destroy_node(node, ex_expunge=True)
self.assertTrue(res)
def test_reboot_node(self):
node = self.driver.list_nodes()[0]
res = node.reboot()
self.assertTrue(res)
def test_list_key_pairs(self):
keypairs = self.driver.list_key_pairs()
fingerprint = '00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:' + \
'00:00:00:00:00'
self.assertEqual(keypairs[0].name, 'cs-keypair')
self.assertEqual(keypairs[0].fingerprint, fingerprint)
# Test old and deprecated way
keypairs = self.driver.ex_list_keypairs()
self.assertEqual(keypairs[0]['name'], 'cs-keypair')
self.assertEqual(keypairs[0]['fingerprint'], fingerprint)
def test_list_key_pairs_no_keypair_key(self):
CloudStackMockHttp.fixture_tag = 'no_keys'
keypairs = self.driver.list_key_pairs()
self.assertEqual(keypairs, [])
def test_get_key_pair(self):
CloudStackMockHttp.fixture_tag = 'get_one'
key_pair = self.driver.get_key_pair(name='cs-keypair')
self.assertEqual(key_pair.name, 'cs-keypair')
def test_get_key_pair_doesnt_exist(self):
CloudStackMockHttp.fixture_tag = 'get_one_doesnt_exist'
self.assertRaises(KeyPairDoesNotExistError, self.driver.get_key_pair,
name='does-not-exist')
def test_create_keypair(self):
key_pair = self.driver.create_key_pair(name='test-keypair')
self.assertEqual(key_pair.name, 'test-keypair')
self.assertTrue(key_pair.fingerprint is not None)
self.assertTrue(key_pair.private_key is not None)
# Test old and deprecated way
res = self.driver.ex_create_keypair(name='test-keypair')
self.assertEqual(res['name'], 'test-keypair')
self.assertTrue(res['fingerprint'] is not None)
self.assertTrue(res['privateKey'] is not None)
def test_import_keypair_from_file(self):
fingerprint = 'c4:a1:e5:d4:50:84:a9:4c:6b:22:ee:d6:57:02:b8:15'
path = os.path.join(os.path.dirname(__file__), 'fixtures',
'cloudstack',
'dummy_rsa.pub')
key_pair = self.driver.import_key_pair_from_file('foobar', path)
self.assertEqual(key_pair.name, 'foobar')
self.assertEqual(key_pair.fingerprint, fingerprint)
# Test old and deprecated way
res = self.driver.ex_import_keypair('foobar', path)
self.assertEqual(res['keyName'], 'foobar')
self.assertEqual(res['keyFingerprint'], fingerprint)
def test_ex_import_keypair_from_string(self):
fingerprint = 'c4:a1:e5:d4:50:84:a9:4c:6b:22:ee:d6:57:02:b8:15'
path = os.path.join(os.path.dirname(__file__), 'fixtures',
'cloudstack',
'dummy_rsa.pub')
fh = open(path)
key_material = fh.read()
fh.close()
key_pair = self.driver.import_key_pair_from_string('foobar', key_material=key_material)
self.assertEqual(key_pair.name, 'foobar')
self.assertEqual(key_pair.fingerprint, fingerprint)
# Test old and deprecated way
res = self.driver.ex_import_keypair_from_string('foobar', key_material=key_material)
self.assertEqual(res['keyName'], 'foobar')
self.assertEqual(res['keyFingerprint'], fingerprint)
def test_delete_key_pair(self):
key_pair = self.driver.list_key_pairs()[0]
res = self.driver.delete_key_pair(key_pair=key_pair)
self.assertTrue(res)
# Test old and deprecated way
res = self.driver.ex_delete_keypair(keypair='cs-keypair')
self.assertTrue(res)
def test_ex_list_security_groups(self):
groups = self.driver.ex_list_security_groups()
self.assertEqual(2, len(groups))
self.assertEqual(groups[0]['name'], 'default')
self.assertEqual(groups[1]['name'], 'mongodb')
def test_ex_list_security_groups_no_securitygroup_key(self):
CloudStackMockHttp.fixture_tag = 'no_groups'
groups = self.driver.ex_list_security_groups()
self.assertEqual(groups, [])
def test_ex_create_security_group(self):
group = self.driver.ex_create_security_group(name='MySG')
self.assertEqual(group['name'], 'MySG')
def test_ex_delete_security_group(self):
res = self.driver.ex_delete_security_group(name='MySG')
self.assertTrue(res)
def test_ex_authorize_security_group_ingress(self):
res = self.driver.ex_authorize_security_group_ingress('MySG',
'TCP',
'22',
'22',
'0.0.0.0/0')
self.assertTrue(res)
def test_ex_create_affinity_group(self):
res = self.driver.ex_create_affinity_group('MyAG2',
CloudStackAffinityGroupType('MyAGType'))
self.assertEqual(res.name, 'MyAG2')
self.assertIsInstance(res.type, CloudStackAffinityGroupType)
self.assertEqual(res.type.type, 'MyAGType')
def test_ex_create_affinity_group_already_exists(self):
self.assertRaises(LibcloudError,
self.driver.ex_create_affinity_group,
'MyAG', CloudStackAffinityGroupType('MyAGType'))
def test_delete_ex_affinity_group(self):
afg = self.driver.ex_create_affinity_group('MyAG3',
CloudStackAffinityGroupType('MyAGType'))
res = self.driver.ex_delete_affinity_group(afg)
self.assertTrue(res)
def test_ex_update_node_affinity_group(self):
affinity_group_list = self.driver.ex_list_affinity_groups()
nodes = self.driver.list_nodes()
node = self.driver.ex_update_node_affinity_group(nodes[0],
affinity_group_list)
self.assertEqual(node.extra['affinity_group'][0],
affinity_group_list[0].id)
def test_ex_list_affinity_groups(self):
res = self.driver.ex_list_affinity_groups()
self.assertEqual(len(res), 1)
self.assertEqual(res[0].id, '11112')
self.assertEqual(res[0].name, 'MyAG')
self.assertIsInstance(res[0].type, CloudStackAffinityGroupType)
self.assertEqual(res[0].type.type, 'MyAGType')
def test_ex_list_affinity_group_types(self):
res = self.driver.ex_list_affinity_group_types()
self.assertEqual(len(res), 1)
self.assertIsInstance(res[0], CloudStackAffinityGroupType)
self.assertEqual(res[0].type, 'MyAGType')
def test_ex_list_public_ips(self):
ips = self.driver.ex_list_public_ips()
self.assertEqual(ips[0].address, '1.1.1.116')
self.assertEqual(ips[0].virtualmachine_id, '2600')
def test_ex_allocate_public_ip(self):
addr = self.driver.ex_allocate_public_ip()
self.assertEqual(addr.address, '7.5.6.1')
self.assertEqual(addr.id, '10987171-8cc9-4d0a-b98f-1698c09ddd2d')
def test_ex_release_public_ip(self):
addresses = self.driver.ex_list_public_ips()
res = self.driver.ex_release_public_ip(addresses[0])
self.assertTrue(res)
def test_ex_create_port_forwarding_rule(self):
node = self.driver.list_nodes()[0]
address = self.driver.ex_list_public_ips()[0]
private_port = 33
private_end_port = 34
public_port = 33
public_end_port = 34
openfirewall = True
protocol = 'TCP'
rule = self.driver.ex_create_port_forwarding_rule(node,
address,
private_port,
public_port,
protocol,
public_end_port,
private_end_port,
openfirewall)
self.assertEqual(rule.address, address)
self.assertEqual(rule.protocol, protocol)
self.assertEqual(rule.public_port, public_port)
self.assertEqual(rule.public_end_port, public_end_port)
self.assertEqual(rule.private_port, private_port)
self.assertEqual(rule.private_end_port, private_end_port)
def test_ex_list_firewall_rules(self):
rules = self.driver.ex_list_firewall_rules()
self.assertEqual(len(rules), 1)
rule = rules[0]
self.assertEqual(rule.address.address, '1.1.1.116')
self.assertEqual(rule.protocol, 'tcp')
self.assertEqual(rule.cidr_list, '192.168.0.0/16')
self.assertIsNone(rule.icmp_code)
self.assertIsNone(rule.icmp_type)
self.assertEqual(rule.start_port, '33')
self.assertEqual(rule.end_port, '34')
def test_ex_list_firewall_rules_icmp(self):
CloudStackMockHttp.fixture_tag = 'firewallicmp'
rules = self.driver.ex_list_firewall_rules()
self.assertEqual(len(rules), 1)
rule = rules[0]
self.assertEqual(rule.address.address, '1.1.1.116')
self.assertEqual(rule.protocol, 'icmp')
self.assertEqual(rule.cidr_list, '192.168.0.0/16')
self.assertEqual(rule.icmp_code, 0)
self.assertEqual(rule.icmp_type, 8)
self.assertIsNone(rule.start_port)
self.assertIsNone(rule.end_port)
def test_ex_delete_firewall_rule(self):
rules = self.driver.ex_list_firewall_rules()
res = self.driver.ex_delete_firewall_rule(rules[0])
self.assertTrue(res)
def test_ex_create_firewall_rule(self):
address = self.driver.ex_list_public_ips()[0]
cidr_list = '192.168.0.0/16'
protocol = 'TCP'
start_port = 33
end_port = 34
rule = self.driver.ex_create_firewall_rule(address,
cidr_list,
protocol,
start_port=start_port,
end_port=end_port)
self.assertEqual(rule.address, address)
self.assertEqual(rule.protocol, protocol)
self.assertIsNone(rule.icmp_code)
self.assertIsNone(rule.icmp_type)
self.assertEqual(rule.start_port, start_port)
self.assertEqual(rule.end_port, end_port)
def test_ex_create_firewall_rule_icmp(self):
address = self.driver.ex_list_public_ips()[0]
cidr_list = '192.168.0.0/16'
protocol = 'icmp'
icmp_code = 0
icmp_type = 8
rule = self.driver.ex_create_firewall_rule(address,
cidr_list,
protocol,
icmp_code=icmp_code,
icmp_type=icmp_type)
self.assertEqual(rule.address, address)
self.assertEqual(rule.protocol, protocol)
self.assertEqual(rule.icmp_code, 0)
self.assertEqual(rule.icmp_type, 8)
self.assertIsNone(rule.start_port)
self.assertIsNone(rule.end_port)
def test_ex_list_egress_firewall_rules(self):
rules = self.driver.ex_list_egress_firewall_rules()
self.assertEqual(len(rules), 1)
rule = rules[0]
self.assertEqual(rule.network_id, '874be2ca-20a7-4360-80e9-7356c0018c0b')
self.assertEqual(rule.cidr_list, '192.168.0.0/16')
self.assertEqual(rule.protocol, 'tcp')
self.assertIsNone(rule.icmp_code)
self.assertIsNone(rule.icmp_type)
self.assertEqual(rule.start_port, '80')
self.assertEqual(rule.end_port, '80')
def test_ex_delete_egress_firewall_rule(self):
rules = self.driver.ex_list_egress_firewall_rules()
res = self.driver.ex_delete_egress_firewall_rule(rules[0])
self.assertTrue(res)
def test_ex_create_egress_firewall_rule(self):
network_id = '874be2ca-20a7-4360-80e9-7356c0018c0b'
cidr_list = '192.168.0.0/16'
protocol = 'TCP'
start_port = 33
end_port = 34
rule = self.driver.ex_create_egress_firewall_rule(
network_id,
cidr_list,
protocol,
start_port=start_port,
end_port=end_port)
self.assertEqual(rule.network_id, network_id)
self.assertEqual(rule.cidr_list, cidr_list)
self.assertEqual(rule.protocol, protocol)
self.assertIsNone(rule.icmp_code)
self.assertIsNone(rule.icmp_type)
self.assertEqual(rule.start_port, start_port)
self.assertEqual(rule.end_port, end_port)
def test_ex_list_port_forwarding_rules(self):
rules = self.driver.ex_list_port_forwarding_rules()
self.assertEqual(len(rules), 1)
rule = rules[0]
self.assertTrue(rule.node)
self.assertEqual(rule.protocol, 'tcp')
self.assertEqual(rule.public_port, '33')
self.assertEqual(rule.public_end_port, '34')
self.assertEqual(rule.private_port, '33')
self.assertEqual(rule.private_end_port, '34')
self.assertEqual(rule.address.address, '1.1.1.116')
def test_ex_delete_port_forwarding_rule(self):
node = self.driver.list_nodes()[0]
rule = self.driver.ex_list_port_forwarding_rules()[0]
res = self.driver.ex_delete_port_forwarding_rule(node, rule)
self.assertTrue(res)
def test_node_ex_delete_port_forwarding_rule(self):
node = self.driver.list_nodes()[0]
self.assertEqual(len(node.extra['port_forwarding_rules']), 1)
node.extra['port_forwarding_rules'][0].delete()
self.assertEqual(len(node.extra['port_forwarding_rules']), 0)
def test_node_ex_create_port_forwarding_rule(self):
node = self.driver.list_nodes()[0]
self.assertEqual(len(node.extra['port_forwarding_rules']), 1)
address = self.driver.ex_list_public_ips()[0]
private_port = 33
private_end_port = 34
public_port = 33
public_end_port = 34
openfirewall = True
protocol = 'TCP'
rule = node.ex_create_port_forwarding_rule(address,
private_port,
public_port,
protocol,
public_end_port,
private_end_port,
openfirewall)
self.assertEqual(rule.address, address)
self.assertEqual(rule.protocol, protocol)
self.assertEqual(rule.public_port, public_port)
self.assertEqual(rule.public_end_port, public_end_port)
self.assertEqual(rule.private_port, private_port)
self.assertEqual(rule.private_end_port, private_end_port)
self.assertEqual(len(node.extra['port_forwarding_rules']), 2)
def test_ex_list_ip_forwarding_rules(self):
rules = self.driver.ex_list_ip_forwarding_rules()
self.assertEqual(len(rules), 1)
rule = rules[0]
self.assertTrue(rule.node)
self.assertEqual(rule.protocol, 'tcp')
self.assertEqual(rule.start_port, 33)
self.assertEqual(rule.end_port, 34)
self.assertEqual(rule.address.address, '1.1.1.116')
def test_ex_limits(self):
limits = self.driver.ex_limits()
self.assertEqual(limits['max_images'], 20)
self.assertEqual(limits['max_networks'], 20)
self.assertEqual(limits['max_public_ips'], -1)
self.assertEqual(limits['max_vpc'], 20)
self.assertEqual(limits['max_instances'], 20)
self.assertEqual(limits['max_projects'], -1)
self.assertEqual(limits['max_volumes'], 20)
self.assertEqual(limits['max_snapshots'], 20)
def test_ex_create_tags(self):
node = self.driver.list_nodes()[0]
tags = {'Region': 'Canada'}
resp = self.driver.ex_create_tags([node.id], 'UserVm', tags)
self.assertTrue(resp)
def test_ex_delete_tags(self):
node = self.driver.list_nodes()[0]
tag_keys = ['Region']
resp = self.driver.ex_delete_tags([node.id], 'UserVm', tag_keys)
self.assertTrue(resp)
def test_list_snapshots(self):
snapshots = self.driver.list_snapshots()
self.assertEqual(len(snapshots), 3)
snap = snapshots[0]
self.assertEqual(snap.id, 188402)
self.assertEqual(snap.extra['name'], "i-123-87654-VM_ROOT-12344_20140917105548")
self.assertEqual(snap.extra['volume_id'], 89341)
def test_create_volume_snapshot(self):
volume = self.driver.list_volumes()[0]
snapshot = self.driver.create_volume_snapshot(volume)
self.assertEqual(snapshot.id, 190547)
self.assertEqual(snapshot.extra['name'], "i-123-87654-VM_ROOT-23456_20140917105548")
self.assertEqual(snapshot.extra['volume_id'], "fe1ada16-57a0-40ae-b577-01a153690fb4")
def test_destroy_volume_snapshot(self):
snapshot = self.driver.list_snapshots()[0]
resp = self.driver.destroy_volume_snapshot(snapshot)
self.assertTrue(resp)
def test_ex_create_snapshot_template(self):
snapshot = self.driver.list_snapshots()[0]
template = self.driver.ex_create_snapshot_template(snapshot, "test-libcloud-template", 99)
self.assertEqual(template.id, '10260')
self.assertEqual(template.name, "test-libcloud-template")
self.assertEqual(template.extra['displaytext'], "test-libcloud-template")
self.assertEqual(template.extra['hypervisor'], "VMware")
self.assertEqual(template.extra['os'], "Other Linux (64-bit)")
def test_ex_list_os_types(self):
os_types = self.driver.ex_list_os_types()
self.assertEqual(len(os_types), 146)
self.assertEqual(os_types[0]['id'], 69)
self.assertEqual(os_types[0]['oscategoryid'], 7)
self.assertEqual(os_types[0]['description'], "Asianux 3(32-bit)")
def test_ex_list_vpn_gateways(self):
vpn_gateways = self.driver.ex_list_vpn_gateways()
self.assertEqual(len(vpn_gateways), 1)
self.assertEqual(vpn_gateways[0].id, 'cffa0cab-d1da-42a7-92f6-41379267a29f')
self.assertEqual(vpn_gateways[0].account, 'some_account')
self.assertEqual(vpn_gateways[0].domain, 'some_domain')
self.assertEqual(vpn_gateways[0].domain_id, '9b397dea-25ef-4c5d-b47d-627eaebe8ed8')
self.assertEqual(vpn_gateways[0].public_ip, '1.2.3.4')
self.assertEqual(vpn_gateways[0].vpc_id, '4d25e181-8850-4d52-8ecb-a6f35bbbabde')
def test_ex_create_vpn_gateway(self):
vpc = self.driver.ex_list_vpcs()[0]
vpn_gateway = self.driver.ex_create_vpn_gateway(vpc)
self.assertEqual(vpn_gateway.id, '5ef6794e-cec8-4018-9fef-c4dacbadee14')
self.assertEqual(vpn_gateway.account, 'some_account')
self.assertEqual(vpn_gateway.domain, 'some_domain')
self.assertEqual(vpn_gateway.domain_id, '9b397dea-25ef-4c5d-b47d-627eaebe8ed8')
self.assertEqual(vpn_gateway.public_ip, '2.3.4.5')
self.assertEqual(vpn_gateway.vpc_id, vpc.id)
def test_ex_delete_vpn_gateway(self):
vpn_gateway = self.driver.ex_list_vpn_gateways()[0]
self.assertTrue(vpn_gateway.delete())
def test_ex_list_vpn_customer_gateways(self):
vpn_customer_gateways = self.driver.ex_list_vpn_customer_gateways()
self.assertEqual(len(vpn_customer_gateways), 1)
self.assertEqual(vpn_customer_gateways[0].id, 'ea67eaae-1c2a-4e65-b910-441e77f69bea')
self.assertEqual(vpn_customer_gateways[0].cidr_list, '10.2.2.0/24')
self.assertEqual(vpn_customer_gateways[0].esp_policy, '3des-md5')
self.assertEqual(vpn_customer_gateways[0].gateway, '10.2.2.1')
self.assertEqual(vpn_customer_gateways[0].ike_policy, '3des-md5')
self.assertEqual(vpn_customer_gateways[0].ipsec_psk, 'some_psk')
def test_ex_create_vpn_customer_gateway(self):
vpn_customer_gateway = self.driver.ex_create_vpn_customer_gateway(
cidr_list='10.0.0.0/24',
esp_policy='3des-md5',
gateway='10.0.0.1',
ike_policy='3des-md5',
ipsec_psk='ipsecpsk')
self.assertEqual(vpn_customer_gateway.id, 'cef3c766-116a-4e83-9844-7d08ab7d3fd4')
self.assertEqual(vpn_customer_gateway.esp_policy, '3des-md5')
self.assertEqual(vpn_customer_gateway.gateway, '10.0.0.1')
self.assertEqual(vpn_customer_gateway.ike_policy, '3des-md5')
self.assertEqual(vpn_customer_gateway.ipsec_psk, 'ipsecpsk')
def test_ex_ex_delete_vpn_customer_gateway(self):
vpn_customer_gateway = self.driver.ex_list_vpn_customer_gateways()[0]
self.assertTrue(vpn_customer_gateway.delete())
def test_ex_list_vpn_connections(self):
vpn_connections = self.driver.ex_list_vpn_connections()
self.assertEqual(len(vpn_connections), 1)
self.assertEqual(vpn_connections[0].id, '8f482d9a-6cee-453b-9e78-b0e1338ffce9')
self.assertEqual(vpn_connections[0].passive, False)
self.assertEqual(vpn_connections[0].vpn_customer_gateway_id, 'ea67eaae-1c2a-4e65-b910-441e77f69bea')
self.assertEqual(vpn_connections[0].vpn_gateway_id, 'cffa0cab-d1da-42a7-92f6-41379267a29f')
self.assertEqual(vpn_connections[0].state, 'Connected')
def test_ex_create_vpn_connection(self):
vpn_customer_gateway = self.driver.ex_list_vpn_customer_gateways()[0]
vpn_gateway = self.driver.ex_list_vpn_gateways()[0]
vpn_connection = self.driver.ex_create_vpn_connection(
vpn_customer_gateway,
vpn_gateway)
self.assertEqual(vpn_connection.id, 'f45c3af8-f909-4f16-9d40-ed4409c575f8')
self.assertEqual(vpn_connection.passive, False)
self.assertEqual(vpn_connection.vpn_customer_gateway_id, 'ea67eaae-1c2a-4e65-b910-441e77f69bea')
self.assertEqual(vpn_connection.vpn_gateway_id, 'cffa0cab-d1da-42a7-92f6-41379267a29f')
self.assertEqual(vpn_connection.state, 'Connected')
def test_ex_delete_vpn_connection(self):
vpn_connection = self.driver.ex_list_vpn_connections()[0]
self.assertTrue(vpn_connection.delete())
class CloudStackTestCase(CloudStackCommonTestCase, unittest.TestCase):
def test_driver_instantiation(self):
urls = [
'http://api.exoscale.ch/compute1', # http, default port
'https://api.exoscale.ch/compute2', # https, default port
'http://api.exoscale.ch:8888/compute3', # https, custom port
'https://api.exoscale.ch:8787/compute4', # https, custom port
'https://api.test.com/compute/endpoint' # https, default port
]
expected_values = [
{'host': 'api.exoscale.ch', 'port': 80, 'path': '/compute1'},
{'host': 'api.exoscale.ch', 'port': 443, 'path': '/compute2'},
{'host': 'api.exoscale.ch', 'port': 8888, 'path': '/compute3'},
{'host': 'api.exoscale.ch', 'port': 8787, 'path': '/compute4'},
{'host': 'api.test.com', 'port': 443, 'path': '/compute/endpoint'}
]
cls = get_driver(Provider.CLOUDSTACK)
for url, expected in zip(urls, expected_values):
driver = cls('key', 'secret', url=url)
self.assertEqual(driver.host, expected['host'])
self.assertEqual(driver.path, expected['path'])
self.assertEqual(driver.connection.port, expected['port'])
def test_user_must_provide_host_and_path_or_url(self):
expected_msg = ('When instantiating CloudStack driver directly '
'you also need to provide url or host and path '
'argument')
cls = get_driver(Provider.CLOUDSTACK)
self.assertRaisesRegexp(Exception, expected_msg, cls,
'key', 'secret')
try:
cls('key', 'secret', True, 'localhost', '/path')
except Exception:
self.fail('host and path provided but driver raised an exception')
try:
cls('key', 'secret', url='https://api.exoscale.ch/compute')
except Exception:
self.fail('url provided but driver raised an exception')
class CloudStackMockHttp(MockHttpTestCase):
fixtures = ComputeFileFixtures('cloudstack')
fixture_tag = 'default'
def _load_fixture(self, fixture):
body = self.fixtures.load(fixture)
return body, json.loads(body)
def _test_path_invalid_credentials(self, method, url, body, headers):
body = ''
return (httplib.UNAUTHORIZED, body, {},
httplib.responses[httplib.UNAUTHORIZED])
def _test_path_api_error(self, method, url, body, headers):
body = self.fixtures.load('registerSSHKeyPair_error.json')
return (431, body, {},
httplib.responses[httplib.OK])
def _test_path(self, method, url, body, headers):
url = urlparse.urlparse(url)
query = dict(parse_qsl(url.query))
self.assertTrue('apiKey' in query)
self.assertTrue('command' in query)
self.assertTrue('response' in query)
self.assertTrue('signature' in query)
self.assertTrue(query['response'] == 'json')
del query['apiKey']
del query['response']
del query['signature']
command = query.pop('command')
if hasattr(self, '_cmd_' + command):
return getattr(self, '_cmd_' + command)(**query)
else:
fixture = command + '_' + self.fixture_tag + '.json'
body, obj = self._load_fixture(fixture)
return (httplib.OK, body, obj, httplib.responses[httplib.OK])
def _cmd_queryAsyncJobResult(self, jobid):
fixture = 'queryAsyncJobResult' + '_' + str(jobid) + '.json'
body, obj = self._load_fixture(fixture)
return (httplib.OK, body, obj, httplib.responses[httplib.OK])
if __name__ == '__main__':
sys.exit(unittest.main()) | unknown | codeparrot/codeparrot-clean | ||
from Tools.Profile import profile
profile("LOAD:ElementTree")
import xml.etree.cElementTree
import os
profile("LOAD:enigma_skin")
from enigma import eSize, ePoint, eRect, gFont, eWindow, eLabel, ePixmap, eWindowStyleManager, addFont, gRGB, eWindowStyleSkinned, getDesktop
from Components.config import ConfigSubsection, ConfigText, config, ConfigYesNo, ConfigSelection, ConfigNothing
from Components.Converter.Converter import Converter
from Components.Sources.Source import Source, ObsoleteSource
from Components.SystemInfo import SystemInfo
from Tools.Directories import resolveFilename, SCOPE_SKIN, SCOPE_SKIN_IMAGE, SCOPE_FONTS, SCOPE_ACTIVE_SKIN, SCOPE_ACTIVE_LCDSKIN, SCOPE_CURRENT_SKIN, SCOPE_CONFIG, fileExists
from Tools.Import import my_import
from Tools.LoadPixmap import LoadPixmap
from Components.RcModel import rc_model
from boxbranding import getBoxType
config.vfd = ConfigSubsection()
config.vfd.show = ConfigSelection([("skin_text.xml", _("Channel Name")), ("skin_text_clock.xml", _("Clock"))], "skin_text.xml")
if not os.path.exists("/usr/share/enigma2/skin_text.xml"):
config.vfd.show = ConfigNothing()
colorNames = {}
colorNamesHuman = {}
# Predefined fonts, typically used in built-in screens and for components like
# the movie list and so.
fonts = {
"Body": ("Regular", 18, 22, 16),
"ChoiceList": ("Regular", 20, 24, 18),
}
parameters = {}
constant_widgets = {}
variables = {}
DEFAULT_SKIN = "MetrixHD/skin.xml"
DEFAULT_DISPLAY_SKIN = "skin_display.xml"
if SystemInfo["grautec"]:
DEFAULT_DISPLAY_SKIN = "skin_display_grautec.xml"
isVTISkin = False
def dump(x, i=0):
print " " * i + str(x)
try:
for n in x.childNodes:
dump(n, i + 1)
except:
None
class SkinError(Exception):
def __init__(self, message):
self.msg = message
def __str__(self):
return "{%s}: %s. Please contact the skin's author!" % (config.skin.primary_skin.value, self.msg)
class DisplaySkinError(Exception):
def __init__(self, message):
self.msg = message
def __str__(self):
return "{%s}: %s. Please contact the skin's author!" % (config.skin.display_skin.value, self.msg)
dom_skins = [ ]
def addSkin(name, scope = SCOPE_SKIN):
# read the skin
if name is None or not len(name):
print "[SKIN ERROR] attempt to add a skin without filename"
return False
filename = resolveFilename(scope, name)
if fileExists(filename):
mpath = os.path.dirname(filename) + "/"
try:
dom_skins.append((mpath, xml.etree.cElementTree.parse(filename).getroot()))
except:
print "[SKIN ERROR] error in %s" % filename
return False
else:
return True
return False
def get_modular_files(name, scope = SCOPE_SKIN):
dirname = resolveFilename(scope, name + 'mySkin/')
file_list = []
if fileExists(dirname) and config.skin.primary_skin.value != DEFAULT_SKIN:
skin_files = (os.listdir(dirname))
if len(skin_files):
for f in skin_files:
if f.startswith('skin_') and f.endswith('.xml'):
file_list.append(("mySkin/" + f))
file_list = sorted(file_list, key=str.lower)
return file_list
# get own skin_user_skinname.xml file, if exist
def skin_user_skinname():
name = "skin_user_" + config.skin.primary_skin.value[:config.skin.primary_skin.value.rfind('/')] + ".xml"
filename = resolveFilename(SCOPE_CONFIG, name)
if fileExists(filename):
return name
return None
# we do our best to always select the "right" value
# skins are loaded in order of priority: skin with
# highest priority is loaded last, usually the user-provided
# skin.
# currently, loadSingleSkinData (colors, bordersets etc.)
# are applied one-after-each, in order of ascending priority.
# the dom_skin will keep all screens in descending priority,
# so the first screen found will be used.
# example: loadSkin("nemesis_greenline/skin.xml")
config.skin = ConfigSubsection()
config.skin.primary_skin = ConfigText(default = DEFAULT_SKIN)
config.skin.display_skin = ConfigText(default = DEFAULT_DISPLAY_SKIN)
##################################################################################################
if fileExists('/etc/.restore_skins'):
os.unlink('/etc/.restore_skins')
import glob
lastpath = ''
for skin in sorted(glob.glob('/usr/lib/enigma2/python/Plugins/Extensions/*/ActivateSkinSettings.py*')):
try:
print '[RESTORE_SKIN] restore skin from "%s" ...' % skin
skinpath, ext = os.path.splitext(skin)
if skinpath == lastpath or not ext in '.pyo':
print '[RESTORE_SKIN] ...skip!'
continue
lastpath = skinpath
if getattr(__import__(skin.replace('/usr/lib/enigma2/python/','').replace(ext,'').replace('/','.'), fromlist=['ActivateSkinSettings']), 'ActivateSkinSettings')().WriteSkin(True):
print '[RESTORE_SKIN] ... failed!'
else:
print '[RESTORE_SKIN] ... done!'
except Exception, err:
print '[RESTORE_SKIN] ...error occurred: ', err
##################################################################################################
def skinExists(skin = False):
if not skin or not isinstance(skin, skin):
skin = config.skin.primary_skin.value
skin = resolveFilename(SCOPE_SKIN, skin)
if not fileExists(skin):
if fileExists(resolveFilename(SCOPE_SKIN, DEFAULT_SKIN)):
config.skin.primary_skin.value = DEFAULT_SKIN
else:
config.skin.primary_skin.value = "skin.xml"
config.skin.primary_skin.save()
skinExists()
def getSkinPath():
#primary_skin_path = config.skin.primary_skin.value.replace('skin.xml', '')
p = config.skin.primary_skin.value
primary_skin_path = p[:p.rfind('/')+1]
if not primary_skin_path.endswith('/'):
primary_skin_path = primary_skin_path + '/'
return primary_skin_path
primary_skin_path = getSkinPath()
profile("LoadSkin")
res = None
name = skin_user_skinname()
if name:
res = addSkin(name, SCOPE_CONFIG)
if not name or not res:
addSkin('skin_user.xml', SCOPE_CONFIG)
# some boxes lie about their dimensions
addSkin('skin_box.xml')
# add optional discrete second infobar
addSkin('skin_second_infobar.xml')
display_skin_id = 1
if getBoxType().startswith('dm'):
display_skin_id = 2
try:
if not addSkin(os.path.join('display', config.skin.display_skin.value)):
raise DisplaySkinError, "display skin not found"
except Exception, err:
print "SKIN ERROR:", err
skin = DEFAULT_DISPLAY_SKIN
if config.skin.display_skin.value == skin:
skin = 'skin_display.xml'
print "defaulting to standard display skin...", skin
config.skin.display_skin.value = skin
skin = os.path.join('display', skin)
addSkin(skin)
del skin
# Add Skin for Display
try:
addSkin(config.vfd.show.value)
except:
addSkin('skin_text.xml')
addSkin('skin_subtitles.xml')
try:
if config.skin.primary_skin.value != DEFAULT_SKIN:
addSkin(primary_skin_path + 'skin_user_colors.xml', SCOPE_SKIN)
print "[SKIN] loading user defined colors for skin", (primary_skin_path + 'skin_user_colors.xml')
except (SkinError, IOError, AssertionError), err:
print "[SKIN] not loading user defined colors for skin"
try:
if config.skin.primary_skin.value != DEFAULT_SKIN:
addSkin(primary_skin_path + 'skin_user_header.xml', SCOPE_SKIN)
print "[SKIN] loading user defined header file for skin", (primary_skin_path + 'skin_user_header.xml')
except (SkinError, IOError, AssertionError), err:
print "[SKIN] not loading user defined header file for skin"
def load_modular_files():
modular_files = get_modular_files(primary_skin_path, SCOPE_SKIN)
if len(modular_files):
for f in modular_files:
try:
addSkin(primary_skin_path + f, SCOPE_SKIN)
print "[SKIN] loading modular skin file : ", (primary_skin_path + f)
except (SkinError, IOError, AssertionError), err:
print "[SKIN] failed to load modular skin file : ", err
load_modular_files()
try:
if not addSkin(config.skin.primary_skin.value):
raise SkinError, "primary skin not found"
except Exception, err:
print "SKIN ERROR:", err
skin = DEFAULT_SKIN
if config.skin.primary_skin.value == skin:
skin = 'skin.xml'
print "defaulting to standard skin...", skin
config.skin.primary_skin.value = skin
addSkin(skin)
del skin
addSkin('skin_default.xml')
profile("LoadSkinDefaultDone")
def parseCoordinate(s, e, size=0, font=None):
s = s.strip()
if s == "center":
if not size:
val = 0
else:
val = (e - size)/2
elif s == '*':
return None
else:
if s[0] is 'e':
val = e
s = s[1:]
elif s[0] is 'c':
val = e/2
s = s[1:]
else:
val = 0
if s:
if s[-1] is '%':
val += e * int(s[:-1]) / 100
elif s[-1] is 'w':
val += fonts[font][3] * int(s[:-1])
elif s[-1] is 'h':
val += fonts[font][2] * int(s[:-1])
else:
val += int(s)
if val < 0:
val = 0
return val
def getParentSize(object, desktop):
size = eSize()
if object:
parent = object.getParent()
# For some widgets (e.g. ScrollLabel) the skin attributes are applied to
# a child widget, instead of to the widget itself. In that case, the parent
# we have here is not the real parent, but it is the main widget.
# We have to go one level higher to get the actual parent.
# We can detect this because the 'parent' will not have a size yet
# (the main widget's size will be calculated internally, as soon as the child
# widget has parsed the skin attributes)
if parent and parent.size().isEmpty():
parent = parent.getParent()
if parent:
size = parent.size()
elif desktop:
#widget has no parent, use desktop size instead for relative coordinates
size = desktop.size()
return size
def parsePosition(s, scale, object = None, desktop = None, size = None):
if s in variables:
s = variables[s]
x, y = s.split(',')
parentsize = eSize()
if object and (x[0] in ('c', 'e') or y[0] in ('c', 'e')):
parentsize = getParentSize(object, desktop)
xval = parseCoordinate(x, parentsize.width(), size and size.width())
yval = parseCoordinate(y, parentsize.height(), size and size.height())
return ePoint(xval * scale[0][0] / scale[0][1], yval * scale[1][0] / scale[1][1])
def parseSize(s, scale, object = None, desktop = None):
if s in variables:
s = variables[s]
x, y = s.split(',')
parentsize = eSize()
if object and (x[0] in ('c', 'e') or y[0] in ('c', 'e')):
parentsize = getParentSize(object, desktop)
xval = parseCoordinate(x, parentsize.width())
yval = parseCoordinate(y, parentsize.height())
return eSize(xval * scale[0][0] / scale[0][1], yval * scale[1][0] / scale[1][1])
def parseFont(s, scale):
try:
f = fonts[s]
name = f[0]
size = f[1]
except:
name, size = s.split(';')
return gFont(name, int(size) * scale[0][0] / scale[0][1])
def parseColor(s):
if s[0] != '#':
try:
return colorNames[s]
except:
raise SkinError("color '%s' must be #aarrggbb or valid named color" % s)
return gRGB(int(s[1:], 0x10))
def collectAttributes(skinAttributes, node, context, skin_path_prefix=None, ignore=(), filenames=frozenset(("pixmap", "pointer", "seek_pointer", "backgroundPixmap", "selectionPixmap", "sliderPixmap", "scrollbarbackgroundPixmap"))):
# walk all attributes
size = None
pos = None
font = None
for attrib, value in node.items():
if attrib not in ignore:
if attrib in filenames:
pngfile = resolveFilename(SCOPE_ACTIVE_SKIN, value, path_prefix=skin_path_prefix)
if fileExists(resolveFilename(SCOPE_ACTIVE_LCDSKIN, value, path_prefix=skin_path_prefix)):
pngfile = resolveFilename(SCOPE_ACTIVE_LCDSKIN, value, path_prefix=skin_path_prefix)
value = pngfile
# Bit of a hack this, really. When a window has a flag (e.g. wfNoBorder)
# it needs to be set at least before the size is set, in order for the
# window dimensions to be calculated correctly in all situations.
# If wfNoBorder is applied after the size has been set, the window will fail to clear the title area.
# Similar situation for a scrollbar in a listbox; when the scrollbar setting is applied after
# the size, a scrollbar will not be shown until the selection moves for the first time
if attrib == 'size':
size = value.encode("utf-8")
elif attrib == 'position':
pos = value.encode("utf-8")
elif attrib == 'font':
font = value.encode("utf-8")
skinAttributes.append((attrib, font))
else:
skinAttributes.append((attrib, value.encode("utf-8")))
if pos is not None:
pos, size = context.parse(pos, size, font)
skinAttributes.append(('position', pos))
if size is not None:
skinAttributes.append(('size', size))
def morphRcImagePath(value):
if rc_model.rcIsDefault() is False:
if value == '/usr/share/enigma2/skin_default/rc.png' or value == '/usr/share/enigma2/skin_default/rcold.png':
value = rc_model.getRcLocation() + 'rc.png'
elif value == '/usr/share/enigma2/skin_default/rc0.png' or value == '/usr/share/enigma2/skin_default/rc1.png' or value == '/usr/share/enigma2/skin_default/rc2.png':
value = rc_model.getRcLocation() + 'rc.png'
return value
def loadPixmap(path, desktop):
cached = False
option = path.find("#")
if option != -1:
options = path[option+1:].split(',')
path = path[:option]
cached = "cached" in options
ptr = LoadPixmap(morphRcImagePath(path), desktop, cached)
if ptr is not None:
return ptr
print("pixmap file %s not found!" % path)
pngcache = []
def cachemenu():
pixmaplist = []
for (path, skin) in dom_skins:
for x in skin.findall("screen"):
if x.attrib.get('name') == 'menu_mainmenu':
print x.attrib.get('name')
for s in x.findall("ePixmap"):
if s.attrib.get('pixmap','') is not '':
pixmaplist.append(s.attrib.get('pixmap',''))
for s in x.findall('widget'):
if s.attrib.get('pixmap','') is not '':
pixmaplist.append(s.attrib.get('pixmap',''))
desktop = getDesktop(0)
for s in pixmaplist:
value ='/usr/share/enigma2/'+s
ptr = loadPixmap(value, desktop)
pngcache.append((value,ptr))
try:
if config.skin.primary_skin.value == "MetrixHD/skin.xml" or config.skin.primary_skin.value == DEFAULT_SKIN:
cachemenu()
except:
print "fail cache main menu"
class AttributeParser:
def __init__(self, guiObject, desktop, scale=((1,1),(1,1))):
self.guiObject = guiObject
self.desktop = desktop
self.scaleTuple = scale
def applyOne(self, attrib, value):
try:
getattr(self, attrib)(value)
except AttributeError:
print "[SKIN] Attribute \"%s\" with value \"%s\" in object of type \"%s\" is not implemented" % (attrib, value, self.guiObject.__class__.__name__)
except SkinError, ex:
print "\033[91m[SKIN] Error:", ex,
print "\033[0m"
except:
print "[Skin] attribute \"%s\" with wrong (or unknown) value \"%s\" in object of type \"%s\"" % (attrib, value, self.guiObject.__class__.__name__)
def applyAll(self, attrs):
for attrib, value in attrs:
try:
getattr(self, attrib)(value)
except AttributeError:
print "[SKIN] Attribute \"%s\" with value \"%s\" in object of type \"%s\" is not implemented" % (attrib, value, self.guiObject.__class__.__name__)
except SkinError, ex:
print "\033[91m[Skin] Error:", ex,
print "\033[0m"
except:
print "[Skin] attribute \"%s\" with wrong (or unknown) value \"%s\" in object of type \"%s\"" % (attrib, value, self.guiObject.__class__.__name__)
def conditional(self, value):
pass
def objectTypes(self, value):
pass
def position(self, value):
if isinstance(value, tuple):
self.guiObject.move(ePoint(*value))
else:
self.guiObject.move(parsePosition(value, self.scaleTuple, self.guiObject, self.desktop, self.guiObject.csize()))
def size(self, value):
if isinstance(value, tuple):
self.guiObject.resize(eSize(*value))
else:
self.guiObject.resize(parseSize(value, self.scaleTuple, self.guiObject, self.desktop))
def animationPaused(self, value):
pass
def NoAnimationAfter(self, value):
pass
def Animation(self, value):
self.guiObject.setAnimationMode(
{ "disable": 0x00,
"off": 0x00,
"offshow": 0x10,
"offhide": 0x01,
"onshow": 0x01,
"onhide": 0x10,
"disable_onshow": 0x10,
"disable_onhide": 0x01,
}[value])
def animationMode(self, value):
self.guiObject.setAnimationMode(
{ "disable": 0x00,
"off": 0x00,
"offshow": 0x10,
"offhide": 0x01,
"onshow": 0x01,
"onhide": 0x10,
"disable_onshow": 0x10,
"disable_onhide": 0x01,
}[value])
def title(self, value):
global isVTISkin
if value[:3].lower() == "vti":
isVTISkin = True
self.guiObject.setTitle(_(value))
def text(self, value):
self.guiObject.setText(_(value))
def font(self, value):
self.guiObject.setFont(parseFont(value, self.scaleTuple))
def secondfont(self, value):
self.guiObject.setSecondFont(parseFont(value, self.scaleTuple))
def zPosition(self, value):
self.guiObject.setZPosition(int(value))
def itemHeight(self, value):
self.guiObject.setItemHeight(int(value))
def pixmap(self, value):
global pngcache
ptr = None
for cvalue, cptr in pngcache:
if cvalue== value:
ptr=cptr
if ptr is None:
if not fileExists(value):
ptr = loadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, value), self.desktop)
else:
ptr = loadPixmap(value, self.desktop)
self.guiObject.setPixmap(ptr)
def backgroundPixmap(self, value):
global pngcache
ptr = None
for cvalue, cptr in pngcache:
if cvalue== value:
ptr=cptr
if ptr is None:
if not fileExists(value):
ptr = loadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, value), self.desktop)
else:
ptr = loadPixmap(value, self.desktop)
self.guiObject.setBackgroundPicture(ptr)
def selectionPixmap(self, value):
global pngcache
ptr = None
for cvalue, cptr in pngcache:
if cvalue== value:
ptr=cptr
if ptr is None:
if not fileExists(value):
ptr = loadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, value), self.desktop)
else:
ptr = loadPixmap(value, self.desktop)
self.guiObject.setSelectionPicture(ptr)
def sliderPixmap(self, value):
global pngcache
ptr = None
for cvalue, cptr in pngcache:
if cvalue== value:
ptr=cptr
if ptr is None:
if not fileExists(value):
ptr = loadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, value), self.desktop)
else:
ptr = loadPixmap(value, self.desktop)
self.guiObject.setSliderPicture(ptr)
def scrollbarbackgroundPixmap(self, value):
global pngcache
ptr = None
for cvalue, cptr in pngcache:
if cvalue== value:
ptr=cptr
if ptr is None:
if not fileExists(value):
ptr = loadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, value), self.desktop)
else:
ptr = loadPixmap(value, self.desktop)
self.guiObject.setScrollbarBackgroundPicture(ptr)
def scrollbarSliderPicture(self, value):
global pngcache
ptr = None
for cvalue, cptr in pngcache:
if cvalue== value:
ptr=cptr
if ptr is None:
if not fileExists(value):
ptr = loadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, value), self.desktop)
else:
ptr = loadPixmap(value, self.desktop)
self.guiObject.setScrollbarSliderPicture(ptr)
def scrollbarBackgroundPicture(self, value):
global pngcache
ptr = None
for cvalue, cptr in pngcache:
if cvalue== value:
ptr=cptr
if ptr is None:
if not fileExists(value):
ptr = loadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, value), self.desktop)
else:
ptr = loadPixmap(value, self.desktop)
self.guiObject.setScrollbarBackgroundPicture(ptr)
def alphatest(self, value):
self.guiObject.setAlphatest(
{ "on": 1,
"off": 0,
"blend": 2,
}[value])
def scale(self, value):
self.guiObject.setScale(1)
def orientation(self, value): # used by eSlider
try:
self.guiObject.setOrientation(*
{ "orVertical": (self.guiObject.orVertical, False),
"orTopToBottom": (self.guiObject.orVertical, False),
"orBottomToTop": (self.guiObject.orVertical, True),
"orHorizontal": (self.guiObject.orHorizontal, False),
"orLeftToRight": (self.guiObject.orHorizontal, False),
"orRightToLeft": (self.guiObject.orHorizontal, True),
}[value])
except KeyError:
print "oprientation must be either orVertical or orHorizontal!, not %s. Please contact the skin's author!" % value
def valign(self, value):
try:
self.guiObject.setVAlign(
{ "top": self.guiObject.alignTop,
"center": self.guiObject.alignCenter,
"bottom": self.guiObject.alignBottom
}[value])
except KeyError:
print "valign must be either top, center or bottom!, not %s. Please contact the skin's author!" % value
def halign(self, value):
try:
self.guiObject.setHAlign(
{ "left": self.guiObject.alignLeft,
"center": self.guiObject.alignCenter,
"right": self.guiObject.alignRight,
"block": self.guiObject.alignBlock
}[value])
except KeyError:
print "halign must be either left, center, right or block!, not %s. Please contact the skin's author!" % value
def textOffset(self, value):
global isVTISkin
if value in variables:
value = variables[value]
x, y = value.split(',')
self.guiObject.setTextOffset(ePoint(int(x) * self.scaleTuple[0][0] / self.scaleTuple[0][1], int(y) * self.scaleTuple[1][0] / self.scaleTuple[1][1]))
if isVTISkin:
self.guiObject.setUseVTIWorkaround()
def flags(self, value):
if value in variables:
value = variables[value]
flags = value.split(',')
for f in flags:
try:
fv = eWindow.__dict__[f]
self.guiObject.setFlag(fv)
except KeyError:
print "illegal flag %s!" % f
def backgroundColor(self, value):
self.guiObject.setBackgroundColor(parseColor(value))
def backgroundColorSelected(self, value):
self.guiObject.setBackgroundColorSelected(parseColor(value))
def foregroundColor(self, value):
self.guiObject.setForegroundColor(parseColor(value))
def foregroundColorSelected(self, value):
self.guiObject.setForegroundColorSelected(parseColor(value))
def foregroundNotCrypted(self, value):
self.guiObject.setForegroundColor(parseColor(value))
def backgroundNotCrypted(self, value):
self.guiObject.setBackgroundColor(parseColor(value))
def foregroundCrypted(self, value):
self.guiObject.setForegroundColor(parseColor(value))
def backgroundCrypted(self, value):
self.guiObject.setBackgroundColor(parseColor(value))
def foregroundEncrypted(self, value):
self.guiObject.setForegroundColor(parseColor(value))
def backgroundEncrypted(self, value):
self.guiObject.setBackgroundColor(parseColor(value))
def shadowColor(self, value):
self.guiObject.setShadowColor(parseColor(value))
def selectionDisabled(self, value):
self.guiObject.setSelectionEnable(0)
def transparent(self, value):
self.guiObject.setTransparent(int(value))
def borderColor(self, value):
self.guiObject.setBorderColor(parseColor(value))
def borderWidth(self, value):
self.guiObject.setBorderWidth(int(value))
def scrollbarSliderBorderWidth(self, value):
self.guiObject.setScrollbarSliderBorderWidth(int(value))
def scrollbarWidth(self, value):
self.guiObject.setScrollbarWidth(int(value))
def scrollbarSliderBorderColor(self, value):
self.guiObject.setSliderBorderColor(parseColor(value))
def scrollbarSliderForegroundColor(self, value):
self.guiObject.setSliderForegroundColor(parseColor(value))
def scrollbarMode(self, value):
self.guiObject.setScrollbarMode(getattr(self.guiObject, value))
# { "showOnDemand": self.guiObject.showOnDemand,
# "showAlways": self.guiObject.showAlways,
# "showNever": self.guiObject.showNever,
# "showLeft": self.guiObject.showLeft
# }[value])
def enableWrapAround(self, value):
self.guiObject.setWrapAround(True)
def pointer(self, value):
(name, pos) = value.split(':')
pos = parsePosition(pos, self.scaleTuple)
ptr = loadPixmap(name, self.desktop)
self.guiObject.setPointer(0, ptr, pos)
def seek_pointer(self, value):
(name, pos) = value.split(':')
pos = parsePosition(pos, self.scaleTuple)
ptr = loadPixmap(name, self.desktop)
self.guiObject.setPointer(1, ptr, pos)
def shadowOffset(self, value):
self.guiObject.setShadowOffset(parsePosition(value, self.scaleTuple))
def noWrap(self, value):
self.guiObject.setNoWrap(int(value))
def linelength(self, value):
pass
def OverScan(self, value):
self.guiObject.setOverscan(value)
def applySingleAttribute(guiObject, desktop, attrib, value, scale = ((1,1),(1,1))):
# Someone still using applySingleAttribute?
AttributeParser(guiObject, desktop, scale).applyOne(attrib, value)
def applyAllAttributes(guiObject, desktop, attributes, scale):
AttributeParser(guiObject, desktop, scale).applyAll(attributes)
def loadSingleSkinData(desktop, skin, path_prefix):
"""loads skin data like colors, windowstyle etc."""
assert skin.tag == "skin", "root element in skin must be 'skin'!"
for c in skin.findall("output"):
id = c.attrib.get('id')
if id:
id = int(id)
else:
id = 0
if id == 0: # framebuffer
for res in c.findall("resolution"):
get_attr = res.attrib.get
xres = get_attr("xres")
if xres:
xres = int(xres)
else:
xres = 720
yres = get_attr("yres")
if yres:
yres = int(yres)
else:
yres = 576
bpp = get_attr("bpp")
if bpp:
bpp = int(bpp)
else:
bpp = 32
#print "Resolution:", xres,yres,bpp
from enigma import gMainDC
gMainDC.getInstance().setResolution(xres, yres)
desktop.resize(eSize(xres, yres))
if bpp != 32:
# load palette (not yet implemented)
pass
for skininclude in skin.findall("include"):
filename = skininclude.attrib.get("filename")
if filename:
skinfile = resolveFilename(SCOPE_ACTIVE_SKIN, filename, path_prefix=path_prefix)
if not fileExists(skinfile):
skinfile = resolveFilename(SCOPE_SKIN_IMAGE, filename, path_prefix=path_prefix)
if fileExists(skinfile):
print "[SKIN] loading include:", skinfile
loadSkin(skinfile)
for c in skin.findall("colors"):
for color in c.findall("color"):
get_attr = color.attrib.get
name = get_attr("name")
color = get_attr("value")
if name and color:
colorNames[name] = parseColor(color)
if color[0] != '#':
for key in colorNames:
if key == color:
colorNamesHuman[name] = colorNamesHuman[key]
break
else:
humancolor = color[1:]
if len(humancolor) >= 6:
colorNamesHuman[name] = int(humancolor,16)
else:
print("need color and name, got %s %s" % (name, color))
for c in skin.findall("fonts"):
for font in c.findall("font"):
get_attr = font.attrib.get
filename = get_attr("filename", "<NONAME>")
name = get_attr("name", "Regular")
scale = get_attr("scale")
if scale:
scale = int(scale)
else:
scale = 100
is_replacement = get_attr("replacement") and True or False
render = get_attr("render")
if render:
render = int(render)
else:
render = 0
resolved_font = resolveFilename(SCOPE_FONTS, filename, path_prefix=path_prefix)
if not fileExists(resolved_font): #when font is not available look at current skin path
resolved_font = resolveFilename(SCOPE_ACTIVE_SKIN, filename)
if fileExists(resolveFilename(SCOPE_CURRENT_SKIN, filename)):
resolved_font = resolveFilename(SCOPE_CURRENT_SKIN, filename)
elif fileExists(resolveFilename(SCOPE_ACTIVE_LCDSKIN, filename)):
resolved_font = resolveFilename(SCOPE_ACTIVE_LCDSKIN, filename)
addFont(resolved_font, name, scale, is_replacement, render)
#print "Font: ", resolved_font, name, scale, is_replacement
fallbackFont = resolveFilename(SCOPE_FONTS, "fallback.font", path_prefix=path_prefix)
if fileExists(fallbackFont):
addFont(fallbackFont, "Fallback", 100, -1, 0)
for alias in c.findall("alias"):
get = alias.attrib.get
try:
name = get("name")
font = get("font")
size = int(get("size"))
height = int(get("height", size)) # to be calculated some day
width = int(get("width", size))
global fonts
fonts[name] = (font, size, height, width)
except Exception, ex:
print "[SKIN] bad font alias", ex
for c in skin.findall("parameters"):
for parameter in c.findall("parameter"):
get = parameter.attrib.get
try:
name = get("name")
value = get("value")
if name.find('Font') != -1:
font = value.split(";")
if isinstance(font, list) and len(font) == 2:
parameters[name] = (str(font[0]), int(font[1]))
else:
parameters[name] = map(int, value.split(","))
except Exception, ex:
print "[SKIN] bad parameter", ex
for c in skin.findall("constant-widgets"):
for constant_widget in c.findall("constant-widget"):
get = constant_widget.attrib.get
name = get("name")
if name:
constant_widgets[name] = constant_widget
for c in skin.findall("variables"):
for parameter in c.findall("variable"):
get = parameter.attrib.get
name = get("name")
value = get("value")
x, y = value.split(',')
if value and name:
variables[name] = str(x) + "," + str(y)
for c in skin.findall("subtitles"):
from enigma import eSubtitleWidget
scale = ((1,1),(1,1))
for substyle in c.findall("sub"):
get_attr = substyle.attrib.get
font = parseFont(get_attr("font"), scale)
col = get_attr("foregroundColor")
if col:
foregroundColor = parseColor(col)
haveColor = 1
else:
foregroundColor = gRGB(0xFFFFFF)
haveColor = 0
col = get_attr("borderColor")
if col:
borderColor = parseColor(col)
else:
borderColor = gRGB(0)
borderwidth = get_attr("borderWidth")
if borderwidth is None:
# default: use a subtitle border
borderWidth = 3
else:
borderWidth = int(borderwidth)
face = eSubtitleWidget.__dict__[get_attr("name")]
eSubtitleWidget.setFontStyle(face, font, haveColor, foregroundColor, borderColor, borderWidth)
for windowstyle in skin.findall("windowstyle"):
style = eWindowStyleSkinned()
style_id = windowstyle.attrib.get("id")
if style_id:
style_id = int(style_id)
else:
style_id = 0
# defaults
font = gFont("Regular", 20)
offset = eSize(20, 5)
for title in windowstyle.findall("title"):
get_attr = title.attrib.get
offset = parseSize(get_attr("offset"), ((1,1),(1,1)))
font = parseFont(get_attr("font"), ((1,1),(1,1)))
style.setTitleFont(font)
style.setTitleOffset(offset)
#print " ", font, offset
for borderset in windowstyle.findall("borderset"):
bsName = str(borderset.attrib.get("name"))
for pixmap in borderset.findall("pixmap"):
get_attr = pixmap.attrib.get
bpName = get_attr("pos")
filename = get_attr("filename")
if filename and bpName:
pngfile = resolveFilename(SCOPE_ACTIVE_SKIN, filename, path_prefix=path_prefix)
if fileExists(resolveFilename(SCOPE_SKIN_IMAGE, filename, path_prefix=path_prefix)):
pngfile = resolveFilename(SCOPE_SKIN_IMAGE, filename, path_prefix=path_prefix)
png = loadPixmap(pngfile, desktop)
try:
style.setPixmap(eWindowStyleSkinned.__dict__[bsName], eWindowStyleSkinned.__dict__[bpName], png)
except:
pass
#print " borderset:", bpName, filename
for color in windowstyle.findall("color"):
get_attr = color.attrib.get
colorType = get_attr("name")
color = parseColor(get_attr("color"))
try:
style.setColor(eWindowStyleSkinned.__dict__["col" + colorType], color)
except:
raise SkinError("Unknown color %s" % colorType)
#pass
#print " color:", type, color
x = eWindowStyleManager.getInstance()
x.setStyle(style_id, style)
for margin in skin.findall("margin"):
style_id = margin.attrib.get("id")
if style_id:
style_id = int(style_id)
else:
style_id = 0
r = eRect(0,0,0,0)
v = margin.attrib.get("left")
if v:
r.setLeft(int(v))
v = margin.attrib.get("top")
if v:
r.setTop(int(v))
v = margin.attrib.get("right")
if v:
r.setRight(int(v))
v = margin.attrib.get("bottom")
if v:
r.setBottom(int(v))
# the "desktop" parameter is hardcoded to the UI screen, so we must ask
# for the one that this actually applies to.
getDesktop(style_id).setMargins(r)
dom_screens = {}
def loadSkin(name, scope = SCOPE_SKIN):
# Now a utility for plugins to add skin data to the screens
global dom_screens, display_skin_id, isVTISkin
filename = resolveFilename(scope, name)
if fileExists(filename):
path = os.path.dirname(filename) + "/"
file = open(filename, 'r')
for elem in xml.etree.cElementTree.parse(file).getroot():
if elem.tag == 'screen':
name = elem.attrib.get('name', None)
if name[:3].lower() == "vti":
isVTISkin = True
if name:
sid = elem.attrib.get('id', None)
if sid and (sid != display_skin_id):
# not for this display
elem.clear()
continue
if name in dom_screens:
# Clear old versions, save memory
dom_screens[name][0].clear()
dom_screens[name] = (elem, path)
else:
elem.clear()
else:
elem.clear()
file.close()
def loadSkinData(desktop):
# Kinda hackish, but this is called once by mytest.py
global isVTISkin
global dom_skins
skins = dom_skins[:]
skins.reverse()
for (path, dom_skin) in skins:
loadSingleSkinData(desktop, dom_skin, path)
for elem in dom_skin:
if elem.tag == 'screen':
name = elem.attrib.get('name', None)
if name:
if name[:3].lower() == "vti":
isVTISkin = True
sid = elem.attrib.get('id', None)
if sid and (sid != display_skin_id):
# not for this display
elem.clear()
continue
if name in dom_screens:
# Kill old versions, save memory
dom_screens[name][0].clear()
dom_screens[name] = (elem, path)
else:
# without name, it's useless!
elem.clear()
else:
# non-screen element, no need for it any longer
elem.clear()
# no longer needed, we know where the screens are now.
del dom_skins
class additionalWidget:
def __init__(self):
pass
# Class that makes a tuple look like something else. Some plugins just assume
# that size is a string and try to parse it. This class makes that work.
class SizeTuple(tuple):
def split(self, *args):
return str(self[0]), str(self[1])
def strip(self, *args):
return '%s,%s' % self
def __str__(self):
return '%s,%s' % self
class SkinContext:
def __init__(self, parent=None, pos=None, size=None, font=None):
if parent is not None:
if pos is not None:
pos, size = parent.parse(pos, size, font)
self.x, self.y = pos
self.w, self.h = size
else:
self.x = None #0 #none
self.y = None #0 #none
self.w = None #560 #none
self.h = None #550 #none
def __str__(self):
return "Context (%s,%s)+(%s,%s) " % (self.x, self.y, self.w, self.h)
def parse(self, pos, size, font):
if size in variables:
size = variables[size]
if pos == "fill":
pos = (self.x, self.y)
size = (self.w, self.h)
self.w = 0
self.h = 0
else:
w,h = size.split(',')
w = parseCoordinate(w, self.w, 0, font)
h = parseCoordinate(h, self.h, 0, font)
if pos == "bottom":
pos = (self.x, self.y + self.h - h)
size = (self.w, h)
self.h -= h
elif pos == "top":
pos = (self.x, self.y)
size = (self.w, h)
self.h -= h
self.y += h
elif pos == "left":
pos = (self.x, self.y)
size = (w, self.h)
self.x += w
self.w -= w
elif pos == "right":
pos = (self.x + self.w - w, self.y)
size = (w, self.h)
self.w -= w
else:
if pos in variables:
pos = variables[pos]
size = (w, h)
pos = pos.split(',')
pos = (self.x + parseCoordinate(pos[0], self.w, size[0], font), self.y + parseCoordinate(pos[1], self.h, size[1], font))
return SizeTuple(pos), SizeTuple(size)
class SkinContextStack(SkinContext):
# A context that stacks things instead of aligning them
def parse(self, pos, size, font):
if size in variables:
size = variables[size]
if pos == "fill":
pos = (self.x, self.y)
size = (self.w, self.h)
else:
w,h = size.split(',')
w = parseCoordinate(w, self.w, 0, font)
h = parseCoordinate(h, self.h, 0, font)
if pos == "bottom":
pos = (self.x, self.y + self.h - h)
size = (self.w, h)
elif pos == "top":
pos = (self.x, self.y)
size = (self.w, h)
elif pos == "left":
pos = (self.x, self.y)
size = (w, self.h)
elif pos == "right":
pos = (self.x + self.w - w, self.y)
size = (w, self.h)
else:
if pos in variables:
pos = variables[pos]
size = (w, h)
pos = pos.split(',')
pos = (self.x + parseCoordinate(pos[0], self.w, size[0], font), self.y + parseCoordinate(pos[1], self.h, size[1], font))
return SizeTuple(pos), SizeTuple(size)
def readSkin(screen, skin, names, desktop):
if not isinstance(names, list):
names = [names]
# try all skins, first existing one have priority
global dom_screens
for n in names:
myscreen, path = dom_screens.get(n, (None,None))
if myscreen is not None:
# use this name for debug output
name = n
break
else:
name = "<embedded-in-'%s'>" % screen.__class__.__name__
# otherwise try embedded skin
if myscreen is None:
myscreen = getattr(screen, "parsedSkin", None)
# try uncompiled embedded skin
if myscreen is None and getattr(screen, "skin", None):
skin = screen.skin
print "[SKIN] Parsing embedded skin", name
if isinstance(skin, tuple):
for s in skin:
candidate = xml.etree.cElementTree.fromstring(s)
if candidate.tag == 'screen':
sid = candidate.attrib.get('id', None)
if (not sid) or (int(sid) == display_skin_id):
myscreen = candidate
break
else:
print "[SKIN] Hey, no suitable screen!"
else:
myscreen = xml.etree.cElementTree.fromstring(skin)
if myscreen:
screen.parsedSkin = myscreen
if myscreen is None:
print "[SKIN] No skin to read..."
myscreen = screen.parsedSkin = xml.etree.cElementTree.fromstring("<screen></screen>")
screen.skinAttributes = [ ]
skin_path_prefix = getattr(screen, "skin_path", path)
context = SkinContextStack()
s = desktop.bounds()
context.x = s.left()
context.y = s.top()
context.w = s.width()
context.h = s.height()
del s
collectAttributes(screen.skinAttributes, myscreen, context, skin_path_prefix, ignore=("name",))
context = SkinContext(context, myscreen.attrib.get('position'), myscreen.attrib.get('size'))
screen.additionalWidgets = [ ]
screen.renderer = [ ]
visited_components = set()
def process_constant_widget(constant_widget, context):
get_attr = constant_widget.attrib.get
wname = get_attr('name')
if wname:
try:
cwvalue = constant_widgets[wname]
except KeyError:
if config.crash.skin_error_crash.value:
print "[SKIN] ERROR - given constant-widget: '%s' not found in skin" % wname
else:
print "\033[91m[SKIN] ERROR - given constant-widget: '%s' not found in skin\033[0m" % wname
return
if cwvalue:
for x in cwvalue:
myscreen.append((x))
try:
myscreen.remove(constant_widget)
except ValueError:
pass
# now walk all widgets and stuff
def process_none(widget, context):
pass
def process_widget(widget, context):
get_attr = widget.attrib.get
# ok, we either have 1:1-mapped widgets ('old style'), or 1:n-mapped
# widgets (source->renderer).
wname = get_attr('name')
wsource = get_attr('source')
if wname is None and wsource is None:
print "widget has no name and no source!"
return
if wname:
# print "Widget name=", wname
visited_components.add(wname)
# get corresponding 'gui' object
try:
attributes = screen[wname].skinAttributes = [ ]
except:
print "component with name '" + wname + "' was not found in skin of screen '" + name + "'!"
# assert screen[wname] is not Source
collectAttributes(attributes, widget, context, skin_path_prefix, ignore=('name',))
elif wsource:
# get corresponding source
# print "Widget source=", wsource
while True: # until we found a non-obsolete source
# parse our current "wsource", which might specifiy a "related screen" before the dot,
# for example to reference a parent, global or session-global screen.
scr = screen
# resolve all path components
path = wsource.split('.')
while len(path) > 1:
scr = screen.getRelatedScreen(path[0])
if scr is None:
#print wsource
#print name
print("specified related screen '" + wsource + "' was not found in screen '" + name + "'!")
path = path[1:]
# resolve the source.
source = scr.get(path[0])
if isinstance(source, ObsoleteSource):
# however, if we found an "obsolete source", issue warning, and resolve the real source.
print "WARNING: SKIN '%s' USES OBSOLETE SOURCE '%s', USE '%s' INSTEAD!" % (name, wsource, source.new_source)
print "OBSOLETE SOURCE WILL BE REMOVED %s, PLEASE UPDATE!" % source.removal_date
if source.description:
print source.description
wsource = source.new_source
else:
# otherwise, use that source.
break
if source is None:
if config.crash.skin_error_crash.value:
raise SkinError("source '" + wsource + "' was not found in screen '" + name + "'!")
else:
print("\033[91m[Skin] Error: Source '" + wsource + "' was not found in screen '" + name + "'!")
wrender = get_attr('render')
if not wrender:
print("you must define a renderer with render= for source '%s'" % wsource)
for converter in widget.findall("convert"):
ctype = converter.get('type')
assert ctype, "'convert'-tag needs a 'type'-attribute"
#print "Converter:", ctype
try:
parms = converter.text.strip()
except:
parms = ""
#print "Params:", parms
try:
converter_class = my_import('.'.join(("Components", "Converter", ctype))).__dict__.get(ctype)
except ImportError:
if config.crash.skin_error_crash.value:
raise SkinError("[Skin] Error: Converter '%s' not found" % ctype)
else:
print("\033[91m[Skin] Error: Converter '%s' not found\033[0m" % ctype)
c = None
for i in source.downstream_elements:
if isinstance(i, converter_class) and i.converter_arguments == parms:
c = i
if c is None:
c = converter_class(parms)
c.connect(source)
source = c
try:
renderer_class = my_import('.'.join(("Components", "Renderer", wrender))).__dict__.get(wrender)
except ImportError:
if config.crash.skin_error_crash.value:
raise SkinError("[Skin] Error: Renderer '%s' not found" % wrender)
else:
print("\033[91m[Skin] Error: Renderer '%s' not found\033[0m" % wrender)
return
renderer = renderer_class() # instantiate renderer
renderer.connect(source) # connect to source
attributes = renderer.skinAttributes = [ ]
collectAttributes(attributes, widget, context, skin_path_prefix, ignore=('render', 'source'))
screen.renderer.append(renderer)
def process_applet(widget, context):
try:
codeText = widget.text.strip()
widgetType = widget.attrib.get('type')
code = compile(codeText, "skin applet", "exec")
except Exception, ex:
raise SkinError("applet failed to compile: " + str(ex))
if widgetType == "onLayoutFinish":
screen.onLayoutFinish.append(code)
else:
print("applet type '%s' unknown!" % widgetType)
def process_elabel(widget, context):
w = additionalWidget()
w.widget = eLabel
w.skinAttributes = [ ]
collectAttributes(w.skinAttributes, widget, context, skin_path_prefix, ignore=('name',))
screen.additionalWidgets.append(w)
def process_epixmap(widget, context):
w = additionalWidget()
w.widget = ePixmap
w.skinAttributes = [ ]
collectAttributes(w.skinAttributes, widget, context, skin_path_prefix, ignore=('name',))
screen.additionalWidgets.append(w)
def process_screen(widget, context):
def process(w):
conditional = w.attrib.get('conditional')
if conditional and not [i for i in conditional.split(",") if i in screen.keys()]:
return
objecttypes = w.attrib.get('objectTypes', '').split(",")
if len(objecttypes) > 1 and (objecttypes[0] not in screen.keys() or not [i for i in objecttypes[1:] if i == screen[objecttypes[0]].__class__.__name__]):
return
p = processors.get(w.tag, process_none)
try:
p(w, context)
except SkinError, e:
print "[SKIN] SKIN ERROR in screen '%s' widget '%s':" % (name, w.tag), e
cw = widget.findall("constant-widget")
if cw: #prozess non-openatv skins
for w in cw:
process(w)
for w in myscreen.findall("widget"):
process(w)
for w in widget.getchildren():
if cw and w.tag in ("constant-widget","widget"): #for non-openatv skins
continue
process(w)
def process_panel(widget, context):
n = widget.attrib.get('name')
if n:
try:
s = dom_screens.get(n, None)
if s is None:
print "[SKIN] Unable to find screen '%s' referred in screen '%s'" % (n, name)
return
except KeyError:
print "[SKIN] Unable to find screen '%s' referred in screen '%s'" % (n, name)
else:
process_screen(s[0], context)
layout = widget.attrib.get('layout')
if layout == 'stack':
cc = SkinContextStack
else:
cc = SkinContext
try:
c = cc(context, widget.attrib.get('position'), widget.attrib.get('size'), widget.attrib.get('font'))
except Exception, ex:
raise SkinError("Failed to create skincontext (%s,%s,%s) in %s: %s" % (widget.attrib.get('position'), widget.attrib.get('size'), widget.attrib.get('font'), context, ex) )
process_screen(widget, c)
processors = {
None: process_none,
"constant-widget": process_constant_widget,
"widget": process_widget,
"applet": process_applet,
"eLabel": process_elabel,
"ePixmap": process_epixmap,
"panel": process_panel
}
try:
print "[SKIN] processing screen %s:" % name
context.x = 0 # reset offsets, all components are relative to screen
context.y = 0 # coordinates.
process_screen(myscreen, context)
except Exception, e:
print "[SKIN] SKIN ERROR in %s:" % name, e
from Components.GUIComponent import GUIComponent
nonvisited_components = [x for x in set(screen.keys()) - visited_components if isinstance(x, GUIComponent)]
assert not nonvisited_components, "the following components in %s don't have a skin entry: %s" % (name, ', '.join(nonvisited_components))
# This may look pointless, but it unbinds 'screen' from the nested scope. A better
# solution is to avoid the nested scope above and use the context object to pass
# things around.
screen = None
visited_components = None
def parseAvailableSkinColor(color):
if color in colorNamesHuman:
return colorNamesHuman[color]
else:
print "color %s ist not available at used skin" % color
return None | unknown | codeparrot/codeparrot-clean | ||
/* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CORE_TPU_TPU_NODE_DEVICE_UTIL_H_
#define TENSORFLOW_CORE_TPU_TPU_NODE_DEVICE_UTIL_H_
#include "tensorflow/core/framework/kernel_def.pb.h"
namespace tensorflow {
// This is a BackendOpFilter. (see tensorflow/compiler/tf2xla/xla_op_registry.h)
// It returns true if the op should be registered on the device, it may
// optionally modify the KernelDef.
bool TpuOpFilter(KernelDef* kdef);
} // namespace tensorflow
#endif // TENSORFLOW_CORE_TPU_TPU_NODE_DEVICE_UTIL_H_ | c | github | https://github.com/tensorflow/tensorflow | tensorflow/core/tpu/tpu_node_device_util.h |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.