code
stringlengths 1
25.8M
| language
stringclasses 18
values | source
stringclasses 4
values | repo
stringclasses 78
values | path
stringlengths 0
268
|
|---|---|---|---|---|
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#############################################
# WARNING #
#############################################
#
# This file is auto generated by the resource
# module builder playbook.
#
# Do not edit this file manually.
#
# Changes to this file will be over written
# by the resource module builder.
#
# Changes should be made in the model used to
# generate this file or in the resource module
# builder template.
#
#############################################
"""
The arg spec for the vyos_lldp_global module
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
class Lldp_globalArgs(object): # pylint: disable=R0903
"""The arg spec for the vyos_lldp_global module
"""
def __init__(self, **kwargs):
pass
argument_spec = {
"config": {
"options": {
"address": {"type": "str"},
"enable": {"type": "bool"},
"legacy_protocols": {
"choices": ["cdp", "edp", "fdp", "sonmp"],
"type": "list",
},
"snmp": {"type": "str"},
},
"type": "dict",
},
"state": {
"choices": ["merged", "replaced", "deleted"],
"default": "merged",
"type": "str",
},
} # pylint: disable=C0301
|
unknown
|
codeparrot/codeparrot-clean
| ||
package image
import (
dockerspec "github.com/moby/docker-image-spec/specs-go/v1"
"github.com/moby/moby/api/types/storage"
ocispec "github.com/opencontainers/image-spec/specs-go/v1"
)
// RootFS returns Image's RootFS description including the layer IDs.
type RootFS struct {
Type string `json:",omitempty"`
Layers []string `json:",omitempty"`
}
// InspectResponse contains response of Engine API:
// GET "/images/{name:.*}/json"
type InspectResponse struct {
// ID is the content-addressable ID of an image.
//
// This identifier is a content-addressable digest calculated from the
// image's configuration (which includes the digests of layers used by
// the image).
//
// Note that this digest differs from the `RepoDigests` below, which
// holds digests of image manifests that reference the image.
ID string `json:"Id"`
// RepoTags is a list of image names/tags in the local image cache that
// reference this image.
//
// Multiple image tags can refer to the same image, and this list may be
// empty if no tags reference the image, in which case the image is
// "untagged", in which case it can still be referenced by its ID.
RepoTags []string
// RepoDigests is a list of content-addressable digests of locally available
// image manifests that the image is referenced from. Multiple manifests can
// refer to the same image.
//
// These digests are usually only available if the image was either pulled
// from a registry, or if the image was pushed to a registry, which is when
// the manifest is generated and its digest calculated.
RepoDigests []string
// Comment is an optional message that can be set when committing or
// importing the image. This field is omitted if not set.
Comment string `json:",omitempty"`
// Created is the date and time at which the image was created, formatted in
// RFC 3339 nano-seconds (time.RFC3339Nano).
//
// This information is only available if present in the image,
// and omitted otherwise.
Created string `json:",omitempty"`
// Author is the name of the author that was specified when committing the
// image, or as specified through MAINTAINER (deprecated) in the Dockerfile.
// This field is omitted if not set.
Author string `json:",omitempty"`
Config *dockerspec.DockerOCIImageConfig
// Architecture is the hardware CPU architecture that the image runs on.
Architecture string
// Variant is the CPU architecture variant (presently ARM-only).
Variant string `json:",omitempty"`
// OS is the Operating System the image is built to run on.
Os string
// OsVersion is the version of the Operating System the image is built to
// run on (especially for Windows).
OsVersion string `json:",omitempty"`
// Size is the total size of the image including all layers it is composed of.
Size int64
// GraphDriver holds information about the storage driver used to store the
// container's and image's filesystem.
GraphDriver *storage.DriverData `json:"GraphDriver,omitempty"`
// RootFS contains information about the image's RootFS, including the
// layer IDs.
RootFS RootFS
// Metadata of the image in the local cache.
//
// This information is local to the daemon, and not part of the image itself.
Metadata Metadata
// Descriptor is the OCI descriptor of the image target.
// It's only set if the daemon provides a multi-platform image store.
//
// WARNING: This is experimental and may change at any time without any backward
// compatibility.
Descriptor *ocispec.Descriptor `json:"Descriptor,omitempty"`
// Manifests is a list of image manifests available in this image. It
// provides a more detailed view of the platform-specific image manifests or
// other image-attached data like build attestations.
//
// Only available if the daemon provides a multi-platform image store, the client
// requests manifests AND does not request a specific platform.
//
// WARNING: This is experimental and may change at any time without any backward
// compatibility.
Manifests []ManifestSummary `json:"Manifests,omitempty"`
// Identity holds information about the identity and origin of the image.
// This is trusted information verified by the daemon and cannot be modified
// by tagging an image to a different name.
Identity *Identity `json:"Identity,omitempty"`
}
// SignatureTimestampType is the type of timestamp used in the signature.
type SignatureTimestampType string
const (
SignatureTimestampTlog SignatureTimestampType = "Tlog"
SignatureTimestampAuthority SignatureTimestampType = "TimestampAuthority"
)
// SignatureType is the type of signature format.
type SignatureType string
const (
SignatureTypeBundleV03 SignatureType = "bundle-v0.3"
SignatureTypeSimpleSigningV1 SignatureType = "simplesigning-v1"
)
// KnownSignerIdentity is an identifier for a special signer identity that is known to the implementation.
type KnownSignerIdentity string
const (
// KnownSignerDHI is the known identity for Docker Hardened Images.
KnownSignerDHI KnownSignerIdentity = "DHI"
)
|
go
|
github
|
https://github.com/moby/moby
|
api/types/image/image_inspect.go
|
import pyxform
from meerkat_abacus.config import config
country_config = config.country_config
from pyxform import builder, xls2json
from pyxform.utils import has_external_choices, sheet_to_csv
for form in country_config["fake_data"].keys():
json_survey = {
'type': 'survey', 'name': form, 'title': form, 'id_string': form, 'sms_keyword': 'sample', 'default_language': 'default', 'children': []
}
groups = {}
for field in ["start", "end", "today", "deviceid"]:
json_survey["children"].append(
{'hint': 'hint',
'type': "text",
'name': field,
'label': 'Label'})
for field, type_info in country_config["fake_data"][form].items():
ty = "text"
if list(type_info.keys())[0] == "integer":
ty = "integer"
if "./" in field:
# Create a group
group_name, field_name = field.split("./")
if not group_name in groups.keys():
json_survey["children"].append(
{'control': {'appearance': 'field-list'},
'type': 'group',
'name': group_name,
'label': 'A group',
'children': [
{'hint': 'hint',
'type': ty,
'name': field_name,
'label': 'Label'}
]
}
)
groups[group_name] = json_survey["children"][-1]["children"]
else:
groups[group_name].append(
{'hint': 'hint',
'type': ty,
'name': field_name,
'label': 'Label'}
)
else:
json_survey["children"].append(
{'hint': 'hint',
'type': ty,
'name': field,
'label': 'Label'})
survey = builder.create_survey_element_from_dict(json_survey)
# Setting validate to false will cause the form not to be processed by
# ODK Validate.
# This may be desirable since ODK Validate requires launching a subprocess
# that runs some java code.
survey.print_xform_to_file(
form +".xml", validate=False, warnings=True)
|
unknown
|
codeparrot/codeparrot-clean
| ||
import numpy as np
from scipy.ndimage.filters import convolve
from scipy.ndimage import maximum_filter, gaussian_filter
from .. import MaskSeparationBase, SeparationException
from ..benchmark import HighLowPassFilter
from ... import AudioSignal
from ... import vamp_imported
import numpy as np
import scipy.signal
if vamp_imported:
import vamp
# function for generating the vocal chord impulse response
def rosenmodel(t1, t2, fs):
"""
This model for generating singing vowel sounds from sine tones comes
from:
https://simonl02.users.greyc.fr/files/Documents/Teaching/SignalProcessingLabs/lab3.pdf
The above will be referred to throughout these docstrings as THE DOCUMENT.
Original author: Fatemeh Pishdadian
The arguments to the fucntions throughout this text follow the
signatures laid out in THE DOCUMENT.
This is used in Melodia to generate the melody signal to produce a
mask.
Equation 2 in THE DOCUMENT.
"""
N1 = np.floor(t1 * fs)
N2 = np.floor(t2 * fs)
samp_vec1 = np.arange(N1+1)
samp_vec2 = np.arange(N1,N1+N2+1)
ir_func1 = 0.5 * (1 - np.cos((np.pi * samp_vec1)/N1))
ir_func2 = np.cos(np.pi * (samp_vec2 - N1)/(2 * N2))
vchord_filt = np.concatenate((ir_func1,ir_func2))
return vchord_filt
# function for computing the denominator coeffs of the vocal cavity filter transfer function
def oral_cavity_filt(pole_amps, pole_freqs,fs):
"""
This model for generating singing vowel sounds from sine tones comes
from:
https://simonl02.users.greyc.fr/files/Documents/Teaching/SignalProcessingLabs/lab3.pdf
The above will be referred to throughout these docstrings as THE DOCUMENT.
Original author: Fatemeh Pishdadian
The arguments to the fucntions throughout this text follow the
signatures laid out in THE DOCUMENT.
This is used in Melodia to generate the melody signal to produce a
mask.
Solves "Q. Write a function to synthesize filter H(z)" in
THE DOCUMENT
"""
num_pole_pair = len(pole_amps)
poles = pole_amps * np.exp(1j * 2 * np.pi * pole_freqs / fs)
poles_conj = np.conj(poles)
denom_coeffs = 1
for i in range(num_pole_pair):
pole_temp = poles[i]
pole_conj_temp = poles_conj[i]
pole_pair_coeffs = np.convolve(np.array([1,-pole_temp]),np.array([1,-pole_conj_temp]))
denom_coeffs = np.convolve(denom_coeffs, pole_pair_coeffs)
return denom_coeffs
def _apply_vowel_filter(impulse_train, fs, t1=0.0075, t2=.013,
pole_amps=None, pole_freqs=None):
"""
This model for generating singing vowel sounds from sine tones comes
from:
https://simonl02.users.greyc.fr/files/Documents/Teaching/SignalProcessingLabs/lab3.pdf
The above will be referred to throughout these docstrings as THE DOCUMENT.
Original author: Fatemeh Pishdadian
The arguments to the fucntions throughout this text follow the
signatures laid out in THE DOCUMENT.
This is used in Melodia to generate the melody signal to produce a
mask.
Args:
impulse_train (np.ndarray): Numpy array with data to be filtered
fs (int): Sample rate of audio.
t1 (float, optional): N1 in Equation 2 in THE DOCUMENT. Defaults to 0.0075.
t2 (float, optional): N2 in Equation 2 in THE DOCUMENT. Defaults to .013.
pole_amps (np.ndarray, optional): Pole amplitudes, see Figures 2-4 in THE DOCUMENT.
Defaults to None, which maps to E vowel.
pole_freqs (np.ndarray, optional): Pole frequencies, see Figures 2-4 in THE DOCUMENT.
Defaults to None, which maps to E vowel
Returns:
np.ndarray: Filtered impulse train that should sound sort of like the desired
vowel.
"""
if pole_amps is None:
pole_amps = np.array([0.99,0.98,0.9,0.9])
if pole_freqs is None:
pole_freqs = np.array([800,1200,2800,3600])
vchord_filt = rosenmodel(t1, t2, fs)
vchord_out = np.convolve(impulse_train, vchord_filt)
denom_coeffs = oral_cavity_filt(pole_amps, pole_freqs, fs)
oral_out = scipy.signal.lfilter(
np.array([1]), denom_coeffs, vchord_out)
lip_out = np.real(scipy.signal.lfilter(
np.array([1,-1]), np.array([1]), oral_out))
lip_out = lip_out[:impulse_train.shape[0]]
return np.real(lip_out)
class Melodia(MaskSeparationBase):
"""
Implements melody extraction using Melodia [1].
This needs Melodia installed as a vamp plugin, as well as having vampy for
Python installed. Install Melodia via: https://www.upf.edu/web/mtg/melodia.
Note that Melodia can be used only for NON-COMMERCIAL use.
References:
[1] J. Salamon and E. Gómez, "Melody Extraction from Polyphonic Music Signals using
Pitch Contour Characteristics", IEEE Transactions on Audio, Speech and
Language Processing, 20(6):1759-1770, Aug. 2012.
Args:
input_audio_signal (AudioSignal object): The AudioSignal object that has the
audio data that Melodia will be run on.
high_pass_cutoff (optional, float): value (in Hz) for the high pass cutoff
filter.
minimum_frequency (optional, float): minimum frequency in Hertz (default 55.0)
maximum_frequency (optional, float): maximum frequency in Hertz (default 1760.0)
voicing_tolerance (optional, float): Greater values will result in more pitch contours
included in the final melody. Smaller values will result in less pitch
contours included in the final melody (default 0.2).
minimum_peak_salience (optional, float): a hack to avoid silence turning into junk
contours when analyzing monophonic recordings (e.g. solo voice with
no accompaniment). Generally you want to leave this untouched (default 0.0).
num_overtones (optional, int): Number of overtones to use when creating
melody mask.
apply_vowel_filter (optional, bool): Whether or not to apply a vowel filter
on the resynthesized melody signal when masking.
smooth_length (optional, int): Number of frames to smooth discontinuities in the
mask.
add_lower_octave (optional, fool): Use octave below fundamental frequency as well
to take care of octave errors in pitch tracking, since we only care about
the mask. Defaults to False.
mask_type (optional, str): Type of mask to use.
mask_threshold (optional, float): Threshold for mask to convert to binary.
"""
def __init__(self, input_audio_signal, high_pass_cutoff=100, minimum_frequency=55.0,
maximum_frequency=1760.0, voicing_tolerance=0.2, minimum_peak_salience=0.0,
compression=0.5, num_overtones=40, apply_vowel_filter=False, smooth_length=5,
add_lower_octave=False, mask_type='soft', mask_threshold=0.5):
# lazy load vamp to check if it exists
from ... import vamp_imported
melodia_installed = False
if vamp_imported:
melodia_installed = 'mtg-melodia:melodia' in vamp.list_plugins()
if not vamp_imported or not melodia_installed:
self._raise_vamp_melodia_error()
super().__init__(
input_audio_signal=input_audio_signal,
mask_type=mask_type,
mask_threshold=mask_threshold
)
self.high_pass_cutoff = high_pass_cutoff
self.minimum_frequency = float(minimum_frequency)
self.maximum_frequency = float(maximum_frequency)
self.voicing_tolerance = float(voicing_tolerance)
self.minimum_peak_salience = float(minimum_peak_salience)
self.compression = compression
self.apply_vowel_filter = apply_vowel_filter
self.add_lower_octave = add_lower_octave
self.melody = None
self.melody_signal = None
self.timestamps = None
self.num_overtones = num_overtones
self.smooth_length = smooth_length
def _raise_vamp_melodia_error(self):
raise SeparationException(
'\n**~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~**'
'\n* Are Vamp and Melodia installed correctly? *'
'\n* Check https://bit.ly/2DXbrAk for installation instructions! *'
'\n**~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~**')
def extract_melody(self):
"""
Extracts melody from the audio using the melodia vamp plugin. Uses arguments kept
in self:
- `self.minimum_frequency` (default: 55 Hz)
- `self.maximum_frequency` (default: 1760 Hz)
- `self.voicing_tolerance` (default: 0.2)
- `self.minimum_peak_salience` (default: 0.0)
This function sets two class members used in other parts:
- `self.melody`: (numpy array) contains the melody in Hz for every timestep
(0 indicates no voice).
- `self.timestamps`: (numpy array) contains the timestamps for each melody note
"""
params = {
'minfqr': self.minimum_frequency,
'maxfqr': self.maximum_frequency,
'voicing': self.voicing_tolerance,
'minpeaksalience': self.minimum_peak_salience
}
data = vamp.collect(self.audio_signal.audio_data, self.sample_rate,
"mtg-melodia:melodia", parameters=params)
_, melody = data['vector']
hop = 128. / 44100. # hard coded hop in Melodia vamp plugin, converting it to frames.
timestamps = 8 * hop + np.arange(len(melody)) * hop
melody[melody < 0] = 0
self.melody = melody
self.timestamps = timestamps
def create_melody_signal(self, num_overtones):
"""
Adapted from Melosynth by Justin Salamon: https://github.com/justinsalamon/melosynth.
To mask the mixture, we need to identify time-frequency bins that belong to the
melody. Melodia outputs only the fundamental frequency of the melodic line.
To construct the mask we take the fundamental frequency and add all the
overtones of it (up to num_overtones) to the mask. The melody is faded in and
out at onsets and offsets to make the separation sound more natural
(hard-coded by transition_length).
Args:
num_overtones (int): Number of overtones to expand out to build the mask.
"""
if self.timestamps[0] > 0:
estimated_hop = np.median(np.diff(self.timestamps))
previous_time = max(self.timestamps[0] - estimated_hop, 0)
self.timestamps = np.insert(self.timestamps, 0, previous_time)
self.melody = np.insert(self.melody, 0, 0)
sample_rate = self.audio_signal.sample_rate
melody_signal = []
transition_length = .001 # duration for fade in/out and frequency interpretation
phase = np.zeros(num_overtones)
previous_frequency = 0
previous_time = 0
overtone_weights = np.ones(num_overtones)
for time, frequency in zip(self.timestamps, self.melody):
if self.add_lower_octave:
frequency = frequency / 2
# taking care of octave errors since we only care about masking
num_samples = int(np.round((time - previous_time) * sample_rate))
if num_samples > 0:
num_transition_samples = float(
min(np.round(transition_length * sample_rate), num_samples))
frequency_series = np.ones(num_samples) * previous_frequency
if previous_frequency > 0 and frequency > 0:
frequency_series += np.minimum(
np.arange(num_samples) / num_transition_samples, 1) * \
(frequency - previous_frequency)
elif frequency > 0:
frequency_series = np.ones(num_samples) * frequency
samples = np.zeros(num_samples)
for overtone in range(num_overtones):
overtone_num = overtone + 1
phasors = 2 * np.pi * overtone_num * frequency_series / float(sample_rate)
phases = phase[overtone] + np.cumsum(phasors)
samples += overtone_weights[overtone] * np.sign(np.sin(phases))
phase[overtone] = phases[-1]
if previous_frequency == 0 and frequency > 0:
samples *= np.minimum(np.arange(num_samples) / num_transition_samples, 1)
elif previous_frequency > 0 and frequency == 0:
samples *= np.maximum(1 - np.arange(num_samples) / num_transition_samples, 0)
elif previous_frequency == 0 and frequency == 0:
samples *= 0
melody_signal.extend(samples)
previous_frequency = frequency
previous_time = time
melody_signal = np.asarray(melody_signal)
if self.apply_vowel_filter:
melody_signal = _apply_vowel_filter(melody_signal, sample_rate)
melody_signal /= float(max(np.max(melody_signal), 1e-7))
melody_signal = [melody_signal for _ in range(self.audio_signal.num_channels)]
melody_signal = np.asarray(melody_signal)
melody_signal = melody_signal[:, 0:self.audio_signal.signal_length]
melody_signal = AudioSignal(
audio_data_array=melody_signal,
sample_rate=sample_rate,
stft_params=self.audio_signal.stft_params
)
self.melody_signal = melody_signal
return melody_signal
def create_harmonic_mask(self, melody_signal):
"""
Creates a harmonic mask from the melody signal. The mask is smoothed to reduce
the effects of discontinuities in the melody synthesizer.
"""
stft = np.abs(melody_signal.stft())
# Need to threshold the melody stft since the synthesized
# F0 sequence overtones are at different weights.
stft = stft ** self.compression
stft /= np.maximum(np.max(stft, axis=1, keepdims=True), 1e-7)
mask = np.empty(self.stft.shape)
# Smoothing the mask row-wise using a low-pass filter to
# get rid of discontuinities in the mask.
kernel = np.full((1, self.smooth_length), 1 / self.smooth_length)
for ch in range(self.audio_signal.num_channels):
mask[..., ch] = convolve(stft[..., ch], kernel)
return mask
def run(self):
high_low = HighLowPassFilter(self.audio_signal, self.high_pass_cutoff)
high_pass_masks = high_low.run()
# separate the mixture foreground melody by masking
if self.melody_signal is None:
self.extract_melody()
self.create_melody_signal(self.num_overtones)
foreground_mask = self.create_harmonic_mask(self.melody_signal)
foreground_mask = self.MASKS['soft'](foreground_mask)
foreground_mask = foreground_mask
background_mask = foreground_mask.invert_mask()
_masks = np.stack(
[background_mask.mask, foreground_mask.mask], axis=-1)
self.result_masks = []
for i in range(_masks.shape[-1]):
mask_data = _masks[..., i]
if self.mask_type == self.MASKS['binary']:
mask_data = _masks[..., i] == np.max(_masks, axis=-1)
if i == 0:
mask_data = np.maximum(mask_data, high_pass_masks[i].mask)
elif i == 1:
mask_data = np.minimum(mask_data, high_pass_masks[i].mask)
mask = self.mask_type(mask_data)
self.result_masks.append(mask)
return self.result_masks
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
# Copyright Yassine Lamgarchal <lamgarchal.yassine@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from chillaxd.raft import message
def test_build_append_entry_request():
test_params = (1, 1, 2, 3, (4, 5))
ae_message = message.build_append_entry_request(*test_params)
decoded_message = message.decode_message(ae_message)
assert (message.APPEND_ENTRY_REQUEST, test_params) == decoded_message
def test_build_append_entry_response():
test_params = (0, True, 0, None)
aer_message = message.build_append_entry_response(*test_params)
decoded_message = message.decode_message(aer_message)
assert (message.APPEND_ENTRY_RESPONSE, test_params) == decoded_message
def test_build_request_vote():
test_params = (0, 1, 2)
rv_message = message.build_request_vote(*test_params)
decoded_message = message.decode_message(rv_message)
assert (message.REQUEST_VOTE, test_params) == decoded_message
def test_build_request_vote_response():
test_params = (0, False)
rvr_message = message.build_request_vote_response(*test_params)
decoded_message = message.decode_message(rvr_message)
assert (message.REQUEST_VOTE_RESPONSE, test_params) == decoded_message
|
unknown
|
codeparrot/codeparrot-clean
| ||
from incidentstransports.conf.settings import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = ()
# django project root, automatically adapt to your on plateform
PROJECT_ROOT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': PROJECT_ROOT_PATH + '/db/db.sqlite', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# for debug toolbar
INTERNAL_IPS = ('127.0.0.1',)
# serving static local file :
DJANGO_STATIC_MEDIA_URL = '/static/'
JENKINS_TASKS = (
'django_jenkins.tasks.run_pylint',
'django_jenkins.tasks.with_coverage',
'django_jenkins.tasks.django_tests',
'django_jenkins.tasks.run_pep8',
'django_jenkins.tasks.run_pyflakes' ,
'django_jenkins.tasks.run_sloccount',
)
# adding debug toolbar :
DEBUG_TOOLBAR_CONFIG = { 'INTERCEPT_REDIRECTS': False }
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INSTALLED_APPS += ('debug_toolbar', )
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
=========================================
Compute source power using DICS beamfomer
=========================================
Compute a Dynamic Imaging of Coherent Sources (DICS) filter from single trial
activity to estimate source power for two frequencies of interest.
The original reference for DICS is:
Gross et al. Dynamic imaging of coherent sources: Studying neural interactions
in the human brain. PNAS (2001) vol. 98 (2) pp. 694-699
"""
# Author: Roman Goj <roman.goj@gmail.com>
# Denis Engemann <denis.engemann@gmail.com>
#
# License: BSD (3-clause)
import mne
from mne.datasets import sample
from mne.time_frequency import csd_epochs
from mne.beamformer import dics_source_power
print(__doc__)
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
event_fname = data_path + '/MEG/sample/sample_audvis_raw-eve.fif'
fname_fwd = data_path + '/MEG/sample/sample_audvis-meg-eeg-oct-6-fwd.fif'
subjects_dir = data_path + '/subjects'
###############################################################################
# Read raw data
raw = mne.io.read_raw_fif(raw_fname)
raw.info['bads'] = ['MEG 2443'] # 1 bad MEG channel
# Set picks
picks = mne.pick_types(raw.info, meg=True, eeg=False, eog=False,
stim=False, exclude='bads')
# Read epochs
event_id, tmin, tmax = 1, -0.2, 0.5
events = mne.read_events(event_fname)
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, proj=True,
picks=picks, baseline=(None, 0), preload=True,
reject=dict(grad=4000e-13, mag=4e-12))
evoked = epochs.average()
# Read forward operator
forward = mne.read_forward_solution(fname_fwd, surf_ori=True)
# Computing the data and noise cross-spectral density matrices
# The time-frequency window was chosen on the basis of spectrograms from
# example time_frequency/plot_time_frequency.py
# As fsum is False csd_epochs returns a list of CrossSpectralDensity
# instances than can then be passed to dics_source_power
data_csds = csd_epochs(epochs, mode='multitaper', tmin=0.04, tmax=0.15,
fmin=15, fmax=30, fsum=False)
noise_csds = csd_epochs(epochs, mode='multitaper', tmin=-0.11,
tmax=-0.001, fmin=15, fmax=30, fsum=False)
# Compute DICS spatial filter and estimate source power
stc = dics_source_power(epochs.info, forward, noise_csds, data_csds)
clim = dict(kind='value', lims=[1.6, 1.9, 2.2])
for i, csd in enumerate(data_csds):
message = 'DICS source power at %0.1f Hz' % csd.frequencies[0]
brain = stc.plot(surface='inflated', hemi='rh', subjects_dir=subjects_dir,
time_label=message, figure=i, clim=clim)
brain.set_data_time_index(i)
brain.show_view('lateral')
# Uncomment line below to save images
# brain.save_image('DICS_source_power_freq_%d.png' % csd.frequencies[0])
|
unknown
|
codeparrot/codeparrot-clean
| ||
from graphics.widgets.checkbox_item_output_f import CheckboxItemOutputF
from utils.train.data_augmentation import DataAugmentation
import constants.output_constants as const
import tkinter as tk
class DataAugmentationOutputF(tk.Frame):
def __init__(self,
parent,
disabled=False):
"""
:param parent: Parent.
:param disabled: - Default: False;
- If True all the widgets will be disabled.
"""
tk.Frame.__init__(self,
parent,
relief=const.DAO_FRAME_RELIEF,
padx=const.DAO_FRAME_PADX,
pady=const.DAO_FRAME_PADY,
bd=const.DAO_FRAME_BD)
self._cio_output = []
# Widget creation
self._lbl_title = tk.Label(
self,
font=const.DAO_TITLE_FONT,
text=const.DAO_TITLE_TEXT,
padx=const.DAO_TITLE_PADX,
pady=const.DAO_TITLE_PADY,
)
self._f_option = tk.Frame(
self,
relief=const.DAO_SUBFRAME_RELIEF,
padx=const.DAO_SUBFRAME_PADX,
pady=const.DAO_SUBFRAME_PADY,
bd=const.DAO_SUBFRAME_BD
)
# Widget placement
self._lbl_title.pack(side='top',
fill='both',
expand=True)
self._f_option.pack(side='top',
fill='both',
expand=True)
# Initial output
initial_output = DataAugmentation()
self.update_status(initial_output)
if disabled:
self.disable()
#########################################################################
# Public methods
def update_status(
self,
data_augmentation_options: DataAugmentation):
"""
- Updates the option's state.
:param data_augmentation_options: DataAugmentation list.
"""
options_list = data_augmentation_options.get_options_list()
for index in range(len(self._cio_output)):
self._cio_output[index].destroy()
self._cio_output = []
for index in range(len(options_list)):
self._cio_output.append(
CheckboxItemOutputF(
parent=self._f_option,
item_text=options_list[index][0],
checked=options_list[index][1],
)
)
self._cio_output[index].pack(side='top')
def enable(self):
""" Enables all the widgets."""
for index in range(len(self._cio_output)):
self._cio_output[index].enable()
self._lbl_title.config(state='normal')
def disable(self):
""" Disables all the widgets."""
for index in range(len(self._cio_output)):
self._cio_output[index].disable()
self._lbl_title.config(state='disabled')
#########################################################################
|
unknown
|
codeparrot/codeparrot-clean
| ||
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CORE_GRAPPLER_UTILS_FUNCTIONS_H_
#define TENSORFLOW_CORE_GRAPPLER_UTILS_FUNCTIONS_H_
#include <memory>
#include <string>
#include "absl/container/flat_hash_map.h"
#include "absl/container/flat_hash_set.h"
#include "absl/container/inlined_vector.h"
#include "tensorflow/core/framework/attr_value.pb.h"
#include "tensorflow/core/framework/function.h"
#include "tensorflow/core/framework/function.pb.h"
#include "tensorflow/core/framework/node_def_util.h"
#include "tensorflow/core/framework/op_def.pb.h"
#include "tensorflow/core/grappler/grappler_item.h"
#include "tensorflow/core/lib/gtl/flatset.h"
namespace tensorflow {
namespace grappler {
// Function input argument instantiated into an '_Arg' node in the function body
// graph, with an 'index' attribute corresponding to the input position.
struct InputArgInstantiation {
InputArgInstantiation(std::string node_name, DataType data_type)
: node_name(std::move(node_name)), data_type(data_type) {}
std::string node_name;
DataType data_type;
};
// Function output instantiated into a '_Retval' node in the function body
// graph, with an 'index' attribute corresponding to the output position.
struct OutputArgInstantiation {
OutputArgInstantiation(std::string node_name, DataType data_type)
: node_name(std::move(node_name)), data_type(data_type) {}
std::string node_name;
DataType data_type;
};
// A mapping from control output name to node name in function body graph.
struct ControlOutput {
std::string output_name;
std::string node_name;
bool operator<(const ControlOutput& a) const {
return output_name < a.output_name;
}
};
// A special case of GrapplerItem, constructed from a TensorFlow Function.
class GrapplerFunctionItem : public GrapplerItem {
public:
GrapplerFunctionItem() = default;
const std::string& description() const;
const std::vector<InputArgInstantiation>& inputs() const;
const InputArgInstantiation& input(int i) const;
const std::size_t input_size() const;
const std::vector<OutputArgInstantiation>& outputs() const;
const OutputArgInstantiation& output(int i) const;
const std::size_t output_size() const;
const std::vector<ControlOutput>& control_outputs() const;
const std::size_t control_output_size() const;
const AttrSlice& func_attr() const;
const std::vector<const FunctionDef::ArgAttrs*>& arg_attr() const;
const GraphDef& function_body() const;
GraphDef& mutable_function_body();
bool is_stateful() const;
GrapplerFunctionItem& SwapFunctionBody(GraphDef&& other);
private:
friend absl::Status MakeGrapplerFunctionItem(const FunctionDef&,
const AttrSlice&,
const FunctionLibraryDefinition&,
int, GrapplerFunctionItem*);
friend absl::Status ReplaceInputWithConst(const NodeDef&, int,
GrapplerFunctionItem*);
friend absl::Status RemoveFunctionOutputs(const absl::flat_hash_set<int>&,
GrapplerFunctionItem*,
std::vector<std::pair<int, int>>*);
GrapplerFunctionItem(std::string func_name, std::string description,
AttrSlice func_attr,
std::vector<const FunctionDef::ArgAttrs*> arg_attr,
std::vector<InputArgInstantiation> input_args,
std::vector<OutputArgInstantiation> output_args,
std::vector<ControlOutput> control_outputs,
int graph_def_version, bool is_stateful,
GraphDef&& function_body);
std::string description_;
AttrSlice func_attr_; // Attributes specific to function definition that
// produced this item (FuncDef.attr field).
// Attributes of function arguments
std::vector<const FunctionDef::ArgAttrs*> arg_attr_;
std::vector<InputArgInstantiation> input_args_;
std::vector<OutputArgInstantiation> output_args_;
std::vector<ControlOutput> control_outputs_;
bool is_stateful_ = false;
};
// Check if function input/output types are fully defined only at instantiation
// time (parametrized by its instantiation node).
bool HasParametrizedType(const FunctionDef& func);
// Check if a function body is parametrized by its instantiation node. Function
// body is parametrized, if it has at least one node with a 'placeholder'
// attribute.
bool HasParametrizedBody(const FunctionDef& func);
// Check if function has parametrized type or body.
bool IsParametrized(const FunctionDef& func);
// Resolve function instantiation type parameters from the attributes of the
// caller node. Return error if type can't be resolved.
absl::Status InstantiationTypeParameters(
const FunctionDef& func, const AttrSlice& func_instantiation_attr,
absl::flat_hash_map<std::string, DataType>* type_parameters);
// Resolve function instantiation body parameters (values for the function body
// attr placeholders) from the attributes of the caller node. Return error if
// type can't be resolved.
absl::Status InstantiationBodyParameters(
const FunctionDef& func, const AttrSlice& func_instantiation_attr,
absl::flat_hash_map<std::string, AttrValue>* body_parameters);
// Replace one of the function inputs with a constant.
absl::Status ReplaceInputWithConst(const NodeDef& input_const, int input_index,
GrapplerFunctionItem* item);
// Removes outputs from instantiated grappler function item. For all active
// function outputs that changed its output index, this function adds an output
// mapping (std::pair<old index, new index>).
absl::Status RemoveFunctionOutputs(
const absl::flat_hash_set<int>& remove_outputs, GrapplerFunctionItem* item,
std::vector<std::pair<int, int>>* output_mapping);
// TODO(ezhulenev, b/120103818): Add RemoveFunctionInputs.
// Make a GrapplerFunctionItem from the function definition and function
// instantiation attributes (caller node attributes). Returns error if the given
// function def cannot be converted (e.g. not all attributes are defined).
absl::Status MakeGrapplerFunctionItem(const FunctionDef& func,
const AttrSlice& func_instantiation_attr,
const FunctionLibraryDefinition& flib,
int graph_def_version,
GrapplerFunctionItem* item);
// Make a GrapplerFunction item from the function definition. Function must be
// fully defined (no type or body parametrization).
// TODO(ezhulenev): Support parametrized functions without fully defined
// instantiation attributes? Do we ever want to optimize parametrized function
// without specializing it to its instantiation attributes (at least types)?
absl::Status MakeGrapplerFunctionItem(const FunctionDef& func,
const FunctionLibraryDefinition& flib,
int graph_def_version,
GrapplerFunctionItem* item);
// Make a FunctionDef from the GrapplerFunctionItem. Use function library
// definition to lookup function body nodes output names and ranges.
absl::Status MakeFunctionDef(const GrapplerFunctionItem& item,
const FunctionLibraryDefinition& flib,
FunctionDef* func);
} // end namespace grappler
} // end namespace tensorflow
#endif // TENSORFLOW_CORE_GRAPPLER_UTILS_FUNCTIONS_H_
|
c
|
github
|
https://github.com/tensorflow/tensorflow
|
tensorflow/core/grappler/utils/functions.h
|
from collections import OrderedDict
import pytest
from django.test import TestCase
from mongoengine import Document, EmbeddedDocument, fields
from rest_framework import fields as drf_fields
from rest_framework.compat import unicode_repr
from rest_framework.serializers import Field, Serializer
from rest_framework_mongoengine.fields import DocumentField
from rest_framework_mongoengine.serializers import (
DocumentSerializer, EmbeddedDocumentSerializer
)
from .models import DumbEmbedded, OtherEmbedded
from .utils import dedent
class NestedEmbeddedDoc(EmbeddedDocument):
name = fields.StringField()
embedded = fields.EmbeddedDocumentField(DumbEmbedded)
class SelfEmbeddingDoc(EmbeddedDocument):
name = fields.StringField()
embedded = fields.EmbeddedDocumentField('self')
class EmbeddingDoc(Document):
embedded = fields.EmbeddedDocumentField(DumbEmbedded)
class NestedEmbeddingDoc(Document):
embedded = fields.EmbeddedDocumentField(NestedEmbeddedDoc)
class RequiredEmbeddingDoc(Document):
embedded = fields.EmbeddedDocumentField(DumbEmbedded, required=True)
class ListEmbeddingDoc(Document):
embedded_list = fields.EmbeddedDocumentListField(DumbEmbedded)
class RecursiveEmbeddingDoc(Document):
embedded = fields.EmbeddedDocumentField(SelfEmbeddingDoc)
class GenericEmbeddingDoc(Document):
embedded = fields.GenericEmbeddedDocumentField()
class TestEmbeddingMapping(TestCase):
def test_embbedded(self):
class TestSerializer(EmbeddedDocumentSerializer):
class Meta:
model = DumbEmbedded
fields = '__all__'
expected = dedent("""
TestSerializer():
name = CharField(required=False)
foo = IntegerField(required=False)
""")
assert unicode_repr(TestSerializer()) == expected
def test_embedding(self):
class TestSerializer(DocumentSerializer):
class Meta:
model = NestedEmbeddingDoc
fields = '__all__'
depth = 1
expected = dedent("""
TestSerializer():
id = ObjectIdField(read_only=True)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
foo = IntegerField(required=False)
""")
assert unicode_repr(TestSerializer()) == expected
def test_embedding_nodepth(self):
class TestSerializer(DocumentSerializer):
class Meta:
model = NestedEmbeddingDoc
fields = '__all__'
depth = 0
expected = dedent("""
TestSerializer():
id = ObjectIdField(read_only=True)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
foo = IntegerField(required=False)
""")
assert unicode_repr(TestSerializer()) == expected
def test_embedding_restricted(self):
class TestSerializer(DocumentSerializer):
class Meta:
model = NestedEmbeddingDoc
fields = '__all__'
depth_embedding = 1
expected = dedent("""
TestSerializer():
id = ObjectIdField(read_only=True)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
embedded = HiddenField(default=None, required=False)
""")
assert unicode_repr(TestSerializer()) == expected
def test_embedding_recursive(self):
class TestSerializer(DocumentSerializer):
class Meta:
model = RecursiveEmbeddingDoc
fields = '__all__'
expected = dedent("""
TestSerializer():
id = ObjectIdField(read_only=True)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
embedded = HiddenField(default=None, required=False)
""")
serializer = TestSerializer()
assert unicode_repr(serializer) == expected
def test_embedding_recursive_restricted(self):
class TestSerializer(DocumentSerializer):
class Meta:
model = RecursiveEmbeddingDoc
fields = '__all__'
depth_embedding = 2
expected = dedent("""
TestSerializer():
id = ObjectIdField(read_only=True)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
embedded = HiddenField(default=None, required=False)
""")
serializer = TestSerializer()
assert unicode_repr(serializer) == expected
def test_embedding_nested(self):
class TestSerializer(DocumentSerializer):
class Meta:
model = NestedEmbeddingDoc
fields = '__all__'
expected = dedent("""
TestSerializer():
id = ObjectIdField(read_only=True)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
foo = IntegerField(required=False)
""")
assert unicode_repr(TestSerializer()) == expected
def test_embedding_list(self):
class TestSerializer(DocumentSerializer):
class Meta:
model = ListEmbeddingDoc
fields = '__all__'
expected = dedent("""
TestSerializer():
id = ObjectIdField(read_only=True)
embedded_list = EmbeddedSerializer(many=True, required=False):
name = CharField(required=False)
foo = IntegerField(required=False)
""")
assert unicode_repr(TestSerializer()) == expected
def test_embedding_required(self):
class TestSerializer(DocumentSerializer):
class Meta:
model = RequiredEmbeddingDoc
fields = '__all__'
expected = dedent("""
TestSerializer():
id = ObjectIdField(read_only=True)
embedded = EmbeddedSerializer(required=True):
name = CharField(required=False)
foo = IntegerField(required=False)
""")
assert unicode_repr(TestSerializer()) == expected
def test_embedding_generic(self):
class TestSerializer(DocumentSerializer):
class Meta:
model = GenericEmbeddingDoc
fields = '__all__'
expected = dedent("""
TestSerializer():
id = ObjectIdField(read_only=True)
embedded = GenericEmbeddedDocumentField(model_field=<mongoengine.fields.GenericEmbeddedDocumentField: embedded>, required=False)
""")
assert unicode_repr(TestSerializer()) == expected
def test_embedding_custom_generic(self):
class CustomEmbedding(DocumentField):
pass
class TestSerializer(DocumentSerializer):
serializer_embedded_generic = CustomEmbedding
class Meta:
model = GenericEmbeddingDoc
fields = '__all__'
expected = dedent("""
TestSerializer():
id = ObjectIdField(read_only=True)
embedded = CustomEmbedding(model_field=<mongoengine.fields.GenericEmbeddedDocumentField: embedded>, required=False)
""")
assert unicode_repr(TestSerializer()) == expected
def test_embedding_custom_nested(self):
class CustomTestSerializer(Serializer):
bla = drf_fields.CharField()
class TestSerializer(DocumentSerializer):
serializer_embedded_nested = CustomTestSerializer
class Meta:
model = NestedEmbeddingDoc
fields = '__all__'
expected = dedent("""
TestSerializer():
id = ObjectIdField(read_only=True)
embedded = EmbeddedSerializer(required=False):
bla = CharField()
""")
assert unicode_repr(TestSerializer()) == expected
def test_embedding_custom_bottom(self):
class CustomEmbedding(Field):
bla = drf_fields.CharField()
class TestSerializer(DocumentSerializer):
serializer_embedded_bottom = CustomEmbedding
class Meta:
model = NestedEmbeddingDoc
fields = '__all__'
depth_embedding = 0
expected = dedent("""
TestSerializer():
id = ObjectIdField(read_only=True)
embedded = CustomEmbedding(default=None, required=False)
""")
assert unicode_repr(TestSerializer()) == expected
class EmbeddingSerializer(DocumentSerializer):
class Meta:
model = EmbeddingDoc
fields = '__all__'
class NestedEmbeddingSerializer(DocumentSerializer):
class Meta:
model = NestedEmbeddingDoc
fields = '__all__'
class TestEmbeddedIntegration(TestCase):
""" should work on isolated embedded docs """
def test_retrieve(self):
""" serializing standalone doc """
class TestSerializer(EmbeddedDocumentSerializer):
class Meta:
model = OtherEmbedded
fields = '__all__'
instance = OtherEmbedded(name="qwe", bar=123)
serializer = TestSerializer(instance)
assert serializer.data == OrderedDict([('name', "qwe"), ('bar', 123)])
def test_create(self):
""" creating standalone instance """
class TestSerializer(EmbeddedDocumentSerializer):
class Meta:
model = OtherEmbedded
fields = '__all__'
data = {'name': "qwe", 'bar': 123}
serializer = TestSerializer(data=data)
assert serializer.is_valid(), serializer.errors
instance = serializer.save()
assert isinstance(instance, OtherEmbedded)
assert instance.name == "qwe"
assert instance.bar == 123
def test_update(self):
""" updating standalone instance with partial data """
class TestSerializer(EmbeddedDocumentSerializer):
class Meta:
model = OtherEmbedded
fields = '__all__'
instance = OtherEmbedded(name="qwe", bar=123)
data = {'bar': 234}
serializer = TestSerializer(instance, data=data, partial=True)
assert serializer.is_valid(), serializer.errors
instance = serializer.save()
assert isinstance(instance, OtherEmbedded)
assert instance.name == "qwe"
assert instance.bar == 234
class TestEmbeddingIntegration(TestCase):
def doCleanups(self):
EmbeddingDoc.drop_collection()
def test_retrieve(self):
instance = EmbeddingDoc.objects.create(
embedded=DumbEmbedded(name="Foo")
)
serializer = EmbeddingSerializer(instance)
expected = {
'id': str(instance.id),
'embedded': OrderedDict((('name', "Foo"), ('foo', None))),
}
assert serializer.data == expected
def test_create(self):
data = {
'embedded': {'name': "emb"}
}
serializer = EmbeddingSerializer(data=data)
assert serializer.is_valid(), serializer.errors
instance = serializer.save()
assert isinstance(instance.embedded, DumbEmbedded)
assert instance.embedded.name == "emb"
expected = {
'id': str(instance.id),
'embedded': OrderedDict((('name', "emb"), ('foo', None))),
}
assert serializer.data == expected
def test_update(self):
instance = EmbeddingDoc.objects.create(
embedded=DumbEmbedded(name="emb", foo=123)
)
data = {
'embedded': {'foo': 321}
}
serializer = EmbeddingSerializer(instance, data=data)
assert serializer.is_valid(), serializer.errors
instance = serializer.save()
assert isinstance(instance.embedded, DumbEmbedded)
assert instance.embedded.name is None
assert instance.embedded.foo == 321
expected = {
'id': str(instance.id),
'embedded': OrderedDict((('name', None), ('foo', 321))),
}
assert serializer.data == expected
@pytest.mark.skipif(True, reason="TODO")
def test_update_partial(self):
pass
class TestNestedEmbeddingIntegration(TestCase):
def doCleanups(self):
NestedEmbeddingDoc.drop_collection()
def test_retrieve(self):
instance = NestedEmbeddingDoc.objects.create(
embedded=NestedEmbeddedDoc(
name='Foo',
embedded=DumbEmbedded(name="Bar")
)
)
serializer = NestedEmbeddingSerializer(instance)
expected = {
'id': str(instance.id),
'embedded': OrderedDict((
('name', "Foo"),
('embedded', OrderedDict((
('name', "Bar"),
('foo', None)
)))
)),
}
assert serializer.data == expected
def test_create(self):
data = {
'embedded': {
'name': 'Foo',
'embedded': {'name': "emb"}
}
}
serializer = NestedEmbeddingSerializer(data=data)
assert serializer.is_valid(), serializer.errors
instance = serializer.save()
assert isinstance(instance.embedded, NestedEmbeddedDoc)
assert instance.embedded.name == "Foo"
assert isinstance(instance.embedded.embedded, DumbEmbedded)
assert instance.embedded.embedded.name == 'emb'
assert instance.embedded.embedded.foo is None
expected = {
'id': str(instance.id),
'embedded': OrderedDict((
('name', "Foo"),
('embedded', OrderedDict((('name', "emb"), ('foo', None))))
)),
}
assert serializer.data == expected
def test_update(self):
instance = NestedEmbeddingDoc.objects.create(
embedded=NestedEmbeddedDoc(
name='Foo',
embedded=DumbEmbedded(name="Bar")
)
)
data = {
'embedded': {
'name': 'Bar',
'embedded': {"foo": 321}
}
}
serializer = NestedEmbeddingSerializer(instance, data=data)
assert serializer.is_valid(), serializer.errors
instance = serializer.save()
assert isinstance(instance.embedded, NestedEmbeddedDoc)
assert instance.embedded.name == "Bar"
assert isinstance(instance.embedded.embedded, DumbEmbedded)
assert instance.embedded.embedded.name is None
assert instance.embedded.embedded.foo == 321
expected = {
'id': str(instance.id),
'embedded': OrderedDict((
('name', 'Bar'),
('embedded', OrderedDict((
('name', None),
('foo', 321)
)))
)),
}
assert serializer.data == expected
@pytest.mark.skipif(True, reason="TODO")
def test_update_partial(self):
pass
class ListEmbeddingSerializer(DocumentSerializer):
class Meta:
model = ListEmbeddingDoc
fields = '__all__'
class TestListEmbeddingIntegration(TestCase):
def doCleanups(self):
ListEmbeddingDoc.drop_collection()
def test_retrieve(self):
instance = ListEmbeddingDoc.objects.create(
embedded_list=[DumbEmbedded(name="Foo"), DumbEmbedded(name="Bar")]
)
serializer = ListEmbeddingSerializer(instance)
expected = {
'id': str(instance.id),
'embedded_list': [
OrderedDict((('name', "Foo"), ('foo', None))),
OrderedDict((('name', "Bar"), ('foo', None)))
],
}
assert serializer.data == expected
def test_create(self):
data = {
'embedded_list': [
{'name': "Foo"},
{'foo': 123}
]
}
serializer = ListEmbeddingSerializer(data=data)
assert serializer.is_valid(), serializer.errors
instance = serializer.save()
assert isinstance(instance, ListEmbeddingDoc)
assert isinstance(instance.embedded_list[0], DumbEmbedded)
assert instance.embedded_list[0].name == "Foo"
assert instance.embedded_list[0].foo is None
assert instance.embedded_list[1].name is None
assert instance.embedded_list[1].foo == 123
expected = {
'id': str(instance.id),
'embedded_list': [
OrderedDict((('name', "Foo"), ('foo', None))),
OrderedDict((('name', None), ('foo', 123)))
]
}
assert serializer.data == expected
def test_update(self):
instance = ListEmbeddingDoc.objects.create(
embedded_list=[DumbEmbedded(name="Foo"), DumbEmbedded(name="Bar")]
)
data = {
'embedded_list': [
OrderedDict((('name', "Baz"), ('foo', 321)))
]
}
serializer = ListEmbeddingSerializer(instance, data=data)
assert serializer.is_valid(), serializer.errors
instance = serializer.save()
assert isinstance(instance, ListEmbeddingDoc)
assert isinstance(instance.embedded_list[0], DumbEmbedded)
assert len(instance.embedded_list) == 1
assert instance.embedded_list[0].name == "Baz"
assert instance.embedded_list[0].foo == 321
expected = {
'id': str(instance.id),
'embedded_list': [OrderedDict((('name', "Baz"), ('foo', 321)))],
}
assert serializer.data == expected
@pytest.mark.skipif(True, reason="TODO")
def test_update_partial(self):
pass
class ValidatingEmbeddedModel(EmbeddedDocument):
text = fields.StringField(min_length=3)
class ValidatingEmbeddingModel(Document):
embedded = fields.EmbeddedDocumentField(ValidatingEmbeddedModel)
class ValidatingSerializer(DocumentSerializer):
class Meta:
model = ValidatingEmbeddingModel
fields = '__all__'
depth = 1
class ValidatingListEmbeddingModel(Document):
embedded_list = fields.EmbeddedDocumentListField(ValidatingEmbeddedModel)
class ValidatingListSerializer(DocumentSerializer):
class Meta:
model = ValidatingListEmbeddingModel
fields = '__all__'
depth = 1
class TestEmbeddedValidation(TestCase):
def test_validation_failing(self):
serializer = ValidatingSerializer(data={'embedded': {'text': 'Fo'}})
assert not serializer.is_valid()
assert 'embedded' in serializer.errors
assert 'text' in serializer.errors['embedded']
def test_validation_passing(self):
serializer = ValidatingSerializer(data={'embedded': {'text': 'Text'}})
assert serializer.is_valid(), serializer.errors
def test_nested_validation_failing(self):
serializer = ValidatingListSerializer(data={'embedded_list': [{'text': 'Fo'}]})
assert not serializer.is_valid()
assert 'embedded_list' in serializer.errors
assert 'text' in serializer.errors['embedded_list']
def test_nested_validation_passing(self):
serializer = ValidatingListSerializer(data={'embedded_list': [{'text': 'Text'}]})
assert serializer.is_valid(), serializer.errors
|
unknown
|
codeparrot/codeparrot-clean
| ||
# How to publish JPS locally to consume it in Intellij Idea
You can use `installJps` task or run:
```shell
cd ..
./gradlew installJps && \
echo Finished successfully
```
|
unknown
|
github
|
https://github.com/JetBrains/kotlin
|
jps/README.md
|
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
#
# -*- test-case-name: twisted.test.test_journal -*-
"""Logging that uses pickles.
TODO: add log that logs to a file.
"""
# twisted imports
from twisted.persisted import dirdbm
from twisted.internet import defer
from zope.interface import implements
# sibling imports
import base
class DirDBMLog:
"""Log pickles to DirDBM directory."""
implements(base.ICommandLog)
def __init__(self, logPath):
self.db = dirdbm.Shelf(logPath)
indexs = map(int, self.db.keys())
if indexs:
self.currentIndex = max(indexs)
else:
self.currentIndex = 0
def logCommand(self, command, time):
"""Log a command."""
self.currentIndex += 1
self.db[str(self.currentIndex)] = (time, command)
return defer.succeed(1)
def getCurrentIndex(self):
"""Return index of last command logged."""
return self.currentIndex
def getCommandsSince(self, index):
result = []
for i in range(index, self.currentIndex + 1):
result.append(self.db[str(i)])
return result
|
unknown
|
codeparrot/codeparrot-clean
| ||
from flask import request
from flask_restful import Resource
from flask_restful.reqparse import RequestParser
from blueprints.api import require_api_key, register_api_access_token
from blueprints.base import rest_api
from blueprints.auth.util import check_authenticated_ip, add_authenticated_ip
add_api_username_verification_token = 'api.auth.add_ip_username_verification'
class IpUsernameVerification(Resource):
get_parser = RequestParser()
get_parser.add_argument("username", type=str)
get_parser.add_argument("uuid", type=str)
def get(self):
args = self.get_parser.parse_args()
result = check_authenticated_ip(request.remote_addr, username=args.get("username"), uuid=args.get("uuid"))
return {'verified': result is not None}
put_parser = RequestParser()
put_parser.add_argument("username", type=str, required=True)
put_parser.add_argument("ip", type=str, required=True)
put_parser.add_argument("uuid", type=str, required=True)
@require_api_key(required_access_tokens=[add_api_username_verification_token])
def put(self):
args = self.put_parser.parse_args()
add_authenticated_ip(args.get("username"), args.get("uuid"), args.get("ip"))
return {'success': True}
rest_api.add_resource(IpUsernameVerification, '/auth/ip_username_verification')
register_api_access_token(add_api_username_verification_token,
"""Authenticates a username with an ip address. This would enable the user to register from that IP.""", permission="api.auth.add_ip_username_verification")
|
unknown
|
codeparrot/codeparrot-clean
| ||
import { test } from '../../test';
export default test({
get props() {
return { x: 1, y: false };
},
html: `
<span>1</span>
`,
test({ assert, component, target }) {
component.x = 2;
assert.htmlEqual(
target.innerHTML,
`
<span>2</span>
`
);
}
});
|
javascript
|
github
|
https://github.com/sveltejs/svelte
|
packages/svelte/tests/runtime-legacy/samples/if-block-else-partial-outro/_config.js
|
% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it.
**Example**
```esql
ROW a=1.8
| EVAL a=FLOOR(a)
```
| a:double |
| --- |
| 1 |
|
unknown
|
github
|
https://github.com/elastic/elasticsearch
|
docs/reference/query-languages/esql/_snippets/functions/examples/floor.md
|
#!/usr/bin/env python
#
# $Id: _psosx.py 794 2010-11-12 13:29:52Z g.rodola $
#
import errno
import os
try:
from collections import namedtuple
except ImportError:
from psutil.compat import namedtuple # python < 2.6
import _psutil_osx
import _psposix
from psutil.error import AccessDenied, NoSuchProcess
# --- constants
NUM_CPUS = _psutil_osx.get_num_cpus()
TOTAL_PHYMEM = _psutil_osx.get_total_phymem()
# --- functions
def avail_phymem():
"Return the amount of physical memory available on the system, in bytes."
return _psutil_osx.get_avail_phymem()
def used_phymem():
"Return the amount of physical memory currently in use on the system, in bytes."
return TOTAL_PHYMEM - _psutil_osx.get_avail_phymem()
def total_virtmem():
"Return the amount of total virtual memory available on the system, in bytes."
return _psutil_osx.get_total_virtmem()
def avail_virtmem():
"Return the amount of virtual memory currently in use on the system, in bytes."
return _psutil_osx.get_avail_virtmem()
def used_virtmem():
"""Return the amount of used memory currently in use on the system, in bytes."""
return _psutil_osx.get_total_virtmem() - _psutil_osx.get_avail_virtmem()
def get_system_cpu_times():
"""Return a dict representing the following CPU times:
user, nice, system, idle."""
values = _psutil_osx.get_system_cpu_times()
return dict(user=values[0], nice=values[1], system=values[2], idle=values[3])
def get_pid_list():
"""Returns a list of PIDs currently running on the system."""
return _psutil_osx.get_pid_list()
def pid_exists(pid):
"""Check For the existence of a unix pid."""
return _psposix.pid_exists(pid)
# --- decorator
def wrap_exceptions(callable):
"""Call callable into a try/except clause so that if an
OSError EPERM exception is raised we translate it into
psutil.AccessDenied.
"""
def wrapper(self, *args, **kwargs):
try:
return callable(self, *args, **kwargs)
except OSError, err:
if err.errno == errno.ESRCH:
raise NoSuchProcess(self.pid, self._process_name)
if err.errno in (errno.EPERM, errno.EACCES):
raise AccessDenied(self.pid, self._process_name)
raise
return wrapper
class OSXProcess(object):
"""Wrapper class around underlying C implementation."""
_meminfo_ntuple = namedtuple('meminfo', 'rss vms')
_cputimes_ntuple = namedtuple('cputimes', 'user system')
__slots__ = ["pid", "_process_name"]
def __init__(self, pid):
self.pid = pid
self._process_name = None
@wrap_exceptions
def get_process_name(self):
"""Return process name as a string of limited len (15)."""
return _psutil_osx.get_process_name(self.pid)
def get_process_exe(self):
# no such thing as "exe" on OS X; it will maybe be determined
# later from cmdline[0]
if not pid_exists(self.pid):
raise NoSuchProcess(self.pid, self._process_name)
return ""
@wrap_exceptions
def get_process_cmdline(self):
"""Return process cmdline as a list of arguments."""
if not pid_exists(self.pid):
raise NoSuchProcess(self.pid, self._process_name)
return _psutil_osx.get_process_cmdline(self.pid)
@wrap_exceptions
def get_process_ppid(self):
"""Return process parent pid."""
return _psutil_osx.get_process_ppid(self.pid)
@wrap_exceptions
def get_process_uid(self):
"""Return process real user id."""
return _psutil_osx.get_process_uid(self.pid)
@wrap_exceptions
def get_process_gid(self):
"""Return process real group id."""
return _psutil_osx.get_process_gid(self.pid)
@wrap_exceptions
def get_memory_info(self):
"""Return a tuple with the process' RSS and VMS size."""
rss, vms = _psutil_osx.get_memory_info(self.pid)
return self._meminfo_ntuple(rss, vms)
@wrap_exceptions
def get_cpu_times(self):
user, system = _psutil_osx.get_cpu_times(self.pid)
return self._cputimes_ntuple(user, system)
@wrap_exceptions
def get_process_create_time(self):
"""Return the start time of the process as a number of seconds since
the epoch."""
return _psutil_osx.get_process_create_time(self.pid)
@wrap_exceptions
def get_process_num_threads(self):
"""Return the number of threads belonging to the process."""
return _psutil_osx.get_process_num_threads(self.pid)
def get_open_files(self):
"""Return files opened by process by parsing lsof output."""
lsof = _psposix.LsofParser(self.pid, self._process_name)
return lsof.get_process_open_files()
def get_connections(self):
"""Return etwork connections opened by a process as a list of
namedtuples."""
lsof = _psposix.LsofParser(self.pid, self._process_name)
return lsof.get_process_connections()
PlatformProcess = OSXProcess
|
unknown
|
codeparrot/codeparrot-clean
| ||
# 🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨
# This file was automatically generated from src/transformers/models/falcon_h1/modular_falcon_h1.py.
# Do NOT edit this file manually as any edits will be overwritten by the generation of
# the file from the modular. If any change should be done, please apply the change to the
# modular_falcon_h1.py file directly. One of our CI enforces this.
# 🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨
# Copyright 2025 Technology Innovation Institute and the HuggingFace Inc. team. All rights reserved.
#
# This code is based on EleutherAI's GPT-NeoX library and the GPT-NeoX
# and OPT implementations in this library. It has been modified from its
# original forms to accommodate minor architectural differences compared
# to GPT-NeoX and OPT used by the Meta AI team that trained the model.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections.abc import Callable
from typing import Any, Optional
import torch
import torch.nn.functional as F
from torch import nn
from transformers.activations import ACT2FN
from ... import initialization as init
from ...cache_utils import Cache
from ...generation import GenerationMixin
from ...integrations import use_kernel_forward_from_hub, use_kernel_func_from_hub, use_kernelized_func
from ...masking_utils import create_causal_mask
from ...modeling_flash_attention_utils import FlashAttentionKwargs
from ...modeling_layers import GradientCheckpointingLayer
from ...modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast
from ...modeling_rope_utils import ROPE_INIT_FUNCTIONS, dynamic_rope_update
from ...modeling_utils import ALL_ATTENTION_FUNCTIONS, PreTrainedModel
from ...processing_utils import Unpack
from ...utils import TransformersKwargs, auto_docstring, can_return_tuple, is_torchdynamo_compiling, logging
from ...utils.generic import maybe_autocast
from ...utils.import_utils import is_causal_conv1d_available, is_mamba_2_ssm_available
from .configuration_falcon_h1 import FalconH1Config
if is_mamba_2_ssm_available():
from mamba_ssm.ops.triton.selective_state_update import selective_state_update
from mamba_ssm.ops.triton.ssd_combined import mamba_chunk_scan_combined, mamba_split_conv1d_scan_combined
else:
selective_state_update = None
if is_causal_conv1d_available():
from causal_conv1d import causal_conv1d_fn, causal_conv1d_update
else:
causal_conv1d_update, causal_conv1d_fn = None, None
logger = logging.get_logger(__name__)
class FalconHybridMambaAttentionDynamicCache:
"""
A dynamic cache that can handle both the attention cache (which has a seq_len dimension) and the mamba cache
(which has a constant shape regardless of seq_len).
This cache has two sets of lists of tensors: `key_cache` and `value_cache` for attention cache and `conv_states`
and `ssm_states` for mamba cache. Each of these lists has `num_layers` tensors. The expected shape for each tensor
For attention layers, `key_cache` and `value_cache` have a shape of `(batch_size, num_heads, seq_len, head_dim)`,
while `conv_states` and `ssm_states` have a shape of `(batch_size, 0)` (empty tensors).
For mamba layers, `key_cache` and `value_cache` have a shape of `(batch_size, 0)` (empty tensors),
while `conv_states` represents the convolution state and has a shape of `(batch_size, d_inner, d_conv)`,
and `ssm_states` represents the ssm state and has a shape of `(batch_size, d_inner, d_state)`.
"""
is_compileable = False
def __init__(
self,
config: FalconH1Config,
batch_size: int,
dtype: torch.dtype = torch.float16,
devices: list[str] | None = None,
):
self.seqlen_offset = 0
self.dtype = dtype
self.has_previous_state = False
self.conv_kernel_size = config.mamba_d_conv
self.intermediate_size = (
config.mamba_d_ssm if config.mamba_d_ssm is not None else int(config.mamba_expand * config.hidden_size)
)
self.conv_states = {
i: torch.zeros(
batch_size,
self.intermediate_size + 2 * config.mamba_n_groups * config.mamba_d_state,
self.conv_kernel_size,
device=devices[i],
dtype=dtype,
)
for i in range(config.num_hidden_layers)
}
self.ssm_states = {
i: torch.zeros(
batch_size,
config.mamba_n_heads,
config.mamba_d_head,
config.mamba_d_state,
device=devices[i],
dtype=dtype,
)
for i in range(config.num_hidden_layers)
}
self.transformer_layers = []
for i in range(config.num_hidden_layers):
self.transformer_layers.append(i)
self.key_cache: list[torch.Tensor] = []
self.value_cache: list[torch.Tensor] = []
def __len__(self):
return len(self.key_cache)
def __getitem__(self, layer_idx):
return self.key_cache[layer_idx], self.value_cache[layer_idx]
def update(
self,
key_states: torch.Tensor,
value_states: torch.Tensor,
layer_idx: int,
cache_kwargs: dict[str, Any] | None = None,
) -> tuple[torch.Tensor, torch.Tensor]:
"""
Updates the cache with the new `key_states` and `value_states` for the layer `layer_idx`.
Parameters:
key_states (`torch.Tensor`):
The new key states to cache.
value_states (`torch.Tensor`):
The new value states to cache.
layer_idx (`int`):
The index of the layer to cache the states for.
cache_kwargs (`dict[str, Any]`, `optional`):
Additional arguments for the cache subclass. No additional arguments are used in `DynamicCache`.
Return:
A tuple containing the updated key and value states.
"""
# Update the cache
if len(self.key_cache) <= layer_idx:
# There may be skipped layers, fill them with empty lists
for _ in range(len(self.key_cache), layer_idx):
self.key_cache.append([])
self.value_cache.append([])
self.key_cache.append(key_states)
self.value_cache.append(value_states)
elif len(self.key_cache[layer_idx]) == 0: # fills previously skipped layers; checking for tensor causes errors
self.key_cache[layer_idx] = key_states
self.value_cache[layer_idx] = value_states
else:
self.key_cache[layer_idx] = torch.cat([self.key_cache[layer_idx], key_states], dim=-2)
self.value_cache[layer_idx] = torch.cat([self.value_cache[layer_idx], value_states], dim=-2)
return self.key_cache[layer_idx], self.value_cache[layer_idx]
def reorder_cache(self, beam_idx: torch.LongTensor):
"""Reorders the cache for beam search, given the selected beam indices."""
if self.get_seq_length() > 0:
for layer_idx in range(len(self.key_cache)):
device = self.key_cache[layer_idx].device
self.key_cache[layer_idx] = self.key_cache[layer_idx].index_select(0, beam_idx.to(device))
device = self.value_cache[layer_idx].device
self.value_cache[layer_idx] = self.value_cache[layer_idx].index_select(0, beam_idx.to(device))
device = self.conv_states[layer_idx].device
self.conv_states[layer_idx] = self.conv_states[layer_idx].index_select(0, beam_idx.to(device))
device = self.ssm_states[layer_idx].device
self.ssm_states[layer_idx] = self.ssm_states[layer_idx].index_select(0, beam_idx.to(device))
def get_mask_sizes(self, cache_position: torch.Tensor, layer_idx: int) -> tuple[int, int]:
"""Return the length and offset of the cache, used to generate the mask"""
kv_offset = 0
query_length = cache_position.shape[0]
kv_length = self.get_seq_length(layer_idx) + query_length
return kv_length, kv_offset
def get_seq_length(self, layer_idx: int | None = 0) -> int:
"""Returns the sequence length of the cached states. A layer index can be optionally passed."""
# take any layer that contains cache and not empty tensor
layer_idx = self.transformer_layers[0] if layer_idx not in self.transformer_layers else layer_idx
if len(self.key_cache) <= layer_idx or self.key_cache[layer_idx].shape[-1] == 0:
return 0
return self.key_cache[layer_idx].shape[-2]
def update_conv_state(
self,
layer_idx: int,
new_conv_state: torch.Tensor,
cache_position: torch.LongTensor,
) -> torch.Tensor:
conv_state = self.conv_states[layer_idx]
cache_position = cache_position.clamp(0, self.conv_kernel_size - 1)
conv_state = conv_state.roll(shifts=-1, dims=-1)
if len(cache_position) > 1:
conv_state[:, :, :] = new_conv_state.to(conv_state.device)
else:
conv_state[:, :, -1] = new_conv_state[:, :, -1].to(conv_state.device)
self.conv_states[layer_idx].zero_()
self.conv_states[layer_idx] += conv_state
return self.conv_states[layer_idx]
def reset(self):
self.conv_states.zero_()
self.ssm_states.zero_()
class FalconH1RotaryEmbedding(nn.Module):
inv_freq: torch.Tensor # fix linting for `register_buffer`
def __init__(self, config: FalconH1Config, device=None):
super().__init__()
self.max_seq_len_cached = config.max_position_embeddings
self.original_max_seq_len = config.max_position_embeddings
self.config = config
self.rope_type = self.config.rope_parameters["rope_type"]
rope_init_fn: Callable = self.compute_default_rope_parameters
if self.rope_type != "default":
rope_init_fn = ROPE_INIT_FUNCTIONS[self.rope_type]
inv_freq, self.attention_scaling = rope_init_fn(self.config, device)
self.register_buffer("inv_freq", inv_freq, persistent=False)
self.register_buffer("original_inv_freq", inv_freq.clone(), persistent=False)
@staticmethod
def compute_default_rope_parameters(
config: FalconH1Config | None = None,
device: Optional["torch.device"] = None,
seq_len: int | None = None,
) -> tuple["torch.Tensor", float]:
"""
Computes the inverse frequencies according to the original RoPE implementation
Args:
config ([`~transformers.PreTrainedConfig`]):
The model configuration.
device (`torch.device`):
The device to use for initialization of the inverse frequencies.
seq_len (`int`, *optional*):
The current sequence length. Unused for this type of RoPE.
Returns:
Tuple of (`torch.Tensor`, `float`), containing the inverse frequencies for the RoPE embeddings and the
post-processing scaling factor applied to the computed cos/sin (unused in this type of RoPE).
"""
base = config.rope_parameters["rope_theta"]
dim = getattr(config, "head_dim", None) or config.hidden_size // config.num_attention_heads
attention_factor = 1.0 # Unused in this type of RoPE
# Compute the inverse frequencies
inv_freq = 1.0 / (
base ** (torch.arange(0, dim, 2, dtype=torch.int64).to(device=device, dtype=torch.float) / dim)
)
return inv_freq, attention_factor
@torch.no_grad()
@dynamic_rope_update # power user: used with advanced RoPE types (e.g. dynamic rope)
def forward(self, x, position_ids):
inv_freq_expanded = self.inv_freq[None, :, None].float().expand(position_ids.shape[0], -1, 1).to(x.device)
position_ids_expanded = position_ids[:, None, :].float()
device_type = x.device.type if isinstance(x.device.type, str) and x.device.type != "mps" else "cpu"
with maybe_autocast(device_type=device_type, enabled=False): # Force float32
freqs = (inv_freq_expanded.float() @ position_ids_expanded.float()).transpose(1, 2)
emb = torch.cat((freqs, freqs), dim=-1)
cos = emb.cos() * self.attention_scaling
sin = emb.sin() * self.attention_scaling
return cos.to(dtype=x.dtype), sin.to(dtype=x.dtype)
def rotate_half(x):
"""Rotates half the hidden dims of the input."""
x1 = x[..., : x.shape[-1] // 2]
x2 = x[..., x.shape[-1] // 2 :]
return torch.cat((-x2, x1), dim=-1)
@use_kernel_func_from_hub("rotary_pos_emb")
def apply_rotary_pos_emb(q, k, cos, sin, unsqueeze_dim=1):
"""Applies Rotary Position Embedding to the query and key tensors.
Args:
q (`torch.Tensor`): The query tensor.
k (`torch.Tensor`): The key tensor.
cos (`torch.Tensor`): The cosine part of the rotary embedding.
sin (`torch.Tensor`): The sine part of the rotary embedding.
unsqueeze_dim (`int`, *optional*, defaults to 1):
The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
Returns:
`tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
"""
cos = cos.unsqueeze(unsqueeze_dim)
sin = sin.unsqueeze(unsqueeze_dim)
q_embed = (q * cos) + (rotate_half(q) * sin)
k_embed = (k * cos) + (rotate_half(k) * sin)
return q_embed, k_embed
def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor:
"""
This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
"""
batch, num_key_value_heads, slen, head_dim = hidden_states.shape
if n_rep == 1:
return hidden_states
hidden_states = hidden_states[:, :, None, :, :].expand(batch, num_key_value_heads, n_rep, slen, head_dim)
return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, head_dim)
def eager_attention_forward(
module: nn.Module,
query: torch.Tensor,
key: torch.Tensor,
value: torch.Tensor,
attention_mask: torch.Tensor | None,
scaling: float,
dropout: float = 0.0,
**kwargs: Unpack[TransformersKwargs],
):
key_states = repeat_kv(key, module.num_key_value_groups)
value_states = repeat_kv(value, module.num_key_value_groups)
attn_weights = torch.matmul(query, key_states.transpose(2, 3)) * scaling
if attention_mask is not None:
attn_weights = attn_weights + attention_mask
attn_weights = nn.functional.softmax(attn_weights, dim=-1, dtype=torch.float32).to(query.dtype)
attn_weights = nn.functional.dropout(attn_weights, p=dropout, training=module.training)
attn_output = torch.matmul(attn_weights, value_states)
attn_output = attn_output.transpose(1, 2).contiguous()
return attn_output, attn_weights
@use_kernelized_func(apply_rotary_pos_emb)
class FalconH1Attention(nn.Module):
"""Multi-headed attention from 'Attention Is All You Need' paper"""
def __init__(self, config: FalconH1Config, layer_idx: int):
super().__init__()
self.config = config
self.layer_idx = layer_idx
self.head_dim = getattr(config, "head_dim", config.hidden_size // config.num_attention_heads)
self.num_key_value_groups = config.num_attention_heads // config.num_key_value_heads
self.scaling = self.head_dim**-0.5
self.attention_dropout = config.attention_dropout
self.is_causal = True
self.q_proj = nn.Linear(
config.hidden_size, config.num_attention_heads * self.head_dim, bias=config.attention_bias
)
self.k_proj = nn.Linear(
config.hidden_size, config.num_key_value_heads * self.head_dim, bias=config.attention_bias
)
self.v_proj = nn.Linear(
config.hidden_size, config.num_key_value_heads * self.head_dim, bias=config.attention_bias
)
self.o_proj = nn.Linear(
config.num_attention_heads * self.head_dim, config.hidden_size, bias=config.attention_bias
)
self.key_multiplier = config.key_multiplier
def forward(
self,
hidden_states: torch.Tensor,
position_embeddings: tuple[torch.Tensor, torch.Tensor],
attention_mask: torch.Tensor | None,
past_key_values: Cache | None = None,
cache_position: torch.LongTensor | None = None,
**kwargs: Unpack[FlashAttentionKwargs],
) -> tuple[torch.Tensor, torch.Tensor | None]:
input_shape = hidden_states.shape[:-1]
hidden_shape = (*input_shape, -1, self.head_dim)
query_states = self.q_proj(hidden_states).view(hidden_shape).transpose(1, 2)
key_states = self.k_proj(hidden_states).view(hidden_shape).transpose(1, 2) * self.key_multiplier
value_states = self.v_proj(hidden_states).view(hidden_shape).transpose(1, 2)
cos, sin = position_embeddings
query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin)
if past_key_values is not None:
# sin and cos are specific to RoPE models; cache_position needed for the static cache
cache_kwargs = {"sin": sin, "cos": cos, "cache_position": cache_position}
key_states, value_states = past_key_values.update(key_states, value_states, self.layer_idx, cache_kwargs)
attention_interface: Callable = ALL_ATTENTION_FUNCTIONS.get_interface(
self.config._attn_implementation, eager_attention_forward
)
attn_output, attn_weights = attention_interface(
self,
query_states,
key_states,
value_states,
attention_mask,
dropout=0.0 if not self.training else self.attention_dropout,
scaling=self.scaling,
**kwargs,
)
attn_output = attn_output.reshape(*input_shape, -1).contiguous()
attn_output = self.o_proj(attn_output)
return attn_output, attn_weights
class FalconH1RMSNormGated(torch.nn.Module):
def __init__(self, hidden_size, eps=1e-6, n_groups=1, norm_before_gate=True):
super().__init__()
self.weight = nn.Parameter(torch.ones(hidden_size))
self.variance_epsilon = eps
self.n_groups = n_groups
self.norm_before_gate = norm_before_gate
def forward(self, hidden_states, gate=None):
input_dtype = hidden_states.dtype
if not self.norm_before_gate and gate is not None:
hidden_states = hidden_states * F.silu(gate.to(torch.float32))
if len(hidden_states.shape) == 3:
batch_size, seq_len, dim = hidden_states.shape
else:
batch_size, dim = hidden_states.shape
seq_len = 1
hidden_states = hidden_states.to(torch.float32)
hidden_states = hidden_states.view(batch_size, seq_len, self.n_groups, int(dim // self.n_groups))
variance = hidden_states.pow(2).mean(-1, keepdim=True)
hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon)
hidden_states = self.weight.view(self.n_groups, int(dim // self.n_groups)) * hidden_states
hidden_states = hidden_states.view(batch_size, seq_len, dim)
if seq_len == 1:
hidden_states = hidden_states.squeeze(1)
if self.norm_before_gate and gate is not None:
hidden_states = hidden_states * F.silu(gate.to(torch.float32))
return hidden_states.to(input_dtype)
# Helper methods for segment sum computation
def pad_tensor_by_size(input_tensor: torch.Tensor, pad_size: int):
"""
Padding x tensor with `pad_size` on the seq_len dim (dim=1)
Assumes that we only have tensors of either size 4 or 3
"""
pad_shape = (0, 0, 0, 0, 0, pad_size, 0, 0) if len(input_tensor.shape) == 4 else (0, 0, 0, pad_size, 0, 0)
return torch.nn.functional.pad(input_tensor, pad_shape, mode="constant", value=0)
def reshape_into_chunks(input_tensor, pad_size, chunk_size):
"""
Padding input_tensor with `pad_size` on the seq_len dim (dim=1) and
simultaneously splitting it into chunk sequences.
Assumes that we only have tensors of either size 4 or 3
"""
# [bsz, seq_len, ...] -> [bsz, seq_len multiple of chunk_size, ...]
input_tensor = pad_tensor_by_size(input_tensor, pad_size)
if len(input_tensor.shape) == 3:
# [bsz, seq_len multiple of chunk_size, num_heads] -> [bsz, -1, chunk_size, num_heads]
return input_tensor.reshape(input_tensor.shape[0], -1, chunk_size, input_tensor.shape[2])
else:
# [bsz, seq_len multiple of chunk_size, num_heads, head_dim or state_size] -> [bsz, -1, chunk_size, num_heads, head_dim or state_size]
return input_tensor.reshape(
input_tensor.shape[0], -1, chunk_size, input_tensor.shape[2], input_tensor.shape[3]
)
def segment_sum(input_tensor):
"""
More stable segment sum calculation. Uses cumulative sums and masking instead of direct subtractions.
"""
chunk_size = input_tensor.size(-1)
# 1. expand input tensor to have an additional dimension and repeat along that dimension
# [..., chunk_size] -> [..., chunk_size, chunk_size]
input_tensor = input_tensor[..., None].expand(*input_tensor.size(), chunk_size)
# 2. create a lower triangular mask with the diagonal set to 0 to 0 out elements above diag
mask = torch.tril(torch.ones(chunk_size, chunk_size, device=input_tensor.device, dtype=torch.bool), diagonal=-1)
input_tensor = input_tensor.masked_fill(~mask, 0)
# 3. compute actual cumsum
tensor_segsum = torch.cumsum(input_tensor, dim=-2)
# 4. apply mask to keep only the lower triangular part of the cumulative sum result (incl diagonal this time)
mask = torch.tril(torch.ones(chunk_size, chunk_size, device=input_tensor.device, dtype=torch.bool), diagonal=0)
tensor_segsum = tensor_segsum.masked_fill(~mask, -torch.inf)
return tensor_segsum
def apply_mask_to_padding_states(hidden_states, attention_mask):
"""
Tunes out the hidden states for padding tokens, see https://github.com/state-spaces/mamba/issues/66
"""
# NOTE: attention mask is a 2D boolean tensor
if attention_mask is not None and attention_mask.shape[1] > 1 and attention_mask.shape[0] > 1:
dtype = hidden_states.dtype
hidden_states = (hidden_states * attention_mask[:, :, None]).to(dtype)
return hidden_states
is_fast_path_available = all((selective_state_update, causal_conv1d_fn, causal_conv1d_update))
# Adapted from transformers.models.mamba2.modeling_mamba2.Mamba2Mixer
class FalconH1Mixer(nn.Module):
"""
FalconH1Mixer is identical to classic Mamba2 mixer classes but differs on two different things
- Users can pass custom intermediate_size through `config.mamba_d_ssm`
- The use of gated RMS normalization layer is optional
"""
def __init__(self, config: FalconH1Config, layer_idx: int):
super().__init__()
self.num_heads = config.mamba_n_heads
self.hidden_size = config.hidden_size
self.ssm_state_size = config.mamba_d_state
self.conv_kernel_size = config.mamba_d_conv
self.intermediate_size = (
int(config.mamba_expand * self.hidden_size) if config.mamba_d_ssm is None else config.mamba_d_ssm
)
self.layer_idx = layer_idx
self.use_conv_bias = config.mamba_conv_bias
self.activation = config.hidden_act
self.act = ACT2FN[config.hidden_act]
self.use_bias = config.mamba_proj_bias
self.layer_norm_epsilon = config.rms_norm_eps
self.groups_time_state_size = config.mamba_n_groups * self.ssm_state_size
self.n_groups = config.mamba_n_groups
self.head_dim = config.mamba_d_head
self.chunk_size = config.mamba_chunk_size
self.time_step_limit = config.time_step_limit
self.time_step_min = config.time_step_min
self.time_step_max = config.time_step_max
self.conv_dim = self.intermediate_size + 2 * self.n_groups * self.ssm_state_size
self.conv1d = nn.Conv1d(
in_channels=self.conv_dim,
out_channels=self.conv_dim,
bias=config.mamba_conv_bias,
kernel_size=self.conv_kernel_size,
groups=self.conv_dim,
padding=self.conv_kernel_size - 1,
)
# projection of the input hidden states
projection_size = self.intermediate_size + self.conv_dim + self.num_heads
self.in_proj = nn.Linear(
self.hidden_size,
projection_size,
bias=self.use_bias,
)
# selective projection used to make dt, B and C input dependant
# time step projection (discretization)
# instantiate once and copy inv_dt in init_weights of PretrainedModel
self.dt_bias = nn.Parameter(torch.ones(self.num_heads))
# S4D real initialization. These are not discretized!
# The core is to load them, compute the discrete states, then write the updated state. Keeps the memory bounded
A = torch.arange(1, self.num_heads + 1)
self.A_log = nn.Parameter(torch.log(A))
self.mamba_rms_norm = config.mamba_rms_norm
if self.mamba_rms_norm:
self.norm = FalconH1RMSNormGated(
self.intermediate_size,
eps=self.layer_norm_epsilon,
n_groups=self.n_groups,
norm_before_gate=config.mamba_norm_before_gate,
)
self.D = nn.Parameter(torch.ones(self.num_heads))
self.out_proj = nn.Linear(self.intermediate_size, config.hidden_size, bias=config.projectors_bias)
if not is_fast_path_available:
logger.warning_once(
"The fast path is not available because one of `(selective_state_update, causal_conv1d_fn, causal_conv1d_update)`"
" is None. Falling back to the naive implementation. To install follow https://github.com/state-spaces/mamba/#installation and"
" https://github.com/Dao-AILab/causal-conv1d"
)
else:
logger.warning_once("The fast path for FalconH1 will be used when running the model on a GPU")
self.zxbcdt_multipliers = config.ssm_multipliers
self.ssm_in_multiplier = config.ssm_in_multiplier
def cuda_kernels_forward(
self,
hidden_states: torch.Tensor,
cache_params: FalconHybridMambaAttentionDynamicCache | None = None,
cache_position: torch.LongTensor | None = None,
attention_mask: torch.Tensor | None = None,
):
# 1. Gated MLP's linear projection
hidden_states = apply_mask_to_padding_states(hidden_states, attention_mask)
# Add Multipliers
hidden_states = hidden_states * self.ssm_in_multiplier
projected_states = self.in_proj(hidden_states)
projected_states = projected_states * self.mup_vector # ADD Mup Multipliers
d_to_remove = 2 * self.intermediate_size + 2 * self.n_groups * self.ssm_state_size + self.num_heads
# Set up dimensions for reshapes later
batch_size, seq_len, _ = hidden_states.shape
groups_time_state_size = self.n_groups * self.ssm_state_size
use_precomputed_states = (
cache_params is not None
and cache_params.has_previous_state
and seq_len == 1
and cache_params.conv_states[self.layer_idx].shape[0]
== cache_params.ssm_states[self.layer_idx].shape[0]
== batch_size
and cache_position is not None
and cache_position[0] > 0
)
# getting projected states from cache if it exists
if use_precomputed_states:
d_mlp = (projected_states.squeeze(1).shape[-1] - d_to_remove) // 2
z0, x0, gate, hidden_states_B_C, dt = projected_states.squeeze(1).split(
[d_mlp, d_mlp, self.intermediate_size, self.conv_dim, self.num_heads], dim=-1
)
# 2. Convolution sequence transformation
hidden_states_B_C = causal_conv1d_update(
hidden_states_B_C,
cache_params.conv_states[self.layer_idx],
self.conv1d.weight.squeeze(1),
self.conv1d.bias,
self.activation,
)
hidden_states, B, C = torch.split(
hidden_states_B_C,
[self.intermediate_size, groups_time_state_size, groups_time_state_size],
dim=-1,
)
# 3. SSM transformation
A = -torch.exp(self.A_log.float()) # (nheads,)
A = A[:, None, ...][:, :, None].expand(-1, self.head_dim, self.ssm_state_size).to(dtype=torch.float32)
dt = dt[:, :, None].expand(-1, -1, self.head_dim)
dt_bias = self.dt_bias[:, None, ...].expand(-1, self.head_dim)
D = self.D[:, None, ...].expand(-1, self.head_dim)
B = B.view(batch_size, self.n_groups, B.shape[1] // self.n_groups)
C = C.view(batch_size, self.n_groups, C.shape[1] // self.n_groups)
hidden_states_reshaped = hidden_states.view(batch_size, self.num_heads, self.head_dim)
hidden_states = selective_state_update(
cache_params.ssm_states[self.layer_idx],
hidden_states_reshaped,
dt,
A,
B,
C,
D,
z=gate.view(batch_size, self.num_heads, self.head_dim) if not self.mamba_rms_norm else None,
dt_bias=dt_bias,
dt_softplus=True,
)
hidden_states = hidden_states.view(batch_size, self.num_heads * self.head_dim)
if self.mamba_rms_norm:
hidden_states = self.norm(hidden_states, gate)
if d_mlp > 0:
hidden_states = torch.cat([F.silu(z0) * x0, hidden_states], dim=-1)
# 4. Final linear projection
out = self.out_proj(hidden_states[:, None, ...])
# Fused calculations or step by step if no initialized cache is found
else:
A = -torch.exp(self.A_log.float()) # (num_heads) or (intermediate_size, state_size)
dt_limit_kwargs = {} if self.time_step_limit == (0.0, float("inf")) else {"dt_limit": self.time_step_limit}
# 2-4. Fused kernel for conv1d, SSM, and the final projection
if self.training and cache_params is None:
out = mamba_split_conv1d_scan_combined(
projected_states,
self.conv1d.weight.squeeze(1),
self.conv1d.bias,
self.dt_bias,
A,
D=self.D,
chunk_size=self.chunk_size,
seq_idx=None, # was seq_idx
activation=self.activation,
rmsnorm_weight=self.norm.weight if self.mamba_rms_norm else None,
rmsnorm_eps=self.norm.variance_epsilon if self.mamba_rms_norm else None,
outproj_weight=self.out_proj.weight,
outproj_bias=self.out_proj.bias,
headdim=self.head_dim,
ngroups=self.n_groups,
norm_before_gate=False,
return_final_states=False,
**dt_limit_kwargs,
)
else:
d_mlp = (
projected_states.shape[-1]
- 2 * self.intermediate_size
- 2 * self.n_groups * self.ssm_state_size
- self.num_heads
) // 2
if attention_mask is not None:
projected_states = projected_states * attention_mask[..., None]
_, gate, hidden_states_B_C, dt = projected_states.split(
[
2 * d_mlp,
self.intermediate_size,
self.conv_dim,
self.num_heads,
],
dim=-1,
)
if cache_params is not None:
conv_states = F.pad(
hidden_states_B_C.permute(0, 2, 1),
(self.conv_kernel_size - hidden_states_B_C.shape[-2], 0),
)
cache_params.update_conv_state(self.layer_idx, conv_states, cache_position)
time_step = nn.functional.softplus(dt + self.dt_bias)
# 1D Convolution
if causal_conv1d_fn is None or self.activation not in ["silu", "swish"]:
hidden_states_B_C = self.act(
self.conv1d(hidden_states_B_C.transpose(1, 2)).transpose(1, 2)[:, :seq_len]
) # (B, L, self.d_inner + 2 * ngroups * d_state)
else:
hidden_states_B_C = causal_conv1d_fn(
x=hidden_states_B_C.transpose(1, 2),
weight=self.conv1d.weight.squeeze(1),
bias=self.conv1d.bias,
activation=self.activation,
).transpose(1, 2)[:, :seq_len]
hidden_states, B, C = torch.split(
hidden_states_B_C,
[
self.intermediate_size,
groups_time_state_size,
groups_time_state_size,
],
dim=-1,
)
if attention_mask is not None and attention_mask.shape[1] > 1 and attention_mask.shape[0] > 1:
# tune out hidden states for pad tokens, see https://github.com/state-spaces/mamba/issues/66
dtype = hidden_states.dtype
hidden_states = (hidden_states * attention_mask[:, :, None]).to(dtype)
# This is a hack to make sure multi-GPU inference works with HF accelerate
# see: https://github.com/Dao-AILab/flash-attention/issues/523 for more details
with torch.cuda.device(hidden_states.device):
scan_output, ssm_state = mamba_chunk_scan_combined(
hidden_states.view(batch_size, seq_len, -1, self.head_dim),
time_step,
A,
B.view(batch_size, seq_len, self.n_groups, -1),
C.view(batch_size, seq_len, self.n_groups, -1),
chunk_size=self.chunk_size,
D=self.D,
z=None,
seq_idx=None,
return_final_states=True,
**dt_limit_kwargs,
)
if ssm_state is not None and cache_params is not None:
cache_params.ssm_states[self.layer_idx].copy_(ssm_state)
scan_output = scan_output.view(batch_size, seq_len, -1)
# Multiply "gate" branch and apply extra normalization layer
if self.mamba_rms_norm:
out = self.norm(scan_output, gate)
else:
out = scan_output * torch.nn.functional.silu(gate)
out = self.out_proj(out)
return out
# fmt: off
def torch_forward(
self,
input_states,
cache_params: FalconHybridMambaAttentionDynamicCache | None = None,
cache_position: torch.LongTensor | None = None,
attention_mask: torch.Tensor | None = None,
):
batch_size, seq_len, _ = input_states.shape
dtype = input_states.dtype
# 1. Gated MLP's linear projection
input_states = apply_mask_to_padding_states(input_states, attention_mask)
# Add Multipliers
input_states = input_states * self.ssm_in_multiplier
projected_states = self.in_proj(input_states)
projected_states = projected_states * self.mup_vector # ADD Mup Multipliers
gate, hidden_states_B_C, dt = projected_states.split([
self.intermediate_size, self.conv_dim, self.num_heads
], dim=-1)
use_precomputed_states = (
cache_params is not None
and cache_params.has_previous_state
and seq_len == 1
and cache_params.conv_states[self.layer_idx].shape[0]
== cache_params.ssm_states[self.layer_idx].shape[0]
== batch_size
and cache_position is not None
and cache_position[0] > 0
)
# 2. Convolution sequence transformation
if use_precomputed_states:
cache_params.conv_states[self.layer_idx] = cache_params.conv_states[self.layer_idx].roll(shifts=-1, dims=-1)
cache_params.conv_states[self.layer_idx][:, :, -1] = hidden_states_B_C[:, 0, :].to(cache_params.conv_states[self.layer_idx].device)
# We need to guarantee that anything regarding the cache is on the same device
conv_states = cache_params.conv_states[self.layer_idx].to(device=self.conv1d.weight.device)
hidden_states_B_C = torch.sum(
conv_states * self.conv1d.weight.squeeze(1), dim=-1
)
if self.use_conv_bias:
hidden_states_B_C = hidden_states_B_C + self.conv1d.bias
hidden_states_B_C = self.act(hidden_states_B_C)
else:
# Init cache
if cache_params is not None:
hidden_states_B_C_transposed = hidden_states_B_C.transpose(1, 2)
conv_states = nn.functional.pad(
hidden_states_B_C_transposed, (self.conv_kernel_size - hidden_states_B_C_transposed.shape[-1], 0)
)
cache_params.conv_states[self.layer_idx].copy_(conv_states)
hidden_states_B_C = self.act(self.conv1d(hidden_states_B_C.transpose(1, 2))[..., :seq_len].transpose(1, 2))
hidden_states_B_C = apply_mask_to_padding_states(hidden_states_B_C, attention_mask)
hidden_states, B, C = torch.split(
hidden_states_B_C,
[self.intermediate_size, self.n_groups * self.ssm_state_size, self.n_groups * self.ssm_state_size],
dim=-1
)
# 3. SSM transformation
A = -torch.exp(self.A_log.float()) # [num_heads]
if use_precomputed_states:
# We need to guarantee that anything regarding the cache is on the same device
cache_device = cache_params.ssm_states[self.layer_idx].device
# Note: there is no need to pad parameter matrices here, as there is just one new token
# for batched generation
dt = dt[:, 0, :][:, None, ...]
dt = dt.transpose(1, 2).expand(batch_size, dt.shape[-1], self.head_dim)
# [num_heads] -> [num_heads, head_dim]
dt_bias = self.dt_bias[..., None].expand(self.dt_bias.shape[0], self.head_dim)
dt = torch.nn.functional.softplus(dt + dt_bias.to(dt.dtype))
dt = torch.clamp(dt, self.time_step_limit[0], self.time_step_limit[1])
A = A[..., None, None].expand(self.num_heads, self.head_dim, self.ssm_state_size).to(dtype=torch.float32)
# [bsz, num_heads, head_dim, state_size]
dA = (torch.exp(dt[..., None] * A)).to(device=cache_device)
# Discretize B
# [bsz, n_groups * state_size] -> [bsz, n_groups, 1, state_size] ->
# -> [bsz, n_groups, group to head repetition factor, state_size] -> [bsz, num_heads, state_size]
B = B.reshape(batch_size, self.n_groups, -1)[..., None, :]
B = B.expand(batch_size, self.n_groups, self.num_heads // self.n_groups, B.shape[-1]).contiguous()
B = B.reshape(batch_size, -1, B.shape[-1])
# [bsz, num_heads, head_dim, state_size]
dB = dt[..., None] * B[..., None, :]
# Discretize x into dB
# [bsz, intermediate_size] -> [bsz, num_heads, head_dim]
hidden_states = hidden_states.reshape(batch_size, -1, self.head_dim)
dBx = (dB * hidden_states[..., None]).to(device=cache_device)
# State calculation
cache_params.ssm_states[self.layer_idx].copy_(
cache_params.ssm_states[self.layer_idx] * dA + dBx
)
# Subsequent output
# [bsz, n_groups * state_size] -> [bsz, num_heads, state_size]
C = C.reshape(batch_size, self.n_groups, -1)[..., None, :]
C = C.expand(batch_size, self.n_groups, self.num_heads // self.n_groups, C.shape[-1]).contiguous()
C = C.reshape(batch_size, -1, C.shape[-1])
# [bsz, num_heads, head_dim]
ssm_states = cache_params.ssm_states[self.layer_idx].to(device=C.device, dtype=C.dtype) # Shape: [b, h, d, n]
# Reshape ssm_states to merge the first two dimensions
ssm_states_reshaped = ssm_states.view(batch_size * self.num_heads, self.head_dim, self.ssm_state_size) # Shape: [b*h, d, n]
C_reshaped = C.view(batch_size * self.num_heads, self.ssm_state_size, 1) # Shape: [b*h, n, 1]
y = torch.bmm(ssm_states_reshaped, C_reshaped)
y = y.view(batch_size, self.num_heads, self.head_dim)
# D skip connection
# [num_heads] -> [num_heads, head_dim]
D = self.D[..., None].expand(self.D.shape[0], self.head_dim)
y = (y + hidden_states * D).to(y.dtype)
# [bsz, num_heads, head_dim] -> [bsz, 1, intermediate_size]
y = y.reshape(batch_size, -1)[:, None, ...]
else:
# begin ssd naive implementation without einsums
dt = nn.functional.softplus(dt + self.dt_bias)
dt = torch.clamp(dt, self.time_step_limit[0], self.time_step_limit[1])
hidden_states = hidden_states.reshape(batch_size, seq_len, -1, self.head_dim).float()
B = B.reshape(batch_size, seq_len, -1, self.ssm_state_size).float()
C = C.reshape(batch_size, seq_len, -1, self.ssm_state_size).float()
B = B.repeat_interleave(self.num_heads // self.n_groups, dim=2, output_size=self.num_heads)
C = C.repeat_interleave(self.num_heads // self.n_groups, dim=2, output_size=self.num_heads)
pad_size = (self.chunk_size - seq_len % self.chunk_size) % self.chunk_size
D_residual = self.D[..., None] * pad_tensor_by_size(hidden_states, pad_size)
# Discretize x and A
hidden_states = hidden_states * dt[..., None]
A = A.to(hidden_states.dtype) * dt
# Rearrange into blocks/chunks
hidden_states, A, B, C = [reshape_into_chunks(t, pad_size, self.chunk_size) for t in (hidden_states, A, B, C)]
# [bsz, -1, chunk_size, num_heads] -> [bsz, num_heads, -1, chunk_size]
A = A.permute(0, 3, 1, 2)
A_cumsum = torch.cumsum(A, dim=-1)
# 1. Compute the output for each intra-chunk (diagonal blocks)
# This is the analog of a causal mask
L = torch.exp(segment_sum(A))
# Contraction of C and B to get G (attention-weights like)
G_intermediate = C[:, :, :, None, :, :] * B[:, :, None, :, :, :] # shape: (b, c, l, s, h, n)
G = G_intermediate.sum(dim=-1) # shape: (b, c, l, s, h)
# Compute M, equivalent to applying attention mask to weights
M_intermediate = G[..., None] * L.permute(0, 2, 3, 4, 1)[..., None]
M = M_intermediate.sum(dim=-1)
# Compute Y_diag (apply to values)
Y_diag = (M[..., None] * hidden_states[:, :, None]).sum(dim=3)
# 2. Compute the state for each intra-chunk
# (right term of low-rank factorization of off-diagonal blocks; B terms)
decay_states = torch.exp(A_cumsum[:, :, :, -1:] - A_cumsum)
B_decay = B * decay_states.permute(0, -2, -1, 1)[..., None]
states = (B_decay[..., None, :] * hidden_states[..., None]).sum(dim=2)
# 3. Compute the inter-chunk SSM recurrence; produces correct SSM states at chunk boundaries
# (middle term of factorization of off-diag blocks; A terms)
if use_precomputed_states:
previous_states = cache_params.ssm_states[self.layer_idx][:, None, ...].to(device=states.device)
else:
previous_states = torch.zeros_like(states[:, :1])
states = torch.cat([previous_states, states], dim=1)
decay_chunk = torch.exp(segment_sum(nn.functional.pad(A_cumsum[:, :, :, -1], (1, 0))))
decay_chunk = decay_chunk.transpose(1, 3)
new_states = (decay_chunk[..., None, None] * states[:, :, None, ...]).sum(dim=1)
states, ssm_state = new_states[:, :-1], new_states[:, -1]
# 4. Compute state -> output conversion per chunk
# (left term of low-rank factorization of off-diagonal blocks; C terms)
state_decay_out = torch.exp(A_cumsum)
C_times_states = (C[..., None, :] * states[:, :, None, ...])
state_decay_out_permuted = state_decay_out.permute(0, 2, 3, 1)
Y_off = (C_times_states.sum(-1) * state_decay_out_permuted[..., None])
# Add output of intra-chunk and inter-chunk terms (diagonal and off-diagonal blocks)
y = Y_diag + Y_off
# [bsz, -1, self.chunk_size, num_heads, head_dim] -> [bsz, (padded) seq_len, num_heads, head_dim]
y = y.reshape(batch_size, -1, self.num_heads, self.head_dim)
y = y + D_residual
# Cutting off padded chunks
if pad_size > 0:
y = y[:, :seq_len, :, :]
y = y.reshape(batch_size, seq_len, -1)
# Init cache
if ssm_state is not None and cache_params is not None:
cache_params.ssm_states[self.layer_idx].copy_(ssm_state)
if self.mamba_rms_norm:
scan_output = self.norm(y, gate)
else:
scan_output = y * torch.nn.functional.silu(gate)
# end ssd naive
# 4. Final linear projection
contextualized_states = self.out_proj(scan_output.to(dtype)) # [batch, seq_len, hidden_size]
return contextualized_states
# fmt: on
def forward(
self,
hidden_states,
cache_params: FalconHybridMambaAttentionDynamicCache | None = None,
cache_position: torch.LongTensor | None = None,
attention_mask: torch.Tensor | None = None,
):
if is_fast_path_available and "cuda" in self.in_proj.weight.device.type and not is_torchdynamo_compiling():
return self.cuda_kernels_forward(hidden_states, cache_params, cache_position, attention_mask)
dtype = hidden_states.dtype
if attention_mask is not None and attention_mask.shape[1] > 1 and attention_mask.shape[0] > 1:
# tune out hidden states for pad tokens, see https://github.com/state-spaces/mamba/issues/66
hidden_states = (hidden_states * attention_mask[:, :, None]).to(dtype)
return self.torch_forward(hidden_states, cache_params, cache_position, attention_mask)
class FalconH1MLP(nn.Module):
def __init__(self, config: FalconH1Config):
super().__init__()
self.config = config
self.hidden_size = config.hidden_size
self.intermediate_size = config.intermediate_size
self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias)
self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias)
self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias)
self.act_fn = ACT2FN[config.hidden_act]
self.gate_multiplier, self.down_multiplier = config.mlp_multipliers
def forward(self, x):
y = self.up_proj(x) * self.act_fn(self.gate_proj(x) * self.gate_multiplier)
y = self.down_proj(y) * self.down_multiplier
return y
@use_kernel_forward_from_hub("RMSNorm")
class FalconH1RMSNorm(nn.Module):
def __init__(self, hidden_size, eps: float = 1e-6) -> None:
"""
FalconH1RMSNorm is equivalent to T5LayerNorm
"""
super().__init__()
self.weight = nn.Parameter(torch.ones(hidden_size))
self.variance_epsilon = eps
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
input_dtype = hidden_states.dtype
hidden_states = hidden_states.to(torch.float32)
variance = hidden_states.pow(2).mean(-1, keepdim=True)
hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon)
return self.weight * hidden_states.to(input_dtype)
def extra_repr(self):
return f"{tuple(self.weight.shape)}, eps={self.variance_epsilon}"
class FalconH1DecoderLayer(GradientCheckpointingLayer):
def __init__(self, config: FalconH1Config, layer_idx: int):
super().__init__()
self.feed_forward = FalconH1MLP(config)
head_dim = config.hidden_size // config.num_attention_heads
self.channels_attn = config.num_attention_heads * head_dim + 2 * config.num_key_value_heads * head_dim
self.mamba = FalconH1Mixer(config=config, layer_idx=layer_idx)
self.self_attn = FalconH1Attention(config, layer_idx)
self.attention_in_multiplier = config.attention_in_multiplier
self.ssm_out_multiplier = config.ssm_out_multiplier
self.attn_out_multiplier = config.attention_out_multiplier
self.input_layernorm = FalconH1RMSNorm(config.hidden_size, eps=config.rms_norm_eps)
self.pre_ff_layernorm = FalconH1RMSNorm(config.hidden_size, eps=config.rms_norm_eps)
def forward(
self,
hidden_states: torch.Tensor,
attention_mask: torch.Tensor | None = None,
mamba_attention_mask: torch.Tensor | None = None,
position_ids: torch.LongTensor | None = None,
past_key_values: FalconHybridMambaAttentionDynamicCache | None = None,
output_attentions: bool | None = False,
use_cache: bool | None = False,
cache_position: torch.LongTensor | None = None,
position_embeddings: tuple[torch.Tensor, torch.Tensor] | None = None,
**kwargs,
) -> tuple[torch.FloatTensor, tuple[torch.FloatTensor, torch.FloatTensor] | None]:
"""
Args:
hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)`
attention_mask (`torch.FloatTensor`, *optional*): attention mask of size
`(batch, sequence_length)` where padding elements are indicated by 0.
past_key_values (`FalconHybridMambaAttentionDynamicCache`, *optional*): cached past key and value projection states
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under
returned tensors for more detail.
use_cache (`bool`, *optional*):
If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding
(see `past_key_values`).
cache_position (`torch.LongTensor` of shape `(sequence_length)`, *optional*):
Indices depicting the position of the input sequence tokens in the sequence.
position_embeddings (`tuple[torch.FloatTensor, torch.FloatTensor]`, *optional*):
Tuple containing the cosine and sine positional embeddings of shape `(batch_size, seq_len, head_dim)`,
with `head_dim` being the embedding dimension of each attention head.
kwargs (`dict`, *optional*):
Arbitrary kwargs to be ignored, used for FSDP and other methods that injects code
into the model
"""
residual = hidden_states
hidden_states = self.input_layernorm(hidden_states)
mamba_hidden_states = self.mamba(
hidden_states=hidden_states,
cache_params=past_key_values,
cache_position=cache_position,
attention_mask=mamba_attention_mask,
)
mamba_hidden_states = mamba_hidden_states * self.ssm_out_multiplier
attention_hidden_states, self_attn_weights = self.self_attn(
hidden_states=hidden_states * self.attention_in_multiplier,
attention_mask=attention_mask,
position_ids=position_ids,
past_key_values=past_key_values,
output_attentions=output_attentions,
use_cache=use_cache,
cache_position=cache_position,
position_embeddings=position_embeddings,
**kwargs,
)
attention_hidden_states = attention_hidden_states * self.attn_out_multiplier
hidden_states = mamba_hidden_states + attention_hidden_states
# residual connection after attention
hidden_states = residual + hidden_states
# feed-forward
residual = hidden_states
hidden_states = self.pre_ff_layernorm(hidden_states)
hidden_states = self.feed_forward(hidden_states)
hidden_states = residual + hidden_states
outputs = (hidden_states,)
if output_attentions:
outputs += (self_attn_weights,)
return outputs
def compute_mup_vector(config):
"""
Computes the MuP vector based on model configuration.
FalconH1 applies different MuP multiplier for each dimension of the hidden states.
The MuP vector is partitioned into chunks, and each chunk is multiplied with its
corresponding projected dimension.
Args:
config: FalconH1Config object
Returns:
torch.Tensor: The computed MuP vector
"""
# We'll need some values from the config to compute the vector dimensions
intermediate_size = (
config.mamba_d_ssm if config.mamba_d_ssm is not None else int(config.mamba_expand * config.hidden_size)
)
groups_time_state_size = config.mamba_n_groups * config.mamba_d_state
num_heads = config.mamba_n_heads
zxbcdt_multipliers = config.ssm_multipliers
vector_shape = 2 * intermediate_size + 2 * groups_time_state_size + num_heads
mup_vector = torch.ones(1, 1, vector_shape)
# Apply multipliers to different sections of the vector
mup_vector[:, :, :intermediate_size] *= zxbcdt_multipliers[0]
mup_vector[:, :, intermediate_size : 2 * intermediate_size] *= zxbcdt_multipliers[1]
mup_vector[:, :, 2 * intermediate_size : 2 * intermediate_size + groups_time_state_size] *= zxbcdt_multipliers[2]
mup_vector[
:, :, 2 * intermediate_size + groups_time_state_size : 2 * intermediate_size + 2 * groups_time_state_size
] *= zxbcdt_multipliers[3]
mup_vector[:, :, 2 * intermediate_size + 2 * groups_time_state_size :] *= zxbcdt_multipliers[4]
return mup_vector
@auto_docstring
class FalconH1PreTrainedModel(PreTrainedModel):
config: FalconH1Config
base_model_prefix = "model"
supports_gradient_checkpointing = True
_no_split_modules = ["FalconH1DecoderLayer"]
_skip_keys_device_placement = "past_key_values"
_supports_flash_attn = True
_supports_sdpa = True
_is_stateful = True
@torch.no_grad()
def _init_weights(self, module):
super()._init_weights(module)
if isinstance(module, FalconH1Mixer):
init.ones_(module.dt_bias)
init.copy_(module.A_log, torch.log(torch.arange(1, module.num_heads + 1)))
init.ones_(module.D)
elif isinstance(module, FalconH1Model):
mup_vector = compute_mup_vector(module.config)
for layer in module.layers:
init.copy_(layer.mamba.mup_vector, mup_vector)
@auto_docstring
# Adapted from transformers.models.jamba.modeling_jamba.JambaModel
class FalconH1Model(FalconH1PreTrainedModel):
def __init__(self, config: FalconH1Config):
super().__init__(config)
self.padding_idx = config.pad_token_id
self.vocab_size = config.vocab_size
self.embed_tokens = nn.Embedding(config.vocab_size, config.hidden_size, self.padding_idx)
decoder_layers = []
for i in range(config.num_hidden_layers):
decoder_layers.append(FalconH1DecoderLayer(config, layer_idx=i))
self.layers = nn.ModuleList(decoder_layers)
self._attn_implementation = config._attn_implementation
self.final_layernorm = FalconH1RMSNorm(config.hidden_size, eps=config.rms_norm_eps)
self.rotary_emb = FalconH1RotaryEmbedding(config=config)
self.embedding_multiplier = config.embedding_multiplier
self.lm_head_multiplier = config.lm_head_multiplier
self.gradient_checkpointing = False
# Compute the MuP vector once and register it for all layers
mup_vector = compute_mup_vector(config)
for layer in self.layers:
layer.mamba.register_buffer("mup_vector", mup_vector.clone(), persistent=False)
# Initialize weights and apply final processing
self.post_init()
@can_return_tuple
@auto_docstring
def forward(
self,
input_ids: torch.LongTensor | None = None,
attention_mask: torch.Tensor | None = None,
position_ids: torch.LongTensor | None = None,
past_key_values: FalconHybridMambaAttentionDynamicCache | None = None,
inputs_embeds: torch.FloatTensor | None = None,
use_cache: bool | None = None,
output_attentions: bool | None = None,
output_hidden_states: bool | None = None,
cache_position: torch.LongTensor | None = None,
**kwargs, # NOOP kwargs, for now
) -> tuple | BaseModelOutputWithPast:
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
use_cache = use_cache if use_cache is not None else self.config.use_cache
if (input_ids is None) ^ (inputs_embeds is not None):
raise ValueError("You must specify exactly one of input_ids or inputs_embeds")
if self.gradient_checkpointing and self.training and use_cache:
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`."
)
use_cache = False
if inputs_embeds is None:
inputs_embeds = self.embed_tokens(input_ids) * self.embedding_multiplier
hidden_states = inputs_embeds
if use_cache and past_key_values is None:
logger.warning_once(
"FalconH1 requires an initialized `FalconHybridMambaAttentionDynamicCache` to return a cache. None was "
"provided, so no cache will be returned."
)
if cache_position is None:
cache_position = torch.arange(hidden_states.shape[1], device=hidden_states.device)
if position_ids is None:
position_ids = cache_position.unsqueeze(0)
causal_mask = create_causal_mask(
config=self.config,
inputs_embeds=inputs_embeds,
attention_mask=attention_mask,
cache_position=cache_position,
past_key_values=past_key_values,
position_ids=position_ids,
)
mamba_mask = self._update_mamba_mask(attention_mask, cache_position)
position_embeddings = self.rotary_emb(hidden_states, position_ids=position_ids)
all_hidden_states = () if output_hidden_states else None
all_self_attns = () if output_attentions else None
for decoder_layer in self.layers:
if output_hidden_states:
all_hidden_states += (hidden_states,)
layer_outputs = decoder_layer(
hidden_states,
attention_mask=causal_mask,
mamba_attention_mask=mamba_mask,
position_ids=position_ids,
past_key_values=past_key_values,
output_attentions=output_attentions,
use_cache=use_cache,
cache_position=cache_position,
position_embeddings=position_embeddings,
)
hidden_states = layer_outputs[0]
if output_attentions:
if layer_outputs[1] is not None:
# append attentions only of attention layers. Mamba layers return `None` as the attention weights
all_self_attns += (layer_outputs[1],)
hidden_states = self.final_layernorm(hidden_states)
# add hidden states from the last decoder layer
if output_hidden_states:
all_hidden_states += (hidden_states,)
if past_key_values and not past_key_values.has_previous_state:
past_key_values.has_previous_state = True
next_cache = None if not use_cache else past_key_values
return BaseModelOutputWithPast(
last_hidden_state=hidden_states,
past_key_values=next_cache,
hidden_states=all_hidden_states,
attentions=all_self_attns,
)
def _update_mamba_mask(self, attention_mask, cache_position):
"""
No need for zeroing states when
1. Cached forward
2. Attending to all inputs
"""
mamba_mask = attention_mask
if cache_position[0] > 0 or (attention_mask is not None and torch.all(attention_mask == 1)):
mamba_mask = None
return mamba_mask
@auto_docstring
class FalconH1ForCausalLM(FalconH1PreTrainedModel, GenerationMixin):
_tied_weights_keys = {"lm_head.weight": "model.embed_tokens.weight"}
_tp_plan = {"lm_head": "colwise_gather_output"}
_pp_plan = {"lm_head": (["hidden_states"], ["logits"])}
def __init__(self, config):
super().__init__(config)
self.model = FalconH1Model(config)
self.vocab_size = config.vocab_size
self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
# Initialize weights and apply final processing
self.post_init()
@can_return_tuple
@auto_docstring
def forward(
self,
input_ids: torch.LongTensor | None = None,
attention_mask: torch.Tensor | None = None,
position_ids: torch.LongTensor | None = None,
past_key_values: FalconHybridMambaAttentionDynamicCache | None = None,
inputs_embeds: torch.FloatTensor | None = None,
labels: torch.LongTensor | None = None,
use_cache: bool | None = None,
output_attentions: bool | None = None,
output_hidden_states: bool | None = None,
cache_position: torch.LongTensor | None = None,
logits_to_keep: int | torch.Tensor = 0,
**kwargs,
) -> tuple | CausalLMOutputWithPast:
r"""
Example:
```python
>>> from transformers import AutoTokenizer, FalconH1ForCausalLM
>>> model = FalconH1ForCausalLM.from_pretrained("...")
>>> tokenizer = AutoTokenizer.from_pretrained("...")
>>> prompt = "Hey, are you conscious? Can you talk to me?"
>>> inputs = tokenizer(prompt, return_tensors="pt")
>>> # Generate
>>> generate_ids = model.generate(inputs.input_ids, max_length=30)
>>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
"Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
```"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
# decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn)
outputs = self.model(
input_ids=input_ids,
attention_mask=attention_mask,
position_ids=position_ids,
past_key_values=past_key_values,
inputs_embeds=inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
cache_position=cache_position,
**kwargs,
)
hidden_states = outputs[0]
# Only compute necessary logits, and do not upcast them to float if we are not computing the loss
slice_indices = slice(-logits_to_keep, None) if isinstance(logits_to_keep, int) else logits_to_keep
logits = self.lm_head(hidden_states[:, slice_indices, :]) * self.model.lm_head_multiplier
loss = None
if labels is not None:
loss = self.loss_function(logits=logits, labels=labels, vocab_size=self.config.vocab_size, **kwargs)
return CausalLMOutputWithPast(
loss=loss,
logits=logits,
past_key_values=outputs.past_key_values,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
def prepare_inputs_for_generation(
self,
input_ids,
past_key_values=None,
attention_mask=None,
inputs_embeds=None,
cache_position=None,
position_ids=None,
use_cache=True,
is_first_iteration=False,
**kwargs,
):
# Overwritten -- has a unique cache type, `FalconHybridMambaAttentionDynamicCache`
if past_key_values is None:
past_key_values = FalconHybridMambaAttentionDynamicCache(
self.config,
input_ids.shape[0],
self.dtype,
devices=[
self.model.layers[i].mamba.conv1d.weight.device for i in range(self.config.num_hidden_layers)
],
)
kwargs["logits_to_keep"] = self.config.num_logits_to_keep
model_inputs = super().prepare_inputs_for_generation(
input_ids,
past_key_values=past_key_values,
attention_mask=attention_mask,
inputs_embeds=inputs_embeds,
cache_position=cache_position,
position_ids=position_ids,
use_cache=use_cache,
is_first_iteration=is_first_iteration,
**kwargs,
)
return model_inputs
__all__ = ["FalconH1Model", "FalconH1ForCausalLM", "FalconH1PreTrainedModel"]
|
python
|
github
|
https://github.com/huggingface/transformers
|
src/transformers/models/falcon_h1/modeling_falcon_h1.py
|
#!/usr/bin/python
from __future__ import print_function
from bcc import BPF
import sys
import socket
import os
import argparse
import time
import netifaces as ni
from sys import argv
from kafka import KafkaProducer
from kafka.errors import KafkaError
from datetime import datetime
#args
def usage():
print("USAGE: %s [-i <if_name>]" % argv[0])
print("")
print("Try '%s -h' for more options." % argv[0])
exit()
#help
def help():
print("USAGE: %s [-i <if_name>][-k <kafka_server_name:kafka_port>]" % argv[0])
print("")
print("optional arguments:")
print(" -h print this help")
print(" -i if_name select interface if_name. Default is eth0")
print(" -k kafka_server_name select kafka server name. Default is save to file")
print(" If -k option is not specified data will be saved to file.")
print("")
print("examples:")
print(" data-plane-tracing # bind socket to eth0")
print(" data-plane-tracing -i eno2 -k vc.manage.overcloud:9092 # bind socket to eno2 and send data to kafka server in iovisor-topic.")
exit()
#arguments
interface="eth0"
kafkaserver=''
#check provided arguments
if len(argv) == 2:
if str(argv[1]) == '-h':
help()
else:
usage()
if len(argv) == 3:
if str(argv[1]) == '-i':
interface = argv[2]
elif str(argv[1]) == '-k':
kafkaserver = argv[2]
else:
usage()
if len(argv) == 5:
if str(argv[1]) == '-i':
interface = argv[2]
kafkaserver = argv[4]
elif str(argv[1]) == '-k':
kafkaserver = argv[2]
interface = argv[4]
else:
usage()
if len(argv) > 5:
usage()
print ("binding socket to '%s'" % interface)
#initialize BPF - load source code from http-parse-simple.c
bpf = BPF(src_file = "data-plane-tracing.c", debug = 0)
#load eBPF program http_filter of type SOCKET_FILTER into the kernel eBPF vm
#more info about eBPF program types http://man7.org/linux/man-pages/man2/bpf.2.html
function_vlan_filter = bpf.load_func("vlan_filter", BPF.SOCKET_FILTER)
#create raw socket, bind it to eth0
#attach bpf program to socket created
BPF.attach_raw_socket(function_vlan_filter, interface)
#get file descriptor of the socket previously created inside BPF.attach_raw_socket
socket_fd = function_vlan_filter.sock
#create python socket object, from the file descriptor
sock = socket.fromfd(socket_fd,socket.PF_PACKET,socket.SOCK_RAW,socket.IPPROTO_IP)
#set it as blocking socket
sock.setblocking(True)
#get interface ip address. In case ip is not set then just add 127.0.0.1.
ni.ifaddresses(interface)
try:
ip = ni.ifaddresses(interface)[ni.AF_INET][0]['addr']
except:
ip = '127.0.0.1'
print("| Timestamp | Host Name | Host IP | IP Version | Source Host IP | Dest Host IP | Source Host Port | Dest Host Port | VNI | Source VM MAC | Dest VM MAC | VLAN ID | Source VM IP | Dest VM IP | Protocol | Source VM Port | Dest VM Port | Packet Length |")
while 1:
#retrieve raw packet from socket
packet_str = os.read(socket_fd, 2048)
#convert packet into bytearray
packet_bytearray = bytearray(packet_str)
#ethernet header length
ETH_HLEN = 14
#VXLAN header length
VXLAN_HLEN = 8
#VLAN header length
VLAN_HLEN = 4
#Inner TCP/UDP header length
TCP_HLEN = 20
UDP_HLEN = 8
#calculate packet total length
total_length = packet_bytearray[ETH_HLEN + 2] #load MSB
total_length = total_length << 8 #shift MSB
total_length = total_length + packet_bytearray[ETH_HLEN+3] #add LSB
#calculate ip header length
ip_header_length = packet_bytearray[ETH_HLEN] #load Byte
ip_header_length = ip_header_length & 0x0F #mask bits 0..3
ip_header_length = ip_header_length << 2 #shift to obtain length
#calculate payload offset
payload_offset = ETH_HLEN + ip_header_length + UDP_HLEN + VXLAN_HLEN
#parsing ip version from ip packet header
ipversion = str(bin(packet_bytearray[14])[2:5])
#parsing source ip address, destination ip address from ip packet header
src_host_ip = str(packet_bytearray[26]) + "." + str(packet_bytearray[27]) + "." + str(packet_bytearray[28]) + "." + str(packet_bytearray[29])
dest_host_ip = str(packet_bytearray[30]) + "." + str(packet_bytearray[31]) + "." + str(packet_bytearray[32]) + "." + str(packet_bytearray[33])
#parsing source port and destination port
src_host_port = packet_bytearray[34] << 8 | packet_bytearray[35]
dest_host_port = packet_bytearray[36] << 8 | packet_bytearray[37]
#parsing VNI from VXLAN header
VNI = str((packet_bytearray[46])+(packet_bytearray[47])+(packet_bytearray[48]))
#parsing source mac address and destination mac address
mac_add = [packet_bytearray[50], packet_bytearray[51], packet_bytearray[52], packet_bytearray[53], packet_bytearray[54], packet_bytearray[55]]
src_vm_mac = ":".join(map(lambda b: format(b, "02x"), mac_add))
mac_add = [packet_bytearray[56], packet_bytearray[57], packet_bytearray[58], packet_bytearray[59], packet_bytearray[60], packet_bytearray[61]]
dest_vm_mac = ":".join(map(lambda b: format(b, "02x"), mac_add))
#parsing VLANID from VLAN header
VLANID=""
VLANID = str((packet_bytearray[64])+(packet_bytearray[65]))
#parsing source vm ip address, destination vm ip address from encapsulated ip packet header
src_vm_ip = str(packet_bytearray[80]) + "." + str(packet_bytearray[81]) + "." + str(packet_bytearray[82]) + "." + str(packet_bytearray[83])
dest_vm_ip = str(packet_bytearray[84]) + "." + str(packet_bytearray[85]) + "." + str(packet_bytearray[86]) + "." + str(packet_bytearray[87])
#parsing source port and destination port
if (packet_bytearray[77]==6 or packet_bytearray[77]==17):
src_vm_port = packet_bytearray[88] << 8 | packet_bytearray[88]
dest_vm_port = packet_bytearray[90] << 8 | packet_bytearray[91]
elif (packet_bytearray[77]==1):
src_vm_port = -1
dest_vm_port = -1
type = str(packet_bytearray[88])
else:
continue
timestamp = str(datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S.%f'))
#send data to remote server via Kafka Messaging Bus
if kafkaserver:
MESSAGE = (timestamp, socket.gethostname(),ip, str(int(ipversion, 2)), str(src_host_ip), str(dest_host_ip), str(src_host_port), str(dest_host_port), str(int(VNI)), str(src_vm_mac), str(dest_vm_mac), str(int(VLANID)), src_vm_ip, dest_vm_ip, str(packet_bytearray[77]), str(src_vm_port), str(dest_vm_port), str(total_length))
print (MESSAGE)
MESSAGE = ','.join(MESSAGE)
MESSAGE = MESSAGE.encode()
producer = KafkaProducer(bootstrap_servers=[kafkaserver])
producer.send('iovisor-topic', key=b'iovisor', value=MESSAGE)
#save data to files
else:
MESSAGE = timestamp+","+socket.gethostname()+","+ip+","+str(int(ipversion, 2))+","+src_host_ip+","+dest_host_ip+","+str(src_host_port)+","+str(dest_host_port)+","+str(int(VNI))+","+str(src_vm_mac)+","+str(dest_vm_mac)+","+str(int(VLANID))+","+src_vm_ip+","+dest_vm_ip+","+str(packet_bytearray[77])+","+str(src_vm_port)+","+str(dest_vm_port)+","+str(total_length)
print (MESSAGE)
#save data to a file on hour basis
filename = "./vlan-data-"+time.strftime("%Y-%m-%d-%H")+"-00"
with open(filename, "a") as f:
f.write("%s\n" % MESSAGE)
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python
import os
import csv
import re
from collections import Counter
PRIVATE_PATH = "../../data/private/"
PUBLIC_PATH = "../../data/public/"
def buildAttendance():
id_map = dict()
with open(PRIVATE_PATH + "withheld.csv", "rb") as inp:
cr = csv.reader(inp)
cr.next()
for row in cr:
uid, first, last = row[0], row[1], row[2]
id_map[(first,last)] = uid
seen = list()
with open(PUBLIC_PATH + "csv/attendance-2014.csv", "wb") as out:
cw = csv.writer(out)
cw.writerow(['ID', '6/24/2014', '6/25/2014', '6/26/2014', '6/27/2014', '7/1/2014', '7/2/2014', '7/3/2014', '7/8/2014', '7/9/2014', '7/10/2014', '7/11/2014', '7/15/2014', '7/16/2014', '7/17/2014', '7/18/2014', '7/22/2014', '7/23/2014', '7/24/2014', '7/25/2014', '7/29/2014', '7/30/2014', '7/31/2014', '8/1/2014', '8/5/2014', '8/6/2014', '8/7/2014', '8/8/2014', '8/12/2014', '8/13/2014', '8/14/2014', '8/15/2014'])
with open(PRIVATE_PATH + "attendance-2014.csv", "rb") as inp:
cr = csv.reader(inp)
for row in cr:
first, last = row[0].lower(), row[1].lower()
if first and last:
try:
attendance = [id_map[(first,last)]]
for col in range(2,33):
attendance.append(row[col])
cw.writerow(attendance)
except:
print "Key Error: " + first + " " + last
def buildVolunteers():
SEEN = list()
FILES = ["volunteers-2014.csv", "volunteers-2013.csv"]
uid = 0
with open(PRIVATE_PATH + "withheld.csv", "wb") as out_private:
cw_private = csv.writer(out_private)
cw_private.writerow(["ID", "FIRST", "LAST", "EMAIL", "PHONE"])
with open(PUBLIC_PATH + "csv/volunteers.csv", "w") as out_public:
cw_public = csv.writer(out_public)
cw_public.writerow(["ID", "YEAR", "DOB", "GENDER", "ADDRESS", "CITY", "STATE", "ZIPCODE", "ETHNICITY", "RELIGION", "HIGHSCHOOL", "GRAD_YEAR", "COLLEGE", "CARPOOL", "VOLUNTEERED", "VOLUNTEER_YEARS"])
for f in FILES:
year = f.split("-")[1].split(".")[0]
with open(PRIVATE_PATH + f, "rU") as inp:
cr = csv.reader(inp)
cr.next()
for row in cr:
first, last = row[0].lower(), row[1].lower()
email, phone = row[2].lower(), row[5]
dob, gender = row[3], row[4].lower()
address, city, state, zipcode = row[6].lower(), row[7].lower(), row[8].lower(), row[9]
ethnicity, religion = row[10].replace("<i>", "").replace("</i>", ""), row[11]
highschool, gradyear, college = row[13].lower(), row[14], row[15].lower()
carpool = row[16]
if re.match("^yes", row[12].lower()):
past = "Yes"
results = re.search("(\d)", row[12].lower())
pastYears = results.group(0)
else:
past, pastYears = "No", 0
if (first, last) not in SEEN and first and last:
address = re.sub('^[0-9]*\S*', '', re.sub('#*[0-9]*$', '', address))
SEEN.append((first,last))
cw_private.writerow([uid, first, last, email, phone])
cw_public.writerow([uid, year, dob, gender, address, city, state, zipcode, ethnicity, religion, highschool, gradyear, college, carpool, past, pastYears,])
uid += 1
if __name__ == "__main__":
buildVolunteers()
buildAttendance()
|
unknown
|
codeparrot/codeparrot-clean
| ||
#~/usr/bin/env python
#-*- coding: utf-8 -*-
import os
import numpy as np
import matplotlib.pyplot as plt
# set global settings
def init_plotting():
plt.rcParams['figure.figsize'] = (4, 3)
plt.rcParams['font.size'] = 8
plt.rcParams['font.family'] = 'Helvetica'
plt.rcParams['axes.labelsize'] = plt.rcParams['font.size']
plt.rcParams['axes.titlesize'] = 1.5*plt.rcParams['font.size']
plt.rcParams['legend.fontsize'] = plt.rcParams['font.size']
plt.rcParams['xtick.labelsize'] = plt.rcParams['font.size']
plt.rcParams['ytick.labelsize'] = plt.rcParams['font.size']
plt.rcParams['savefig.dpi'] = 2*plt.rcParams['savefig.dpi']
plt.rcParams['xtick.major.size'] = 3
plt.rcParams['xtick.minor.size'] = 3
plt.rcParams['xtick.major.width'] = 1
plt.rcParams['xtick.minor.width'] = 1
plt.rcParams['ytick.major.size'] = 3
plt.rcParams['ytick.minor.size'] = 3
plt.rcParams['ytick.major.width'] = 1
plt.rcParams['ytick.minor.width'] = 1
plt.rcParams['legend.frameon'] = True
plt.rcParams['legend.loc'] = 'best'
plt.rcParams['axes.linewidth'] = 1
plt.gca().spines['right'].set_color('none')
plt.gca().spines['top'].set_color('none')
plt.gca().xaxis.set_ticks_position('bottom')
plt.gca().yaxis.set_ticks_position('left')
def load_comparison_data(detailed_model, frag_model1, frag_model2=None):
v0_csv = os.path.join('../', 'data', 'pdd_chemistry',
'detailed', detailed_model,
'results', 'reactant_conv.csv')
v0_data = []
with open(v0_csv, 'r') as read_in:
for line in read_in:
tokens = line.split(' ')
entries = [float(token) for token in tokens]
v0_data.append(entries)
assert len(v0_data) == 2
v1_csv = os.path.join('../', 'data', 'pdd_chemistry',
frag_model1,
'results', 'reactant_conv.csv')
v1_data = []
with open(v1_csv, 'r') as read_in:
for line in read_in:
tokens = line.split(' ')
entries = [float(token) for token in tokens]
v1_data.append(entries)
assert len(v1_data) == 2
if frag_model2:
v2_csv = os.path.join('../', 'data', 'pdd_chemistry',
frag_model2,
'results', 'reactant_conv.csv')
v2_data = []
with open(v2_csv, 'r') as read_in:
for line in read_in:
tokens = line.split(' ')
entries = [float(token) for token in tokens]
v2_data.append(entries)
assert len(v2_data) == 2
return np.array(v0_data), np.array(v1_data), np.array(v2_data)
else:
return np.array(v0_data), np.array(v1_data), None
def plot_comparison(v0_data, v1_data, v2_data=None,
detailed_model=None,
frag_model1=None,
frag_model2=None,
xlabel='',
ylabel='',
figure_name='',
xlim=10, ylim=1.0):
init_plotting()
plt.figure()
plt.plot(v0_data[0]/3600.0, v0_data[1], label='Detailed: {0}'.format(detailed_model))
plt.plot(v1_data[0]/3600,v1_data[1], label='Fragment: {0}'.format(frag_model1))
if v2_data:
plt.plot(v2_data[0]/3600,v2_data[1], label='Fragment: {0}'.format(frag_model2))
plt.gca().set_xscale('log')
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.xlim((.1, xlim))
plt.ylim((0, ylim))
plt.gca().legend(scatterpoints=1)
plt.tight_layout()
plt.savefig(figure_name)
detailed_model = 'pdd_2014_pruning4_s4_a3ene_c11'
frag_model1 = 'two-sided'
frag_model2 = None
if frag_model2:
figure_name = 'reactant_conversion_{0}_vs_{1}'.format(frag_model1, frag_model2)
else:
figure_name = 'reactant_conversion_{0}'.format(frag_model1)
# plot reactant conversion
xlabel = 'Time / hr'
ylabel = 'Conversion'
detailed, frag1, frag2 = load_comparison_data(detailed_model, frag_model1, frag_model2)
plot_comparison(detailed, frag1, frag2,
detailed_model,
frag_model1,
frag_model2,
xlabel, ylabel,
'{0}.pdf'.format(figure_name),
xlim=14)
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
Models can have a ``managed`` attribute, which specifies whether the SQL code
is generated for the table on various manage.py operations.
"""
from django.db import models
# All of these models are created in the database by Django.
class A01(models.Model):
f_a = models.CharField(max_length=10, db_index=True)
f_b = models.IntegerField()
class Meta:
db_table = 'a01'
def __unicode__(self):
return self.f_a
class B01(models.Model):
fk_a = models.ForeignKey(A01)
f_a = models.CharField(max_length=10, db_index=True)
f_b = models.IntegerField()
class Meta:
db_table = 'b01'
# 'managed' is True by default. This tests we can set it explicitly.
managed = True
def __unicode__(self):
return self.f_a
class C01(models.Model):
mm_a = models.ManyToManyField(A01, db_table='d01')
f_a = models.CharField(max_length=10, db_index=True)
f_b = models.IntegerField()
class Meta:
db_table = 'c01'
def __unicode__(self):
return self.f_a
# All of these models use the same tables as the previous set (they are shadows
# of possibly a subset of the columns). There should be no creation errors,
# since we have told Django they aren't managed by Django.
class A02(models.Model):
f_a = models.CharField(max_length=10, db_index=True)
class Meta:
db_table = 'a01'
managed = False
def __unicode__(self):
return self.f_a
class B02(models.Model):
class Meta:
db_table = 'b01'
managed = False
fk_a = models.ForeignKey(A02)
f_a = models.CharField(max_length=10, db_index=True)
f_b = models.IntegerField()
def __unicode__(self):
return self.f_a
# To re-use the many-to-many intermediate table, we need to manually set up
# things up.
class C02(models.Model):
mm_a = models.ManyToManyField(A02, through="Intermediate")
f_a = models.CharField(max_length=10, db_index=True)
f_b = models.IntegerField()
class Meta:
db_table = 'c01'
managed = False
def __unicode__(self):
return self.f_a
class Intermediate(models.Model):
a02 = models.ForeignKey(A02, db_column="a01_id")
c02 = models.ForeignKey(C02, db_column="c01_id")
class Meta:
db_table = 'd01'
managed = False
#
# These next models test the creation (or not) of many to many join tables
# between managed and unmanaged models. A join table between two unmanaged
# models shouldn't be automatically created (see #10647).
#
# Firstly, we need some models that will create the tables, purely so that the
# tables are created. This is a test setup, not a requirement for unmanaged
# models.
class Proxy1(models.Model):
class Meta:
db_table = "unmanaged_models_proxy1"
class Proxy2(models.Model):
class Meta:
db_table = "unmanaged_models_proxy2"
class Unmanaged1(models.Model):
class Meta:
managed = False
db_table = "unmanaged_models_proxy1"
# Unmanged with an m2m to unmanaged: the intermediary table won't be created.
class Unmanaged2(models.Model):
mm = models.ManyToManyField(Unmanaged1)
class Meta:
managed = False
db_table = "unmanaged_models_proxy2"
# Here's an unmanaged model with an m2m to a managed one; the intermediary
# table *will* be created (unless given a custom `through` as for C02 above).
class Managed1(models.Model):
mm = models.ManyToManyField(Unmanaged1)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from twisted.internet import defer
from twisted.internet import task
from twisted.trial import unittest
from zope.interface import implementer
from buildbot import interfaces
from buildbot.data import buildsets
from buildbot.data import resultspec
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
from buildbot.test.fake import fakedb
from buildbot.test.fake import fakemaster
from buildbot.test.util import endpoint
from buildbot.test.util import interfaces as util_interfaces
from buildbot.util import epoch2datetime
A_TIMESTAMP = 1341700729
A_TIMESTAMP_EPOCH = epoch2datetime(A_TIMESTAMP)
EARLIER = 1248529376
EARLIER_EPOCH = epoch2datetime(EARLIER)
class BuildsetEndpoint(endpoint.EndpointMixin, unittest.TestCase):
endpointClass = buildsets.BuildsetEndpoint
resourceTypeClass = buildsets.Buildset
def setUp(self):
self.setUpEndpoint()
self.db.insertTestData([
fakedb.Buildset(id=13, reason='because I said so'),
fakedb.SourceStamp(id=92),
fakedb.SourceStamp(id=93),
fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=92),
fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=93),
fakedb.Buildset(id=14, reason='no sourcestamps'),
])
def tearDown(self):
self.tearDownEndpoint()
def test_get_existing(self):
d = self.callGet(('buildsets', 13))
@d.addCallback
def check(buildset):
self.validateData(buildset)
self.assertEqual(buildset['reason'], 'because I said so')
return d
def test_get_existing_no_sourcestamps(self):
d = self.callGet(('buildsets', 14))
@d.addCallback
def check(buildset):
self.validateData(buildset)
self.assertEqual(buildset['sourcestamps'], [])
return d
def test_get_missing(self):
d = self.callGet(('buildsets', 99))
@d.addCallback
def check(buildset):
self.assertEqual(buildset, None)
return d
class BuildsetsEndpoint(endpoint.EndpointMixin, unittest.TestCase):
endpointClass = buildsets.BuildsetsEndpoint
resourceTypeClass = buildsets.Buildset
def setUp(self):
self.setUpEndpoint()
self.db.insertTestData([
fakedb.SourceStamp(id=92),
fakedb.Buildset(id=13, complete=True),
fakedb.Buildset(id=14, complete=False),
fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=92),
fakedb.BuildsetSourceStamp(buildsetid=14, sourcestampid=92),
])
def tearDown(self):
self.tearDownEndpoint()
def test_get(self):
d = self.callGet(('buildsets',))
@d.addCallback
def check(buildsets):
self.validateData(buildsets[0])
self.assertEqual(buildsets[0]['bsid'], 13)
self.validateData(buildsets[1])
self.assertEqual(buildsets[1]['bsid'], 14)
return d
def test_get_complete(self):
f = resultspec.Filter('complete', 'eq', [True])
d = self.callGet(('buildsets',),
resultSpec=resultspec.ResultSpec(filters=[f]))
@d.addCallback
def check(buildsets):
self.assertEqual(len(buildsets), 1)
self.validateData(buildsets[0])
self.assertEqual(buildsets[0]['bsid'], 13)
return d
def test_get_incomplete(self):
f = resultspec.Filter('complete', 'eq', [False])
d = self.callGet(('buildsets',),
resultSpec=resultspec.ResultSpec(filters=[f]))
@d.addCallback
def check(buildsets):
self.assertEqual(len(buildsets), 1)
self.validateData(buildsets[0])
self.assertEqual(buildsets[0]['bsid'], 14)
return d
class Buildset(util_interfaces.InterfaceTests, unittest.TestCase):
def setUp(self):
self.master = fakemaster.make_master(testcase=self,
wantMq=True, wantDb=True, wantData=True)
self.rtype = buildsets.Buildset(self.master)
return self.master.db.insertTestData([
fakedb.SourceStamp(id=234, branch='br', codebase='cb',
project='pr', repository='rep', revision='rev',
created_at=89834834),
fakedb.Builder(id=42, name='bldr1'),
fakedb.Builder(id=43, name='bldr2'),
])
SS234_DATA = {'branch': u'br', 'codebase': u'cb', 'patch': None,
'project': u'pr', 'repository': u'rep', 'revision': u'rev',
'created_at': epoch2datetime(89834834), 'ssid': 234}
def test_signature_addBuildset(self):
@self.assertArgSpecMatches(
self.master.data.updates.addBuildset, # fake
self.rtype.addBuildset) # real
def addBuildset(self, waited_for, scheduler=None, sourcestamps=None, reason='',
properties=None, builderids=None, external_idstring=None,
parent_buildid=None, parent_relationship=None):
pass
def do_test_addBuildset(self, kwargs, expectedReturn,
expectedMessages, expectedBuildset):
"""Run a test of addBuildset.
@param kwargs: kwargs to addBuildset
@param expectedReturn: expected return value - tuple of (bsid, brids)
@param expectedMessages: expected mq messages transmitted
@param expectedBuildset: expected buildset inserted into the db
The buildset is added at time A_TIMESTAMP.
Note that addBuildset does not add sourcestamps, so this method assumes
there are none in the db.
"""
clock = task.Clock()
clock.advance(A_TIMESTAMP)
d = self.rtype.addBuildset(_reactor=clock, **kwargs)
def check(xxx_todo_changeme):
(bsid, brids) = xxx_todo_changeme
self.assertEqual((bsid, brids), expectedReturn)
# check the correct message was received
self.master.mq.assertProductions(
expectedMessages, orderMatters=False)
# and that the correct data was inserted into the db
self.master.db.buildsets.assertBuildset(bsid, expectedBuildset)
d.addCallback(check)
return d
def _buildRequestMessageDict(self, brid, bsid, builderid):
return {'builderid': builderid,
'buildrequestid': brid,
'buildsetid': bsid,
'claimed': False,
'claimed_at': None,
'claimed_by_masterid': None,
'complete': False,
'complete_at': None,
'priority': 0,
'results': -1,
'submitted_at': epoch2datetime(A_TIMESTAMP),
'waited_for': True,
'properties': None}
def _buildRequestMessage1(self, brid, bsid, builderid):
return (
('buildsets', str(bsid),
'builders', str(builderid),
'buildrequests', str(brid), 'new'),
self._buildRequestMessageDict(brid, bsid, builderid))
def _buildRequestMessage2(self, brid, bsid, builderid):
return (
('buildrequests', str(brid), 'new'),
self._buildRequestMessageDict(brid, bsid, builderid))
def _buildRequestMessage3(self, brid, bsid, builderid):
return (
('builders', str(builderid),
'buildrequests', str(brid), 'new'),
self._buildRequestMessageDict(brid, bsid, builderid))
def _buildsetMessage(self, bsid, external_idstring=u'extid',
reason=u'because', scheduler=u'fakesched', sourcestampids=None,
submitted_at=A_TIMESTAMP):
if sourcestampids is None:
sourcestampids = [234]
ssmap = {234: self.SS234_DATA}
return (
('buildsets', str(bsid), 'new'),
dict(bsid=bsid, complete=False, complete_at=None,
external_idstring=external_idstring, reason=reason,
results=None, scheduler=scheduler,
sourcestamps=[ssmap[ssid] for ssid in sourcestampids],
submitted_at=submitted_at))
def _buildsetCompleteMessage(self, bsid, complete_at=A_TIMESTAMP_EPOCH,
submitted_at=A_TIMESTAMP_EPOCH, external_idstring=u'extid',
reason=u'because', results=0, sourcestampids=None):
if sourcestampids is None:
sourcestampids = [234]
ssmap = {234: self.SS234_DATA}
return (
('buildsets', str(bsid), 'complete'),
dict(bsid=bsid, complete=True, complete_at=complete_at,
external_idstring=external_idstring, reason=reason,
results=results, submitted_at=submitted_at,
sourcestamps=[ssmap[ssid] for ssid in sourcestampids]))
def test_addBuildset_two_builderNames(self):
@implementer(interfaces.IScheduler)
class FakeSched(object):
name = 'fakesched'
kwargs = dict(scheduler=u'fakesched', reason=u'because',
sourcestamps=[234], external_idstring=u'extid',
builderids=[42, 43], waited_for=True)
expectedReturn = (200, {42: 1000, 43: 1001})
expectedMessages = [
self._buildRequestMessage1(1000, 200, 42),
self._buildRequestMessage2(1000, 200, 42),
self._buildRequestMessage3(1000, 200, 42),
self._buildRequestMessage1(1001, 200, 43),
self._buildRequestMessage2(1001, 200, 43),
self._buildRequestMessage3(1001, 200, 43),
self._buildsetMessage(200),
]
expectedBuildset = dict(reason=u'because',
properties={},
external_idstring=u'extid')
return self.do_test_addBuildset(kwargs,
expectedReturn, expectedMessages, expectedBuildset)
def test_addBuildset_no_builderNames(self):
@implementer(interfaces.IScheduler)
class FakeSched(object):
name = 'fakesched'
kwargs = dict(scheduler=u'fakesched', reason=u'because',
sourcestamps=[234], external_idstring=u'extid', waited_for=False)
expectedReturn = (200, {})
expectedMessages = [
self._buildsetMessage(200),
# with no builderNames, this is done already
self._buildsetCompleteMessage(200),
]
expectedBuildset = dict(reason=u'because',
properties={},
external_idstring=u'extid')
return self.do_test_addBuildset(kwargs,
expectedReturn, expectedMessages, expectedBuildset)
def test_signature_maybeBuildsetComplete(self):
@self.assertArgSpecMatches(
self.master.data.updates.maybeBuildsetComplete, # fake
self.rtype.maybeBuildsetComplete) # real
def maybeBuildsetComplete(self, bsid):
pass
@defer.inlineCallbacks
def do_test_maybeBuildsetComplete(self,
buildRequestCompletions=None,
buildRequestResults=None,
buildsetComplete=False,
expectComplete=False,
expectMessage=False,
expectSuccess=True):
"""Test maybeBuildsetComplete.
@param buildRequestCompletions: dict mapping brid to True if complete,
else False (and defaulting to False)
@param buildRequestResults: dict mapping brid to result (defaulting
to SUCCESS)
@param buildsetComplete: true if the buildset is already complete
@param expectComplete: true if the buildset should be complete at exit
@param expectMessage: true if a buildset completion message is expected
@param expectSuccess: if expectComplete, whether to expect the buildset
to be complete
This first adds two buildsets to the database - 72 and 73. Buildset 72
is already complete if buildsetComplete is true; 73 is not complete.
It adds four buildrequests - 42, 43, and 44 for buildset 72, and 45 for
buildset 73. The completion and results are based on
buidlRequestCompletions and buildRequestResults.
Then, maybeBuildsetComplete is called for buildset 72, and the
expectations are checked.
"""
if buildRequestCompletions is None:
buildRequestCompletions = {}
if buildRequestResults is None:
buildRequestResults = {}
clock = task.Clock()
clock.advance(A_TIMESTAMP)
def mkbr(brid, bsid=72):
return fakedb.BuildRequest(id=brid, buildsetid=bsid, builderid=42,
complete=buildRequestCompletions.get(
brid),
results=buildRequestResults.get(brid, SUCCESS))
yield self.master.db.insertTestData([
fakedb.Builder(id=42, name='bldr1'),
fakedb.Buildset(id=72,
submitted_at=EARLIER,
complete=buildsetComplete,
complete_at=A_TIMESTAMP if buildsetComplete else None),
mkbr(42), mkbr(43), mkbr(44),
fakedb.BuildsetSourceStamp(buildsetid=72, sourcestampid=234),
fakedb.Buildset(id=73,
complete=False),
mkbr(45, bsid=73),
fakedb.BuildsetSourceStamp(buildsetid=73, sourcestampid=234),
])
yield self.rtype.maybeBuildsetComplete(72, _reactor=clock)
self.master.db.buildsets.assertBuildsetCompletion(72, expectComplete)
if expectMessage:
self.assertEqual(self.master.mq.productions, [
self._buildsetCompleteMessage(72,
results=SUCCESS if expectSuccess else FAILURE,
submitted_at=EARLIER_EPOCH),
])
else:
self.assertEqual(self.master.mq.productions, [])
def test_maybeBuildsetComplete_not_yet(self):
# only brid 42 is complete, so the buildset is not complete
return self.do_test_maybeBuildsetComplete(
buildRequestCompletions={42: True})
def test_maybeBuildsetComplete_complete(self):
return self.do_test_maybeBuildsetComplete(
buildRequestCompletions={42: True, 43: True, 44: True},
expectComplete=True,
expectMessage=True)
def test_maybeBuildsetComplete_complete_failure(self):
return self.do_test_maybeBuildsetComplete(
buildRequestCompletions={42: True, 43: True, 44: True},
buildRequestResults={43: FAILURE},
expectComplete=True,
expectMessage=True,
expectSuccess=False)
def test_maybeBuildsetComplete_already_complete(self):
return self.do_test_maybeBuildsetComplete(
buildRequestCompletions={42: True, 43: True, 44: True},
buildsetComplete=True,
expectComplete=True,
expectMessage=False)
|
unknown
|
codeparrot/codeparrot-clean
| ||
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
from ZODB.DB import DB
from ZODB.tests import (
BasicStorage,
HistoryStorage,
IteratorStorage,
MTStorage,
PackableStorage,
RevisionStorage,
StorageTestBase,
Synchronization,
)
import os
if os.environ.get('USE_ZOPE_TESTING_DOCTEST'):
from zope.testing import doctest
else:
import doctest
import random
import re
import transaction
import unittest
import ZODB.DemoStorage
import ZODB.tests.hexstorage
import ZODB.tests.util
import ZODB.utils
from zope.testing import renormalizing
class DemoStorageTests(
StorageTestBase.StorageTestBase,
BasicStorage.BasicStorage,
HistoryStorage.HistoryStorage,
IteratorStorage.ExtendedIteratorStorage,
IteratorStorage.IteratorStorage,
MTStorage.MTStorage,
PackableStorage.PackableStorage,
RevisionStorage.RevisionStorage,
Synchronization.SynchronizedStorage,
):
def setUp(self):
StorageTestBase.StorageTestBase.setUp(self)
self._storage = ZODB.DemoStorage.DemoStorage()
def checkOversizeNote(self):
# This base class test checks for the common case where a storage
# doesnt support huge transaction metadata. This storage doesnt
# have this limit, so we inhibit this test here.
pass
def checkLoadDelegation(self):
# Minimal test of loadEX w/o version -- ironically
db = DB(self._storage) # creates object 0. :)
s2 = ZODB.DemoStorage.DemoStorage(base=self._storage)
self.assertEqual(s2.load(ZODB.utils.z64, ''),
self._storage.load(ZODB.utils.z64, ''))
def checkLengthAndBool(self):
self.assertEqual(len(self._storage), 0)
self.assertTrue(not self._storage)
db = DB(self._storage) # creates object 0. :)
self.assertEqual(len(self._storage), 1)
self.assertTrue(self._storage)
conn = db.open()
for i in range(10):
conn.root()[i] = conn.root().__class__()
transaction.commit()
self.assertEqual(len(self._storage), 11)
self.assertTrue(self._storage)
def checkLoadBeforeUndo(self):
pass # we don't support undo yet
checkUndoZombie = checkLoadBeforeUndo
class DemoStorageHexTests(DemoStorageTests):
def setUp(self):
StorageTestBase.StorageTestBase.setUp(self)
self._storage = ZODB.tests.hexstorage.HexStorage(
ZODB.DemoStorage.DemoStorage())
class DemoStorageWrappedBase(DemoStorageTests):
def setUp(self):
StorageTestBase.StorageTestBase.setUp(self)
self._base = self._makeBaseStorage()
self._storage = ZODB.DemoStorage.DemoStorage(base=self._base)
def tearDown(self):
self._base.close()
StorageTestBase.StorageTestBase.tearDown(self)
def _makeBaseStorage(self):
raise NotImplementedError
def checkPackOnlyOneObject(self):
pass # Wrapping demo storages don't do gc
def checkPackWithMultiDatabaseReferences(self):
pass # we never do gc
checkPackAllRevisions = checkPackWithMultiDatabaseReferences
class DemoStorageWrappedAroundMappingStorage(DemoStorageWrappedBase):
def _makeBaseStorage(self):
from ZODB.MappingStorage import MappingStorage
return MappingStorage()
class DemoStorageWrappedAroundFileStorage(DemoStorageWrappedBase):
def _makeBaseStorage(self):
from ZODB.FileStorage import FileStorage
return FileStorage('FileStorageTests.fs')
class DemoStorageWrappedAroundHexMappingStorage(DemoStorageWrappedBase):
def _makeBaseStorage(self):
from ZODB.MappingStorage import MappingStorage
return ZODB.tests.hexstorage.HexStorage(MappingStorage())
def setUp(test):
random.seed(0)
ZODB.tests.util.setUp(test)
def testSomeDelegation():
r"""
>>> import six
>>> class S:
... def __init__(self, name):
... self.name = name
... def registerDB(self, db):
... six.print_(self.name, db)
... def close(self):
... six.print_(self.name, 'closed')
... sortKey = getSize = __len__ = history = getTid = None
... tpc_finish = tpc_vote = tpc_transaction = None
... _lock_acquire = _lock_release = lambda self: None
... getName = lambda self: 'S'
... isReadOnly = tpc_transaction = None
... supportsUndo = undo = undoLog = undoInfo = None
... supportsTransactionalUndo = None
... def new_oid(self):
... return '\0' * 8
... def tpc_begin(self, t, tid, status):
... six.print_('begin', tid, status)
... def tpc_abort(self, t):
... pass
>>> from ZODB.DemoStorage import DemoStorage
>>> storage = DemoStorage(base=S(1), changes=S(2))
>>> storage.registerDB(1)
2 1
>>> storage.close()
1 closed
2 closed
>>> storage.tpc_begin(1, 2, 3)
begin 2 3
>>> storage.tpc_abort(1)
"""
def blob_pos_key_error_with_non_blob_base():
"""
>>> storage = ZODB.DemoStorage.DemoStorage()
>>> storage.loadBlob(ZODB.utils.p64(1), ZODB.utils.p64(1))
Traceback (most recent call last):
...
POSKeyError: 0x01
>>> storage.openCommittedBlobFile(ZODB.utils.p64(1), ZODB.utils.p64(1))
Traceback (most recent call last):
...
POSKeyError: 0x01
"""
def load_before_base_storage_current():
"""
Here we'll exercise that DemoStorage's loadBefore method works
properly when deferring to a record that is current in the
base storage.
>>> import time
>>> import transaction
>>> import ZODB.DB
>>> import ZODB.DemoStorage
>>> import ZODB.MappingStorage
>>> import ZODB.utils
>>> base = ZODB.MappingStorage.MappingStorage()
>>> basedb = ZODB.DB(base)
>>> conn = basedb.open()
>>> conn.root()['foo'] = 'bar'
>>> transaction.commit()
>>> conn.close()
>>> storage = ZODB.DemoStorage.DemoStorage(base=base)
>>> db = ZODB.DB(storage)
>>> conn = db.open()
>>> conn.root()['foo'] = 'baz'
>>> time.sleep(.1) # Windows has a low-resolution clock
>>> transaction.commit()
>>> oid = ZODB.utils.z64
>>> base_current = storage.base.load(oid)
>>> tid = ZODB.utils.p64(ZODB.utils.u64(base_current[1]) + 1)
>>> base_record = storage.base.loadBefore(oid, tid)
>>> base_record[-1] is None
True
>>> base_current == base_record[:2]
True
>>> t = storage.loadBefore(oid, tid)
The data and tid are the values from the base storage, but the
next tid is from changes.
>>> t[:2] == base_record[:2]
True
>>> t[-1] == storage.changes.load(oid)[1]
True
>>> conn.close()
>>> db.close()
>>> base.close()
"""
def test_suite():
suite = unittest.TestSuite((
doctest.DocTestSuite(
setUp=setUp, tearDown=ZODB.tests.util.tearDown,
checker=ZODB.tests.util.checker
),
doctest.DocFileSuite(
'../DemoStorage.test',
setUp=setUp,
tearDown=ZODB.tests.util.tearDown,
checker=ZODB.tests.util.checker,
),
))
suite.addTest(unittest.makeSuite(DemoStorageTests, 'check'))
suite.addTest(unittest.makeSuite(DemoStorageHexTests, 'check'))
suite.addTest(unittest.makeSuite(DemoStorageWrappedAroundFileStorage,
'check'))
suite.addTest(unittest.makeSuite(DemoStorageWrappedAroundMappingStorage,
'check'))
suite.addTest(unittest.makeSuite(DemoStorageWrappedAroundHexMappingStorage,
'check'))
return suite
|
unknown
|
codeparrot/codeparrot-clean
| ||
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Big5 frequency table
# by Taiwan's Mandarin Promotion Council
# <http://www.edu.tw:81/mandr/>
#
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
#Char to FreqOrder table
BIG5_TABLE_SIZE = 5376
Big5CharToFreqOrder = ( \
1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16
3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32
1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48
63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64
3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80
4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96
5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112
630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128
179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144
995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160
2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176
1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192
3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208
706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240
3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256
2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272
437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288
3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304
1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320
5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336
266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352
5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368
1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384
32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400
188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416
3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432
3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448
324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464
2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480
2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496
314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512
287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528
3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544
1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560
1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576
1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592
2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608
265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624
4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640
1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656
5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672
2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688
383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704
98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720
523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736
710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752
5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768
379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784
1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800
585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816
690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832
5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848
1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864
544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880
3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896
4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912
3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928
279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944
610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960
1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976
4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992
3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056
3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232
626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520
5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680
4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856
5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096
1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464
4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528
3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688
1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720
1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040
3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184
5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312
2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440
2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632
4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680
5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 #last 512
#Everything below is of no interest for detection purpose
2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 5392
2500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 5408
5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 5424
5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 5440
5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 5456
5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 5472
5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 5488
5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 5504
5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 5520
5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 5536
5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 5552
5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 5568
5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 5584
5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 5600
6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 5616
6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 5632
6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 5648
6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 5664
6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 5680
6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 5696
6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 5712
6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 5728
6133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 5744
6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 5760
6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 5776
6181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 5792
6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 5808
6213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 5824
6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 5840
6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 5856
6260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 5872
6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 5888
6291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 5904
6306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 5920
6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 5936
6335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 5952
6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 5968
6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 5984
6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 6000
6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 6016
6413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 6032
6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 6048
6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 6064
6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 6080
6475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 6096
6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 6112
6501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 6128
6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 6144
6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 6160
6549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 6176
6563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 6192
6577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 6208
6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 6224
6606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 6240
6620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 6256
3547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 6272
6647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 6288
6660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 6304
3952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 6320
6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 6336
6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 6352
6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 6368
6732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 6384
6746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 6400
6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 6416
6778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 6432
4169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 6448
6807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 6464
6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 6480
3548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 6496
6850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 6512
6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 6528
6880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 6544
6895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 6560
6910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 6576
6926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 6592
6939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 6608
6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 6624
6969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 6640
6985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 6656
6999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 6672
7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 6688
7030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 6704
7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 6720
7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 6736
7075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 6752
7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 6768
7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 6784
7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 6800
7137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 6816
7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 6832
7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 6848
7184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 6864
7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 6880
7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 6896
7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 6912
7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 6928
7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 6944
7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 6960
7294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 6976
7309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 6992
7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 7008
7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 7024
7355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 7040
7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 7056
7386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 7072
7401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 7088
7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 7104
7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 7120
7448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 7136
7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 7152
7477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 7168
7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 7184
7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 7200
7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 7216
7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 7232
7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 7248
7568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 7264
7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 7280
7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 7296
7615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 7312
7629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 7328
7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 7344
7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 7360
7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 7376
7691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 7392
7705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 7408
7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 7424
7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 7440
3134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 7456
7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 7472
7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 7488
7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 7504
7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 7520
4855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7536
7841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 7552
7856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 7568
7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 7584
7887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 7600
7900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 7616
7914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 7632
7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 7648
7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 7664
7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 7680
7976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 7696
7991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 7712
8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 7728
8021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 7744
8036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 7760
8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 7776
8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 7792
8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 7808
8097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 7824
8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 7840
8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 7856
8142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 7872
8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 7888
8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 7904
8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 7920
8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 7936
8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 7952
8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 7968
8250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 7984
8264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 8000
8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 8016
8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 8032
8308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 8048
8323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 8064
8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 8080
8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 8096
8368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 8112
8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 8128
8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 8144
8414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 8160
8429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 8176
8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 8192
8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 8208
8476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 8224
8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 8240
8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 8256
8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 8272
8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 8288
8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 8304
8570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 8320
8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 8336
8601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 8352
8614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 8368
8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 8384
8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 8400
8659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 8416
8673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 8432
8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 8448
8704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 8464
8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 8480
8734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 8496
8750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 8512
8765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 8528
8781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 8544
8794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 8560
8809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 8576
8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 8592
8840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 8608
8855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 8624
8871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 8640
8886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 8656
8902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 8672
8918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 8688
4883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 8704
8947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 8720
8963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 8736
8978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 8752
8993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 8768
9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 8784
9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 8800
9040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 8816
9056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 8832
9070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 8848
9086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 8864
9102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 8880
9118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 8896
9134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 8912
9149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 8928
9164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 8944
9179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 8960
9195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 8976
9210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 8992
9225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 9008
9241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 9024
9256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 9040
9269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 9056
9284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 9072
9299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 9088
9315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 9104
9330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 9120
9346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 9136
9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 9152
9375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 9168
9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 9184
9406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 9200
9418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 9216
9432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 9232
9447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 9248
9462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 9264
9477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 9280
9491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 9296
9507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 9312
9522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 9328
9537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 9344
9553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 9360
9569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 9376
3805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 9392
9600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 9408
9614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 9424
9629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 9440
4907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 9456
9659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 9472
9674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 9488
9688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 9504
9701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 9520
9716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 9536
9732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 9552
9746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 9568
9761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 9584
9777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 9600
9792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 9616
9807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 9632
9823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 9648
9839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 9664
9855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 9680
9870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 9696
9886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 9712
9900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 9728
9915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 9744
9929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 9760
9945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 9776
9959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 9792
9973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 9808
9988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 9824
10004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 9840
10020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 9856
10034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 9872
10049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 9888
10065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 9904
10081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 9920
10096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 9936
10111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 9952
10126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 9968
4186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 9984
10153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #10000
10167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #10016
10183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #10032
10198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #10048
10212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #10064
10228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #10080
10243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #10096
10259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #10112
4928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #10128
10288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #10144
10304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #10160
10320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #10176
10335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #10192
10350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #10208
10363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #10224
10378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #10240
10393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #10256
10408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #10272
10423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #10288
10437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #10304
10451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #10320
10467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #10336
10483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #10352
10499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #10368
10513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #10384
10528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #10400
4482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #10416
10558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #10432
10572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #10448
10587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #10464
10603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #10480
10619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #10496
10633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #10512
10649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #10528
10663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #10544
10678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #10560
10693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #10576
10706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #10592
10721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #10608
10736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #10624
10752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #10640
10767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #10656
10781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #10672
10797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #10688
10813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #10704
10829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #10720
10845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #10736
10861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #10752
10877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #10768
10891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #10784
10907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #10800
10922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #10816
10937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #10832
10952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #10848
10967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #10864
10983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #10880
10999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #10896
11014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #10912
11030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #10928
11045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #10944
4953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #10960
11073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #10976
11089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #10992
11105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #11008
11120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #11024
11135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #11040
11151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #11056
11166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #11072
11181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #11088
11196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #11104
11210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #11120
11226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #11136
11242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #11152
11257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #11168
11273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #11184
11289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #11200
11305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #11216
11319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #11232
11335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #11248
11350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #11264
11366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #11280
11380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #11296
11396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #11312
11411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #11328
11426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #11344
11442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #11360
11458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #11376
11472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #11392
11487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #11408
11502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #11424
11518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #11440
11533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #11456
11549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #11472
4193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #11488
11579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #11504
11593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #11520
11608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #11536
11623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #11552
11639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #11568
11655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #11584
11671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #11600
11686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #11616
11701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #11632
11717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #11648
11731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #11664
11746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #11680
11761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #11696
11777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #11712
11792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #11728
11807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #11744
11822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #11760
11836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #11776
11851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #11792
11867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #11808
11883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #11824
11898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #11840
11914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #11856
11929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #11872
11945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #11888
11960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #11904
11976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #11920
11991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #11936
12006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #11952
12022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #11968
12038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #11984
12054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #12000
12069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #12016
12085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #12032
12101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #12048
12117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #12064
12131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #12080
12147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #12096
12161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #12112
12174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #12128
12189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #12144
12205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #12160
12221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #12176
4982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #12192
4984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #12208
4985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #12224
12279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #12240
12294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #12256
12309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #12272
12324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #12288
12339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #12304
12355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #12320
12370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #12336
12385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #12352
12401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #12368
12416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #12384
12432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #12400
12447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #12416
12463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #12432
12478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #12448
12492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #12464
12508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #12480
12524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #12496
12540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #12512
12555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #12528
12571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #12544
12585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #12560
12601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #12576
12617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #12592
12633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #12608
12648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #12624
12664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #12640
12680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #12656
12696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #12672
12711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #12688
12727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #12704
12743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #12720
12759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #12736
12775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #12752
12788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #12768
12804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #12784
12820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #12800
12834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #12816
12850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #12832
12865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #12848
12881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #12864
12896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #12880
12912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #12896
12927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #12912
12943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #12928
12958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #12944
12974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #12960
12990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #12976
4504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #12992
13019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #13008
13034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #13024
13050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #13040
13066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #13056
13082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #13072
13098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #13088
13114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #13104
4505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #13120
13140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #13136
13156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #13152
13171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #13168
13186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #13184
13202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #13200
13217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #13216
13231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #13232
13245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #13248
13261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #13264
13276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #13280
13291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #13296
13306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #13312
13322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #13328
13337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #13344
13352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #13360
5004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #13376
13383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #13392
13399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #13408
13415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #13424
13431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #13440
13445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #13456
13460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #13472
13475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #13488
13491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #13504
13507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #13520
13523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #13536
13539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #13552
13555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #13568
13571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #13584
13587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #13600
13603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #13616
13619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #13632
13635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #13648
13650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #13664
13665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #13680
13681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #13696
13697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #13712
13713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #13728
13729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #13744
13745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #13760
13761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #13776
13776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #13792
13792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #13808
13808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #13824
13824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #13840
13840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #13856
13856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #13872
13872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #13888
13888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #13904
13904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #13920
13920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #13936
13936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #13952
13952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #13968
13968,13969,13970,13971,13972) #13973
|
unknown
|
codeparrot/codeparrot-clean
| ||
# frozen_string_literal: true
module Jekyll
class Stevenson < ::Logger
def initialize
formatter = proc do |_, _, _, msg|
msg.to_s
end
super($stdout, :formatter => formatter)
end
def add(severity, message = nil, progname = nil)
severity ||= UNKNOWN
@logdev = logdevice(severity)
return true if @logdev.nil? || severity < @level
progname ||= @progname
if message.nil?
if block_given?
message = yield
else
message = progname
progname = @progname
end
end
@logdev.puts(
format_message(format_severity(severity), Time.now, progname, message)
)
true
end
# Log a +WARN+ message
def warn(progname = nil, &block)
add(WARN, nil, progname.yellow, &block)
end
# Log an +ERROR+ message
def error(progname = nil, &block)
add(ERROR, nil, progname.red, &block)
end
def close
# No LogDevice in use
end
private
def logdevice(severity)
if severity > INFO
$stderr
else
$stdout
end
end
end
end
|
ruby
|
github
|
https://github.com/jekyll/jekyll
|
lib/jekyll/stevenson.rb
|
# Patch (bugfix) release process
The patch releases can be cut straight from the `master` branch if there are no
changes that would warrant a non-patch release.
However, if the `master` branch has changes that are not suitable for a patch
release, a release branch should be created.
See [BRANCHES-AND-TAGS.md](BRANCHES-AND-TAGS.md) for more information on the release branches.
## Backporting changes
If a release branch exists (because `master` has changes that are not suitable for
a patch release), then bug fixes and patches need to be backported to that release
branch. If patches can be shipped directly from `master`, no backporting is needed.
A patch must:
- Not be a major/new feature
- Not break existing functionality
- Be a bugfix or a small improvement
To indicate that a pull request made against the `master` branch should be
included in the next patch release, the author or maintainer should apply a
`process/cherry-pick/<BRANCH_NAME>` label corresponding to the release branch.
|
unknown
|
github
|
https://github.com/moby/moby
|
project/PATCH-RELEASES.md
|
/*
* Copyright 2012-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.configurationsample.source;
import org.springframework.boot.configurationsample.TestConfigurationProperties;
import org.springframework.boot.configurationsample.TestNestedConfigurationProperty;
@TestConfigurationProperties(prefix = "example")
public class ConventionSourceAnnotated {
@TestNestedConfigurationProperty
private final ConventionSource nested = new ConventionSource();
public ConventionSource getNested() {
return this.nested;
}
}
|
java
|
github
|
https://github.com/spring-projects/spring-boot
|
configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationsample/source/ConventionSourceAnnotated.java
|
"""
E-commerce Tab Instructor Dashboard Query Registration Code Status.
"""
from django.core.urlresolvers import reverse
from django.views.decorators.http import require_GET, require_POST
from instructor.enrollment import get_email_params, send_mail_to_student
from django.utils.translation import ugettext as _
from courseware.courses import get_course_by_id
from instructor.views.api import require_level
from student.models import CourseEnrollment
from util.json_request import JsonResponse
from shoppingcart.models import CourseRegistrationCode, RegistrationCodeRedemption
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from django.views.decorators.cache import cache_control
import logging
log = logging.getLogger(__name__)
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_GET
def look_up_registration_code(request, course_id): # pylint: disable=unused-argument
"""
Look for the registration_code in the database.
and check if it is still valid, allowed to redeem or not.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
code = request.GET.get('registration_code')
course = get_course_by_id(course_key, depth=0)
try:
registration_code = CourseRegistrationCode.objects.get(code=code)
except CourseRegistrationCode.DoesNotExist:
return JsonResponse({
'is_registration_code_exists': False,
'is_registration_code_valid': False,
'is_registration_code_redeemed': False,
'message': _('The enrollment code ({code}) was not found for the {course_name} course.').format(
code=code, course_name=course.display_name
)
}, status=400) # status code 200: OK by default
reg_code_already_redeemed = RegistrationCodeRedemption.is_registration_code_redeemed(code)
registration_code_detail_url = reverse('registration_code_details', kwargs={'course_id': unicode(course_id)})
return JsonResponse({
'is_registration_code_exists': True,
'is_registration_code_valid': registration_code.is_valid,
'is_registration_code_redeemed': reg_code_already_redeemed,
'registration_code_detail_url': registration_code_detail_url
}) # status code 200: OK by default
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def registration_code_details(request, course_id):
"""
Post handler to mark the registration code as
1) valid
2) invalid
3) Unredeem.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
code = request.POST.get('registration_code')
action_type = request.POST.get('action_type')
course = get_course_by_id(course_key, depth=0)
action_type_messages = {
'invalidate_registration_code': _('This enrollment code has been canceled. It can no longer be used.'),
'unredeem_registration_code': _('This enrollment code has been marked as unused.'),
'validate_registration_code': _('The enrollment code has been restored.')
}
try:
registration_code = CourseRegistrationCode.objects.get(code=code)
except CourseRegistrationCode.DoesNotExist:
return JsonResponse({
'message': _('The enrollment code ({code}) was not found for the {course_name} course.').format(
code=code, course_name=course.display_name
)}, status=400)
if action_type == 'invalidate_registration_code':
registration_code.is_valid = False
registration_code.save()
if RegistrationCodeRedemption.is_registration_code_redeemed(code):
code_redemption = RegistrationCodeRedemption.get_registration_code_redemption(code, course_key)
delete_redemption_entry(request, code_redemption, course_key)
if action_type == 'validate_registration_code':
registration_code.is_valid = True
registration_code.save()
if action_type == 'unredeem_registration_code':
code_redemption = RegistrationCodeRedemption.get_registration_code_redemption(code, course_key)
if code_redemption is None:
return JsonResponse({
'message': _('The redemption does not exist against enrollment code ({code}).').format(
code=code)}, status=400)
delete_redemption_entry(request, code_redemption, course_key)
return JsonResponse({'message': action_type_messages[action_type]})
def delete_redemption_entry(request, code_redemption, course_key):
"""
delete the redemption entry from the table and
unenroll the user who used the registration code
for the enrollment and send him/her the unenrollment email.
"""
user = code_redemption.redeemed_by
email_address = code_redemption.redeemed_by.email
full_name = code_redemption.redeemed_by.profile.name
CourseEnrollment.unenroll(user, course_key, skip_refund=True)
course = get_course_by_id(course_key, depth=0)
email_params = get_email_params(course, True, secure=request.is_secure())
email_params['message'] = 'enrolled_unenroll'
email_params['email_address'] = email_address
email_params['full_name'] = full_name
send_mail_to_student(email_address, email_params)
# remove the redemption entry from the database.
log.info('deleting redemption entry (%s) from the database.', code_redemption.id)
code_redemption.delete()
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Django settings for DjangoApplication1 project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
from os import environ, path
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': path.join(environ['localappdata'], 'DjangoProjectDatabase.db'), # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://example.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '^28avlv8e$sky_08pu926q^+b5&4&5&+ob7ma%v(tn$bg#=&k4'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'DjangoProject.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
path.join(path.dirname(__file__), 'Templates').replace('\\', '/'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'Oar',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
unknown
|
codeparrot/codeparrot-clean
| ||
# orm/exc.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""SQLAlchemy ORM exceptions."""
from .. import exc as sa_exc, util
NO_STATE = (AttributeError, KeyError)
"""Exception types that may be raised by instrumentation implementations."""
class StaleDataError(sa_exc.SQLAlchemyError):
"""An operation encountered database state that is unaccounted for.
Conditions which cause this to happen include:
* A flush may have attempted to update or delete rows
and an unexpected number of rows were matched during
the UPDATE or DELETE statement. Note that when
version_id_col is used, rows in UPDATE or DELETE statements
are also matched against the current known version
identifier.
* A mapped object with version_id_col was refreshed,
and the version number coming back from the database does
not match that of the object itself.
* A object is detached from its parent object, however
the object was previously attached to a different parent
identity which was garbage collected, and a decision
cannot be made if the new parent was really the most
recent "parent".
.. versionadded:: 0.7.4
"""
ConcurrentModificationError = StaleDataError
class FlushError(sa_exc.SQLAlchemyError):
"""A invalid condition was detected during flush()."""
class UnmappedError(sa_exc.InvalidRequestError):
"""Base for exceptions that involve expected mappings not present."""
class ObjectDereferencedError(sa_exc.SQLAlchemyError):
"""An operation cannot complete due to an object being garbage
collected.
"""
class DetachedInstanceError(sa_exc.SQLAlchemyError):
"""An attempt to access unloaded attributes on a
mapped instance that is detached."""
class UnmappedInstanceError(UnmappedError):
"""An mapping operation was requested for an unknown instance."""
@util.dependencies("sqlalchemy.orm.base")
def __init__(self, base, obj, msg=None):
if not msg:
try:
base.class_mapper(type(obj))
name = _safe_cls_name(type(obj))
msg = ("Class %r is mapped, but this instance lacks "
"instrumentation. This occurs when the instance "
"is created before sqlalchemy.orm.mapper(%s) "
"was called." % (name, name))
except UnmappedClassError:
msg = _default_unmapped(type(obj))
if isinstance(obj, type):
msg += (
'; was a class (%s) supplied where an instance was '
'required?' % _safe_cls_name(obj))
UnmappedError.__init__(self, msg)
def __reduce__(self):
return self.__class__, (None, self.args[0])
class UnmappedClassError(UnmappedError):
"""An mapping operation was requested for an unknown class."""
def __init__(self, cls, msg=None):
if not msg:
msg = _default_unmapped(cls)
UnmappedError.__init__(self, msg)
def __reduce__(self):
return self.__class__, (None, self.args[0])
class ObjectDeletedError(sa_exc.InvalidRequestError):
"""A refresh operation failed to retrieve the database
row corresponding to an object's known primary key identity.
A refresh operation proceeds when an expired attribute is
accessed on an object, or when :meth:`.Query.get` is
used to retrieve an object which is, upon retrieval, detected
as expired. A SELECT is emitted for the target row
based on primary key; if no row is returned, this
exception is raised.
The true meaning of this exception is simply that
no row exists for the primary key identifier associated
with a persistent object. The row may have been
deleted, or in some cases the primary key updated
to a new value, outside of the ORM's management of the target
object.
"""
@util.dependencies("sqlalchemy.orm.base")
def __init__(self, base, state, msg=None):
if not msg:
msg = "Instance '%s' has been deleted, or its "\
"row is otherwise not present." % base.state_str(state)
sa_exc.InvalidRequestError.__init__(self, msg)
def __reduce__(self):
return self.__class__, (None, self.args[0])
class UnmappedColumnError(sa_exc.InvalidRequestError):
"""Mapping operation was requested on an unknown column."""
class NoResultFound(sa_exc.InvalidRequestError):
"""A database result was required but none was found."""
class MultipleResultsFound(sa_exc.InvalidRequestError):
"""A single database result was required but more than one were found."""
def _safe_cls_name(cls):
try:
cls_name = '.'.join((cls.__module__, cls.__name__))
except AttributeError:
cls_name = getattr(cls, '__name__', None)
if cls_name is None:
cls_name = repr(cls)
return cls_name
@util.dependencies("sqlalchemy.orm.base")
def _default_unmapped(base, cls):
try:
mappers = base.manager_of_class(cls).mappers
except NO_STATE:
mappers = {}
except TypeError:
mappers = {}
name = _safe_cls_name(cls)
if not mappers:
return "Class '%s' is not mapped" % name
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""Functions for prompting the user for project info."""
import json
from collections import OrderedDict
import click
from jinja2.exceptions import UndefinedError
from cookiecutter.environment import StrictEnvironment
from cookiecutter.exceptions import UndefinedVariableInTemplate
def read_user_variable(var_name, default_value):
"""Prompt user for variable and return the entered value or given default.
:param str var_name: Variable of the context to query the user
:param default_value: Value that will be returned if no input happens
"""
# Please see https://click.palletsprojects.com/en/7.x/api/#click.prompt
return click.prompt(var_name, default=default_value)
def read_user_yes_no(question, default_value):
"""Prompt the user to reply with 'yes' or 'no' (or equivalent values).
Note:
Possible choices are 'true', '1', 'yes', 'y' or 'false', '0', 'no', 'n'
:param str question: Question to the user
:param default_value: Value that will be returned if no input happens
"""
# Please see https://click.palletsprojects.com/en/7.x/api/#click.prompt
return click.prompt(question, default=default_value, type=click.BOOL)
def read_repo_password(question):
"""Prompt the user to enter a password.
:param str question: Question to the user
"""
# Please see https://click.palletsprojects.com/en/7.x/api/#click.prompt
return click.prompt(question, hide_input=True)
def read_user_choice(var_name, options):
"""Prompt the user to choose from several options for the given variable.
The first item will be returned if no input happens.
:param str var_name: Variable as specified in the context
:param list options: Sequence of options that are available to select from
:return: Exactly one item of ``options`` that has been chosen by the user
"""
# Please see https://click.palletsprojects.com/en/7.x/api/#click.prompt
if not isinstance(options, list):
raise TypeError
if not options:
raise ValueError
choice_map = OrderedDict(
('{}'.format(i), value) for i, value in enumerate(options, 1)
)
choices = choice_map.keys()
default = '1'
choice_lines = ['{} - {}'.format(*c) for c in choice_map.items()]
prompt = '\n'.join(
(
'Select {}:'.format(var_name),
'\n'.join(choice_lines),
'Choose from {}'.format(', '.join(choices)),
)
)
user_choice = click.prompt(
prompt, type=click.Choice(choices), default=default, show_choices=False
)
return choice_map[user_choice]
def process_json(user_value):
"""Load user-supplied value as a JSON dict.
:param str user_value: User-supplied value to load as a JSON dict
"""
try:
user_dict = json.loads(user_value, object_pairs_hook=OrderedDict)
except Exception:
# Leave it up to click to ask the user again
raise click.UsageError('Unable to decode to JSON.')
if not isinstance(user_dict, dict):
# Leave it up to click to ask the user again
raise click.UsageError('Requires JSON dict.')
return user_dict
def read_user_dict(var_name, default_value):
"""Prompt the user to provide a dictionary of data.
:param str var_name: Variable as specified in the context
:param default_value: Value that will be returned if no input is provided
:return: A Python dictionary to use in the context.
"""
# Please see https://click.palletsprojects.com/en/7.x/api/#click.prompt
if not isinstance(default_value, dict):
raise TypeError
default_display = 'default'
user_value = click.prompt(
var_name, default=default_display, type=click.STRING, value_proc=process_json
)
if user_value == default_display:
# Return the given default w/o any processing
return default_value
return user_value
def render_variable(env, raw, cookiecutter_dict):
"""Render the next variable to be displayed in the user prompt.
Inside the prompting taken from the cookiecutter.json file, this renders
the next variable. For example, if a project_name is "Peanut Butter
Cookie", the repo_name could be be rendered with:
`{{ cookiecutter.project_name.replace(" ", "_") }}`.
This is then presented to the user as the default.
:param Environment env: A Jinja2 Environment object.
:param raw: The next value to be prompted for by the user.
:param dict cookiecutter_dict: The current context as it's gradually
being populated with variables.
:return: The rendered value for the default variable.
"""
if raw is None:
return None
elif isinstance(raw, dict):
return {
render_variable(env, k, cookiecutter_dict): render_variable(
env, v, cookiecutter_dict
)
for k, v in raw.items()
}
elif isinstance(raw, list):
return [render_variable(env, v, cookiecutter_dict) for v in raw]
elif not isinstance(raw, str):
raw = str(raw)
template = env.from_string(raw)
rendered_template = template.render(cookiecutter=cookiecutter_dict)
return rendered_template
def prompt_choice_for_config(cookiecutter_dict, env, key, options, no_input):
"""Prompt user with a set of options to choose from.
Each of the possible choices is rendered beforehand.
"""
rendered_options = [render_variable(env, raw, cookiecutter_dict) for raw in options]
if no_input:
return rendered_options[0]
return read_user_choice(key, rendered_options)
def prompt_for_config(context, no_input=False):
"""Prompt user to enter a new config.
:param dict context: Source for field names and sample values.
:param no_input: Prompt the user at command line for manual configuration?
"""
cookiecutter_dict = OrderedDict([])
env = StrictEnvironment(context=context)
# First pass: Handle simple and raw variables, plus choices.
# These must be done first because the dictionaries keys and
# values might refer to them.
for key, raw in context['cookiecutter'].items():
if key.startswith('_') and not key.startswith('__'):
cookiecutter_dict[key] = raw
continue
elif key.startswith('__'):
cookiecutter_dict[key] = render_variable(env, raw, cookiecutter_dict)
continue
try:
if isinstance(raw, list):
# We are dealing with a choice variable
val = prompt_choice_for_config(
cookiecutter_dict, env, key, raw, no_input
)
cookiecutter_dict[key] = val
elif not isinstance(raw, dict):
# We are dealing with a regular variable
val = render_variable(env, raw, cookiecutter_dict)
if not no_input:
val = read_user_variable(key, val)
cookiecutter_dict[key] = val
except UndefinedError as err:
msg = "Unable to render variable '{}'".format(key)
raise UndefinedVariableInTemplate(msg, err, context)
# Second pass; handle the dictionaries.
for key, raw in context['cookiecutter'].items():
# Skip private type dicts
if key.startswith('_') and not key.startswith('__'):
continue
try:
if isinstance(raw, dict):
# We are dealing with a dict variable
val = render_variable(env, raw, cookiecutter_dict)
if not no_input:
val = read_user_dict(key, val)
cookiecutter_dict[key] = val
except UndefinedError as err:
msg = "Unable to render variable '{}'".format(key)
raise UndefinedVariableInTemplate(msg, err, context)
return cookiecutter_dict
|
unknown
|
codeparrot/codeparrot-clean
| ||
//// [tests/cases/compiler/callWithWrongNumberOfTypeArguments.ts] ////
//// [callWithWrongNumberOfTypeArguments.ts]
function f<T, U>() { }
f<number>();
f<number, string>();
f<number, string, number>();
//// [callWithWrongNumberOfTypeArguments.js]
"use strict";
function f() { }
f();
f();
f();
|
javascript
|
github
|
https://github.com/microsoft/TypeScript
|
tests/baselines/reference/callWithWrongNumberOfTypeArguments.js
|
from collections import namedtuple
from MySQLdb.constants import FIELD_TYPE
from django.db.backends.base.introspection import (
BaseDatabaseIntrospection, FieldInfo, TableInfo,
)
from django.utils.datastructures import OrderedSet
from django.utils.encoding import force_text
FieldInfo = namedtuple('FieldInfo', FieldInfo._fields + ('extra', 'default'))
InfoLine = namedtuple('InfoLine', 'col_name data_type max_len num_prec num_scale extra column_default')
class DatabaseIntrospection(BaseDatabaseIntrospection):
data_types_reverse = {
FIELD_TYPE.BLOB: 'TextField',
FIELD_TYPE.CHAR: 'CharField',
FIELD_TYPE.DECIMAL: 'DecimalField',
FIELD_TYPE.NEWDECIMAL: 'DecimalField',
FIELD_TYPE.DATE: 'DateField',
FIELD_TYPE.DATETIME: 'DateTimeField',
FIELD_TYPE.DOUBLE: 'FloatField',
FIELD_TYPE.FLOAT: 'FloatField',
FIELD_TYPE.INT24: 'IntegerField',
FIELD_TYPE.LONG: 'IntegerField',
FIELD_TYPE.LONGLONG: 'BigIntegerField',
FIELD_TYPE.SHORT: 'SmallIntegerField',
FIELD_TYPE.STRING: 'CharField',
FIELD_TYPE.TIME: 'TimeField',
FIELD_TYPE.TIMESTAMP: 'DateTimeField',
FIELD_TYPE.TINY: 'IntegerField',
FIELD_TYPE.TINY_BLOB: 'TextField',
FIELD_TYPE.MEDIUM_BLOB: 'TextField',
FIELD_TYPE.LONG_BLOB: 'TextField',
FIELD_TYPE.VAR_STRING: 'CharField',
}
def get_field_type(self, data_type, description):
field_type = super(DatabaseIntrospection, self).get_field_type(data_type, description)
if field_type == 'IntegerField' and 'auto_increment' in description.extra:
return 'AutoField'
return field_type
def get_table_list(self, cursor):
"""
Returns a list of table and view names in the current database.
"""
cursor.execute("SHOW FULL TABLES")
return [TableInfo(row[0], {'BASE TABLE': 't', 'VIEW': 'v'}.get(row[1]))
for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"""
Returns a description of the table, with the DB-API cursor.description interface."
"""
# information_schema database gives more accurate results for some figures:
# - varchar length returned by cursor.description is an internal length,
# not visible length (#5725)
# - precision and scale (for decimal fields) (#5014)
# - auto_increment is not available in cursor.description
cursor.execute("""
SELECT column_name, data_type, character_maximum_length, numeric_precision,
numeric_scale, extra, column_default
FROM information_schema.columns
WHERE table_name = %s AND table_schema = DATABASE()""", [table_name])
field_info = {line[0]: InfoLine(*line) for line in cursor.fetchall()}
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
to_int = lambda i: int(i) if i is not None else i
fields = []
for line in cursor.description:
col_name = force_text(line[0])
fields.append(
FieldInfo(*((col_name,)
+ line[1:3]
+ (to_int(field_info[col_name].max_len) or line[3],
to_int(field_info[col_name].num_prec) or line[4],
to_int(field_info[col_name].num_scale) or line[5])
+ (line[6],)
+ (field_info[col_name].extra,)
+ (field_info[col_name].column_default,)))
)
return fields
def get_relations(self, cursor, table_name):
"""
Returns a dictionary of {field_name: (field_name_other_table, other_table)}
representing all relationships to the given table.
"""
constraints = self.get_key_columns(cursor, table_name)
relations = {}
for my_fieldname, other_table, other_field in constraints:
relations[my_fieldname] = (other_field, other_table)
return relations
def get_key_columns(self, cursor, table_name):
"""
Returns a list of (column_name, referenced_table_name, referenced_column_name) for all
key columns in given table.
"""
key_columns = []
cursor.execute("""
SELECT column_name, referenced_table_name, referenced_column_name
FROM information_schema.key_column_usage
WHERE table_name = %s
AND table_schema = DATABASE()
AND referenced_table_name IS NOT NULL
AND referenced_column_name IS NOT NULL""", [table_name])
key_columns.extend(cursor.fetchall())
return key_columns
def get_indexes(self, cursor, table_name):
cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name))
# Do a two-pass search for indexes: on first pass check which indexes
# are multicolumn, on second pass check which single-column indexes
# are present.
rows = list(cursor.fetchall())
multicol_indexes = set()
for row in rows:
if row[3] > 1:
multicol_indexes.add(row[2])
indexes = {}
for row in rows:
if row[2] in multicol_indexes:
continue
if row[4] not in indexes:
indexes[row[4]] = {'primary_key': False, 'unique': False}
# It's possible to have the unique and PK constraints in separate indexes.
if row[2] == 'PRIMARY':
indexes[row[4]]['primary_key'] = True
if not row[1]:
indexes[row[4]]['unique'] = True
return indexes
def get_storage_engine(self, cursor, table_name):
"""
Retrieves the storage engine for a given table. Returns the default
storage engine if the table doesn't exist.
"""
cursor.execute(
"SELECT engine "
"FROM information_schema.tables "
"WHERE table_name = %s", [table_name])
result = cursor.fetchone()
if not result:
return self.connection.features._mysql_storage_engine
return result[0]
def get_constraints(self, cursor, table_name):
"""
Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns.
"""
constraints = {}
# Get the actual constraint names and columns
name_query = """
SELECT kc.`constraint_name`, kc.`column_name`,
kc.`referenced_table_name`, kc.`referenced_column_name`
FROM information_schema.key_column_usage AS kc
WHERE
kc.table_schema = %s AND
kc.table_name = %s
"""
cursor.execute(name_query, [self.connection.settings_dict['NAME'], table_name])
for constraint, column, ref_table, ref_column in cursor.fetchall():
if constraint not in constraints:
constraints[constraint] = {
'columns': OrderedSet(),
'primary_key': False,
'unique': False,
'index': False,
'check': False,
'foreign_key': (ref_table, ref_column) if ref_column else None,
}
constraints[constraint]['columns'].add(column)
# Now get the constraint types
type_query = """
SELECT c.constraint_name, c.constraint_type
FROM information_schema.table_constraints AS c
WHERE
c.table_schema = %s AND
c.table_name = %s
"""
cursor.execute(type_query, [self.connection.settings_dict['NAME'], table_name])
for constraint, kind in cursor.fetchall():
if kind.lower() == "primary key":
constraints[constraint]['primary_key'] = True
constraints[constraint]['unique'] = True
elif kind.lower() == "unique":
constraints[constraint]['unique'] = True
# Now add in the indexes
cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name))
for table, non_unique, index, colseq, column in [x[:5] for x in cursor.fetchall()]:
if index not in constraints:
constraints[index] = {
'columns': OrderedSet(),
'primary_key': False,
'unique': False,
'index': True,
'check': False,
'foreign_key': None,
}
constraints[index]['index'] = True
constraints[index]['columns'].add(column)
# Convert the sorted sets to lists
for constraint in constraints.values():
constraint['columns'] = list(constraint['columns'])
return constraints
|
unknown
|
codeparrot/codeparrot-clean
| ||
from nose.tools import * # flake8: noqa
from framework.auth.core import Auth
from website.models import Node
from website.util import permissions
from api.base.settings.defaults import API_BASE
from tests.base import ApiTestCase
from tests.factories import (
NodeFactory,
ProjectFactory,
RegistrationFactory,
AuthUserFactory,
WithdrawnRegistrationFactory,
ForkFactory
)
class TestRegistrationForksList(ApiTestCase):
def setUp(self):
super(TestRegistrationForksList, self).setUp()
self.user = AuthUserFactory()
self.private_project = ProjectFactory()
self.private_project.add_contributor(self.user, permissions=[permissions.READ, permissions.WRITE])
self.private_project.save()
self.component = NodeFactory(parent=self.private_project, creator=self.user)
self.pointer = ProjectFactory(creator=self.user)
self.private_project.add_pointer(self.pointer, auth=Auth(self.user), save=True)
self.private_registration = RegistrationFactory(project = self.private_project, creator=self.user)
self.private_fork = ForkFactory(project=self.private_registration, user=self.user)
self.private_registration_url = '/{}registrations/{}/forks/'.format(API_BASE, self.private_registration._id)
self.public_project = ProjectFactory(is_public=True, creator=self.user)
self.public_project.save()
self.public_component = NodeFactory(parent=self.public_project, creator=self.user, is_public=True)
self.public_registration = RegistrationFactory(project = self.public_project, creator=self.user, is_public=True)
self.public_registration_url = '/{}registrations/{}/forks/'.format(API_BASE, self.public_registration._id)
self.public_fork = ForkFactory(project=self.public_registration, user=self.user)
self.user_two = AuthUserFactory()
def test_can_access_public_registration_forks_list_when_unauthenticated(self):
res = self.app.get(self.public_registration_url)
assert_equal(len(res.json['data']), 0)
# Fork defaults to private
assert_equal(self.public_fork.is_public, False)
self.public_fork.is_public = True
self.public_fork.save()
res = self.app.get(self.public_registration_url)
assert_equal(res.status_code, 200)
assert_equal(len(res.json['data']), 1)
assert_equal(self.public_fork.is_public, True)
data = res.json['data'][0]
assert_equal(data['attributes']['title'], 'Fork of ' + self.public_registration.title)
assert_equal(data['id'], self.public_fork._id)
assert_equal(data['attributes']['registration'], False)
assert_equal(data['attributes']['fork'], True)
def test_can_access_public_registration_forks_list_authenticated_contributor(self):
res = self.app.get(self.public_registration_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(self.public_fork.is_public, False)
assert_equal(len(res.json['data']), 1)
data = res.json['data'][0]
assert_equal(data['attributes']['title'], 'Fork of ' + self.public_project.title)
assert_equal(data['id'], self.public_fork._id)
assert_equal(data['attributes']['registration'], False)
assert_equal(data['attributes']['fork'], True)
def test_can_access_public_registration_forks_list_authenticated_non_contributor(self):
res = self.app.get(self.public_registration_url, auth=self.user_two.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json['data']), 0)
# Fork defaults to private
assert_equal(self.public_fork.is_public, False)
self.public_fork.is_public = True
self.public_fork.save()
res = self.app.get(self.public_registration_url)
assert_equal(len(res.json['data']), 1)
assert_equal(self.public_fork.is_public, True)
data = res.json['data'][0]
assert_equal(data['attributes']['title'], 'Fork of ' + self.public_project.title)
assert_equal(data['id'], self.public_fork._id)
assert_equal(data['attributes']['registration'], False)
assert_equal(data['attributes']['fork'], True)
def test_cannot_access_private_registration_forks_list_unauthenticated(self):
res = self.app.get(self.private_registration_url, expect_errors=True)
assert_equal(res.status_code, 401)
assert_equal(res.json['errors'][0]['detail'], 'Authentication credentials were not provided.')
def test_authenticated_contributor_can_access_private_registration_forks_list(self):
res = self.app.get(self.private_registration_url + '?embed=children&embed=node_links&embed=logs&embed=contributors&embed=forked_from', auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json['data']), 1)
data = res.json['data'][0]
assert_equal(data['attributes']['title'], 'Fork of ' + self.private_project.title)
assert_equal(data['id'], self.private_fork._id)
fork_contributors = data['embeds']['contributors']['data'][0]['embeds']['users']['data']
assert_equal(fork_contributors['attributes']['family_name'], self.user.family_name)
assert_equal(fork_contributors['id'], self.user._id)
forked_children = data['embeds']['children']['data'][0]
assert_equal(forked_children['id'], self.private_registration.nodes[0].forks[0]._id)
assert_equal(forked_children['attributes']['title'], self.component.title)
forked_node_links = data['embeds']['node_links']['data'][0]['embeds']['target_node']['data']
assert_equal(forked_node_links['id'], self.pointer._id)
assert_equal(forked_node_links['attributes']['title'], self.pointer.title)
assert_equal(data['attributes']['registration'], False)
assert_equal(data['attributes']['fork'], True)
expected_logs = [log.action for log in self.private_registration.logs]
expected_logs.append(self.private_registration.nodes[0].logs[0].action)
expected_logs.append('node_forked')
expected_logs.append('node_forked')
forked_logs = data['embeds']['logs']['data']
assert_equal(set(expected_logs), set(log['attributes']['action'] for log in forked_logs))
assert_equal(len(forked_logs), 6)
forked_from = data['embeds']['forked_from']['data']
assert_equal(forked_from['id'], self.private_registration._id)
def test_authenticated_non_contributor_cannot_access_private_registration_forks_list(self):
res = self.app.get(self.private_registration_url, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_equal(res.json['errors'][0]['detail'], 'You do not have permission to perform this action.')
class TestRegistrationForkCreate(ApiTestCase):
def setUp(self):
super(TestRegistrationForkCreate, self).setUp()
self.user = AuthUserFactory()
self.user_two = AuthUserFactory()
self.user_three = AuthUserFactory()
self.private_project = ProjectFactory(creator=self.user)
private_pointer = ProjectFactory(creator=self.user_two)
actual_pointer = self.private_project.add_pointer(private_pointer, auth=Auth(self.user_two), save=True)
self.private_registration = RegistrationFactory(creator=self.user, project=self.private_project)
self.fork_data = {
'data': {
'type': 'nodes'
}
}
self.fork_data_with_title = {
'data': {
'type': 'nodes',
'attributes':
{'title': 'My Forked Project'}
}
}
self.private_registration_url = '/{}registrations/{}/forks/'.format(API_BASE, self.private_registration._id)
self.public_project = ProjectFactory(is_public=True, creator=self.user)
self.public_registration = RegistrationFactory(creator=self.user, project=self.public_project, is_public=True)
self.public_registration_url = '/{}registrations/{}/forks/'.format(API_BASE, self.public_registration._id)
def tearDown(self):
super(TestRegistrationForkCreate, self).tearDown()
Node.remove()
def test_create_fork_from_public_registration_with_new_title(self):
res = self.app.post_json_api(self.public_registration_url, self.fork_data_with_title, auth=self.user.auth)
assert_equal(res.status_code, 201)
data = res.json['data']
assert_equal(data['id'], self.public_registration.forks[0]._id)
assert_equal(data['attributes']['title'], self.fork_data_with_title['data']['attributes']['title'])
assert_equal(data['attributes']['registration'], False)
assert_equal(data['attributes']['fork'], True)
def test_create_fork_from_private_registration_with_new_title(self):
res = self.app.post_json_api(self.private_registration_url, self.fork_data_with_title, auth=self.user.auth)
assert_equal(res.status_code, 201)
data = res.json['data']
assert_equal(data['id'], self.private_registration.forks[0]._id)
assert_equal(data['attributes']['title'], self.fork_data_with_title['data']['attributes']['title'])
assert_equal(data['attributes']['registration'], False)
assert_equal(data['attributes']['fork'], True)
def test_can_fork_public_registration_logged_in(self):
res = self.app.post_json_api(self.public_registration_url, self.fork_data, auth=self.user_two.auth)
assert_equal(res.status_code, 201)
data = res.json['data']
assert_equal(data['id'], self.public_registration.forks[0]._id)
assert_equal(data['attributes']['title'], 'Fork of ' + self.public_registration.title)
assert_equal(data['attributes']['registration'], False)
assert_equal(data['attributes']['fork'], True)
def test_cannot_fork_public_registration_logged_out(self):
res = self.app.post_json_api(self.public_registration_url, self.fork_data, expect_errors=True)
assert_equal(res.status_code, 401)
assert_equal(res.json['errors'][0]['detail'], 'Authentication credentials were not provided.')
def test_can_fork_public_registration_logged_in_contributor(self):
res = self.app.post_json_api(self.public_registration_url, self.fork_data, auth=self.user.auth)
assert_equal(res.status_code, 201)
data = res.json['data']
assert_equal(data['id'], self.public_registration.forks[0]._id)
assert_equal(data['attributes']['title'], 'Fork of ' + self.public_registration.title)
assert_equal(data['attributes']['registration'], False)
assert_equal(data['attributes']['fork'], True)
def test_cannot_fork_private_registration_logged_out(self):
res = self.app.post_json_api(self.private_registration_url, self.fork_data, expect_errors=True)
assert_equal(res.status_code, 401)
assert_equal(res.json['errors'][0]['detail'], 'Authentication credentials were not provided.')
def test_cannot_fork_private_registration_logged_in_non_contributor(self):
res = self.app.post_json_api(self.private_registration_url, self.fork_data, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_equal(res.json['errors'][0]['detail'], 'You do not have permission to perform this action.')
def test_can_fork_private_registration_logged_in_contributor(self):
res = self.app.post_json_api(self.private_registration_url + '?embed=children&embed=node_links&embed=logs&embed=contributors&embed=forked_from', self.fork_data, auth=self.user.auth)
assert_equal(res.status_code, 201)
data = res.json['data']
assert_equal(data['attributes']['title'], 'Fork of ' + self.private_registration.title)
assert_equal(data['attributes']['registration'], False)
assert_equal(data['attributes']['fork'], True)
fork_contributors = data['embeds']['contributors']['data'][0]['embeds']['users']['data']
assert_equal(fork_contributors['attributes']['family_name'], self.user.family_name)
assert_equal(fork_contributors['id'], self.user._id)
forked_from = data['embeds']['forked_from']['data']
assert_equal(forked_from['id'], self.private_registration._id)
def test_fork_private_components_no_access(self):
url = self.public_registration_url + '?embed=children'
private_component = NodeFactory(parent=self.public_registration, creator=self.user_two, is_public=False)
res = self.app.post_json_api(url, self.fork_data, auth=self.user_three.auth)
assert_equal(res.status_code, 201)
# Private components that you do not have access to are not forked
assert_equal(res.json['data']['embeds']['children']['links']['meta']['total'], 0)
def test_fork_components_you_can_access(self):
url = self.private_registration_url + '?embed=children'
new_component = NodeFactory(parent=self.private_registration, creator=self.user)
res = self.app.post_json_api(url, self.fork_data, auth=self.user.auth)
assert_equal(res.status_code, 201)
assert_equal(res.json['data']['embeds']['children']['links']['meta']['total'], 1)
assert_equal(res.json['data']['embeds']['children']['data'][0]['id'], new_component.forks[0]._id)
def test_fork_private_node_links(self):
url = self.private_registration_url + '?embed=node_links'
# Node link is forked, but shows up as a private node link
res = self.app.post_json_api(url, self.fork_data, auth=self.user.auth)
assert_equal(res.json['data']['embeds']['node_links']['data'][0]['embeds']['target_node']['errors'][0]['detail'],
'You do not have permission to perform this action.')
assert_equal(res.json['data']['embeds']['node_links']['links']['meta']['total'], 1)
def test_fork_node_links_you_can_access(self):
pointer = ProjectFactory(creator=self.user)
self.private_project.add_pointer(pointer, auth=Auth(self.user), save=True)
new_registration = RegistrationFactory(project = self.private_project, creator=self.user)
url = '/{}registrations/{}/forks/'.format(API_BASE, new_registration._id) + '?embed=node_links'
res = self.app.post_json_api(url, self.fork_data, auth=self.user.auth)
assert_equal(res.json['data']['embeds']['node_links']['data'][1]['embeds']['target_node']['data']['id'], pointer._id)
assert_equal(res.json['data']['embeds']['node_links']['links']['meta']['total'], 2)
def test_cannot_fork_retractions(self):
retraction = WithdrawnRegistrationFactory(registration=self.private_registration, user=self.user)
url = '/{}registrations/{}/forks/'.format(API_BASE, self.private_registration._id) + '?embed=forked_from'
res = self.app.post_json_api(url, self.fork_data, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
|
unknown
|
codeparrot/codeparrot-clean
| ||
## Input
```javascript
function Component({value}) {
const object = {
get value() {
return value;
},
};
return <div>{object.value}</div>;
}
export const FIXTURE_ENTRYPOINT = {
fn: foo,
params: [{value: 0}],
sequentialRenders: [{value: 1}, {value: 2}],
};
```
## Error
```
Found 1 error:
Todo: (BuildHIR::lowerExpression) Handle get functions in ObjectExpression
error.todo-object-expression-get-syntax.ts:3:4
1 | function Component({value}) {
2 | const object = {
> 3 | get value() {
| ^^^^^^^^^^^^^
> 4 | return value;
| ^^^^^^^^^^^^^^^^^^^
> 5 | },
| ^^^^^^ (BuildHIR::lowerExpression) Handle get functions in ObjectExpression
6 | };
7 | return <div>{object.value}</div>;
8 | }
```
|
unknown
|
github
|
https://github.com/facebook/react
|
compiler/packages/babel-plugin-react-compiler/src/__tests__/fixtures/compiler/error.todo-object-expression-get-syntax.expect.md
|
#!/usr/bin/python3
import requests, json
import sys
import netifaces
import os
import os.path
import settings
from datetime import datetime
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), './config')))
from config_func import add_to_config, read_config, get_device_info_and_config
output = ""
# Test if config exists - create it if it doesnt
# (this way we don't have to create the config file manually)
if(os.path.isfile('config.txt')):
config = read_config()
output = output + os.linesep + 'WARNING: config.txt already exists'
else:
open('config.txt', 'a')
output = output + os.linesep + 'CREATION of config.txt'
# Get info to Register new device with AMS
eth0_mac = netifaces.ifaddresses('eth0')[netifaces.AF_LINK][0]['addr']
wlan0_mac = netifaces.ifaddresses('wlan0')[netifaces.AF_LINK][0]['addr']
try:
eth0_ip = netifaces.ifaddresses('eth0')[netifaces.AF_INET][0]['addr']
except:
eth0_ip = "0.0.0.0"
try:
wlan0_ip= netifaces.ifaddresses('wlan0')[netifaces.AF_INET][0]['addr']
except:
wlan0_ip = "0.0.0.0"
output = output + os.linesep + "Device/Network Info"
output = output + os.linesep + "ETH0 MAC: " + eth0_mac
output = output + os.linesep + "WLAN MAC: " + wlan0_mac
output = output + os.linesep + "ETH0 IP (LAN IP): " + eth0_ip
output = output + os.linesep + "WLAN IP: " + wlan0_ip
try:
r = requests.get(settings.API_SERVER + 'members/api/cam_api/mkdevice?format=json&LAN_MAC=' + eth0_mac + '&WLAN_MAC=' + wlan0_mac + '&lan_ip=' + eth0_ip + 'wlan_ip=' + wlan0_ip)
fp = open("register.txt", "w+")
fp.write(r.text)
fp.close()
except:
output = output + os.linesep + "mknewdevice failed"
data = json.loads(r.text)
try:
if data['errors']['Invalid_data'] == 'LAN_MAC WLAN_MAC combination must be unique.':
output = output + os.linesep + "Device already exist!"
else:
output = output + os.linesep + "Device created."
except:
output = output + os.linesep + "Device Created."
#LOG IP OF DEVICE.
msg = "lan_ip=" + eth0_ip + ":wlan_ip=" + wlan0_ip
r = requests.post(settings.API_SERVER + 'members/api/cam_api/addLog', data={'LAN_MAC': eth0_mac, 'WLAN_MAC': wlan0_mac, 'msg': msg})
res = r.text
x, id = res.split("device_id: ")
hostname = "ams" + id.rstrip("\n")
out = open("/home/pi/fireball_camera/host", "w+")
out.write(hostname)
out.close()
os.system("sudo cp /home/pi/fireball_camera/host /etc/hostname")
output = output + os.linesep + 'Hostname updated:'+ " ams" + id.rstrip("\n")
# Here we populate the config file with the info we got (and we need...)
add_to_config('lan_ip',eth0_ip)
add_to_config('device_id',id.rstrip("\n"))
add_to_config('hd',0)
add_to_config('wlan_ip',wlan0_ip)
add_to_config('wlan_mac',wlan0_mac)
add_to_config('lan_mac',eth0_mac)
i = datetime.now()
add_to_config('reg_date',i.strftime('%Y-%m-%d %H:%M:%S'))
p = read_config();
output = output + os.linesep + "Config file updated."
# Get Info from the API in case the cam already has info in the database
# update the config file accordingly
get_device_info_and_config()
print(output)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import pooler
import time
from report import report_sxw
class lot_overview_all(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(lot_overview_all, self).__init__(cr, uid, name, context=context)
self.price_total = 0.0
self.grand_total = 0.0
self.localcontext.update({
'time': time,
'process':self.process,
'price_total': self._price_total,
'grand_total_price':self._grand_total,
})
def process(self,location_id):
location_obj = pooler.get_pool(self.cr.dbname).get('stock.location')
data = location_obj._product_get_all_report(self.cr,self.uid, [location_id])
data['location_name'] = location_obj.read(self.cr, self.uid, [location_id],['complete_name'])[0]['complete_name']
self.price_total = 0.0
self.price_total += data['total_price']
self.grand_total += data['total_price']
return [data]
def _price_total(self):
return self.price_total
def _grand_total(self):
return self.grand_total
report_sxw.report_sxw('report.lot.stock.overview_all', 'stock.location', 'addons/stock/report/lot_overview_all.rml', parser=lot_overview_all,header='internal')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/python
from __future__ import print_function
import sys
from time import sleep, time
from argparse import ArgumentParser
import os
import sqlite3
from datetime import datetime
import errno
import socket
from select import select
import traceback
from collections import deque, defaultdict, namedtuple
try:
from queue import Queue, Empty
except ImportError:
from Queue import Queue, Empty
from spidev import SpiDev
import RPi.GPIO as GPIO
from nrf24 import NRF24
import requests
import json
PIPES = ([0xe7, 0xe7, 0xe7, 0xe7, 0xe7], [0xc2, 0xc2, 0xc2, 0xc2, 0xc2])
CHANNEL = 0x20
class Button(object):
def __init__(self, pins):
self.pins = pins
self.states = {}
self.events = Queue()
for i, pin in enumerate(self.pins):
GPIO.setup(pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.add_event_detect(pin, GPIO.FALLING, callback=self.add_event,
bouncetime=500)
self.states[pin] = i
def add_event(self, channel):
self.events.put(self.states[channel])
class Relay(object):
def __init__(self, pins):
self.pins = pins
self.states = []
for pin in self.pins:
GPIO.setup(pin, GPIO.OUT, initial=GPIO.HIGH)
self.states.append(0)
def output(self, pin, state):
print("setting pin", pin, state and "on" or "off")
self.states[pin] = state
GPIO.output(self.pins[pin], not state) # These devices are active-low.
def state(self, pin):
return self.states[pin]
def cleanup(self):
pass # this will be done later: GPIO.cleanup()
class Temperature(object):
def __init__(self, major=0, minor=0):
self.spi = SpiDev()
self.spi.open(major, minor)
def rawread(self):
return self.spi.xfer2([0, 0])
def read(self):
return self.calc_temp(self.rawread())
@staticmethod
def calc_temp(buf):
return (((buf[0] << 8) | buf[1]) >> 3) * 0.0625
def cleanup(self):
self.spi.close()
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
self.cleanup()
class Boiler(object):
def __init__(self, major, minor, ce_pin, irq_pin, temperature, relay, button):
self.relay = relay
self.temperature = temperature
self.button = button
self.radio = NRF24()
self.radio.begin(major, minor, ce_pin, irq_pin)
self.radio.setDataRate(self.radio.BR_250KBPS)
self.radio.setChannel(CHANNEL)
self.radio.setAutoAck(1)
self.radio.enableDynamicPayloads()
self.radio.printDetails()
self.radio.openWritingPipe(PIPES[0])
self.radio.openReadingPipe(1, PIPES[1])
def run(self):
while True:
try:
recv_buffer = self.recv(10)
print("recv_buffer", recv_buffer, "temp", self.temperature.read())
while True:
try:
event = self.button.events.get_nowait()
except Empty:
break
else:
recv_buffer.append(event) # pin = 0, query = 0, state = event
for byte in recv_buffer:
pin = byte >> 2
query = byte >> 1 & 1
state = byte & 1
print("pin", pin, "query", query, "state", state)
if query:
self.radio.write([self.relay.state(pin)])
else:
self.relay.output(pin, state)
start = time()
result = self.radio.write(self.temperature.rawread())
if not result:
print(datetime.now(), "Did not receive ACK from controller after", time() - start, "seconds:", self.radio.last_error)
arc = self.radio.read_register(self.radio.OBSERVE_TX)
if result and arc & 0xf != 0:
print("Last TX succeeded in", arc & 0xf, "retransmissions.")
sys.stdout.flush()
except Exception as exc:
print(exc)
def recv(self, timeout=None):
end = time() + timeout
pipe = [0]
self.radio.startListening()
try:
while not self.radio.available(pipe) and (timeout is None or time() < end):
sleep(10000 / 1e6)
if self.radio.available(pipe):
recv_buffer = []
self.radio.read(recv_buffer)
return recv_buffer
return []
finally:
self.radio.stopListening()
def cleanup(self):
self.radio.end()
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
self.cleanup()
action = namedtuple('action', 'metric value pin state')
class Controller(object):
def __init__(self, major, minor, ce_pin, irq_pin, temperature, db, sock, relay):
self.temperature = temperature
self.db = db
self.sock = sock
self.relay = relay
self.actions = []
self.radio = NRF24()
self.radio.begin(major, minor, ce_pin, irq_pin)
self.radio.setDataRate(self.radio.BR_250KBPS)
self.radio.setChannel(CHANNEL)
self.radio.setAutoAck(1)
self.radio.enableDynamicPayloads()
self.radio.printDetails()
self.radio.openWritingPipe(PIPES[0])
self.radio.openReadingPipe(1, PIPES[1])
def run(self):
try:
tick = time()
while True:
recv_buffer = self.recv(10, rfds=[self.sock])
if recv_buffer and len(recv_buffer) == 2:
self.db.write(1, self.temperature.calc_temp(recv_buffer))
if tick < time():
tick = time() + 10
temp = self.temperature.read()
self.db.write(0, temp)
for i, (metric, value, pin, state) in enumerate(sorted(self.actions)):
if metric == 'temp' and temp >= value or \
metric == 'time' and time() >= value:
del self.actions[i]
result = self.control(pin, state)
print('\n', datetime.now(), "action matched:", metric, value, pin, state, "=>", result)
if not result:
print('action failed, will retry in 10s.')
self.actions.append(action(metric, value, pin, state))
break
try:
conn, _ = self.sock.accept()
except socket.error as exc:
if exc.errno != errno.EAGAIN:
raise
else:
try:
conn.settimeout(10)
recv_line = conn.recv(1024)
args = recv_line[:-1].split(None, 2)
if len(args) > 2:
state, pin, arg = args
pin = int(pin)
if state == 'boost':
args = arg.split()
if len(args) == 2:
metric, value = args
value = float(value)
if metric == 'temp' and temp >= value:
conn.sendall('temperature already above target!\n')
continue
if metric == 'time' and value <= 0:
conn.sendall('time delta must be positive!\n')
continue
if metric == 'time':
value += time()
self.actions.append(action(metric, value, pin, 'off'))
print('\n', datetime.now(), "added action", self.actions)
state = 'on' # continue to turn the boiler on
else:
state, pin = args
pin = int(pin)
if state.lower() in ('on', 'off'):
result = self.control(pin, state)
recv_buffer = '' # Need to clear buffer each time through the loop.
if state.lower() == 'query':
result, recv_buffer = self.state(pin)
elif state.lower() == 'queryactions':
result = True
recv_buffer = str(self.actions)
if isinstance(recv_buffer, list):
if not recv_buffer:
recv_buffer = ''
elif len(recv_buffer) == 1:
recv_buffer = recv_buffer[0]
conn.sendall('%s %s\n' % ('OK' if result else 'timed out', recv_buffer))
except Exception as exc:
print()
print('\n', datetime.now(), "Exception while processing:", repr(recv_line))
traceback.print_exc()
if self.radio.last_error:
print("Last radio error: %r" % self.radio.last_error)
try:
conn.sendall('invalid request: {!s}\n'.format(exc))
except socket.error:
pass
finally:
conn.close()
except KeyboardInterrupt:
print()
def state(self, pin):
if pin < 0:
return True, self.relay.state(-pin - 1)
else:
if self.control(pin, 'query'):
recv_buffer = self.recv(1)
return len(recv_buffer) > 0, recv_buffer
print("control returned not True: %r" % self.radio.last_error)
return False, []
def control(self, pin, state):
if pin < 0:
self.relay.output(-pin - 1, state.lower() == 'on')
return True
else:
cmd = pin << 2 | (state.lower() == 'query') << 1 | (state.lower() == 'on')
return self.radio.write(chr(cmd))
def recv(self, timeout=None, rfds=None):
if rfds is None:
rfds = []
end = time() + (timeout or 0.0)
pipe = [0]
self.radio.startListening()
try:
while not self.radio.available(pipe) and (timeout is None or time() < end):
#sleep(10000 / 1e6)
r, _, _ = select(rfds, [], [], 10000 / 1e6)
if r:
return []
if self.radio.available(pipe):
recv_buffer = []
self.radio.read(recv_buffer)
return recv_buffer
return []
finally:
self.radio.stopListening()
def cleanup(self):
self.radio.end()
self.db.close()
self.temperature.cleanup()
self.sock.close()
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
self.cleanup()
def tridian(mylist, sum=sum, sorted=sorted):
"""Optimised median function. Assumes delta is 21."""
return sum(sorted(mylist)[7:14]) / 7.
def tridian_slow(mylist):
"""Unoptimised median function."""
sorts = sorted(mylist)
tri = len(sorts) / 3
return sum(sorts[tri:2 * tri]) / float(tri)
class DBWriter(object):
def __init__(self):
self.buf = defaultdict(deque)
self.con = sqlite3.connect('/var/lib/autoboiler/autoboiler.sqlite3')
self.con.isolation_level = None
self.cur = self.con.cursor()
self.cur.execute('''CREATE TABLE IF NOT EXISTS temperature
(date datetime, sensor integer, temperature real)''')
self.cur.execute('''CREATE TABLE IF NOT EXISTS temperature_raw
(date datetime, sensor integer, temperature real)''')
self.cur.execute('''CREATE INDEX IF NOT EXISTS temperature_raw_sensor_date
ON temperature_raw(sensor, date)''')
self.cur.execute('''CREATE INDEX IF NOT EXISTS temperature_sensor_date
ON temperature(sensor, date)''')
def write(self, idx, value):
data = (datetime.now(), idx, value)
line = "%s %d %f" % data
if idx > 0:
print('\033[%dC' % len(line) * idx, end='')
print(line, '\r', end='')
sys.stdout.flush()
self.buf[idx].append(data)
try:
self.cur.execute('insert into temperature_raw values (?, ?, ?)',
data)
res = requests.post('http://emonpi/emoncms/input/post.json?node=1&apikey=74f0ab98df349fdfd17559978fb1d4b9',
data={'data': json.dumps({'T{}raw'.format(idx): value})})
if len(self.buf[idx]) >= 21:
# Take the middle-ish value to use for the time.
data = (self.buf[idx][10][0], idx, tridian([x[2] for x in self.buf[idx]]))
self.buf[idx].popleft()
self.cur.execute('insert into temperature values (?, ?, ?)',
data)
requests.post('http://emonpi/emoncms/input/post.json?node=1&apikey=74f0ab98df349fdfd17559978fb1d4b9',
data={'data': json.dumps({'T{}'.format(idx): value})})
except (requests.exceptions.ConnectionError, sqlite3.OperationalError) as exc:
print('\n', exc)
def close(self):
self.con.commit()
self.cur.close()
self.con.close()
def main():
GPIO.setmode(GPIO.BCM)
parser = ArgumentParser()
parser.add_argument('--mode', required=True, choices=['boiler', 'controller'])
parser.add_argument('--pidfile', '-p', default='/var/run/autoboiler.pid')
parser.add_argument('--sock', '-s', default='/var/lib/autoboiler/autoboiler.socket')
parser.add_argument('--output', '-o')
args = parser.parse_args()
if args.output:
f = open(args.output, 'a+')
if f:
sys.stdout = f
if args.pidfile:
with open(args.pidfile, 'w') as f:
print(os.getpid(), file=f)
try:
if args.mode == 'boiler':
with Boiler(0, 0, 25, 24, Temperature(0, 1), Relay([17, 18]), Button([23, 24])) as radio:
radio.run()
elif args.mode == 'controller':
try:
os.unlink(args.sock)
except OSError as exc:
if exc.errno != errno.ENOENT and os.path.exists(args.sock):
raise
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(args.sock)
os.chmod(args.sock, 0o777)
sock.setblocking(0)
sock.listen(1)
with Controller(0, 1, 25, 24, Temperature(0, 0), DBWriter(), sock, Relay([15, 14])) as radio:
radio.run()
finally:
GPIO.cleanup()
if args.pidfile:
os.unlink(args.pidfile)
if args.sock and args.mode == 'controller':
try:
os.unlink(args.sock)
except OSError as exc:
if exc.errno != errno.ENOENT and os.path.exists(args.sock):
raise
return 0
if __name__ == '__main__':
sys.exit(main())
# vim: set et sw=4 ts=4 sts=4 ai:
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
import frappe, os
from frappe.core.page.data_import_tool.data_import_tool import import_doc, export_json
def sync_fixtures(app=None):
"""Import, overwrite fixtures from `[app]/fixtures`"""
if app:
apps = [app]
else:
apps = frappe.get_installed_apps()
frappe.flags.in_fixtures = True
for app in apps:
if os.path.exists(frappe.get_app_path(app, "fixtures")):
fixture_files = sorted(os.listdir(frappe.get_app_path(app, "fixtures")))
for fname in fixture_files:
if fname.endswith(".json") or fname.endswith(".csv"):
import_doc(frappe.get_app_path(app, "fixtures", fname),
ignore_links=True, overwrite=True)
import_custom_scripts(app)
frappe.flags.in_fixtures = False
frappe.db.commit()
def import_custom_scripts(app):
"""Import custom scripts from `[app]/fixtures/custom_scripts`"""
if os.path.exists(frappe.get_app_path(app, "fixtures", "custom_scripts")):
for fname in os.listdir(frappe.get_app_path(app, "fixtures", "custom_scripts")):
if fname.endswith(".js"):
with open(frappe.get_app_path(app, "fixtures",
"custom_scripts") + os.path.sep + fname) as f:
doctype = fname.rsplit(".", 1)[0]
script = f.read()
if frappe.db.exists("Custom Script", {"dt": doctype}):
custom_script = frappe.get_doc("Custom Script", {"dt": doctype})
custom_script.script = script
custom_script.save()
else:
frappe.get_doc({
"doctype":"Custom Script",
"dt": doctype,
"script_type": "Client",
"script": script
}).insert()
def export_fixtures():
"""Export fixtures as JSON to `[app]/fixtures`"""
for app in frappe.get_installed_apps():
for fixture in frappe.get_hooks("fixtures", app_name=app):
filters = None
or_filters = None
if isinstance(fixture, dict):
filters = fixture.get("filters")
or_filters = fixture.get("or_filters")
fixture = fixture.get("doctype") or fixture.get("dt")
print("Exporting {0} app {1} filters {2}".format(fixture, app, (filters if filters else or_filters)))
if not os.path.exists(frappe.get_app_path(app, "fixtures")):
os.mkdir(frappe.get_app_path(app, "fixtures"))
export_json(fixture, frappe.get_app_path(app, "fixtures", frappe.scrub(fixture) + ".json"), filters=filters, or_filters=or_filters)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
#
# This file is part of Flask-SSO
# Copyright (C) 2014, 2015 CERN.
#
# Flask-SSO is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Flask-SSO is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Flask-SSO; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Test *Flask-SSO* integration."""
from __future__ import absolute_import
import sys
pyv = sys.version_info
if pyv[0] == 2 and pyv[1] < 7:
import unittest2 as unittest
else:
import unittest
from contextlib import contextmanager
from flask import request_started, request
from flask_login import current_user
from invenio.testsuite import make_test_suite, run_test_suite, InvenioTestCase
try:
from invenio.ext.sso import setup_app
has_sso = True
except ImportError:
has_sso = False
class TestSSO(InvenioTestCase):
"""Test extension itegration."""
@unittest.skipUnless(has_sso, 'Flask-SSO is not installed')
def test_login_handler(self):
"""Test login handler."""
self.app = setup_app(self.app)
@contextmanager
def request_environ_set(app, data):
def handler(sender, **kwargs):
for (k, v) in data.items():
request.environ[k] = v
with request_started.connected_to(handler, app):
yield
def run(data, expected_data):
with request_environ_set(self.app, data):
with self.app.test_client() as c:
c.get(self.app.config['SSO_LOGIN_URL'])
current_user['email'] == expected_data['email']
current_user['group'] == expected_data['group']
data = {
'ADFS_GROUP': ('CERN Registered;'
'project-invenio-devel;'
'cern-personnel'),
'ADFS_LOGIN': 'admin',
'ADFS_EMAIL': self.app.config['CFG_SITE_ADMIN_EMAIL'],
}
expected_data = {
'email': self.app.config['CFG_SITE_ADMIN_EMAIL'],
'group': [
'project-invenio-devel (Group)', 'CERN Registered (Group)',
'cern-personnel (Group)'
],
}
# FIXME mock user table
from invenio.ext.sqlalchemy import db
from invenio.modules.accounts.models import User
admin = User.query.get(1)
old_settings = admin.settings
admin.settings = {'login_method': 'SSO'}
db.session.merge(admin)
db.session.commit()
run(data, expected_data)
admin.settings = old_settings
db.session.merge(admin)
db.session.commit()
TEST_SUITE = make_test_suite(TestSSO)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright 2002-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.aop.aspectj;
import org.aspectj.lang.JoinPoint;
/**
* Aspect used as part of before advice binding tests and
* serves as base class for a number of more specialized test aspects.
*
* @author Adrian Colyer
* @author Chris Beams
*/
class AdviceBindingTestAspect {
protected AdviceBindingCollaborator collaborator;
public void setCollaborator(AdviceBindingCollaborator aCollaborator) {
this.collaborator = aCollaborator;
}
// "advice" methods
public void oneIntArg(int age) {
this.collaborator.oneIntArg(age);
}
public void oneObjectArg(Object bean) {
this.collaborator.oneObjectArg(bean);
}
public void oneIntAndOneObject(int x, Object o) {
this.collaborator.oneIntAndOneObject(x,o);
}
public void needsJoinPoint(JoinPoint tjp) {
this.collaborator.needsJoinPoint(tjp.getSignature().getName());
}
public void needsJoinPointStaticPart(JoinPoint.StaticPart tjpsp) {
this.collaborator.needsJoinPointStaticPart(tjpsp.getSignature().getName());
}
/**
* Collaborator interface that makes it easy to test this aspect is
* working as expected through mocking.
*/
public interface AdviceBindingCollaborator {
void oneIntArg(int x);
void oneObjectArg(Object o);
void oneIntAndOneObject(int x, Object o);
void needsJoinPoint(String s);
void needsJoinPointStaticPart(String s);
}
}
|
java
|
github
|
https://github.com/spring-projects/spring-framework
|
spring-context/src/test/java/org/springframework/aop/aspectj/AdviceBindingTestAspect.java
|
from __future__ import unicode_literals
import base64
from .common import InfoExtractor
from ..utils import (
ExtractorError,
int_or_none,
sanitized_Request,
urlencode_postdata,
)
class SharedIE(InfoExtractor):
IE_DESC = 'shared.sx and vivo.sx'
_VALID_URL = r'https?://(?:shared|vivo)\.sx/(?P<id>[\da-z]{10})'
_TESTS = [{
'url': 'http://shared.sx/0060718775',
'md5': '106fefed92a8a2adb8c98e6a0652f49b',
'info_dict': {
'id': '0060718775',
'ext': 'mp4',
'title': 'Bmp4',
'filesize': 1720110,
},
}, {
'url': 'http://vivo.sx/d7ddda0e78',
'md5': '15b3af41be0b4fe01f4df075c2678b2c',
'info_dict': {
'id': 'd7ddda0e78',
'ext': 'mp4',
'title': 'Chicken',
'filesize': 528031,
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
if '>File does not exist<' in webpage:
raise ExtractorError(
'Video %s does not exist' % video_id, expected=True)
download_form = self._hidden_inputs(webpage)
request = sanitized_Request(
url, urlencode_postdata(download_form))
request.add_header('Content-Type', 'application/x-www-form-urlencoded')
video_page = self._download_webpage(
request, video_id, 'Downloading video page')
video_url = self._html_search_regex(
r'data-url="([^"]+)"', video_page, 'video URL')
title = base64.b64decode(self._html_search_meta(
'full:title', webpage, 'title').encode('utf-8')).decode('utf-8')
filesize = int_or_none(self._html_search_meta(
'full:size', webpage, 'file size', fatal=False))
thumbnail = self._html_search_regex(
r'data-poster="([^"]+)"', video_page, 'thumbnail', default=None)
return {
'id': video_id,
'url': video_url,
'ext': 'mp4',
'filesize': filesize,
'title': title,
'thumbnail': thumbnail,
}
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8; -*-
#
# @file translation.py
# @brief
# @author Medhi BOULNEMOUR (INRA UMR1095)
# @date 2017-01-03
# @copyright Copyright (c) 2016 INRA/CIRAD
# @license MIT (see LICENSE file)
# @details
"""
Install Country
"""
from django.core.management import BaseCommand
from geonames.appsettings import TRANSLATION_SOURCES, TRANSLATION_LANGUAGES, IAlternate, DATA_DIR
from geonames.models import AlternateName, Country, City, ContentType
from geonames.geonames import Geonames
from django.db import transaction
import progressbar
import resource
import sys
import os
from django.utils import timezone
from colorama import Fore, Style
class MemoryUsageWidget(progressbar.widgets.WidgetBase):
def __call__(self, progress, data):
if sys.platform != 'win32':
return '%s kB' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
return '?? kB'
class Command(BaseCommand):
help = """Download all files in GEONAMES_TRANSLATION_SOURCES if they were updated or if
--force option was used.
And Import translation data if they were downloaded."""
def __init__(self):
super(Command, self).__init__()
self.no_color = None
self.verbosity = None
self.export_file = None
self.progress_enabled = False
self.progress_widgets = None
self.progress = 0
self.force = False
self.export = False
self.delete = False
self.city_content_type_id = ContentType.objects.get_by_natural_key('geonames', 'city').id
self.country_content_type_id = ContentType.objects.get_by_natural_key('geonames', 'country').id
def add_arguments(self, parser):
# Named (optional) arguments
parser.add_argument(
'-f', '--force',
action='store_true',
dest='force',
default=False,
help='Download and import even if matching files are up-to-date',
)
parser.add_argument(
'-np', '--no-progress',
action='store_true',
dest='no-progress',
default=False,
help='Hide progress bar'
)
parser.add_argument(
'-e', '--export',
dest='export',
action='store',
default=False,
nargs='?',
help='Export files with matching data only. Absolute path to export file'
)
parser.add_argument(
'-d', '--delete',
dest='delete',
action='store_true',
default=False,
help='Delete local source files after importation'
)
def progress_init(self):
"""Initialize progress bar."""
if self.progress_enabled:
self.progress = 0
self.progress_widgets = [
Fore.LIGHTCYAN_EX,
'RAM used: ',
MemoryUsageWidget(),
' ',
progressbar.ETA(),
' Done: ',
progressbar.Percentage(),
' ',
progressbar.Bar(
marker='▓',
fill='░'
),
' ',
progressbar.AnimatedMarker(markers='⎺⎻⎼⎽⎼⎻'),
' ',
Style.RESET_ALL,
]
def progress_start(self, max_value):
"""Start progress bar."""
if self.progress_enabled:
self.progress = progressbar.ProgressBar(
max_value=max_value,
widgets=self.progress_widgets
).start()
def progress_update(self, value):
"""Update progress bar."""
if self.progress_enabled:
self.progress.update(value)
def progress_finish(self):
"""Finalize progress bar."""
if self.progress_enabled:
self.progress.finish()
@transaction.atomic
def handle(self, *args, **options):
self.translation_manager(args, options)
def translation_manager(self, args, options):
self.progress_enabled = not options.get('no-progress')
self.export = options.get('export')
self.force = options.get('force')
self.verbosity = options.get('verbosity')
self.no_color = options.get('no_color')
if self.export is None:
self.export = '%s/alt_name_light_%s.txt' % (DATA_DIR,
timezone.now().isoformat('_')
.replace(':', '-')
.replace('.', '-'))
self.delete = options.get('delete')
self.progress_init()
if self.export:
file_path = self.export
if os.path.exists(file_path):
os.remove(file_path)
else:
print('Creating %s' % file_path)
self.export_file = open(file_path, 'a')
for source in TRANSLATION_SOURCES:
geonames = Geonames(source, force=self.force)
if not geonames.need_run:
continue
i = 0
nb_lines = geonames.num_lines()
refresh_tx = int(nb_lines / 100) if (nb_lines / 100) >= 1 else 1
self.progress_start(nb_lines)
if not self.progress_enabled:
print('Importing...')
alt_names_to_check = []
for items in geonames.parse():
current_alt_name = self.translation_check(items)
i += 1
if i % refresh_tx == 0:
self.progress_update(i)
if current_alt_name:
alt_names_to_check.append(current_alt_name)
if len(alt_names_to_check) >= 500:
self.translation_bulk(alt_names_to_check)
alt_names_to_check = []
if alt_names_to_check:
self.translation_bulk(alt_names_to_check)
self.progress_finish()
if self.export:
self.export_file.close()
geonames.finish(delete=self.delete)
@staticmethod
def translation_check(items):
if items[IAlternate.language] not in TRANSLATION_LANGUAGES:
return False
size = len(items)
if size > IAlternate.isHistoric and items[IAlternate.isHistoric] == "1":
return False
if size > IAlternate.isColloquial and items[IAlternate.isColloquial] == "1":
return False
if size > IAlternate.isPreferred and items[IAlternate.isPreferred] == "1":
is_preferred = True
else:
is_preferred = False
if size > IAlternate.isShort and items[IAlternate.isShort] == "1":
is_short = True
else:
is_short = False
return {
'name_id': items[IAlternate.nameid],
'geoname_id': int(items[IAlternate.geonameid]),
'language': items[IAlternate.language],
'alternate_name': items[IAlternate.name],
'is_preferred_name': is_preferred,
'is_short_name': is_short
}
def translation_bulk(self, alt_names_to_check):
bulk = []
geonameid_list = set(x.get('geoname_id') for x in alt_names_to_check)
suspect_cities = {x[1]: x[0] for x in City.objects.filter(geoname_id__in=geonameid_list).values_list('id', 'geoname_id')}
suspect_countries = {x[1]: x[0] for x in Country.objects.filter(geoname_id__in=geonameid_list).values_list('id', 'geoname_id')}
for alt_name in alt_names_to_check:
if alt_name.get('geoname_id') in suspect_cities:
is_city_or_country = True
elif alt_name.get('geoname_id') in suspect_countries:
is_city_or_country = False
else:
continue
result = AlternateName.objects.filter(alt_name_id=alt_name.get('name_id'))
if result:
result[0].content_type_id = self.city_content_type_id if is_city_or_country else self.country_content_type_id
result[0].object_id = suspect_cities[alt_name.get('geoname_id')] if is_city_or_country else suspect_countries[alt_name.get('geoname_id')]
result[0].language = alt_name.get('language')
result[0].alternate_name = alt_name.get('alternate_name')
result[0].is_preferred_name = alt_name.get('is_preferred_name')
result[0].is_short_name = alt_name.get('is_short_name')
result[0].save()
translation = result[0]
else:
translation = AlternateName(
alt_name_id=alt_name.get('name_id'),
content_type_id=self.city_content_type_id if is_city_or_country else self.country_content_type_id,
object_id=suspect_cities[alt_name.get('geoname_id')] if is_city_or_country else suspect_countries[alt_name.get('geoname_id')],
language=alt_name.get('language'),
alternate_name=alt_name.get('alternate_name'),
is_preferred_name=alt_name.get('is_preferred_name'),
is_short_name=alt_name.get('is_short_name')
)
bulk.append(translation)
if self.export:
r = list(range(6))
r[IAlternate.nameid] = alt_name.get('name_id')
r[IAlternate.geonameid] = str(alt_name.get('geoname_id'))
r[IAlternate.language] = alt_name.get('language')
r[IAlternate.name] = alt_name.get('alternate_name')
r[IAlternate.isPreferred] = '1' if alt_name.get('is_preferred_name') else '0'
r[IAlternate.isShort] = '1' if alt_name.get('is_short_name') else '0'
self.export_file.write('\t'.join(r) + '\n')
self.display_entry_message(translation, True if result else False)
if bulk:
AlternateName.objects.bulk_create(bulk)
self.display_bulk_message(len(bulk))
def display_bulk_message(self, bulk_size):
if not self.progress_enabled and self.verbosity:
print('BULK INSERT!\tNb_entries:%s' % bulk_size)
def display_entry_message(self, alt_name, state):
if not self.progress_enabled and self.verbosity:
display_state = "UPDATED" if state else "ADD"
if not self.no_color:
display_state = (Fore.BLUE if state else Fore.GREEN) + display_state + Style.RESET_ALL
print('[%s] %s' % (display_state, alt_name))
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/python
# Copyright 2014 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Functions for creating an Android.mk from already created dictionaries.
"""
import os
def write_group(f, name, items, append):
"""Helper function to list all names passed to a variable.
Args:
f: File open for writing (Android.mk)
name: Name of the makefile variable (e.g. LOCAL_CFLAGS)
items: list of strings to be passed to the variable.
append: Whether to append to the variable or overwrite it.
"""
if not items:
return
# Copy the list so we can prepend it with its name.
items_to_write = list(items)
if append:
items_to_write.insert(0, '%s +=' % name)
else:
items_to_write.insert(0, '%s :=' % name)
f.write(' \\\n\t'.join(items_to_write))
f.write('\n\n')
def write_local_vars(f, var_dict, append, name):
"""Helper function to write all the members of var_dict to the makefile.
Args:
f: File open for writing (Android.mk)
var_dict: VarsDict holding the unique values for one configuration.
append: Whether to append to each makefile variable or overwrite it.
name: If not None, a string to be appended to each key.
"""
for key in var_dict.keys():
_key = key
_items = var_dict[key]
if key == 'LOCAL_CFLAGS':
# Always append LOCAL_CFLAGS. This allows us to define some early on in
# the makefile and not overwrite them.
_append = True
elif key == 'DEFINES':
# For DEFINES, we want to append to LOCAL_CFLAGS.
_append = True
_key = 'LOCAL_CFLAGS'
_items_with_D = []
for define in _items:
_items_with_D.append('-D' + define)
_items = _items_with_D
elif key == 'KNOWN_TARGETS':
# KNOWN_TARGETS are not needed in the final make file.
continue
else:
_append = append
if name:
_key += '_' + name
write_group(f, _key, _items, _append)
AUTOGEN_WARNING = (
"""
###############################################################################
#
# THIS FILE IS AUTOGENERATED BY GYP_TO_ANDROID.PY. DO NOT EDIT.
#
###############################################################################
"""
)
DEBUGGING_HELP = (
"""
###############################################################################
#
# PROBLEMS WITH SKIA DEBUGGING?? READ THIS...
#
# The debug build results in changes to the Skia headers. This means that those
# using libskia must also be built with the debug version of the Skia headers.
# There are a few scenarios where this comes into play:
#
# (1) You're building debug code that depends on libskia.
# (a) If libskia is built in release, then define SK_RELEASE when building
# your sources.
# (b) If libskia is built with debugging (see step 2), then no changes are
# needed since your sources and libskia have been built with SK_DEBUG.
# (2) You're building libskia in debug mode.
# (a) RECOMMENDED: You can build the entire system in debug mode. Do this by
# updating your build/core/config.mk to include -DSK_DEBUG on the line
# that defines COMMON_GLOBAL_CFLAGS
# (b) You can update all the users of libskia to define SK_DEBUG when they are
# building their sources.
#
# NOTE: If neither SK_DEBUG or SK_RELEASE are defined then Skia checks NDEBUG to
# determine which build type to use.
###############################################################################
"""
)
SKIA_TOOLS = (
"""
#############################################################
# Build the skia tools
#
# benchmark (timings)
include $(BASE_PATH)/bench/Android.mk
# golden-master (fidelity / regression test)
include $(BASE_PATH)/gm/Android.mk
# unit-tests
include $(BASE_PATH)/tests/Android.mk
# diamond-master (one test to rule them all)
include $(BASE_PATH)/dm/Android.mk
"""
)
class VarsDictData(object):
"""Helper class to keep a VarsDict along with a name and optional condition.
"""
def __init__(self, vars_dict, name, condition=None):
"""Create a new VarsDictData.
Args:
vars_dict: A VarsDict. Can be accessed via self.vars_dict.
name: Name associated with the VarsDict. Can be accessed via
self.name.
condition: Optional string representing a condition. If not None,
used to create a conditional inside the makefile.
"""
self.vars_dict = vars_dict
self.condition = condition
self.name = name
def write_local_path(f):
"""Add the LOCAL_PATH line to the makefile.
Args:
f: File open for writing.
"""
f.write('LOCAL_PATH:= $(call my-dir)\n')
def write_clear_vars(f):
"""Add the CLEAR_VARS line to the makefile.
Args:
f: File open for writing.
"""
f.write('include $(CLEAR_VARS)\n')
def write_include_stlport(f):
"""Add a line to include stlport.
Args:
f: File open for writing.
"""
f.write('include external/stlport/libstlport.mk\n')
def write_android_mk(target_dir, common, deviations_from_common):
"""Given all the variables, write the final make file.
Args:
target_dir: The full path to the directory to write Android.mk, or None
to use the current working directory.
common: VarsDict holding variables definitions common to all
configurations.
deviations_from_common: List of VarsDictData, one for each possible
configuration. VarsDictData.name will be appended to each key before
writing it to the makefile. VarsDictData.condition, if not None, will be
written to the makefile as a condition to determine whether to include
VarsDictData.vars_dict.
"""
target_file = 'Android.mk'
if target_dir:
target_file = os.path.join(target_dir, target_file)
with open(target_file, 'w') as f:
f.write(AUTOGEN_WARNING)
f.write('BASE_PATH := $(call my-dir)\n')
write_local_path(f)
f.write(DEBUGGING_HELP)
write_clear_vars(f)
f.write('LOCAL_ARM_MODE := thumb\n')
# need a flag to tell the C side when we're on devices with large memory
# budgets (i.e. larger than the low-end devices that initially shipped)
# On arm, only define the flag if it has VFP. For all other architectures,
# always define the flag.
f.write('ifeq ($(TARGET_ARCH),arm)\n')
f.write('\tifeq ($(ARCH_ARM_HAVE_VFP),true)\n')
f.write('\t\tLOCAL_CFLAGS += -DANDROID_LARGE_MEMORY_DEVICE\n')
f.write('\tendif\n')
f.write('else\n')
f.write('\tLOCAL_CFLAGS += -DANDROID_LARGE_MEMORY_DEVICE\n')
f.write('endif\n\n')
f.write('# used for testing\n')
f.write('#LOCAL_CFLAGS += -g -O0\n\n')
f.write('ifeq ($(NO_FALLBACK_FONT),true)\n')
f.write('\tLOCAL_CFLAGS += -DNO_FALLBACK_FONT\n')
f.write('endif\n\n')
write_local_vars(f, common, False, None)
for data in deviations_from_common:
if data.condition:
f.write('ifeq ($(%s), true)\n' % data.condition)
write_local_vars(f, data.vars_dict, True, data.name)
if data.condition:
f.write('endif\n\n')
write_include_stlport(f)
f.write('include $(BUILD_SHARED_LIBRARY)\n')
f.write(SKIA_TOOLS)
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import java.util.Map;
public interface HttpRequestFormatter {
Map<String, String> formatHeaders();
String formatBody();
}
|
java
|
github
|
https://github.com/apache/kafka
|
clients/src/main/java/org/apache/kafka/common/security/oauthbearer/internals/secured/HttpRequestFormatter.java
|
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Alexander Akait @alexander-akait
*/
"use strict";
const { CSS_TYPE } = require("../ModuleSourceTypeConstants");
const makeSerializable = require("../util/makeSerializable");
const CssIcssExportDependency = require("./CssIcssExportDependency");
const NullDependency = require("./NullDependency");
/** @typedef {import("webpack-sources").ReplaceSource} ReplaceSource */
/** @typedef {import("../Dependency")} Dependency */
/** @typedef {import("../Dependency").ExportsSpec} ExportsSpec */
/** @typedef {import("../Dependency").ReferencedExports} ReferencedExports */
/** @typedef {import("../Dependency").UpdateHashContext} UpdateHashContext */
/** @typedef {import("../DependencyTemplate").CssDependencyTemplateContext} DependencyTemplateContext */
/** @typedef {import("../ModuleGraph")} ModuleGraph */
/** @typedef {import("../css/CssParser").Range} Range */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectDeserializerContext} ObjectDeserializerContext */
/** @typedef {import("../serialization/ObjectMiddleware").ObjectSerializerContext} ObjectSerializerContext */
/** @typedef {import("../util/Hash")} Hash */
/** @typedef {import("../util/runtime").RuntimeSpec} RuntimeSpec */
class CssIcssSymbolDependency extends NullDependency {
/**
* @param {string} name name
* @param {string} symbol symbol
* @param {Range} range range
* @param {boolean=} isReference true when is reference, otherwise false
*/
constructor(name, symbol, range, isReference) {
super();
this.name = name;
this.symbol = symbol;
this.range = range;
this.isReference = isReference;
/** @type {undefined | string} */
this._hashUpdate = undefined;
}
get type() {
return "css symbol identifier";
}
/**
* Update the hash
* @param {Hash} hash hash to be updated
* @param {UpdateHashContext} context context
* @returns {void}
*/
updateHash(hash, context) {
if (this._hashUpdate === undefined) {
this._hashUpdate = `${this.range}${this.name}${this.value}`;
}
hash.update(this._hashUpdate);
}
/**
* Returns list of exports referenced by this dependency
* @param {ModuleGraph} moduleGraph module graph
* @param {RuntimeSpec} runtime the runtime for which the module is analysed
* @returns {ReferencedExports} referenced exports
*/
getReferencedExports(moduleGraph, runtime) {
return [[this.symbol]];
}
/**
* @param {ObjectSerializerContext} context context
*/
serialize(context) {
const { write } = context;
write(this.name);
write(this.symbol);
write(this.value);
write(this.range);
write(this.isReference);
super.serialize(context);
}
/**
* @param {ObjectDeserializerContext} context context
*/
deserialize(context) {
const { read } = context;
this.name = read();
this.symbol = read();
this.value = read();
this.range = read();
this.isReference = read();
super.deserialize(context);
}
}
CssIcssSymbolDependency.Template = class CssIcssSymbolDependencyTemplate extends (
NullDependency.Template
) {
/**
* @param {Dependency} dependency the dependency for which the template should be applied
* @param {ReplaceSource} source the current replace source which can be modified
* @param {DependencyTemplateContext} templateContext the context object
* @returns {void}
*/
apply(dependency, source, templateContext) {
if (templateContext.type === CSS_TYPE) {
const dep = /** @type {CssIcssSymbolDependency} */ (dependency);
/** @type {string | undefined} */
const value = dep.isReference
? CssIcssExportDependency.Template.findReference(
dep.symbol,
templateContext
)
: dep.symbol;
if (!value) {
return;
}
source.replace(dep.range[0], dep.range[1] - 1, value);
}
}
};
makeSerializable(
CssIcssSymbolDependency,
"webpack/lib/dependencies/CssIcssSymbolDependency"
);
module.exports = CssIcssSymbolDependency;
|
javascript
|
github
|
https://github.com/webpack/webpack
|
lib/dependencies/CssIcssSymbolDependency.js
|
from coalib.parsing.StringProcessing import unescaped_split
from coalib.tests.parsing.StringProcessing.StringProcessingTestBase import (
StringProcessingTestBase)
class UnescapedSplitTest(StringProcessingTestBase):
bs = StringProcessingTestBase.bs
test_basic_pattern = r"'"
test_basic_expected_results = [
[r"out1 ", r"escaped-escape: \\ ", r" out2"],
[r"out1 ", r"escaped-quote: \' ", r" out2"],
[r"out1 ", r"escaped-anything: \X ", r" out2"],
[r"out1 ", r"two escaped escapes: \\\\ ", r" out2"],
[r"out1 ", r"escaped-quote at end: \'", r" out2"],
[r"out1 ", r"escaped-escape at end: " + 2 * bs, r" out2"],
[r"out1 ", r"str1", r" out2 ", r"str2", r" out2"],
[r"out1 \' ", r"str1", r" out2 ", r"str2", r" out2"],
[r"out1 \\\' ", r"str1", r" out2 ", r"str2", r" out2"],
[r"out1 \\ ", r"str1", r" out2 ", r"str2", r" out2"],
[r"out1 \\\\ ", r"str1", r" out2 ", r"str2", r" out2"],
[r"out1 " + 2 * bs, r"str1", r" out2 ", r"str2", r" out2"],
[r"out1 " + 4 * bs, r"str1", r" out2 ", r"str2", r" out2"],
[r"out1 ", r"str1", r"", r"str2", r"", r"str3", r" out2"],
[r""],
[r"out1 out2 out3"],
[bs],
[2 * bs]]
# Test the basic unescaped_split() functionality.
def test_basic(self):
split_pattern = self.test_basic_pattern
expected_results = self.test_basic_expected_results
self.assertResultsEqual(
unescaped_split,
{(split_pattern, test_string, 0, False, use_regex): result
for test_string, result in zip(self.test_strings,
expected_results)
for use_regex in [True, False]},
list)
# Test the unescaped_split() function while varying the max_split
# parameter.
def test_max_split(self):
split_pattern = self.test_basic_pattern
expected_master_results = self.test_basic_expected_results
for max_split in [1, 2, 3, 4, 5, 6, 7, 8, 9, 112]:
expected_results = [
elem[0: max_split] for elem in expected_master_results]
for res, master in zip(expected_results, expected_master_results):
if max_split < len(master):
# max_split is less the length of our master result list,
# need to append the rest as a joined string.
res.append(str.join(split_pattern, master[max_split:]))
self.assertResultsEqual(
unescaped_split,
{(split_pattern,
test_string,
max_split,
False,
use_regex): result
for test_string, result in zip(self.test_strings,
expected_results)
for use_regex in [True, False]},
list)
# Test the unescaped_split() function with different regex patterns.
def test_regex_pattern(self):
expected_results = [
[r"", r"", r"cba###\\13q4ujsabbc\+'**'ac###.#.####-ba"],
[r"", r"c", r"ccba###\\13q4ujs", r"bc\+'**'ac###.#.####-ba"],
[r"", r"c", r"ccba###\\13q4ujs", r"bc\+'**'", r"###.#.####-ba"],
[r"abcabccba###", r"\13q4ujsabbc", r"+'**'ac###.#.####-ba"],
[r"abcabccba", r"\\13q4ujsabbc\+'**'ac", r".", r".", r"-ba"],
[r"", r"", r"c", r"", r"cc", r"", r"", r"", r"\13q4ujs", r"", r"",
r"c\+'**'", r"c", r"", r"", r"", r"", r"-", r"", r""],
[r"", r"cba###\\13q4ujs", r"\+'**'", r"###.#.####-ba"],
[r"abcabccba###" + 2 * self.bs,
r"3q4ujsabbc\+'**'ac###.#.####-ba"]]
self.assertResultsEqual(
unescaped_split,
{(pattern, self.multi_pattern_test_string, 0, False, True): result
for pattern, result in zip(self.multi_patterns,
expected_results)},
list)
# Test the unescaped_split() function for its remove_empty_matches feature.
def test_auto_trim(self):
expected_results = [
[],
[2 * self.bs, r"\\\\\;\\#", r"\\\'", r"\;\\\\", r"+ios"],
[r"1", r"2", r"3", r"4", r"5", r"6"],
[r"1", r"2", r"3", r"4", r"5", r"6", r"7"],
[],
[r"Hello world"],
[r"\;"],
[2 * self.bs],
[r"abc", r"a", r"asc"]]
self.assertResultsEqual(
unescaped_split,
{(self.auto_trim_test_pattern,
test_string,
0,
True,
use_regex): result
for test_string, result in zip(self.auto_trim_test_strings,
expected_results)
for use_regex in [True, False]},
list)
# Test the unescaped_split() function with regexes disabled.
def test_disabled_regex(self):
expected_results = [[x] for x in self.test_strings]
self.assertResultsEqual(
unescaped_split,
{(r"'()", test_string, 0, False, False): result
for test_string, result in zip(self.test_strings,
expected_results)},
list)
|
unknown
|
codeparrot/codeparrot-clean
| ||
import {useHook} from 'shared-runtime';
function Component(props) {
const o = {};
const x = <div>{props.value}</div>; // create within the range of x to group with x
useHook(); // intersperse a hook call to prevent memoization of x
o.value = props.value;
const y = <div>{x}</div>;
return <div>{y}</div>;
}
export const FIXTURE_ENTRYPOINT = {
fn: Component,
params: [{value: 'sathya'}],
};
|
javascript
|
github
|
https://github.com/facebook/react
|
compiler/packages/babel-plugin-react-compiler/src/__tests__/fixtures/compiler/prune-scopes-whose-deps-invalidate-jsx.js
|
#Copyright 2007-2009 WebDriver committers
#Copyright 2007-2009 Google Inc.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
"""
The Alert implementation.
"""
from selenium.webdriver.remote.command import Command
class Alert(object):
"""
Allows to work with alerts.
Use this class to interact with alert prompts. It contains methods for dismissing,
accepting, inputting, and getting text from alert prompts.
Accepting / Dismissing alert prompts::
Alert(driver).accept()
Alert(driver).dismiss()
Inputting a value into an alert prompt:
name_prompt = Alert(driver)
name_prompt.send_keys("Willian Shakesphere")
name_prompt.accept()
Reading a the text of a prompt for verification:
alert_text = Alert(driver).text
self.assertEqual("Do you wish to quit?", alert_text)
"""
def __init__(self, driver):
"""
Creates a new Alert.
:Args:
- driver: The WebDriver instance which performs user actions.
"""
self.driver = driver
@property
def text(self):
"""
Gets the text of the Alert.
"""
return self.driver.execute(Command.GET_ALERT_TEXT)["value"]
def dismiss(self):
"""
Dismisses the alert available.
"""
self.driver.execute(Command.DISMISS_ALERT)
def accept(self):
"""
Accepts the alert available.
Usage::
Alert(driver).accept() # Confirm a alert dialog.
"""
self.driver.execute(Command.ACCEPT_ALERT)
def send_keys(self, keysToSend):
"""
Send Keys to the Alert.
:Args:
- keysToSend: The text to be sent to Alert.
"""
self.driver.execute(Command.SET_ALERT_VALUE, {'text': keysToSend})
|
unknown
|
codeparrot/codeparrot-clean
| ||
{
"kind": "Dashboard",
"apiVersion": "dashboard.grafana.app/v1beta1",
"metadata": {
"name": "v0alpha1.rows-to-fields.v42"
},
"spec": {
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations \u0026 Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"links": [],
"panels": [
{
"datasource": {
"type": "datasource",
"uid": "-- Dashboard --"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"custom": {
"align": "left",
"cellOptions": {
"type": "auto"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 5,
"w": 12,
"x": 0,
"y": 0
},
"id": 8,
"options": {
"showHeader": true
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"datasource": {
"type": "datasource",
"uid": "-- Dashboard --"
},
"panelId": 2,
"refId": "A"
}
],
"title": "Raw data",
"type": "table"
},
{
"datasource": {
"type": "datasource",
"uid": "-- Dashboard --"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"custom": {
"align": "left",
"cellOptions": {
"type": "auto"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "Value"
},
"properties": [
{
"id": "custom.width",
"value": 82
}
]
},
{
"matcher": {
"id": "byName",
"options": "Unit"
},
"properties": [
{
"id": "custom.width",
"value": 108
}
]
}
]
},
"gridPos": {
"h": 5,
"w": 12,
"x": 12,
"y": 0
},
"id": 7,
"options": {
"showHeader": true,
"sortBy": []
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"datasource": {
"type": "datasource",
"uid": "-- Dashboard --"
},
"panelId": 3,
"refId": "A"
}
],
"title": "Raw data",
"type": "table"
},
{
"datasource": {
"apiVersion": "v1",
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 7,
"w": 12,
"x": 0,
"y": 5
},
"id": 2,
"options": {
"colorMode": "value",
"graphMode": "none",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"text": {},
"textMode": "auto"
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"csvContent": "Name,Value,Unit,Color\nTemperature,10,degree,green\nPressure,100,bar,blue\nSpeed,30,km/h,red",
"datasource": {
"apiVersion": "v1",
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "A",
"scenarioId": "csv_content"
}
],
"title": "Unit and color from data",
"transformations": [
{
"id": "rowsToFields",
"options": {}
}
],
"type": "stat"
},
{
"datasource": {
"apiVersion": "v1",
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"min": 0,
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 7,
"w": 12,
"x": 12,
"y": 5
},
"id": 3,
"options": {
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"showThresholdLabels": true,
"showThresholdMarkers": true,
"text": {}
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"csvContent": "Name,Value,Unit,min,max, threshold1\nTemperature,10,degree,0,50,30\nPressure,100,Pa,0,300,200\nSpeed,30,km/h,0,150,110",
"datasource": {
"apiVersion": "v1",
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "A",
"scenarioId": "csv_content"
}
],
"title": "Min, Max \u0026 Thresholds from data",
"transformations": [
{
"id": "rowsToFields",
"options": {}
}
],
"type": "gauge"
},
{
"datasource": {
"type": "datasource",
"uid": "-- Dashboard --"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"custom": {
"align": "left",
"cellOptions": {
"type": "auto"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 5,
"w": 12,
"x": 0,
"y": 12
},
"id": 10,
"options": {
"showHeader": true
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"datasource": {
"type": "datasource",
"uid": "-- Dashboard --"
},
"panelId": 9,
"refId": "A"
}
],
"title": "Raw data",
"type": "table"
},
{
"datasource": {
"type": "datasource",
"uid": "-- Dashboard --"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"custom": {
"align": "left",
"cellOptions": {
"type": "auto"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "Value"
},
"properties": [
{
"id": "custom.width",
"value": 82
}
]
},
{
"matcher": {
"id": "byName",
"options": "Unit"
},
"properties": [
{
"id": "custom.width",
"value": 108
}
]
}
]
},
"gridPos": {
"h": 5,
"w": 12,
"x": 12,
"y": 12
},
"id": 12,
"options": {
"showHeader": true,
"sortBy": []
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"datasource": {
"type": "datasource",
"uid": "-- Dashboard --"
},
"panelId": 11,
"refId": "A"
}
],
"title": "Raw data (Custom mapping)",
"type": "table"
},
{
"datasource": {
"apiVersion": "v1",
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "continuous-GrYlRd"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 7,
"w": 12,
"x": 0,
"y": 17
},
"id": 9,
"options": {
"displayMode": "gradient",
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"showUnfilled": true,
"text": {}
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"csvContent": "Name,Value,Unit,Min,Max\nTemperature,20,degree,0,50\nPressure,150,Pa,0,300\nSpeed,100,km/h,0,110",
"datasource": {
"apiVersion": "v1",
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "A",
"scenarioId": "csv_content"
}
],
"title": "Min max from data",
"transformations": [
{
"id": "rowsToFields",
"options": {}
}
],
"type": "bargauge"
},
{
"datasource": {
"apiVersion": "v1",
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"min": 0,
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 7,
"w": 12,
"x": 12,
"y": 17
},
"id": 11,
"options": {
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"showThresholdLabels": true,
"showThresholdMarkers": true,
"text": {}
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"csvContent": "Name,Value,Type,Quota, Warning\nTemperature,25,degree,50,30\nPressure,100,Pa,300,200\nSpeed,30,km/h,150,130",
"datasource": {
"apiVersion": "v1",
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "A",
"scenarioId": "csv_content"
}
],
"title": "Custom mapping",
"transformations": [
{
"id": "rowsToFields",
"options": {
"mappings": [
{
"configProperty": "unit",
"fieldName": "Type",
"handlerKey": "unit"
},
{
"configProperty": "max",
"fieldName": "Quota",
"handlerKey": "max"
},
{
"configProperty": "threshold1",
"fieldName": "Warning",
"handlerKey": "threshold1"
}
]
}
}
],
"type": "gauge"
},
{
"datasource": {
"apiVersion": "v1",
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green"
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 7,
"w": 12,
"x": 0,
"y": 24
},
"id": 13,
"options": {
"colorMode": "value",
"graphMode": "none",
"justifyMode": "auto",
"orientation": "horizontal",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"text": {},
"textMode": "auto"
},
"pluginVersion": "8.1.0-pre",
"targets": [
{
"csvContent": "Name, City, Country, Value\nSensorA, Stockholm, Sweden, 20\nSensorB, London, England, 50\nSensorC, New York, USA,100",
"datasource": {
"apiVersion": "v1",
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "A",
"scenarioId": "csv_content"
}
],
"title": "Extra string fields to labels",
"transformations": [
{
"id": "rowsToFields",
"options": {}
}
],
"type": "stat"
}
],
"refresh": "",
"schemaVersion": 42,
"tags": [
"gdev",
"transform"
],
"templating": {
"list": []
},
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "Transforms - Rows to fields",
"uid": "PMtIInink",
"weekStart": ""
},
"status": {
"conversion": {
"failed": false,
"storedVersion": "v0alpha1"
}
}
}
|
json
|
github
|
https://github.com/grafana/grafana
|
apps/dashboard/pkg/migration/conversion/testdata/output/migrated_dev_dashboards/transforms/v0alpha1.rows-to-fields.v42.v1beta1.json
|
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html
#ifndef OPENCV_HAL_INTRIN_LSX_HPP
#define OPENCV_HAL_INTRIN_LSX_HPP
#include <lsxintrin.h>
#define CV_SIMD128 1
#define CV_SIMD128_64F 1
#define CV_SIMD128_FP16 0
namespace cv
{
//! @cond IGNORED
CV_CPU_OPTIMIZATION_HAL_NAMESPACE_BEGIN
/////////// Utils ////////
inline __m128i _v128_setr_b(char v0, char v1, char v2, char v3, char v4, char v5, char v6,
char v7, char v8, char v9, char v10, char v11, char v12, char v13, char v14, char v15)
{
return (__m128i)v16i8{ v0, v1, v2, v3, v4, v5, v6, v7,
v8, v9, v10, v11, v12, v13, v14, v15 };
}
inline __m128i _v128_set_b(char v0, char v1, char v2, char v3, char v4, char v5, char v6,
char v7, char v8, char v9, char v10, char v11, char v12, char v13, char v14, char v15)
{
return (__m128i)v16i8{ v15, v14, v13, v12, v11, v10, v9, v8,
v7, v6, v5, v4, v3, v2, v1, v0 };
}
inline __m128i _v128_setr_h(short v0, short v1, short v2, short v3, short v4, short v5,
short v6, short v7)
{
return (__m128i)v8i16{ v0, v1, v2, v3, v4, v5, v6, v7 };
}
inline __m128i _v128_setr_w(int v0, int v1, int v2, int v3)
{
return (__m128i)v4i32{ v0, v1, v2, v3 };
}
inline __m128i _v128_set_w(int v0, int v1, int v2, int v3)
{
return (__m128i)v4i32{ v3, v2, v1, v0 };
}
inline __m128i _v128_setall_w(int v0)
{
return __lsx_vreplgr2vr_w(v0);
}
inline __m128i _v128_setr_d(int64 v0, int64 v1)
{
return (__m128i)v2i64{ v0, v1 };
}
inline __m128i _v128_set_d(int64 v0, int64 v1)
{
return (__m128i)v2i64{ v1, v0 };
}
inline __m128 _v128_setr_ps(float v0, float v1, float v2, float v3)
{
return (__m128)v4f32{ v0, v1, v2, v3 };
}
inline __m128 _v128_setall_ps(float v0)
{
return (__m128)v4f32{ v0, v0, v0, v0 };
}
inline __m128d _v128_setr_pd(double v0, double v1)
{
return (__m128d)v2f64{ v0, v1 };
}
inline __m128d _v128_setall_pd(double v0)
{
return (__m128d)v2f64{ v0, v0 };
}
inline __m128i _lsx_packus_h(const __m128i& a, const __m128i& b)
{
return __lsx_vssrarni_bu_h(b, a, 0);
}
inline __m128i _lsx_packs_h(const __m128i& a, const __m128i& b)
{
return __lsx_vssrarni_b_h(b, a, 0);
}
inline __m128i _lsx_packus_w(const __m128i& a, const __m128i& b)
{
return __lsx_vssrarni_hu_w(b, a, 0);
}
/////// Types ///////
struct v_uint8x16
{
typedef uchar lane_type;
enum { nlanes = 16};
v_uint8x16() {}
explicit v_uint8x16(__m128i v): val(v) {}
v_uint8x16(uchar v0, uchar v1, uchar v2, uchar v3, uchar v4, uchar v5, uchar v6, uchar v7,
uchar v8, uchar v9, uchar v10, uchar v11, uchar v12, uchar v13, uchar v14, uchar v15)
{
val = _v128_setr_b(v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15);
}
uchar get0() const
{
return (uchar)__lsx_vpickve2gr_bu(val, 0);
}
__m128i val;
};
struct v_int8x16
{
typedef schar lane_type;
enum { nlanes = 16 };
v_int8x16() {}
explicit v_int8x16(__m128i v) : val(v) {}
v_int8x16(schar v0, schar v1, schar v2, schar v3, schar v4, schar v5, schar v6, schar v7,
schar v8, schar v9, schar v10, schar v11, schar v12, schar v13, schar v14, schar v15)
{
val = _v128_setr_b(v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15);
}
schar get0() const
{
return (schar)__lsx_vpickve2gr_b(val, 0);
}
__m128i val;
};
struct v_uint16x8
{
typedef ushort lane_type;
enum { nlanes = 8 };
v_uint16x8() {}
explicit v_uint16x8(__m128i v) : val(v) {}
v_uint16x8(ushort v0, ushort v1, ushort v2, ushort v3, ushort v4, ushort v5, ushort v6, ushort v7)
{
val = _v128_setr_h(v0, v1, v2, v3, v4, v5, v6, v7);
}
ushort get0() const
{
return (ushort)__lsx_vpickve2gr_hu(val, 0);
}
__m128i val;
};
struct v_int16x8
{
typedef short lane_type;
enum { nlanes = 8 };
v_int16x8() {}
explicit v_int16x8(__m128i v) : val(v) {}
v_int16x8(short v0, short v1, short v2, short v3, short v4, short v5, short v6, short v7)
{
val = _v128_setr_h(v0, v1, v2, v3, v4, v5, v6, v7);
}
short get0() const
{
return (short)__lsx_vpickve2gr_h(val, 0);
}
__m128i val;
};
struct v_uint32x4
{
typedef unsigned lane_type;
enum { nlanes = 4 };
v_uint32x4() {}
explicit v_uint32x4(__m128i v) : val(v) {}
v_uint32x4(unsigned v0, unsigned v1, unsigned v2, unsigned v3)
{
val = _v128_setr_w(v0, v1, v2, v3);
}
unsigned get0() const
{
return (unsigned)__lsx_vpickve2gr_wu(val, 0);
}
__m128i val;
};
struct v_int32x4
{
typedef int lane_type;
enum { nlanes = 4 };
v_int32x4() {}
explicit v_int32x4(__m128i v) : val(v) {}
v_int32x4(int v0, int v1, int v2, int v3)
{
val = _v128_setr_w(v0, v1, v2, v3);
}
int get0() const
{
return (int)__lsx_vpickve2gr_w(val, 0);
}
__m128i val;
};
struct v_float32x4
{
typedef float lane_type;
enum { nlanes = 4};
v_float32x4() {}
explicit v_float32x4(__m128 v) : val(v) {}
explicit v_float32x4(__m128i v) { val = *((__m128*)&v); }
v_float32x4(float v0, float v1, float v2, float v3)
{
val = _v128_setr_ps(v0, v1, v2, v3);
}
float get0() const
{
union { int iv; float fv; } d;
d.iv = __lsx_vpickve2gr_w(val, 0);
return d.fv;
}
int get0toint() const
{
__m128i result = __lsx_vftintrz_w_s(val);
return (int)__lsx_vpickve2gr_w(result, 0);
}
__m128 val;
};
struct v_uint64x2
{
typedef uint64 lane_type;
enum { nlanes = 2};
v_uint64x2() {}
explicit v_uint64x2(__m128i v) : val(v) {}
v_uint64x2(uint64 v0, uint64 v1)
{
val = _v128_setr_d(v0, v1);
}
uint64 get0() const
{
return __lsx_vpickve2gr_du(val, 0);
}
__m128i val;
};
struct v_int64x2
{
typedef int64 lane_type;
enum { nlanes = 2};
v_int64x2() {}
explicit v_int64x2(__m128i v) : val(v) {}
v_int64x2(int64 v0, int64 v1)
{
val = _v128_setr_d(v0, v1);
}
uint64 get0() const
{
return __lsx_vpickve2gr_d(val, 0);
}
__m128i val;
};
struct v_float64x2
{
typedef double lane_type;
enum { nlanes = 2};
v_float64x2() {}
explicit v_float64x2(__m128d v) : val(v) {}
explicit v_float64x2(__m128i v) { val = *((__m128d*)&v); }
v_float64x2(double v0, double v1)
{
val = _v128_setr_pd(v0, v1);
}
double get0() const
{
union { int64 iv; double fv; } d;
d.iv = __lsx_vpickve2gr_d(val, 0);
return d.fv;
}
int64 get0toint64() const
{
__m128i result = __lsx_vftintrz_l_d(val);
return (int64)__lsx_vpickve2gr_d(result, 0);
}
__m128d val;
};
////////////// Load and store operations /////////
#define OPENCV_HAL_IMPL_LSX_LOADSTORE(_Tpvec, _Tp) \
inline _Tpvec v_load(const _Tp* ptr) \
{ return _Tpvec(__lsx_vld(ptr, 0)); } \
inline _Tpvec v_load_aligned(const _Tp* ptr) \
{ return _Tpvec(__lsx_vld(ptr, 0)); } \
inline _Tpvec v_load_low(const _Tp* ptr) \
{ return _Tpvec(__lsx_vldrepl_d(ptr, 0)); } \
inline _Tpvec v_load_halves(const _Tp* ptr0, const _Tp* ptr1) \
{ \
__m128i vl = __lsx_vldrepl_d(ptr0, 0); \
__m128i vh = __lsx_vldrepl_d(ptr1, 0); \
return _Tpvec(__lsx_vilvl_d(vh, vl)); \
} \
inline void v_store(_Tp* ptr, const _Tpvec& a) \
{ __lsx_vst(a.val, ptr, 0); } \
inline void v_store_aligned(_Tp* ptr, const _Tpvec& a) \
{ __lsx_vst(a.val, ptr, 0); } \
inline void v_store_aligned_nocache(_Tp* ptr, const _Tpvec& a) \
{ __lsx_vst(a.val, ptr, 0); } \
inline void v_store(_Tp* ptr, const _Tpvec& a, hal::StoreMode mode)\
{ \
if ( mode == hal::STORE_UNALIGNED) \
__lsx_vst(a.val, ptr, 0); \
else if ( mode == hal::STORE_ALIGNED_NOCACHE) \
__lsx_vst(a.val, ptr, 0); \
else \
__lsx_vst(a.val, ptr, 0); \
} \
inline void v_store_low(_Tp* ptr, const _Tpvec& a) \
{ __lsx_vstelm_d(a.val, ptr, 0, 0); } \
inline void v_store_high(_Tp* ptr, const _Tpvec& a) \
{ __lsx_vstelm_d(a.val, ptr, 0, 1); } \
OPENCV_HAL_IMPL_LSX_LOADSTORE(v_uint8x16, uchar)
OPENCV_HAL_IMPL_LSX_LOADSTORE(v_int8x16, schar)
OPENCV_HAL_IMPL_LSX_LOADSTORE(v_uint16x8, ushort)
OPENCV_HAL_IMPL_LSX_LOADSTORE(v_int16x8, short)
OPENCV_HAL_IMPL_LSX_LOADSTORE(v_uint32x4, unsigned)
OPENCV_HAL_IMPL_LSX_LOADSTORE(v_int32x4, int)
OPENCV_HAL_IMPL_LSX_LOADSTORE(v_uint64x2, uint64)
OPENCV_HAL_IMPL_LSX_LOADSTORE(v_int64x2, int64)
#define OPENCV_HAL_IMPL_LSX_LOADSTORE_FLT(_Tpvec, _Tp, halfreg) \
inline _Tpvec v_load(const _Tp* ptr) \
{ return _Tpvec((halfreg)__lsx_vld(ptr, 0)); } \
inline _Tpvec v_load_aligned(const _Tp* ptr) \
{ return _Tpvec((halfreg)__lsx_vld(ptr, 0)); } \
inline _Tpvec v_load_low(const _Tp* ptr) \
{ return _Tpvec((halfreg)__lsx_vldrepl_d(ptr, 0)); } \
inline _Tpvec v_load_halves(const _Tp* ptr0, const _Tp* ptr1) \
{ \
__m128i vl = __lsx_vldrepl_d(ptr0, 0); \
__m128i vh = __lsx_vldrepl_d(ptr1, 0); \
return _Tpvec((halfreg)__lsx_vilvl_d(vh, vl)); \
} \
inline void v_store(_Tp* ptr, const _Tpvec& a) \
{ __lsx_vst((__m128i)a.val, ptr, 0); } \
inline void v_store_aligned(_Tp* ptr, const _Tpvec& a) \
{ __lsx_vst((__m128i)a.val, ptr, 0); } \
inline void v_store_aligned_nocache(_Tp* ptr, const _Tpvec& a) \
{ __lsx_vst((__m128i)a.val, ptr, 0); } \
inline void v_store(_Tp* ptr, const _Tpvec& a, hal::StoreMode mode)\
{ \
if( mode == hal::STORE_UNALIGNED) \
__lsx_vst((__m128i)a.val, ptr, 0); \
else if( mode == hal::STORE_ALIGNED_NOCACHE) \
__lsx_vst((__m128i)a.val, ptr, 0); \
else \
__lsx_vst((__m128i)a.val, ptr, 0); \
} \
inline void v_store_low(_Tp* ptr, const _Tpvec& a) \
{ __lsx_vstelm_d((__m128i)a.val, ptr, 0, 0); } \
inline void v_store_high(_Tp* ptr, const _Tpvec& a) \
{ __lsx_vstelm_d((__m128i)a.val, ptr, 0, 1); } \
OPENCV_HAL_IMPL_LSX_LOADSTORE_FLT(v_float32x4, float, __m128)
OPENCV_HAL_IMPL_LSX_LOADSTORE_FLT(v_float64x2, double, __m128d)
inline __m128i _lsx_128_castps_si128(const __m128& v)
{ return __m128i(v); }
inline __m128i _lsx_128_castpd_si128(const __m128d& v)
{ return __m128i(v); }
#define OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, _Tpvecf, suffix, cast) \
inline _Tpvec v_reinterpret_as_##suffix(const _Tpvecf& a) \
{ return _Tpvec(cast(a.val)); }
#define OPENCV_HAL_IMPL_LSX_INIT(_Tpvec, _Tp, suffix, ssuffix, ctype_s) \
inline _Tpvec v_setzero_##suffix() \
{ return _Tpvec(__lsx_vldi(0)); } \
inline _Tpvec v_setall_##suffix(_Tp v) \
{ return _Tpvec(__lsx_vreplgr2vr_##ssuffix((ctype_s)v)); } \
template <> inline _Tpvec v_setzero_() \
{ return v_setzero_##suffix(); } \
template <> inline _Tpvec v_setall_(_Tp v) \
{ return v_setall_##suffix(v); } \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_uint8x16, suffix, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_int8x16, suffix, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_uint16x8, suffix, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_int16x8, suffix, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_uint32x4, suffix, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_int32x4, suffix, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_uint64x2, suffix, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_int64x2, suffix, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_float32x4, suffix, _lsx_128_castps_si128) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_float64x2, suffix, _lsx_128_castpd_si128) \
OPENCV_HAL_IMPL_LSX_INIT(v_uint8x16, uchar, u8, b, int)
OPENCV_HAL_IMPL_LSX_INIT(v_int8x16, schar, s8, b, int)
OPENCV_HAL_IMPL_LSX_INIT(v_uint16x8, ushort, u16, h, int)
OPENCV_HAL_IMPL_LSX_INIT(v_int16x8, short, s16, h, int)
OPENCV_HAL_IMPL_LSX_INIT(v_uint32x4, unsigned, u32, w, int)
OPENCV_HAL_IMPL_LSX_INIT(v_int32x4, int, s32, w, int)
OPENCV_HAL_IMPL_LSX_INIT(v_uint64x2, uint64, u64, d, long int)
OPENCV_HAL_IMPL_LSX_INIT(v_int64x2, int64, s64, d, long int)
inline __m128 _lsx_128_castsi128_ps(const __m128i &v)
{ return __m128(v); }
inline __m128d _lsx_128_castsi128_pd(const __m128i &v)
{ return __m128d(v); }
#define OPENCV_HAL_IMPL_LSX_INIT_FLT(_Tpvec, _Tp, suffix, zsuffix, cast) \
inline _Tpvec v_setzero_##suffix() \
{ return _Tpvec(__lsx_vldi(0)); } \
inline _Tpvec v_setall_##suffix(_Tp v) \
{ return _Tpvec(_v128_setall_##zsuffix(v)); } \
template <> inline _Tpvec v_setzero_() \
{ return v_setzero_##suffix(); } \
template <> inline _Tpvec v_setall_(_Tp v) \
{ return v_setall_##suffix(v); } \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_uint8x16, suffix, cast) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_int8x16, suffix, cast) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_uint16x8, suffix, cast) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_int16x8, suffix, cast) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_uint32x4, suffix, cast) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_int32x4, suffix, cast) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_uint64x2, suffix, cast) \
OPENCV_HAL_IMPL_LSX_CAST(_Tpvec, v_int64x2, suffix, cast) \
OPENCV_HAL_IMPL_LSX_INIT_FLT(v_float32x4, float, f32, ps, _lsx_128_castsi128_ps)
OPENCV_HAL_IMPL_LSX_INIT_FLT(v_float64x2, double, f64, pd, _lsx_128_castsi128_pd)
inline v_float32x4 v_reinterpret_as_f32(const v_float32x4& a)
{ return a; }
inline v_float32x4 v_reinterpret_as_f32(const v_float64x2& a)
{ return v_float32x4(_lsx_128_castps_si128(__m128(a.val))); }
inline v_float64x2 v_reinterpret_as_f64(const v_float64x2& a)
{ return a; }
inline v_float64x2 v_reinterpret_as_f64(const v_float32x4& a)
{ return v_float64x2(_lsx_128_castpd_si128(__m128d(a.val))); }
//////////////// Variant Value reordering ///////////////
// unpacks
#define OPENCV_HAL_IMPL_LSX_UNPACK(_Tpvec, suffix) \
inline _Tpvec v128_unpacklo(const _Tpvec& a, const _Tpvec& b) \
{ return _Tpvec(__lsx_vilvl_##suffix(__m128i(b.val), __m128i(a.val))); } \
inline _Tpvec v128_unpackhi(const _Tpvec& a, const _Tpvec& b) \
{ return _Tpvec(__lsx_vilvh_##suffix(__m128i(b.val), __m128i(a.val))); } \
OPENCV_HAL_IMPL_LSX_UNPACK(v_uint8x16, b)
OPENCV_HAL_IMPL_LSX_UNPACK(v_int8x16, b)
OPENCV_HAL_IMPL_LSX_UNPACK(v_uint16x8, h)
OPENCV_HAL_IMPL_LSX_UNPACK(v_int16x8, h)
OPENCV_HAL_IMPL_LSX_UNPACK(v_uint32x4, w)
OPENCV_HAL_IMPL_LSX_UNPACK(v_int32x4, w)
OPENCV_HAL_IMPL_LSX_UNPACK(v_uint64x2, d)
OPENCV_HAL_IMPL_LSX_UNPACK(v_int64x2, d)
OPENCV_HAL_IMPL_LSX_UNPACK(v_float32x4, w)
OPENCV_HAL_IMPL_LSX_UNPACK(v_float64x2, d)
//ZIP
#define OPENCV_HAL_IMPL_LSX_ZIP(_Tpvec) \
inline _Tpvec v_combine_low(const _Tpvec& a, const _Tpvec& b) \
{ return (_Tpvec)__lsx_vilvl_d((__m128i)b.val, (__m128i)a.val); } \
inline _Tpvec v_combine_high(const _Tpvec& a, const _Tpvec& b) \
{ return (_Tpvec)__lsx_vilvh_d((__m128i)b.val, (__m128i)a.val); } \
inline void v_recombine(const _Tpvec& a, const _Tpvec& b, \
_Tpvec& c, _Tpvec& d) \
{ \
__m128i a1 = (__m128i)a.val, b1 = (__m128i)b.val; \
c = _Tpvec(__lsx_vilvl_d(b1, a1)); \
d = _Tpvec(__lsx_vilvh_d(b1, a1)); \
} \
inline void v_zip(const _Tpvec& a, const _Tpvec& b, \
_Tpvec& ab0, _Tpvec& ab1) \
{ \
ab0 = v128_unpacklo(a, b); \
ab1 = v128_unpackhi(a, b); \
}
OPENCV_HAL_IMPL_LSX_ZIP(v_uint8x16)
OPENCV_HAL_IMPL_LSX_ZIP(v_int8x16)
OPENCV_HAL_IMPL_LSX_ZIP(v_uint16x8)
OPENCV_HAL_IMPL_LSX_ZIP(v_int16x8)
OPENCV_HAL_IMPL_LSX_ZIP(v_uint32x4)
OPENCV_HAL_IMPL_LSX_ZIP(v_int32x4)
OPENCV_HAL_IMPL_LSX_ZIP(v_uint64x2)
OPENCV_HAL_IMPL_LSX_ZIP(v_int64x2)
OPENCV_HAL_IMPL_LSX_ZIP(v_float32x4)
OPENCV_HAL_IMPL_LSX_ZIP(v_float64x2)
////////// Arithmetic, bitwise and comparison operations /////////
/** Arithmetics **/
#define OPENCV_HAL_IMPL_LSX_BIN_OP(bin_op, _Tpvec, intrin) \
inline _Tpvec bin_op(const _Tpvec& a, const _Tpvec& b) \
{ return _Tpvec(intrin(a.val, b.val)); }
OPENCV_HAL_IMPL_LSX_BIN_OP(v_add, v_uint8x16, __lsx_vsadd_bu)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_sub, v_uint8x16, __lsx_vssub_bu)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_add, v_int8x16, __lsx_vsadd_b)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_sub, v_int8x16, __lsx_vssub_b)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_add, v_uint16x8, __lsx_vsadd_hu)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_sub, v_uint16x8, __lsx_vssub_hu)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_add, v_int16x8, __lsx_vsadd_h)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_sub, v_int16x8, __lsx_vssub_h)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_add, v_uint32x4, __lsx_vadd_w)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_sub, v_uint32x4, __lsx_vsub_w)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_mul, v_uint32x4, __lsx_vmul_w)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_add, v_int32x4, __lsx_vadd_w)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_sub, v_int32x4, __lsx_vsub_w)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_mul, v_int32x4, __lsx_vmul_w)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_add, v_uint64x2, __lsx_vadd_d)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_sub, v_uint64x2, __lsx_vsub_d)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_add, v_int64x2, __lsx_vadd_d)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_sub, v_int64x2, __lsx_vsub_d)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_add, v_float32x4, __lsx_vfadd_s)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_sub, v_float32x4, __lsx_vfsub_s)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_mul, v_float32x4, __lsx_vfmul_s)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_div, v_float32x4, __lsx_vfdiv_s)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_add, v_float64x2, __lsx_vfadd_d)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_sub, v_float64x2, __lsx_vfsub_d)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_mul, v_float64x2, __lsx_vfmul_d)
OPENCV_HAL_IMPL_LSX_BIN_OP(v_div, v_float64x2, __lsx_vfdiv_d)
// saturating multiply 8-bit, 16-bit
inline v_uint8x16 v_mul(const v_uint8x16& a, const v_uint8x16& b)
{
v_uint16x8 c, d;
v_mul_expand(a, b, c, d);
return v_pack(c, d);
}
inline v_int8x16 v_mul(const v_int8x16& a, const v_int8x16& b)
{
v_int16x8 c, d;
v_mul_expand(a, b, c, d);
return v_pack(c, d);
}
inline v_uint16x8 v_mul(const v_uint16x8& a, const v_uint16x8& b)
{
__m128i a0 = a.val, b0 = b.val;
__m128i pev = __lsx_vmulwev_w_hu(a0, b0);
__m128i pod = __lsx_vmulwod_w_hu(a0, b0);
__m128i pl = __lsx_vilvl_w(pod, pev);
__m128i ph = __lsx_vilvh_w(pod, pev);
return (v_uint16x8)__lsx_vssrlrni_hu_w(ph, pl, 0);
}
inline v_int16x8 v_mul(const v_int16x8& a, const v_int16x8& b)
{
__m128i a0 = a.val, b0 = b.val;
__m128i pev = __lsx_vmulwev_w_h(a0, b0);
__m128i pod = __lsx_vmulwod_w_h(a0, b0);
__m128i pl = __lsx_vilvl_w(pod, pev);
__m128i ph = __lsx_vilvh_w(pod, pev);
return (v_int16x8)__lsx_vssrarni_h_w(ph, pl, 0);
}
/** Non-saturating arithmetics **/
#define OPENCV_HAL_IMPL_LSX_BIN_FUNC(func, _Tpvec, intrin) \
inline _Tpvec func(const _Tpvec& a, const _Tpvec& b) \
{ return _Tpvec(intrin(a.val, b.val)); } \
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_add_wrap, v_uint8x16, __lsx_vadd_b)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_add_wrap, v_int8x16, __lsx_vadd_b)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_add_wrap, v_uint16x8, __lsx_vadd_h)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_add_wrap, v_int16x8, __lsx_vadd_h)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_sub_wrap, v_uint8x16, __lsx_vsub_b)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_sub_wrap, v_int8x16, __lsx_vsub_b)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_sub_wrap, v_uint16x8, __lsx_vsub_h)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_sub_wrap, v_int16x8, __lsx_vsub_h)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_mul_wrap, v_uint16x8, __lsx_vmul_h)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_mul_wrap, v_int16x8, __lsx_vmul_h)
inline v_uint8x16 v_mul_wrap(const v_uint8x16& a, const v_uint8x16& b)
{
__m128i a0 = a.val, b0 = b.val;
__m128i p0 = __lsx_vmulwev_h_bu(a0, b0);
__m128i p1 = __lsx_vmulwod_h_bu(a0, b0);
return v_uint8x16(__lsx_vpackev_b(p1, p0));
}
inline v_int8x16 v_mul_wrap(const v_int8x16& a, const v_int8x16& b)
{
return v_reinterpret_as_s8(v_mul_wrap(v_reinterpret_as_u8(a), v_reinterpret_as_u8(b)));
}
// Multiply and expand
inline void v_mul_expand(const v_uint8x16& a, const v_uint8x16& b,
v_uint16x8& c, v_uint16x8& d)
{
__m128i a0 = a.val, b0 = b.val;
__m128i p0 = __lsx_vmulwev_h_bu(a0, b0);
__m128i p1 = __lsx_vmulwod_h_bu(a0, b0);
c.val = __lsx_vilvl_h(p1, p0);
d.val = __lsx_vilvh_h(p1, p0);
}
inline void v_mul_expand(const v_int8x16& a, const v_int8x16& b,
v_int16x8& c, v_int16x8& d)
{
__m128i a0 = a.val, b0 = b.val;
__m128i p0 = __lsx_vmulwev_h_b(a0, b0);
__m128i p1 = __lsx_vmulwod_h_b(a0, b0);
c.val = __lsx_vilvl_h(p1, p0);
d.val = __lsx_vilvh_h(p1, p0);
}
inline void v_mul_expand(const v_int16x8& a, const v_int16x8& b,
v_int32x4& c, v_int32x4& d)
{
__m128i a0 = a.val, b0 = b.val;
__m128i p0 = __lsx_vmulwev_w_h(a0, b0);
__m128i p1 = __lsx_vmulwod_w_h(a0, b0);
c.val = __lsx_vilvl_w(p1, p0);
d.val = __lsx_vilvh_w(p1, p0);
}
inline void v_mul_expand(const v_uint16x8& a, const v_uint16x8& b,
v_uint32x4& c, v_uint32x4& d)
{
__m128i a0 = a.val, b0 = b.val;
__m128i p0 = __lsx_vmulwev_w_hu(a0, b0);
__m128i p1 = __lsx_vmulwod_w_hu(a0, b0);
c.val = __lsx_vilvl_w(p1, p0);
d.val = __lsx_vilvh_w(p1, p0);
}
inline void v_mul_expand(const v_uint32x4& a, const v_uint32x4& b,
v_uint64x2& c, v_uint64x2& d)
{
__m128i a0 = a.val, b0 = b.val;
__m128i p0 = __lsx_vmulwev_d_wu(a0, b0);
__m128i p1 = __lsx_vmulwod_d_wu(a0, b0);
c.val = __lsx_vilvl_d(p1, p0);
d.val = __lsx_vilvh_d(p1, p0);
}
inline v_int16x8 v_mul_hi(const v_int16x8& a, const v_int16x8& b)
{ return v_int16x8(__lsx_vmuh_h(a.val, b.val)); }
inline v_uint16x8 v_mul_hi(const v_uint16x8& a, const v_uint16x8& b)
{ return v_uint16x8(__lsx_vmuh_hu(a.val, b.val)); }
/** Bitwise shifts **/
#define OPENCV_HAL_IMPL_LSX_SHIFT_OP(_Tpuvec, _Tpsvec, suffix, srai) \
inline _Tpuvec v_shl(const _Tpuvec& a, int imm) \
{ return _Tpuvec(__lsx_vsll_##suffix(a.val, __lsx_vreplgr2vr_##suffix(imm))); } \
inline _Tpsvec v_shl(const _Tpsvec& a, int imm) \
{ return _Tpsvec(__lsx_vsll_##suffix(a.val, __lsx_vreplgr2vr_##suffix(imm))); } \
inline _Tpuvec v_shr(const _Tpuvec& a, int imm) \
{ return _Tpuvec(__lsx_vsrl_##suffix(a.val, __lsx_vreplgr2vr_##suffix(imm))); } \
inline _Tpsvec v_shr(const _Tpsvec& a, int imm) \
{ return _Tpsvec(srai(a.val, __lsx_vreplgr2vr_##suffix(imm))); } \
template<int imm> \
inline _Tpuvec v_shl(const _Tpuvec& a) \
{ return _Tpuvec(__lsx_vslli_##suffix(a.val, imm)); } \
template<int imm> \
inline _Tpsvec v_shl(const _Tpsvec& a) \
{ return _Tpsvec(__lsx_vslli_##suffix(a.val, imm)); } \
template<int imm> \
inline _Tpuvec v_shr(const _Tpuvec& a) \
{ return _Tpuvec(__lsx_vsrli_##suffix(a.val, imm)); } \
template<int imm> \
inline _Tpsvec v_shr(const _Tpsvec& a) \
{ return _Tpsvec(__lsx_vsrai_##suffix(a.val, imm)); } \
OPENCV_HAL_IMPL_LSX_SHIFT_OP(v_uint16x8, v_int16x8, h, __lsx_vsra_h)
OPENCV_HAL_IMPL_LSX_SHIFT_OP(v_uint32x4, v_int32x4, w, __lsx_vsra_w)
OPENCV_HAL_IMPL_LSX_SHIFT_OP(v_uint64x2, v_int64x2, d, __lsx_vsra_d)
/** Bitwise logic **/
#define OPENCV_HAL_IMPL_LSX_LOGIC_OP(_Tpvec, suffix) \
OPENCV_HAL_IMPL_LSX_BIN_OP(v_and, _Tpvec, __lsx_vand_##suffix) \
OPENCV_HAL_IMPL_LSX_BIN_OP(v_or, _Tpvec, __lsx_vor_##suffix) \
OPENCV_HAL_IMPL_LSX_BIN_OP(v_xor, _Tpvec, __lsx_vxor_##suffix) \
inline _Tpvec v_not(const _Tpvec& a) \
{ return _Tpvec(__lsx_vnori_b(a.val, 0)); } \
OPENCV_HAL_IMPL_LSX_LOGIC_OP(v_uint8x16, v)
OPENCV_HAL_IMPL_LSX_LOGIC_OP(v_int8x16, v)
OPENCV_HAL_IMPL_LSX_LOGIC_OP(v_uint16x8, v)
OPENCV_HAL_IMPL_LSX_LOGIC_OP(v_int16x8, v)
OPENCV_HAL_IMPL_LSX_LOGIC_OP(v_uint32x4, v)
OPENCV_HAL_IMPL_LSX_LOGIC_OP(v_int32x4, v)
OPENCV_HAL_IMPL_LSX_LOGIC_OP(v_uint64x2, v)
OPENCV_HAL_IMPL_LSX_LOGIC_OP(v_int64x2, v)
#define OPENCV_HAL_IMPL_LSX_FLOAT_BIN_OP(bin_op, _Tpvec, intrin, cast) \
inline _Tpvec bin_op(const _Tpvec& a, const _Tpvec& b) \
{ return _Tpvec(intrin((__m128i)(a.val), (__m128i)(b.val))); }
#define OPENCV_HAL_IMPL_LSX_FLOAT_LOGIC_OP(_Tpvec, cast) \
OPENCV_HAL_IMPL_LSX_FLOAT_BIN_OP(v_and, _Tpvec, __lsx_vand_v, cast) \
OPENCV_HAL_IMPL_LSX_FLOAT_BIN_OP(v_or, _Tpvec, __lsx_vor_v, cast) \
OPENCV_HAL_IMPL_LSX_FLOAT_BIN_OP(v_xor, _Tpvec, __lsx_vxor_v, cast) \
inline _Tpvec v_not(const _Tpvec& a) \
{ return _Tpvec(__lsx_vnori_b((__m128i)(a.val), 0)); } \
OPENCV_HAL_IMPL_LSX_FLOAT_LOGIC_OP(v_float32x4, _lsx_128_castsi128_ps)
OPENCV_HAL_IMPL_LSX_FLOAT_LOGIC_OP(v_float64x2, _lsx_128_castsi128_pd)
/** Select **/
#define OPENCV_HAL_IMPL_LSX_SELECT(_Tpvec) \
inline _Tpvec v_select(const _Tpvec& mask, const _Tpvec& a, const _Tpvec& b) \
{ return _Tpvec(__lsx_vbitsel_v(b.val, a.val, mask.val)); } \
OPENCV_HAL_IMPL_LSX_SELECT(v_uint8x16)
OPENCV_HAL_IMPL_LSX_SELECT(v_int8x16)
OPENCV_HAL_IMPL_LSX_SELECT(v_uint16x8)
OPENCV_HAL_IMPL_LSX_SELECT(v_int16x8)
OPENCV_HAL_IMPL_LSX_SELECT(v_uint32x4)
OPENCV_HAL_IMPL_LSX_SELECT(v_int32x4)
inline v_float32x4 v_select(const v_float32x4 &mask, const v_float32x4 &a, const v_float32x4 &b)
{ return v_float32x4(__lsx_vbitsel_v((__m128i)b.val, (__m128i)a.val, (__m128i)mask.val)); }
inline v_float64x2 v_select(const v_float64x2 &mask, const v_float64x2 &a, const v_float64x2 &b)
{ return v_float64x2(__lsx_vbitsel_v((__m128i)b.val, (__m128i)a.val, (__m128i)mask.val)); }
/** Comparison **/
#define OPENCV_HAL_IMPL_LSX_CMP_OP_OV(_Tpvec) \
inline _Tpvec v_ne(const _Tpvec& a, const _Tpvec& b) \
{ return v_not(v_eq(a, b)); } \
inline _Tpvec v_lt(const _Tpvec& a, const _Tpvec& b) \
{ return v_gt(b, a); } \
inline _Tpvec v_ge(const _Tpvec& a, const _Tpvec& b) \
{ return v_not(v_lt(a, b)); } \
inline _Tpvec v_le(const _Tpvec& a, const _Tpvec& b) \
{ return v_ge(b, a); } \
#define OPENCV_HAL_IMPL_LSX_CMP_OP_INT(_Tpuvec, _Tpsvec, suffix, usuffix) \
inline _Tpuvec v_eq(const _Tpuvec& a, const _Tpuvec& b) \
{ return _Tpuvec(__lsx_vseq_##suffix(a.val, b.val)); } \
inline _Tpuvec v_gt(const _Tpuvec& a, const _Tpuvec& b) \
{ return _Tpuvec(__lsx_vslt_##usuffix(b.val, a.val)); } \
inline _Tpsvec v_eq(const _Tpsvec& a, const _Tpsvec& b) \
{ return _Tpsvec(__lsx_vseq_##suffix(a.val, b.val)); } \
inline _Tpsvec v_gt(const _Tpsvec& a, const _Tpsvec& b) \
{ return _Tpsvec(__lsx_vslt_##suffix(b.val, a.val)); } \
OPENCV_HAL_IMPL_LSX_CMP_OP_OV(_Tpuvec) \
OPENCV_HAL_IMPL_LSX_CMP_OP_OV(_Tpsvec)
OPENCV_HAL_IMPL_LSX_CMP_OP_INT(v_uint8x16, v_int8x16, b, bu)
OPENCV_HAL_IMPL_LSX_CMP_OP_INT(v_uint16x8, v_int16x8, h, hu)
OPENCV_HAL_IMPL_LSX_CMP_OP_INT(v_uint32x4, v_int32x4, w, wu)
#define OPENCV_HAL_IMPL_LSX_CMP_OP_64BIT(_Tpvec, suffix) \
inline _Tpvec v_eq(const _Tpvec& a, const _Tpvec& b) \
{ return _Tpvec(__lsx_vseq_##suffix(a.val, b.val)); } \
inline _Tpvec v_ne(const _Tpvec& a, const _Tpvec& b) \
{ return v_not(v_eq(a, b)); }
OPENCV_HAL_IMPL_LSX_CMP_OP_64BIT(v_uint64x2, d)
OPENCV_HAL_IMPL_LSX_CMP_OP_64BIT(v_int64x2, d)
#define OPENCV_HAL_IMPL_LSX_CMP_FLT(bin_op, suffix, _Tpvec, ssuffix) \
inline _Tpvec bin_op(const _Tpvec& a, const _Tpvec& b) \
{ return _Tpvec(__lsx_##suffix##_##ssuffix(a.val, b.val)); } \
#define OPENCV_HAL_IMPL_LSX_CMP_OP_FLT(_Tpvec, ssuffix) \
OPENCV_HAL_IMPL_LSX_CMP_FLT(v_eq, vfcmp_ceq, _Tpvec, ssuffix) \
OPENCV_HAL_IMPL_LSX_CMP_FLT(v_ne, vfcmp_cne, _Tpvec, ssuffix) \
OPENCV_HAL_IMPL_LSX_CMP_FLT(v_lt, vfcmp_clt, _Tpvec, ssuffix) \
OPENCV_HAL_IMPL_LSX_CMP_FLT(v_le, vfcmp_cle, _Tpvec, ssuffix) \
OPENCV_HAL_IMPL_LSX_CMP_OP_FLT(v_float32x4, s)
OPENCV_HAL_IMPL_LSX_CMP_OP_FLT(v_float64x2, d)
inline v_float32x4 v_gt(const v_float32x4 &a, const v_float32x4 &b)
{ return v_float32x4(__lsx_vfcmp_clt_s(b.val, a.val)); }
inline v_float32x4 v_ge(const v_float32x4 &a, const v_float32x4 &b)
{ return v_float32x4(__lsx_vfcmp_cle_s(b.val, a.val)); }
inline v_float64x2 v_gt(const v_float64x2 &a, const v_float64x2 &b)
{ return v_float64x2(__lsx_vfcmp_clt_d(b.val, a.val)); }
inline v_float64x2 v_ge(const v_float64x2 &a, const v_float64x2 &b)
{ return v_float64x2(__lsx_vfcmp_cle_d(b.val, a.val)); }
inline v_float32x4 v_not_nan(const v_float32x4& a)
{ return v_float32x4(__lsx_vfcmp_cor_s(a.val, a.val)); }
inline v_float64x2 v_not_nan(const v_float64x2& a)
{ return v_float64x2(__lsx_vfcmp_cor_d(a.val, a.val)); }
/** min/max **/
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_min, v_uint8x16, __lsx_vmin_bu)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_max, v_uint8x16, __lsx_vmax_bu)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_min, v_int8x16, __lsx_vmin_b)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_max, v_int8x16, __lsx_vmax_b)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_min, v_uint16x8, __lsx_vmin_hu)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_max, v_uint16x8, __lsx_vmax_hu)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_min, v_int16x8, __lsx_vmin_h)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_max, v_int16x8, __lsx_vmax_h)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_min, v_uint32x4, __lsx_vmin_wu)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_max, v_uint32x4, __lsx_vmax_wu)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_min, v_int32x4, __lsx_vmin_w)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_max, v_int32x4, __lsx_vmax_w)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_min, v_float32x4, __lsx_vfmin_s)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_max, v_float32x4, __lsx_vfmax_s)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_min, v_float64x2, __lsx_vfmin_d)
OPENCV_HAL_IMPL_LSX_BIN_FUNC(v_max, v_float64x2, __lsx_vfmax_d)
template <int imm,
bool is_invalid = ((imm < 0) || (imm > 16)),
bool is_first = (imm == 0),
bool is_half = (imm == 8),
bool is_second = (imm == 16),
bool is_other = (((imm > 0) && (imm < 8)) || ((imm > 8) && (imm < 16)))>
class v_lsx_palignr_u8_class;
template <int imm>
class v_lsx_palignr_u8_class<imm, true, false, false, false, false>;
template <int imm>
class v_lsx_palignr_u8_class<imm, false, true, false, false, false>
{
public:
inline __m128i operator()(const __m128i& a, const __m128i& b) const
{
CV_UNUSED(b);
return a;
}
};
template <int imm>
class v_lsx_palignr_u8_class<imm, false, false, true, false, false>
{
public:
inline __m128i operator()(const __m128i& a, const __m128i& b) const
{
return __lsx_vshuf4i_d(a, b, 0x9);
}
};
template <int imm>
class v_lsx_palignr_u8_class<imm, false, false, false, true, false>
{
public:
inline __m128i operator()(const __m128i& a, const __m128i& b) const
{
CV_UNUSED(a);
return b;
}
};
template <int imm>
class v_lsx_palignr_u8_class<imm, false, false, false, false, true>
{
public:
inline __m128i operator()(const __m128i& a, const __m128i& b) const
{
enum { imm2 = (sizeof(__m128i) - imm) };
return __lsx_vor_v(__lsx_vbsrl_v(a, imm), __lsx_vbsll_v(b, imm2));
}
};
template <int imm>
inline __m128i v_lsx_palignr_u8(const __m128i& a, const __m128i& b)
{
CV_StaticAssert((imm >= 0) && (imm <= 16), "Invalid imm for v_lsx_palignr_u8");
return v_lsx_palignr_u8_class<imm>()(a, b);
}
/** Rotate **/
#define OPENCV_HAL_IMPL_LSX_ROTATE_CAST(_Tpvec, cast) \
template<int imm> \
inline _Tpvec v_rotate_right(const _Tpvec &a) \
{ \
enum { imm2 = (imm * sizeof(typename _Tpvec::lane_type))}; \
__m128i ret = __lsx_vbsrl_v((__m128i)a.val, imm2); \
return _Tpvec(cast(ret)); \
} \
template<int imm> \
inline _Tpvec v_rotate_left(const _Tpvec &a) \
{ \
enum { imm2 = (imm * sizeof(typename _Tpvec::lane_type))}; \
__m128i ret = __lsx_vbsll_v((__m128i)a.val, imm2); \
return _Tpvec(cast(ret)); \
} \
template<int imm> \
inline _Tpvec v_rotate_right(const _Tpvec& a, const _Tpvec& b) \
{ \
enum { imm2 = (imm * sizeof(typename _Tpvec::lane_type))}; \
return _Tpvec(cast(v_lsx_palignr_u8<imm2>((__m128i)a.val, (__m128i)b.val))); \
} \
template<int imm> \
inline _Tpvec v_rotate_left(const _Tpvec& a, const _Tpvec& b) \
{ \
enum { imm2 = ((_Tpvec::nlanes - imm) * sizeof(typename _Tpvec::lane_type))}; \
return _Tpvec(cast(v_lsx_palignr_u8<imm2>((__m128i)b.val, (__m128i)a.val))); \
}
OPENCV_HAL_IMPL_LSX_ROTATE_CAST(v_uint8x16, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_ROTATE_CAST(v_int8x16, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_ROTATE_CAST(v_uint16x8, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_ROTATE_CAST(v_int16x8, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_ROTATE_CAST(v_uint32x4, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_ROTATE_CAST(v_int32x4, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_ROTATE_CAST(v_uint64x2, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_ROTATE_CAST(v_int64x2, OPENCV_HAL_NOP) \
OPENCV_HAL_IMPL_LSX_ROTATE_CAST(v_float32x4, _lsx_128_castsi128_ps)
OPENCV_HAL_IMPL_LSX_ROTATE_CAST(v_float64x2, _lsx_128_castsi128_pd)
/** Rverse **/
inline v_uint8x16 v_reverse(const v_uint8x16 &a)
{
__m128i vec = __lsx_vshuf4i_b(a.val, 0x1B);
return v_uint8x16(__lsx_vshuf4i_w(vec, 0x1B));
}
inline v_int8x16 v_reverse(const v_int8x16 &a)
{ return v_reinterpret_as_s8(v_reverse(v_reinterpret_as_u8(a))); }
inline v_uint16x8 v_reverse(const v_uint16x8 &a)
{
__m128i vec = __lsx_vshuf4i_h(a.val, 0x1B);
return v_uint16x8(__lsx_vshuf4i_w(vec, 0x4E));
}
inline v_int16x8 v_reverse(const v_int16x8 &a)
{ return v_reinterpret_as_s16(v_reverse(v_reinterpret_as_u16(a))); }
inline v_uint32x4 v_reverse(const v_uint32x4 &a)
{ return v_uint32x4(__lsx_vshuf4i_w(a.val, 0x1B)); }
inline v_int32x4 v_reverse(const v_int32x4 &a)
{ return v_int32x4(__lsx_vshuf4i_w(a.val, 0x1B)); }
inline v_uint64x2 v_reverse(const v_uint64x2 &a)
{ return v_uint64x2(__lsx_vshuf4i_w(a.val, 0x4E)); }
inline v_int64x2 v_reverse(const v_int64x2 &a)
{ return v_int64x2(__lsx_vshuf4i_w(a.val, 0x4E)); }
inline v_float32x4 v_reverse(const v_float32x4 &a)
{ return v_reinterpret_as_f32(v_reverse(v_reinterpret_as_u32(a))); }
inline v_float64x2 v_reverse(const v_float64x2 &a)
{ return v_reinterpret_as_f64(v_reverse(v_reinterpret_as_u64(a))); }
////////////// Reduce and mask ////////////
/** Reduce **/
// this function is return a[0]+a[1]+...+a[31]
inline unsigned v_reduce_sum(const v_uint8x16& a)
{
__m128i t1 = __lsx_vhaddw_hu_bu(a.val, a.val);
__m128i t2 = __lsx_vhaddw_wu_hu(t1, t1);
__m128i t3 = __lsx_vhaddw_du_wu(t2, t2);
__m128i t4 = __lsx_vhaddw_qu_du(t3, t3);
return (unsigned)__lsx_vpickve2gr_w(t4, 0);
}
inline int v_reduce_sum(const v_int8x16 &a)
{
__m128i t1 = __lsx_vhaddw_h_b(a.val, a.val);
__m128i t2 = __lsx_vhaddw_w_h(t1, t1);
__m128i t3 = __lsx_vhaddw_d_w(t2, t2);
__m128i t4 = __lsx_vhaddw_q_d(t3, t3);
return (int)__lsx_vpickve2gr_w(t4, 0);
}
#define OPENCV_HAL_IMPL_LSX_REDUCE_16(_Tpvec, sctype, func, intrin) \
inline sctype v_reduce_##func(const _Tpvec& a) \
{ \
__m128i val = intrin(a.val, __lsx_vbsrl_v(a.val, 8)); \
val = intrin(val, __lsx_vbsrl_v(val, 4)); \
val = intrin(val, __lsx_vbsrl_v(val, 2)); \
val = intrin(val, __lsx_vbsrl_v(val, 1)); \
return (sctype)__lsx_vpickve2gr_b(val, 0); \
}
OPENCV_HAL_IMPL_LSX_REDUCE_16(v_uint8x16, uchar, min, __lsx_vmin_bu)
OPENCV_HAL_IMPL_LSX_REDUCE_16(v_uint8x16, uchar, max, __lsx_vmax_bu)
OPENCV_HAL_IMPL_LSX_REDUCE_16(v_int8x16, schar, min, __lsx_vmin_b)
OPENCV_HAL_IMPL_LSX_REDUCE_16(v_int8x16, schar, max, __lsx_vmax_b)
#define OPENCV_HAL_IMPL_LSX_REDUCE_8(_Tpvec, sctype, func, intrin) \
inline sctype v_reduce_##func(const _Tpvec &a) \
{ \
__m128i val = intrin(a.val, __lsx_vbsrl_v(a.val, 8)); \
val = intrin(val, __lsx_vbsrl_v(val, 4)); \
val = intrin(val, __lsx_vbsrl_v(val, 2)); \
return (sctype)__lsx_vpickve2gr_h(val, 0); \
}
OPENCV_HAL_IMPL_LSX_REDUCE_8(v_uint16x8, ushort, min, __lsx_vmin_hu)
OPENCV_HAL_IMPL_LSX_REDUCE_8(v_uint16x8, ushort, max, __lsx_vmax_hu)
OPENCV_HAL_IMPL_LSX_REDUCE_8(v_int16x8, short, min, __lsx_vmin_h)
OPENCV_HAL_IMPL_LSX_REDUCE_8(v_int16x8, short, max, __lsx_vmax_h)
#define OPENCV_HAL_IMPL_LSX_REDUCE_4(_Tpvec, sctype, func, intrin) \
inline sctype v_reduce_##func(const _Tpvec &a) \
{ \
__m128i val = intrin(a.val, __lsx_vbsrl_v(a.val, 8)); \
val = intrin(val, __lsx_vbsrl_v(val, 4)); \
return (sctype)__lsx_vpickve2gr_w(val, 0); \
}
OPENCV_HAL_IMPL_LSX_REDUCE_4(v_uint32x4, unsigned, min, __lsx_vmin_wu)
OPENCV_HAL_IMPL_LSX_REDUCE_4(v_uint32x4, unsigned, max, __lsx_vmax_wu)
OPENCV_HAL_IMPL_LSX_REDUCE_4(v_int32x4, int, min, __lsx_vmin_w)
OPENCV_HAL_IMPL_LSX_REDUCE_4(v_int32x4, int, max, __lsx_vmax_w)
#define OPENCV_HAL_IMPL_LSX_REDUCE_FLT(func, intrin) \
inline float v_reduce_##func(const v_float32x4 &a) \
{ \
__m128 val = a.val; \
val = intrin(val, (__m128)__lsx_vbsrl_v((__m128i)val, 8)); \
val = intrin(val, (__m128)__lsx_vbsrl_v((__m128i)val, 4)); \
float *fval = (float*)&val; \
return fval[0]; \
}
OPENCV_HAL_IMPL_LSX_REDUCE_FLT(min, __lsx_vfmin_s)
OPENCV_HAL_IMPL_LSX_REDUCE_FLT(max, __lsx_vfmax_s)
inline int v_reduce_sum(const v_int32x4 &a)
{
__m128i t1 = __lsx_vhaddw_d_w(a.val, a.val);
__m128i t2 = __lsx_vhaddw_q_d(t1, t1);
return (int)__lsx_vpickve2gr_w(t2, 0);
}
inline unsigned v_reduce_sum(const v_uint32x4 &a)
{
__m128i t1 = __lsx_vhaddw_du_wu(a.val, a.val);
__m128i t2 = __lsx_vhaddw_qu_du(t1, t1);
return (int)__lsx_vpickve2gr_w(t2, 0);
}
inline int v_reduce_sum(const v_int16x8 &a)
{
__m128i t1 = __lsx_vhaddw_w_h(a.val, a.val);
__m128i t2 = __lsx_vhaddw_d_w(t1, t1);
__m128i t3 = __lsx_vhaddw_q_d(t2, t2);
return (int)__lsx_vpickve2gr_w(t3, 0);
}
inline unsigned v_reduce_sum(const v_uint16x8 &a)
{
__m128i t1 = __lsx_vhaddw_wu_hu(a.val, a.val);
__m128i t2 = __lsx_vhaddw_du_wu(t1, t1);
__m128i t3 = __lsx_vhaddw_qu_du(t2, t2);
return (int)__lsx_vpickve2gr_w(t3, 0);
}
inline float v_reduce_sum(const v_float32x4 &a)
{
__m128i val = (__m128i)a.val;
val = __lsx_vbsrl_v(val, 8);
__m128 result = __lsx_vfadd_s(a.val, (__m128)val);
float *pa = (float*)&result;
return (float)(pa[0] + pa[1]);
}
inline uint64 v_reduce_sum(const v_uint64x2 &a)
{
__m128i t0 = __lsx_vhaddw_qu_du(a.val, a.val);
return (uint64)__lsx_vpickve2gr_du(t0, 0);
}
inline int64 v_reduce_sum(const v_int64x2 &a)
{
__m128i t0 = __lsx_vhaddw_q_d(a.val, a.val);
return (int64)__lsx_vpickve2gr_d(t0, 0);
}
inline double v_reduce_sum(const v_float64x2 &a)
{
double *pa = (double*)&a;
return pa[0] + pa[1];
}
inline v_float32x4 v_reduce_sum4(const v_float32x4& a, const v_float32x4& b,
const v_float32x4& c, const v_float32x4& d)
{
__m128i a0 = (__m128i)a.val;
__m128i b0 = (__m128i)b.val;
__m128i c0 = (__m128i)c.val;
__m128i d0 = (__m128i)d.val;
__m128i ac_l = __lsx_vilvl_w(c0, a0);
__m128i ac_h = __lsx_vilvh_w(c0, a0);
__m128i bd_l = __lsx_vilvl_w(d0, b0);
__m128i bd_h = __lsx_vilvh_w(d0, b0);
__m128 ac = __lsx_vfadd_s((__m128)ac_l, (__m128)ac_h);
__m128 bd = __lsx_vfadd_s((__m128)bd_l, (__m128)bd_h);
return v_float32x4(__lsx_vfadd_s((__m128)__lsx_vilvl_w((__m128i)bd, (__m128i)ac),
(__m128)__lsx_vilvh_w((__m128i)bd, (__m128i)ac)));
}
inline unsigned v_reduce_sad(const v_int8x16& a, const v_int8x16& b)
{
__m128i t0 = __lsx_vabsd_b(a.val, b.val);
__m128i t1 = __lsx_vhaddw_hu_bu(t0, t0);
__m128i t2 = __lsx_vhaddw_wu_hu(t1, t1);
__m128i t3 = __lsx_vhaddw_du_wu(t2, t2);
__m128i t4 = __lsx_vhaddw_qu_du(t3, t3);
return (unsigned)__lsx_vpickve2gr_w(t4, 0);
}
inline unsigned v_reduce_sad(const v_uint8x16& a, const v_uint8x16& b)
{
__m128i t0 = __lsx_vabsd_bu(a.val, b.val);
__m128i t1 = __lsx_vhaddw_hu_bu(t0, t0);
__m128i t2 = __lsx_vhaddw_wu_hu(t1, t1);
__m128i t3 = __lsx_vhaddw_du_wu(t2, t2);
__m128i t4 = __lsx_vhaddw_qu_du(t3, t3);
return (unsigned)__lsx_vpickve2gr_w(t4, 0);
}
inline unsigned v_reduce_sad(const v_uint16x8& a, const v_uint16x8& b)
{
__m128i t0 = __lsx_vabsd_hu(a.val, b.val);
__m128i t1 = __lsx_vhaddw_wu_hu(t0, t0);
__m128i t2 = __lsx_vhaddw_du_wu(t1, t1);
__m128i t3 = __lsx_vhaddw_qu_du(t2, t2);
return (unsigned)__lsx_vpickve2gr_w(t3, 0);
}
inline unsigned v_reduce_sad(const v_int16x8& a, const v_int16x8& b)
{
__m128i t0 = __lsx_vabsd_h(a.val, b.val);
__m128i t1 = __lsx_vhaddw_wu_hu(t0, t0);
__m128i t2 = __lsx_vhaddw_du_wu(t1, t1);
__m128i t3 = __lsx_vhaddw_qu_du(t2, t2);
return (unsigned)__lsx_vpickve2gr_w(t3, 0);
}
inline unsigned v_reduce_sad(const v_uint32x4& a, const v_uint32x4& b)
{
__m128i t0 = __lsx_vabsd_wu(a.val, b.val);
__m128i t1 = __lsx_vhaddw_du_wu(t0, t0);
__m128i t2 = __lsx_vhaddw_qu_du(t1, t1);
return (unsigned)__lsx_vpickve2gr_w(t2, 0);
}
inline unsigned v_reduce_sad(const v_int32x4& a, const v_int32x4& b)
{
__m128i t0 = __lsx_vabsd_w(a.val, b.val);
__m128i t1 = __lsx_vhaddw_du_wu(t0, t0);
__m128i t2 = __lsx_vhaddw_qu_du(t1, t1);
return (unsigned)__lsx_vpickve2gr_w(t2, 0);
}
inline float v_reduce_sad(const v_float32x4& a, const v_float32x4& b)
{
v_float32x4 a_b = v_sub(a, b);
return v_reduce_sum(v_float32x4((__m128i)a_b.val & __lsx_vreplgr2vr_w(0x7fffffff)));
}
/** Popcount **/
#define OPENCV_HAL_IMPL_LSX_POPCOUNT(_Tpvec, _Tp, suffix) \
inline _Tpvec v_popcount(const _Tp& a) \
{ return _Tpvec(__lsx_vpcnt_##suffix(a.val)); }
OPENCV_HAL_IMPL_LSX_POPCOUNT(v_uint8x16, v_uint8x16, b);
OPENCV_HAL_IMPL_LSX_POPCOUNT(v_uint8x16, v_int8x16, b);
OPENCV_HAL_IMPL_LSX_POPCOUNT(v_uint16x8, v_uint16x8, h);
OPENCV_HAL_IMPL_LSX_POPCOUNT(v_uint16x8, v_int16x8, h);
OPENCV_HAL_IMPL_LSX_POPCOUNT(v_uint32x4, v_uint32x4, w);
OPENCV_HAL_IMPL_LSX_POPCOUNT(v_uint32x4, v_int32x4, w);
OPENCV_HAL_IMPL_LSX_POPCOUNT(v_uint64x2, v_uint64x2, d);
OPENCV_HAL_IMPL_LSX_POPCOUNT(v_uint64x2, v_int64x2, d);
/** Mask **/
#define OPENCV_HAL_IMPL_REINTERPRET_INT(ft, tt) \
inline tt reinterpret_int(ft x) { union {ft l; tt i;} v; v.l = x; return v.i; }
OPENCV_HAL_IMPL_REINTERPRET_INT(uchar, schar)
OPENCV_HAL_IMPL_REINTERPRET_INT(schar, schar)
OPENCV_HAL_IMPL_REINTERPRET_INT(ushort, short)
OPENCV_HAL_IMPL_REINTERPRET_INT(short, short)
OPENCV_HAL_IMPL_REINTERPRET_INT(unsigned, int)
OPENCV_HAL_IMPL_REINTERPRET_INT(int, int)
OPENCV_HAL_IMPL_REINTERPRET_INT(float, int)
OPENCV_HAL_IMPL_REINTERPRET_INT(uint64, int64)
OPENCV_HAL_IMPL_REINTERPRET_INT(int64, int64)
OPENCV_HAL_IMPL_REINTERPRET_INT(double, int64)
inline int v_signmask(const v_int8x16& a)
{
__m128i result = __lsx_vmskltz_b(a.val);
return __lsx_vpickve2gr_w(result, 0);
}
inline int v_signmask(const v_uint8x16& a)
{ return v_signmask(v_reinterpret_as_s8(a)) ;}
inline int v_signmask(const v_int16x8 &a)
{
__m128i result = __lsx_vmskltz_h(a.val);
return __lsx_vpickve2gr_w(result, 0);
}
inline int v_signmask(const v_uint16x8 &a)
{ return v_signmask(v_reinterpret_as_s16(a)); }
inline int v_signmask(const v_uint32x4& a)
{
__m128i result = __lsx_vmskltz_w(a.val);
return __lsx_vpickve2gr_w(result, 0);
}
inline int v_signmask(const v_int32x4& a)
{ return v_signmask(v_reinterpret_as_u32(a)); }
inline int v_signmask(const v_uint64x2& a)
{
__m128i result = __lsx_vmskltz_d(a.val);
return __lsx_vpickve2gr_w(result, 0);
}
inline int v_signmask(const v_int64x2& a)
{ return v_signmask(v_reinterpret_as_u64(a)); }
inline int v_signmask(const v_float32x4& a)
{ return v_signmask(*(v_int32x4*)(&a)); }
inline int v_signmask(const v_float64x2& a)
{ return v_signmask(*(v_int64x2*)(&a)); }
inline int v_scan_forward(const v_int8x16& a) { return trailingZeros32(v_signmask(v_reinterpret_as_s8(a))); }
inline int v_scan_forward(const v_uint8x16& a) { return trailingZeros32(v_signmask(v_reinterpret_as_s8(a))); }
inline int v_scan_forward(const v_int16x8& a) { return trailingZeros32(v_signmask(v_reinterpret_as_s8(a))) / 2; }
inline int v_scan_forward(const v_uint16x8& a) { return trailingZeros32(v_signmask(v_reinterpret_as_s8(a))) / 2; }
inline int v_scan_forward(const v_int32x4& a) { return trailingZeros32(v_signmask(v_reinterpret_as_s8(a))) / 4; }
inline int v_scan_forward(const v_uint32x4& a) { return trailingZeros32(v_signmask(v_reinterpret_as_s8(a))) / 4; }
inline int v_scan_forward(const v_float32x4& a) { return trailingZeros32(v_signmask(v_reinterpret_as_s8(a))) / 4; }
inline int v_scan_forward(const v_int64x2& a) { return trailingZeros32(v_signmask(v_reinterpret_as_s8(a))) / 8; }
inline int v_scan_forward(const v_uint64x2& a) { return trailingZeros32(v_signmask(v_reinterpret_as_s8(a))) / 8; }
inline int v_scan_forward(const v_float64x2& a) { return trailingZeros32(v_signmask(v_reinterpret_as_s8(a))) / 8; }
/** Checks **/
#define OPENCV_HAL_IMPL_LSX_CHECK(_Tpvec, allmask) \
inline bool v_check_all(const _Tpvec& a) { return v_signmask(a) == allmask; } \
inline bool v_check_any(const _Tpvec& a) { return v_signmask(a) != 0; }
OPENCV_HAL_IMPL_LSX_CHECK(v_uint8x16, 65535)
OPENCV_HAL_IMPL_LSX_CHECK(v_int8x16, 65535)
OPENCV_HAL_IMPL_LSX_CHECK(v_uint16x8, 255);
OPENCV_HAL_IMPL_LSX_CHECK(v_int16x8, 255);
OPENCV_HAL_IMPL_LSX_CHECK(v_uint32x4, 15)
OPENCV_HAL_IMPL_LSX_CHECK(v_int32x4, 15)
OPENCV_HAL_IMPL_LSX_CHECK(v_uint64x2, 3)
OPENCV_HAL_IMPL_LSX_CHECK(v_int64x2, 3)
OPENCV_HAL_IMPL_LSX_CHECK(v_float32x4, 15)
OPENCV_HAL_IMPL_LSX_CHECK(v_float64x2, 3)
///////////// Other math /////////////
/** Some frequent operations **/
#define OPENCV_HAL_IMPL_LSX_MULADD(_Tpvec, suffix) \
inline _Tpvec v_fma(const _Tpvec& a, const _Tpvec& b, const _Tpvec& c) \
{ return _Tpvec(__lsx_vfmadd_##suffix(a.val, b.val, c.val)); } \
inline _Tpvec v_muladd(const _Tpvec& a, const _Tpvec &b, const _Tpvec& c) \
{ return _Tpvec(__lsx_vfmadd_##suffix(a.val, b.val, c.val)); } \
inline _Tpvec v_sqrt(const _Tpvec& x) \
{ return _Tpvec(__lsx_vfsqrt_##suffix(x.val)); } \
inline _Tpvec v_sqr_magnitude(const _Tpvec& a, const _Tpvec& b) \
{ return v_fma(a, a, v_mul(b, b)); } \
inline _Tpvec v_magnitude(const _Tpvec& a, const _Tpvec& b) \
{ return v_sqrt(v_fma(a, a, v_mul(b, b))); }
OPENCV_HAL_IMPL_LSX_MULADD(v_float32x4, s)
OPENCV_HAL_IMPL_LSX_MULADD(v_float64x2, d)
inline v_int32x4 v_fma(const v_int32x4& a, const v_int32x4& b, const v_int32x4& c)
{ return v_int32x4(__lsx_vmadd_w(c.val, a.val, b.val)); }
inline v_int32x4 v_muladd(const v_int32x4& a, const v_int32x4& b, const v_int32x4& c)
{ return v_fma(a, b, c); }
inline v_float32x4 v_invsqrt(const v_float32x4& x)
{
return v_float32x4(__lsx_vfrsqrt_s(x.val));
}
inline v_float64x2 v_invsqrt(const v_float64x2& x)
{
return v_float64x2(__lsx_vfrsqrt_d(x.val));
}
/** Absolute values **/
#define OPENCV_HAL_IMPL_LSX_ABS(_Tpvec, suffix) \
inline v_u##_Tpvec v_abs(const v_##_Tpvec& x) \
{ return v_u##_Tpvec(__lsx_vabsd_##suffix(x.val, __lsx_vldi(0))); }
OPENCV_HAL_IMPL_LSX_ABS(int8x16, b)
OPENCV_HAL_IMPL_LSX_ABS(int16x8, h)
OPENCV_HAL_IMPL_LSX_ABS(int32x4, w)
inline v_float32x4 v_abs(const v_float32x4& x)
{ return v_float32x4(*((__m128i*)&x) & __lsx_vreplgr2vr_w(0x7fffffff)); }
inline v_float64x2 v_abs(const v_float64x2& x)
{ return v_float64x2(*((__m128i*)&x) & __lsx_vreplgr2vr_d(0x7fffffffffffffff)); }
/** Absolute difference **/
inline v_uint8x16 v_absdiff(const v_uint8x16& a, const v_uint8x16& b)
{ return (v_uint8x16)__lsx_vabsd_bu(a.val, b.val); }
inline v_uint16x8 v_absdiff(const v_uint16x8& a, const v_uint16x8& b)
{ return (v_uint16x8)__lsx_vabsd_hu(a.val, b.val); }
inline v_uint32x4 v_absdiff(const v_uint32x4& a, const v_uint32x4& b)
{ return (v_uint32x4)__lsx_vabsd_wu(a.val, b.val); }
inline v_uint8x16 v_absdiff(const v_int8x16& a, const v_int8x16& b)
{ return (v_uint8x16)__lsx_vabsd_b(a.val, b.val); }
inline v_uint16x8 v_absdiff(const v_int16x8& a, const v_int16x8& b)
{ return (v_uint16x8)__lsx_vabsd_h(a.val, b.val); }
inline v_uint32x4 v_absdiff(const v_int32x4& a, const v_int32x4& b)
{ return (v_uint32x4)__lsx_vabsd_w(a.val, b.val); }
inline v_float32x4 v_absdiff(const v_float32x4& a, const v_float32x4& b)
{ return v_abs(v_sub(a, b)); }
inline v_float64x2 v_absdiff(const v_float64x2& a, const v_float64x2& b)
{ return v_abs(v_sub(a, b)); }
/** Saturating absolute difference **/
inline v_int8x16 v_absdiffs(const v_int8x16& a, const v_int8x16& b)
{
v_int8x16 d = v_sub(a, b);
v_int8x16 m = v_lt(a, b);
return v_sub(v_xor(d, m), m);
}
inline v_int16x8 v_absdiffs(const v_int16x8& a, const v_int16x8& b)
{ return v_sub(v_max(a, b), v_min(a, b)); }
///////// Conversions /////////
/** Rounding **/
inline v_int32x4 v_round(const v_float32x4& a)
{ return v_int32x4(__lsx_vftint_w_s(a.val)); }
inline v_int32x4 v_round(const v_float64x2& a)
{ return v_int32x4(__lsx_vftint_w_d(a.val, a.val)); }
inline v_int32x4 v_round(const v_float64x2& a, const v_float64x2& b)
{ return v_int32x4(__lsx_vftint_w_d(b.val, a.val)); }
inline v_int32x4 v_trunc(const v_float32x4& a)
{ return v_int32x4(__lsx_vftintrz_w_s(a.val)); }
inline v_int32x4 v_trunc(const v_float64x2& a)
{ return v_int32x4(__lsx_vftintrz_w_d(a.val, a.val)); }
inline v_int32x4 v_floor(const v_float32x4& a)
{ return v_int32x4(__lsx_vftintrz_w_s(__m128(__lsx_vfrintrm_s(a.val)))); }
inline v_int32x4 v_floor(const v_float64x2& a)
{ return v_trunc(v_float64x2(__lsx_vfrintrm_d(a.val))); }
inline v_int32x4 v_ceil(const v_float32x4& a)
{ return v_int32x4(__lsx_vftintrz_w_s(__m128(__lsx_vfrintrp_s(a.val)))); }
inline v_int32x4 v_ceil(const v_float64x2& a)
{ return v_trunc(v_float64x2(__lsx_vfrintrp_d(a.val))); }
/** To float **/
inline v_float32x4 v_cvt_f32(const v_int32x4& a)
{ return v_float32x4(__lsx_vffint_s_w(a.val)); }
inline v_float32x4 v_cvt_f32(const v_float64x2& a)
{ return v_float32x4(__lsx_vfcvt_s_d(a.val, a.val)); }
inline v_float32x4 v_cvt_f32(const v_float64x2& a, const v_float64x2& b)
{ return v_float32x4(__lsx_vfcvt_s_d(b.val, a.val)); }
inline v_float64x2 v_cvt_f64(const v_int32x4& a)
{ return v_float64x2(__lsx_vffintl_d_w(a.val)); }
inline v_float64x2 v_cvt_f64_high(const v_int32x4& a)
{ return v_float64x2(__lsx_vffinth_d_w(a.val)); }
inline v_float64x2 v_cvt_f64(const v_float32x4& a)
{ return v_float64x2(__lsx_vfcvtl_d_s(a.val)); }
inline v_float64x2 v_cvt_f64_high(const v_float32x4& a)
{ return v_float64x2(__lsx_vfcvth_d_s(a.val)); }
inline v_float64x2 v_cvt_f64(const v_int64x2& v)
{ return v_float64x2(__lsx_vffint_d_l(v.val)); }
//////////////// Lookup table access ////////////////
inline v_int8x16 v_lut(const schar* tab, const int* idx)
{
return v_int8x16(_v128_setr_b(tab[idx[0]], tab[idx[1]], tab[idx[2]], tab[idx[3]],
tab[idx[4]], tab[idx[5]], tab[idx[6]], tab[idx[7]], tab[idx[8]],
tab[idx[9]], tab[idx[10]], tab[idx[11]], tab[idx[12]], tab[idx[13]],
tab[idx[14]], tab[idx[15]]));
}
inline v_int8x16 v_lut_pairs(const schar* tab, const int* idx)
{
return v_int8x16(_v128_setr_h(*(const short*)(tab + idx[0]), *(const short*)(tab + idx[1]),
*(const short*)(tab + idx[2]), *(const short*)(tab + idx[3]), *(const short*)(tab + idx[4]),
*(const short*)(tab + idx[5]), *(const short*)(tab + idx[6]), *(const short*)(tab + idx[7])));
}
inline v_int8x16 v_lut_quads(const schar* tab, const int* idx)
{
return v_int8x16(_v128_setr_w(*(const int*)(tab + idx[0]), *(const int*)(tab + idx[1]),
*(const int*)(tab + idx[2]), *(const int*)(tab + idx[3])));
}
inline v_uint8x16 v_lut(const uchar* tab, const int* idx)
{ return v_reinterpret_as_u8(v_lut((const schar*)tab, idx)); }
inline v_uint8x16 v_lut_pairs(const uchar* tab, const int* idx)
{ return v_reinterpret_as_u8(v_lut_pairs((const schar*)tab, idx)); }
inline v_uint8x16 v_lut_quads(const uchar* tab, const int* idx)
{ return v_reinterpret_as_u8(v_lut_quads((const schar*)tab, idx)); }
inline v_int16x8 v_lut(const short* tab, const int* idx)
{
return v_int16x8(_v128_setr_h(tab[idx[0]], tab[idx[1]], tab[idx[2]], tab[idx[3]],
tab[idx[4]], tab[idx[5]], tab[idx[6]], tab[idx[7]]));
}
inline v_int16x8 v_lut_pairs(const short* tab, const int* idx)
{
return v_int16x8(_v128_setr_w(*(const int*)(tab + idx[0]), *(const int*)(tab + idx[1]),
*(const int*)(tab + idx[2]), *(const int*)(tab + idx[3])));
}
inline v_int16x8 v_lut_quads(const short* tab, const int* idx)
{
return v_int16x8(_v128_setr_d(*(const int64_t*)(tab + idx[0]), *(const int64_t*)(tab + idx[1])));
}
inline v_uint16x8 v_lut(const ushort* tab, const int* idx)
{ return v_reinterpret_as_u16(v_lut((const short *)tab, idx)); }
inline v_uint16x8 v_lut_pairs(const ushort* tab, const int* idx)
{ return v_reinterpret_as_u16(v_lut_pairs((const short *)tab, idx)); }
inline v_uint16x8 v_lut_quads(const ushort* tab, const int* idx)
{ return v_reinterpret_as_u16(v_lut_quads((const short *)tab, idx)); }
inline v_int32x4 v_lut(const int* tab, const int* idx)
{
return v_int32x4(_v128_setr_w(tab[idx[0]], tab[idx[1]], tab[idx[2]], tab[idx[3]]));
}
inline v_int32x4 v_lut_pairs(const int *tab, const int* idx)
{
return v_int32x4(_v128_setr_d(*(const int64_t*)(tab + idx[0]), *(const int64_t*)(tab + idx[1])));
}
inline v_int32x4 v_lut_quads(const int* tab, const int* idx)
{
return v_int32x4(__lsx_vld(tab + idx[0], 0));
}
inline v_uint32x4 v_lut(const unsigned* tab, const int* idx) { return v_reinterpret_as_u32(v_lut((const int *)tab, idx)); }
inline v_uint32x4 v_lut_pairs(const unsigned* tab, const int* idx) { return v_reinterpret_as_u32(v_lut_pairs((const int *)tab, idx)); }
inline v_uint32x4 v_lut_quads(const unsigned* tab, const int* idx) { return v_reinterpret_as_u32(v_lut_quads((const int *)tab, idx)); }
inline v_int64x2 v_lut(const int64_t* tab, const int *idx)
{
return v_int64x2(_v128_setr_d(tab[idx[0]], tab[idx[1]]));
}
inline v_int64x2 v_lut_pairs(const int64_t* tab, const int* idx)
{
return v_int64x2(__lsx_vld(tab + idx[0], 0));
}
inline v_uint64x2 v_lut(const uint64_t* tab, const int* idx) { return v_reinterpret_as_u64(v_lut((const int64_t *)tab, idx)); }
inline v_uint64x2 v_lut_pairs(const uint64_t* tab, const int* idx) { return v_reinterpret_as_u64(v_lut_pairs((const int64_t *)tab, idx)); }
inline v_float32x4 v_lut(const float* tab, const int* idx)
{
return v_float32x4(_v128_setr_ps(tab[idx[0]], tab[idx[1]], tab[idx[2]], tab[idx[3]]));
}
inline v_float32x4 v_lut_pairs(const float* tab, const int* idx)
{
return v_float32x4((__m128)_v128_setr_pd(*(const double*)(tab + idx[0]), *(const double*)(tab + idx[1])));
}
inline v_float32x4 v_lut_quads(const float* tab, const int* idx)
{
return v_float32x4((__m128)__lsx_vld(tab + idx[0], 0));
}
inline v_float64x2 v_lut(const double* tab, const int* idx)
{
return v_float64x2(_v128_setr_pd(tab[idx[0]], tab[idx[1]]));
}
inline v_float64x2 v_lut_pairs(const double* tab, const int* idx)
{
return v_float64x2((__m128d)__lsx_vld(tab + idx[0], 0));
}
inline v_int32x4 v_lut(const int* tab, const v_int32x4& idxvec)
{
int *idx = (int*)&idxvec.val;
return v_lut(tab, idx);
}
inline v_uint32x4 v_lut(const unsigned* tab, const v_int32x4& idxvec)
{
return v_reinterpret_as_u32(v_lut((const int *)tab, idxvec));
}
inline v_float32x4 v_lut(const float* tab, const v_int32x4& idxvec)
{
const int *idx = (const int*)&idxvec.val;
return v_lut(tab, idx);
}
inline v_float64x2 v_lut(const double* tab, const v_int32x4& idxvec)
{
const int *idx = (const int*)&idxvec.val;
return v_lut(tab, idx);
}
inline void v_lut_deinterleave(const float* tab, const v_int32x4& idxvec, v_float32x4& x, v_float32x4& y)
{
const int *idx = (const int*)&idxvec.val;
__m128i xy0 = __lsx_vld(tab + idx[0], 0);
__m128i xy1 = __lsx_vld(tab + idx[1], 0);
__m128i xy2 = __lsx_vld(tab + idx[2], 0);
__m128i xy3 = __lsx_vld(tab + idx[3], 0);
__m128i xy01 = __lsx_vilvl_d(xy1, xy0);
__m128i xy23 = __lsx_vilvl_d(xy3, xy2);
__m128i xxyy02 = __lsx_vilvl_w(xy23, xy01);
__m128i xxyy13 = __lsx_vilvh_w(xy23, xy01);
x = v_float32x4((__m128)__lsx_vilvl_w(xxyy13, xxyy02));
y = v_float32x4((__m128)__lsx_vilvh_w(xxyy13, xxyy02));
}
inline void v_lut_deinterleave(const double* tab, const v_int32x4& idxvec, v_float64x2& x, v_float64x2& y)
{
const int* idx = (const int*)&idxvec.val;
__m128i xy0 = __lsx_vld(tab + idx[0], 0);
__m128i xy1 = __lsx_vld(tab + idx[1], 0);
x = v_float64x2((__m128d)__lsx_vilvl_d(xy1, xy0));
y = v_float64x2((__m128d)__lsx_vilvh_d(xy1, xy0));
}
inline v_int8x16 v_interleave_pairs(const v_int8x16& vec)
{
return v_int8x16(__lsx_vshuf_b(vec.val, vec.val,
_v128_setr_d(0x0705060403010200, 0x0f0d0e0c0b090a08)));
}
inline v_uint8x16 v_interleave_pairs(const v_uint8x16& vec)
{ return v_reinterpret_as_u8(v_interleave_pairs(v_reinterpret_as_s8(vec))); }
inline v_int8x16 v_interleave_quads(const v_int8x16& vec)
{
return v_int8x16(__lsx_vshuf_b(vec.val, vec.val,
_v128_setr_d(0x0703060205010400, 0x0f0b0e0a0d090c08)));
}
inline v_uint8x16 v_interleave_quads(const v_uint8x16& vec)
{ return v_reinterpret_as_u8(v_interleave_quads(v_reinterpret_as_s8(vec))); }
inline v_int16x8 v_interleave_pairs(const v_int16x8& vec)
{
return v_int16x8(__lsx_vshuf_b(vec.val, vec.val,
_v128_setr_d(0x0706030205040100, 0x0f0e0b0a0d0c0908)));
}
inline v_uint16x8 v_interleave_pairs(const v_uint16x8& vec)
{ return v_reinterpret_as_u16(v_interleave_pairs(v_reinterpret_as_s16(vec))); }
inline v_int16x8 v_interleave_quads(const v_int16x8& vec)
{
return v_int16x8(__lsx_vshuf_b(vec.val, vec.val,
_v128_setr_d(0x0b0a030209080100, 0x0f0e07060d0c0504)));
}
inline v_uint16x8 v_interleave_quads(const v_uint16x8& vec)
{ return v_reinterpret_as_u16(v_interleave_quads(v_reinterpret_as_s16(vec))); }
inline v_int32x4 v_interleave_pairs(const v_int32x4& vec)
{
return v_int32x4(__lsx_vshuf4i_w(vec.val, 0xd8));
}
inline v_uint32x4 v_interleave_pairs(const v_uint32x4& vec)
{ return v_reinterpret_as_u32(v_interleave_pairs(v_reinterpret_as_s32(vec))); }
inline v_float32x4 v_interleave_pairs(const v_float32x4& vec)
{ return v_reinterpret_as_f32(v_interleave_pairs(v_reinterpret_as_s32(vec))); }
inline v_int8x16 v_pack_triplets(const v_int8x16& vec)
{
__m128i zero = __lsx_vldi(0);
return v_int8x16(__lsx_vshuf_b(zero, vec.val,
_v128_set_d(0x1211100f0e0d0c0a, 0x0908060504020100)));
}
inline v_uint8x16 v_pack_triplets(const v_uint8x16& vec)
{ return v_reinterpret_as_u8(v_pack_triplets(v_reinterpret_as_s8(vec))); }
inline v_int16x8 v_pack_triplets(const v_int16x8& vec)
{
__m128i zero = __lsx_vldi(0);
return v_int16x8(__lsx_vshuf_b(zero, vec.val,
_v128_set_d(0x11100f0e0d0c0b0a, 0x0908050403020100)));
}
inline v_uint16x8 v_pack_triplets(const v_uint16x8& vec)
{ return v_reinterpret_as_u16(v_pack_triplets(v_reinterpret_as_s16(vec))); }
inline v_int32x4 v_pack_triplets(const v_int32x4& vec) { return vec; }
inline v_uint32x4 v_pack_triplets(const v_uint32x4& vec) { return vec; }
inline v_float32x4 v_pack_triplets(const v_float32x4& vec) { return vec; }
//////////// Matrix operations /////////
/////////// Dot Product /////////
// 16 >> 32
inline v_int32x4 v_dotprod(const v_int16x8& a, const v_int16x8& b)
{
__m128i x = a.val, y = b.val;
return v_int32x4(__lsx_vmaddwod_w_h(__lsx_vmulwev_w_h(x, y), x, y));
}
inline v_int32x4 v_dotprod(const v_int16x8& a, const v_int16x8& b, const v_int32x4& c)
{
__m128i x = a.val, y = b.val, z = c.val;
__m128i t = __lsx_vmaddwev_w_h(z, x, y);
return v_int32x4(__lsx_vmaddwod_w_h(t, x, y));
}
// 32 >> 64
inline v_int64x2 v_dotprod(const v_int32x4& a, const v_int32x4& b)
{
__m128i x = a.val, y = b.val;
return v_int64x2(__lsx_vmaddwod_d_w(__lsx_vmulwev_d_w(x, y), x, y));
}
inline v_int64x2 v_dotprod(const v_int32x4& a, const v_int32x4& b, const v_int64x2& c)
{
__m128i x = a.val, y = b.val, z = c.val;
__m128i t = __lsx_vmaddwev_d_w(z, x, y);
return v_int64x2(__lsx_vmaddwod_d_w(t, x, y));
}
// 8 >> 32
inline v_uint32x4 v_dotprod_expand(const v_uint8x16& a, const v_uint8x16& b)
{
__m128i x = a.val, y = b.val;
__m128i even = __lsx_vmulwev_h_bu(x, y);
__m128i odd = __lsx_vmulwod_h_bu(x, y);
__m128i prod0 = __lsx_vhaddw_wu_hu(even, even);
__m128i prod1 = __lsx_vhaddw_wu_hu(odd, odd);
return v_uint32x4(__lsx_vadd_w(prod0, prod1));
}
inline v_uint32x4 v_dotprod_expand(const v_uint8x16& a, const v_uint8x16& b, const v_uint32x4& c)
{ return v_add(v_dotprod_expand(a, b), c) ;}
inline v_int32x4 v_dotprod_expand(const v_int8x16& a, const v_int8x16& b)
{
__m128i x = a.val, y = b.val;
__m128i even = __lsx_vmulwev_h_b(x, y);
__m128i odd = __lsx_vmulwod_h_b(x, y);
__m128i prod0 = __lsx_vhaddw_w_h(even, even);
__m128i prod1 = __lsx_vhaddw_w_h(odd, odd);
return v_int32x4(__lsx_vadd_w(prod0, prod1));
}
inline v_int32x4 v_dotprod_expand(const v_int8x16& a, const v_int8x16& b, const v_int32x4& c)
{ return v_add(v_dotprod_expand(a, b), c); }
// 16 >> 64
inline v_uint64x2 v_dotprod_expand(const v_uint16x8& a, const v_uint16x8& b)
{
__m128i x = a.val, y = b.val;
__m128i even = __lsx_vmulwev_w_hu(x, y);
__m128i odd = __lsx_vmulwod_w_hu(x, y);
__m128i prod0 = __lsx_vhaddw_du_wu(even, even);
__m128i prod1 = __lsx_vhaddw_du_wu(odd, odd);
return v_uint64x2(__lsx_vadd_d(prod0, prod1));
}
inline v_uint64x2 v_dotprod_expand(const v_uint16x8& a, const v_uint16x8& b, const v_uint64x2& c)
{ return v_add(v_dotprod_expand(a, b), c); }
inline v_int64x2 v_dotprod_expand(const v_int16x8& a, const v_int16x8& b)
{
__m128i x = a.val, y = b.val;
__m128i even = __lsx_vmulwev_w_h(x, y);
__m128i odd = __lsx_vmulwod_w_h(x, y);
__m128i prod0 = __lsx_vhaddw_d_w(even, even);
__m128i prod1 = __lsx_vhaddw_d_w(odd, odd);
return v_int64x2(__lsx_vadd_d(prod0, prod1));
}
inline v_int64x2 v_dotprod_expand(const v_int16x8& a, const v_int16x8& b, const v_int64x2& c)
{ return v_add(v_dotprod_expand(a, b), c); }
//32 >> 64f
inline v_float64x2 v_dotprod_expand(const v_int32x4& a, const v_int32x4& b)
{ return v_cvt_f64(v_dotprod(a, b)); }
inline v_float64x2 v_dotprod_expand(const v_int32x4& a, const v_int32x4& b, const v_float64x2& c)
{ return v_add(v_dotprod_expand(a, b), c); }
///////// Fast Dot Product //////
// 16 >> 32
inline v_int32x4 v_dotprod_fast(const v_int16x8& a, const v_int16x8& b)
{ return v_dotprod(a, b); }
inline v_int32x4 v_dotprod_fast(const v_int16x8& a, const v_int16x8& b, const v_int32x4& c)
{ return v_dotprod(a, b, c); }
// 32 >> 64
inline v_int64x2 v_dotprod_fast(const v_int32x4& a, const v_int32x4& b)
{ return v_dotprod(a, b); }
inline v_int64x2 v_dotprod_fast(const v_int32x4& a, const v_int32x4& b, const v_int64x2& c)
{ return v_dotprod(a, b, c); }
// 8 >> 32
inline v_uint32x4 v_dotprod_expand_fast(const v_uint8x16& a, const v_uint8x16& b)
{ return v_dotprod_expand(a, b); }
inline v_uint32x4 v_dotprod_expand_fast(const v_uint8x16& a, const v_uint8x16& b, const v_uint32x4& c)
{ return v_dotprod_expand(a, b, c); }
inline v_int32x4 v_dotprod_expand_fast(const v_int8x16& a, const v_int8x16& b)
{ return v_dotprod_expand(a, b); }
inline v_int32x4 v_dotprod_expand_fast(const v_int8x16& a, const v_int8x16& b, const v_int32x4& c)
{ return v_dotprod_expand(a, b, c); }
// 16 >> 64
inline v_uint64x2 v_dotprod_expand_fast(const v_uint16x8& a, const v_uint16x8& b)
{
__m128i x = a.val, y = b.val;
__m128i even = __lsx_vmulwev_w_hu(x, y);
__m128i odd = __lsx_vmulwod_w_hu(x, y);
__m128i prod0 = __lsx_vhaddw_du_wu(even, even);
__m128i prod1 = __lsx_vhaddw_du_wu(odd, odd);
return v_uint64x2(__lsx_vilvl_d(__lsx_vhaddw_qu_du(prod0, prod0), __lsx_vhaddw_qu_du(prod1, prod1)));
}
inline v_uint64x2 v_dotprod_expand_fast(const v_uint16x8& a, const v_uint16x8& b, const v_uint64x2& c)
{ return v_add(v_dotprod_expand_fast(a, b), c); }
inline v_int64x2 v_dotprod_expand_fast(const v_int16x8& a, const v_int16x8& b)
{
__m128i x = a.val, y = b.val;
__m128i prod = __lsx_vmaddwod_w_h(__lsx_vmulwev_w_h(x, y), x, y);
__m128i sign = __lsx_vsrai_w(prod, 31);
__m128i lo = __lsx_vilvl_w(sign, prod);
__m128i hi = __lsx_vilvh_w(sign, prod);
return v_int64x2(__lsx_vadd_d(lo, hi));
}
inline v_int64x2 v_dotprod_expand_fast(const v_int16x8& a, const v_int16x8& b, const v_int64x2& c)
{ return v_add(v_dotprod_expand_fast(a, b), c); }
// 32 >> 64f
inline v_float64x2 v_dotprod_expand_fast(const v_int32x4& a, const v_int32x4& b)
{ return v_dotprod_expand(a, b); }
inline v_float64x2 v_dotprod_expand_fast(const v_int32x4& a, const v_int32x4& b, const v_float64x2& c)
{ return v_dotprod_expand(a, b, c); }
inline v_float32x4 v_matmul(const v_float32x4& v, const v_float32x4& m0,
const v_float32x4& m1, const v_float32x4& m2, const v_float32x4& m3)
{
__m128i x = (__m128i)v.val;
__m128 v0 = __lsx_vfmul_s((__m128)__lsx_vshuf4i_w(x, 0x0), m0.val);
__m128 v1 = __lsx_vfmul_s((__m128)__lsx_vshuf4i_w(x, 0x55), m1.val);
__m128 v2 = __lsx_vfmul_s((__m128)__lsx_vshuf4i_w(x, 0xAA), m2.val);
__m128 v3 = __lsx_vfmul_s((__m128)__lsx_vshuf4i_w(x, 0xFF), m3.val);
return v_float32x4(__lsx_vfadd_s(__lsx_vfadd_s(v0, v1), __lsx_vfadd_s(v2, v3)));
}
inline v_float32x4 v_matmuladd(const v_float32x4& v, const v_float32x4& m0,
const v_float32x4& m1, const v_float32x4& m2, const v_float32x4& a)
{
__m128i x = (__m128i)v.val;
__m128 v0 = __lsx_vfmul_s((__m128)__lsx_vshuf4i_w(x, 0x0), m0.val);
__m128 v1 = __lsx_vfmul_s((__m128)__lsx_vshuf4i_w(x, 0x55), m1.val);
__m128 v2 = __lsx_vfmadd_s((__m128)__lsx_vshuf4i_w(x, 0xAA), m2.val, a.val);
return v_float32x4(__lsx_vfadd_s(__lsx_vfadd_s(v0, v1), v2));
}
#define OPENCV_HAL_IMPL_LSX_TRANSPOSE4X4(_Tpvec, cast_from, cast_to) \
inline void v_transpose4x4(const _Tpvec& a0, const _Tpvec& a1, \
const _Tpvec& a2, const _Tpvec& a3, \
_Tpvec& b0, _Tpvec& b1, _Tpvec& b2, _Tpvec& b3) \
{ \
__m128i t0 = cast_from(__lsx_vilvl_w(a1.val, a0.val)); \
__m128i t1 = cast_from(__lsx_vilvl_w(a3.val, a2.val)); \
__m128i t2 = cast_from(__lsx_vilvh_w(a1.val, a0.val)); \
__m128i t3 = cast_from(__lsx_vilvh_w(a3.val, a2.val)); \
b0.val = cast_to(__lsx_vilvl_d(t1, t0)); \
b1.val = cast_to(__lsx_vilvh_d(t1, t0)); \
b2.val = cast_to(__lsx_vilvl_d(t3, t2)); \
b3.val = cast_to(__lsx_vilvh_d(t3, t2)); \
}
OPENCV_HAL_IMPL_LSX_TRANSPOSE4X4(v_uint32x4, OPENCV_HAL_NOP, OPENCV_HAL_NOP)
OPENCV_HAL_IMPL_LSX_TRANSPOSE4X4(v_int32x4, OPENCV_HAL_NOP, OPENCV_HAL_NOP)
inline void v_transpose4x4(const v_float32x4& a0, const v_float32x4& a1,
const v_float32x4& a2, const v_float32x4& a3,
v_float32x4& b0, v_float32x4& b1, v_float32x4& b2, v_float32x4& b3)
{
__m128i vec0 = (__m128i)a0.val, vec1 = (__m128i)a1.val;
__m128i vec2 = (__m128i)a2.val, vec3 = (__m128i)a3.val;
__m128i t0 = __lsx_vilvl_w(vec1, vec0);
__m128i t1 = __lsx_vilvl_w(vec3, vec2);
__m128i t2 = __lsx_vilvh_w(vec1, vec0);
__m128i t3 = __lsx_vilvh_w(vec3, vec2);
b0.val = __m128(__lsx_vilvl_d(t1, t0));
b1.val = __m128(__lsx_vilvh_d(t1, t0));
b2.val = __m128(__lsx_vilvl_d(t3, t2));
b3.val = __m128(__lsx_vilvh_d(t3, t2));
}
////////////////// Value reordering ////////////////
/* Expand */
#define OPENCV_HAL_IMPL_LSX_EXPAND(_Tpvec, _Tpwvec, _Tp, intrin_lo, intrin_hi) \
inline void v_expand(const _Tpvec& a, _Tpwvec& b0, _Tpwvec& b1) \
{ \
b0.val = intrin_lo(a.val, 0); \
b1.val = intrin_hi(a.val); \
} \
inline _Tpwvec v_expand_low(const _Tpvec& a) \
{ return _Tpwvec(intrin_lo(a.val, 0)); } \
inline _Tpwvec v_expand_high(const _Tpvec& a) \
{ return _Tpwvec(intrin_hi(a.val)); } \
inline _Tpwvec v_load_expand(const _Tp* ptr) \
{ \
__m128i a = __lsx_vld(ptr, 0); \
return _Tpwvec(intrin_lo(a, 0)); \
}
OPENCV_HAL_IMPL_LSX_EXPAND(v_uint8x16, v_uint16x8, uchar, __lsx_vsllwil_hu_bu, __lsx_vexth_hu_bu)
OPENCV_HAL_IMPL_LSX_EXPAND(v_int8x16, v_int16x8, schar, __lsx_vsllwil_h_b, __lsx_vexth_h_b)
OPENCV_HAL_IMPL_LSX_EXPAND(v_uint16x8, v_uint32x4, ushort, __lsx_vsllwil_wu_hu, __lsx_vexth_wu_hu)
OPENCV_HAL_IMPL_LSX_EXPAND(v_int16x8, v_int32x4, short, __lsx_vsllwil_w_h, __lsx_vexth_w_h)
OPENCV_HAL_IMPL_LSX_EXPAND(v_uint32x4, v_uint64x2, unsigned, __lsx_vsllwil_du_wu, __lsx_vexth_du_wu)
OPENCV_HAL_IMPL_LSX_EXPAND(v_int32x4, v_int64x2, int, __lsx_vsllwil_d_w, __lsx_vexth_d_w)
#define OPENCV_HAL_IMPL_LSX_EXPAND_Q(_Tpvec, _Tp, intrin_lo, intrin_hi) \
inline _Tpvec v_load_expand_q(const _Tp* ptr) \
{ \
__m128i a = __lsx_vld(ptr, 0); \
__m128i b = intrin_lo(a, 0); \
return _Tpvec(intrin_hi(b, 0)); \
}
OPENCV_HAL_IMPL_LSX_EXPAND_Q(v_uint32x4, uchar, __lsx_vsllwil_hu_bu, __lsx_vsllwil_wu_hu)
OPENCV_HAL_IMPL_LSX_EXPAND_Q(v_int32x4, schar, __lsx_vsllwil_h_b, __lsx_vsllwil_w_h)
/* pack */
// 16
inline v_int8x16 v_pack(const v_int16x8& a, const v_int16x8& b)
{ return v_int8x16(_lsx_packs_h(a.val, b.val)); }
inline v_uint8x16 v_pack(const v_uint16x8& a, const v_uint16x8& b)
{ return v_uint8x16(__lsx_vssrlrni_bu_h(b.val, a.val, 0)); }
inline v_uint8x16 v_pack_u(const v_int16x8& a, const v_int16x8& b)
{ return v_uint8x16(_lsx_packus_h(a.val, b.val)); }
inline void v_pack_store(schar* ptr, const v_int16x8& a)
{ v_store_low(ptr, v_pack(a, a)); }
inline void v_pack_store(uchar* ptr, const v_uint16x8& a)
{ v_store_low(ptr, v_pack(a, a)); }
inline void v_pack_u_store(uchar* ptr, const v_int16x8& a)
{ v_store_low(ptr, v_pack_u(a, a)); }
template<int n> inline
v_uint8x16 v_rshr_pack(const v_uint16x8& a, const v_uint16x8& b)
{ return v_uint8x16(__lsx_vssrlrni_bu_h(b.val, a.val, n)); }
template<int n> inline
void v_rshr_pack_store(uchar* ptr, const v_uint16x8& a)
{ __lsx_vstelm_d(__lsx_vssrlrni_bu_h(a.val, a.val, n), ptr, 0, 0); }
template<int n> inline
v_uint8x16 v_rshr_pack_u(const v_int16x8& a, const v_int16x8& b)
{ return v_uint8x16(__lsx_vssrarni_bu_h(b.val, a.val, n)); }
template<int n> inline
void v_rshr_pack_u_store(uchar* ptr, const v_int16x8& a)
{ __lsx_vstelm_d(__lsx_vssrarni_bu_h(a.val, a.val, n), ptr, 0, 0); }
template<int n> inline
v_int8x16 v_rshr_pack(const v_int16x8& a, const v_int16x8& b)
{ return v_int8x16(__lsx_vssrarni_b_h(b.val, a.val, n)); }
template<int n> inline
void v_rshr_pack_store(schar* ptr, const v_int16x8& a)
{ __lsx_vstelm_d(__lsx_vssrarni_b_h(a.val, a.val, n), ptr, 0, 0); }
//32
inline v_int16x8 v_pack(const v_int32x4& a, const v_int32x4& b)
{ return v_int16x8(__lsx_vssrarni_h_w(b.val, a.val, 0)); }
inline v_uint16x8 v_pack(const v_uint32x4& a, const v_uint32x4& b)
{ return v_uint16x8(__lsx_vssrlrni_hu_w(b.val, a.val, 0)); }
inline v_uint16x8 v_pack_u(const v_int32x4& a, const v_int32x4& b)
{ return v_uint16x8(__lsx_vssrarni_hu_w(b.val, a.val, 0)); }
inline void v_pack_store(short* ptr, const v_int32x4& a)
{ v_store_low(ptr, v_pack(a, a)); }
inline void v_pack_store(ushort *ptr, const v_uint32x4& a)
{ __lsx_vstelm_d(__lsx_vssrlrni_hu_w(a.val, a.val, 0), ptr, 0, 0); }
inline void v_pack_u_store(ushort* ptr, const v_int32x4& a)
{ __lsx_vstelm_d(__lsx_vssrarni_hu_w(a.val, a.val, 0), ptr, 0, 0); }
template<int n> inline
v_uint16x8 v_rshr_pack(const v_uint32x4& a, const v_uint32x4& b)
{ return v_uint16x8(__lsx_vssrlrni_hu_w(b.val, a.val, n)); }
template<int n> inline
void v_rshr_pack_store(ushort* ptr, const v_uint32x4& a)
{ __lsx_vstelm_d(__lsx_vssrlrni_hu_w(a.val, a.val, n), ptr, 0, 0); }
template<int n> inline
v_uint16x8 v_rshr_pack_u(const v_int32x4& a, const v_int32x4& b)
{ return v_uint16x8(__lsx_vssrarni_hu_w(b.val, a.val, n)); }
template<int n> inline
void v_rshr_pack_u_store(ushort* ptr, const v_int32x4& a)
{ __lsx_vstelm_d(__lsx_vssrarni_hu_w(a.val, a.val, n), ptr, 0, 0); }
template<int n> inline
v_int16x8 v_rshr_pack(const v_int32x4& a, const v_int32x4& b)
{ return v_int16x8(__lsx_vssrarni_h_w(b.val, a.val, n)); }
template<int n> inline
void v_rshr_pack_store(short* ptr, const v_int32x4& a)
{ __lsx_vstelm_d(__lsx_vssrarni_h_w(a.val, a.val, n), ptr, 0, 0); }
// 64
// Non-saturaing pack
inline v_uint32x4 v_pack(const v_uint64x2& a, const v_uint64x2& b)
{ return v_uint32x4(__lsx_vpickev_w(b.val, a.val)); }
inline v_int32x4 v_pack(const v_int64x2& a, const v_int64x2& b)
{ return v_reinterpret_as_s32(v_pack(v_reinterpret_as_u64(a), v_reinterpret_as_u64(b))); }
inline void v_pack_store(unsigned* ptr, const v_uint64x2& a)
{ __lsx_vstelm_d(__lsx_vshuf4i_w(a.val, 0x08), ptr, 0, 0); }
inline void v_pack_store(int *ptr, const v_int64x2& a)
{ v_pack_store((unsigned*)ptr, v_reinterpret_as_u64(a)); }
template<int n> inline
v_uint32x4 v_rshr_pack(const v_uint64x2& a, const v_uint64x2& b)
{ return v_uint32x4(__lsx_vsrlrni_w_d(b.val, a.val, n)); }
template<int n> inline
void v_rshr_pack_store(unsigned* ptr, const v_uint64x2& a)
{ __lsx_vstelm_d(__lsx_vsrlrni_w_d(a.val, a.val, n), ptr, 0, 0); }
template<int n> inline
v_int32x4 v_rshr_pack(const v_int64x2& a, const v_int64x2& b)
{ return v_int32x4(__lsx_vsrarni_w_d(b.val, a.val, n)); }
template<int n> inline
void v_rshr_pack_store(int* ptr, const v_int64x2& a)
{ __lsx_vstelm_d(__lsx_vsrarni_w_d(a.val, a.val, n), ptr, 0, 0); }
// pack boolean
inline v_uint8x16 v_pack_b(const v_uint16x8& a, const v_uint16x8& b)
{ return v_uint8x16(__lsx_vssrarni_b_h(b.val, a.val, 0)); }
inline v_uint8x16 v_pack_b(const v_uint32x4& a, const v_uint32x4& b,
const v_uint32x4& c, const v_uint32x4& d)
{
__m128i ab = __lsx_vssrarni_h_w(b.val, a.val, 0);
__m128i cd = __lsx_vssrarni_h_w(d.val, c.val, 0);
return v_uint8x16(__lsx_vssrarni_b_h(cd, ab, 0));
}
inline v_uint8x16 v_pack_b(const v_uint64x2& a, const v_uint64x2& b, const v_uint64x2& c,
const v_uint64x2& d, const v_uint64x2& e, const v_uint64x2& f,
const v_uint64x2& g, const v_uint64x2& h)
{
__m128i ab = __lsx_vssrarni_w_d(b.val, a.val, 0);
__m128i cd = __lsx_vssrarni_w_d(d.val, c.val, 0);
__m128i ef = __lsx_vssrarni_w_d(f.val, e.val, 0);
__m128i gh = __lsx_vssrarni_w_d(h.val, g.val, 0);
__m128i abcd = __lsx_vssrarni_h_w(cd, ab, 0);
__m128i efgh = __lsx_vssrarni_h_w(gh, ef, 0);
return v_uint8x16(__lsx_vssrarni_b_h(efgh, abcd, 0));
}
/* Recombine */
// its up there with load and store operations
/* Extract */
#define OPENCV_HAL_IMPL_LSX_EXTRACT(_Tpvec) \
template<int s> \
inline _Tpvec v_extract(const _Tpvec& a, const _Tpvec& b) \
{ return v_rotate_right<s>(a, b); }
OPENCV_HAL_IMPL_LSX_EXTRACT(v_uint8x16)
OPENCV_HAL_IMPL_LSX_EXTRACT(v_int8x16)
OPENCV_HAL_IMPL_LSX_EXTRACT(v_uint16x8)
OPENCV_HAL_IMPL_LSX_EXTRACT(v_int16x8)
OPENCV_HAL_IMPL_LSX_EXTRACT(v_uint32x4)
OPENCV_HAL_IMPL_LSX_EXTRACT(v_int32x4)
OPENCV_HAL_IMPL_LSX_EXTRACT(v_uint64x2)
OPENCV_HAL_IMPL_LSX_EXTRACT(v_int64x2)
OPENCV_HAL_IMPL_LSX_EXTRACT(v_float32x4)
OPENCV_HAL_IMPL_LSX_EXTRACT(v_float64x2)
#define OPENCV_HAL_IMPL_LSX_EXTRACT_N(_Tpvec, _Twvec, intrin) \
template<int i> \
inline _Twvec v_extract_n(const _Tpvec& a) \
{ return (_Twvec)intrin(a.val, i); }
OPENCV_HAL_IMPL_LSX_EXTRACT_N(v_uint8x16, uchar, __lsx_vpickve2gr_b)
OPENCV_HAL_IMPL_LSX_EXTRACT_N(v_int8x16, schar, __lsx_vpickve2gr_b)
OPENCV_HAL_IMPL_LSX_EXTRACT_N(v_uint16x8, ushort, __lsx_vpickve2gr_h)
OPENCV_HAL_IMPL_LSX_EXTRACT_N(v_int16x8, short, __lsx_vpickve2gr_h)
OPENCV_HAL_IMPL_LSX_EXTRACT_N(v_uint32x4, uint, __lsx_vpickve2gr_w)
OPENCV_HAL_IMPL_LSX_EXTRACT_N(v_int32x4, int, __lsx_vpickve2gr_w)
OPENCV_HAL_IMPL_LSX_EXTRACT_N(v_uint64x2, uint64, __lsx_vpickve2gr_d)
OPENCV_HAL_IMPL_LSX_EXTRACT_N(v_int64x2, int64, __lsx_vpickve2gr_d)
template<int i>
inline float v_extract_n(const v_float32x4& v)
{
union { uint iv; float fv; } d;
d.iv = __lsx_vpickve2gr_w(v.val, i);
return d.fv;
}
template<int i>
inline double v_extract_n(const v_float64x2& v)
{
union { uint64 iv; double dv; } d;
d.iv = __lsx_vpickve2gr_d(v.val, i);
return d.dv;
}
template<int i>
inline v_uint32x4 v_broadcast_element(const v_uint32x4& a)
{ return v_uint32x4(__lsx_vreplvei_w(a.val, i)); }
template<int i>
inline v_int32x4 v_broadcast_element(const v_int32x4& a)
{ return v_int32x4(__lsx_vreplvei_w(a.val, i)); }
template<int i>
inline v_float32x4 v_broadcast_element(const v_float32x4& a)
{ return v_float32x4((__m128)__lsx_vreplvei_w((__m128i)a.val, i)); }
/////////////////// load deinterleave //////////////////////////////
inline void v_load_deinterleave(const uchar* ptr, v_uint8x16& a, v_uint8x16& b)
{
__m128i t0 = __lsx_vld(ptr, 0);
__m128i t1 = __lsx_vld(ptr, 16);
a.val = __lsx_vpickev_b(t1, t0);
b.val = __lsx_vpickod_b(t1, t0);
}
inline void v_load_deinterleave(const ushort* ptr, v_uint16x8& a, v_uint16x8& b)
{
__m128i t0 = __lsx_vld(ptr, 0);
__m128i t1 = __lsx_vld(ptr, 16);
a.val = __lsx_vpickev_h(t1, t0);
b.val = __lsx_vpickod_h(t1, t0);
}
inline void v_load_deinterleave(const unsigned* ptr, v_uint32x4& a, v_uint32x4& b)
{
__m128i t0 = __lsx_vld(ptr, 0);
__m128i t1 = __lsx_vld(ptr, 16);
a.val = __lsx_vpickev_w(t1, t0);
b.val = __lsx_vpickod_w(t1, t0);
}
inline void v_load_deinterleave(const uint64* ptr, v_uint64x2& a, v_uint64x2& b)
{
__m128i t0 = __lsx_vld(ptr, 0);
__m128i t1 = __lsx_vld(ptr, 16);
a.val = __lsx_vilvl_d(t1, t0);
b.val = __lsx_vilvh_d(t1, t0);
}
inline void v_load_deinterleave(const uchar* ptr, v_uint8x16& a, v_uint8x16& b, v_uint8x16& c)
{
__m128i t0 = __lsx_vld(ptr, 0);
__m128i t1 = __lsx_vld(ptr, 16);
__m128i t2 = __lsx_vld(ptr, 32);
const __m128i shuff0 = _v128_setr_b(0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0);
const __m128i shuff1 = _v128_setr_b(0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0);
__m128i a0 = __lsx_vbitsel_v(t0, t1, shuff0);
__m128i b0 = __lsx_vbitsel_v(t1, t0, shuff1);
__m128i c0 = __lsx_vbitsel_v(t1, t0, shuff0);
const __m128i shuff_a = _v128_setr_b(0, 3, 6, 9, 12, 15, 2, 5, 8, 11, 14, 17, 20, 23, 26, 29);
const __m128i shuff_b = _v128_setr_b(1, 4, 7, 10, 13, 0, 3, 6, 9, 12, 15, 18, 21, 24, 27, 30);
const __m128i shuff_c = _v128_setr_b(2, 5, 8, 11, 14, 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 31);
a.val = __lsx_vshuf_b(t2, a0, shuff_a);
b.val = __lsx_vshuf_b(t2, b0, shuff_b);
c.val = __lsx_vshuf_b(t2, c0, shuff_c);
}
inline void v_load_deinterleave(const ushort* ptr, v_uint16x8& a, v_uint16x8& b, v_uint16x8& c)
{
__m128i t0 = __lsx_vld(ptr, 0);
__m128i t1 = __lsx_vld(ptr, 16);
__m128i t2 = __lsx_vld(ptr, 32);
const __m128i shuff0 = _v128_setr_h(0, 0, -1, 0, 0, -1, 0, 0);
const __m128i shuff1 = _v128_setr_h(0, -1, 0, 0, -1, 0, 0, -1);
__m128i a0 = __lsx_vbitsel_v(t0, t1, shuff1);
__m128i b0 = __lsx_vbitsel_v(t0, t1, shuff0);
__m128i c0 = __lsx_vbitsel_v(t1, t0, shuff0);
const __m128i shuff_a = _v128_setr_b(0, 1, 6, 7, 12, 13, 2, 3, 8, 9, 14, 15, 20, 21, 26, 27);
const __m128i shuff_b = _v128_setr_b(2, 3, 8, 9, 14, 15, 4, 5, 10, 11, 16, 17, 22, 23, 28, 29);
const __m128i shuff_c = _v128_setr_b(4, 5, 10, 11, 0, 1, 6, 7, 12, 13, 18, 19, 24, 25, 30, 31);
a.val = __lsx_vshuf_b(t2, a0, shuff_a);
b.val = __lsx_vshuf_b(t2, b0, shuff_b);
c.val = __lsx_vshuf_b(t2, c0, shuff_c);
}
inline void v_load_deinterleave(const unsigned* ptr, v_uint32x4& a, v_uint32x4& b, v_uint32x4& c)
{
__m128i t0 = __lsx_vld(ptr, 0);
__m128i t1 = __lsx_vld(ptr, 16);
__m128i t2 = __lsx_vld(ptr, 32);
__m128i a0 = __lsx_vpermi_w(t1, t0, 0xAC);
__m128i b0 = __lsx_vpermi_w(t1, t0, 0xC5);
__m128i c0 = __lsx_vpermi_w(t1, t0, 0x5A);
a.val = __lsx_vextrins_w(a0, t2, 0x31);
b0 = __lsx_vshuf4i_w(b0, 0x38);
c0 = __lsx_vshuf4i_w(c0, 0x8);
b.val = __lsx_vextrins_w(b0, t2, 0x32);
c.val = __lsx_vpermi_w(t2, c0, 0xC4);
}
inline void v_load_deinterleave(const uint64* ptr, v_uint64x2& a, v_uint64x2& b, v_uint64x2& c)
{
__m128i t0 = __lsx_vld(ptr, 0);
__m128i t1 = __lsx_vld(ptr, 16);
__m128i t2 = __lsx_vld(ptr, 32);
a.val = __lsx_vshuf4i_d(t0, t1, 0xC);
b.val = __lsx_vshuf4i_d(t0, t2, 0x9);
c.val = __lsx_vshuf4i_d(t1, t2, 0xC);
}
inline void v_load_deinterleave(const uchar* ptr, v_uint8x16& a, v_uint8x16& b, v_uint8x16& c, v_uint8x16& d)
{
__m128i t0 = __lsx_vld(ptr, 0);
__m128i t1 = __lsx_vld(ptr, 16);
__m128i t2 = __lsx_vld(ptr, 32);
__m128i t3 = __lsx_vld(ptr, 48);
__m128i ac_lo = __lsx_vpickev_b(t1, t0);
__m128i bd_lo = __lsx_vpickod_b(t1, t0);
__m128i ac_hi = __lsx_vpickev_b(t3, t2);
__m128i bd_hi = __lsx_vpickod_b(t3, t2);
a.val = __lsx_vpickev_b(ac_hi, ac_lo);
c.val = __lsx_vpickod_b(ac_hi, ac_lo);
b.val = __lsx_vpickev_b(bd_hi, bd_lo);
d.val = __lsx_vpickod_b(bd_hi, bd_lo);
}
inline void v_load_deinterleave(const ushort* ptr, v_uint16x8& a, v_uint16x8& b, v_uint16x8& c, v_uint16x8& d)
{
__m128i t0 = __lsx_vld(ptr, 0);
__m128i t1 = __lsx_vld(ptr, 16);
__m128i t2 = __lsx_vld(ptr, 32);
__m128i t3 = __lsx_vld(ptr, 48);
__m128i ac_lo = __lsx_vpickev_h(t1, t0);
__m128i bd_lo = __lsx_vpickod_h(t1, t0);
__m128i ac_hi = __lsx_vpickev_h(t3, t2);
__m128i bd_hi = __lsx_vpickod_h(t3, t2);
a.val = __lsx_vpickev_h(ac_hi, ac_lo);
c.val = __lsx_vpickod_h(ac_hi, ac_lo);
b.val = __lsx_vpickev_h(bd_hi, bd_lo);
d.val = __lsx_vpickod_h(bd_hi, bd_lo);
}
inline void v_load_deinterleave(const unsigned* ptr, v_uint32x4& a, v_uint32x4& b, v_uint32x4& c, v_uint32x4& d)
{
__m128i p0 = __lsx_vld(ptr, 0);
__m128i p1 = __lsx_vld(ptr, 16);
__m128i p2 = __lsx_vld(ptr, 32);
__m128i p3 = __lsx_vld(ptr, 48);
__m128i t0 = __lsx_vilvl_w(p1, p0);
__m128i t1 = __lsx_vilvl_w(p3, p2);
__m128i t2 = __lsx_vilvh_w(p1, p0);
__m128i t3 = __lsx_vilvh_w(p3, p2);
a.val = __lsx_vilvl_d(t1, t0);
b.val = __lsx_vilvh_d(t1, t0);
c.val = __lsx_vilvl_d(t3, t2);
d.val = __lsx_vilvh_d(t3, t2);
}
inline void v_load_deinterleave(const uint64* ptr, v_uint64x2& a, v_uint64x2& b, v_uint64x2& c, v_uint64x2& d)
{
__m128i t0 = __lsx_vld(ptr, 0);
__m128i t1 = __lsx_vld(ptr, 16);
__m128i t2 = __lsx_vld(ptr, 32);
__m128i t3 = __lsx_vld(ptr, 48);
a.val = __lsx_vilvl_d(t2, t0);
b.val = __lsx_vilvh_d(t2, t0);
c.val = __lsx_vilvl_d(t3, t1);
d.val = __lsx_vilvh_d(t3, t1);
}
////////////////////////// store interleave ////////////////////////////////
inline void v_store_interleave(uchar* ptr, const v_uint8x16& a, const v_uint8x16& b,
hal::StoreMode /*mode*/ = hal::STORE_UNALIGNED)
{
__m128i v0 = __lsx_vilvl_b(b.val, a.val);
__m128i v1 = __lsx_vilvh_b(b.val, a.val);
__lsx_vst(v0, ptr, 0);
__lsx_vst(v1, ptr, 16);
}
inline void v_store_interleave(ushort* ptr, const v_uint16x8& a, const v_uint16x8& b,
hal::StoreMode /*mode*/ = hal::STORE_UNALIGNED)
{
__m128i v0 = __lsx_vilvl_h(b.val, a.val);
__m128i v1 = __lsx_vilvh_h(b.val, a.val);
__lsx_vst(v0, ptr, 0);
__lsx_vst(v1, ptr, 16);
}
inline void v_store_interleave(unsigned* ptr, const v_uint32x4& a, const v_uint32x4& b,
hal::StoreMode /*mode*/ = hal::STORE_UNALIGNED)
{
__m128i v0 = __lsx_vilvl_w(b.val, a.val);
__m128i v1 = __lsx_vilvh_w(b.val, a.val);
__lsx_vst(v0, ptr, 0);
__lsx_vst(v1, ptr, 16);
}
inline void v_store_interleave(uint64* ptr, const v_uint64x2& a, const v_uint64x2& b,
hal::StoreMode /*mode*/ = hal::STORE_UNALIGNED)
{
__m128i v0 = __lsx_vilvl_d(b.val, a.val);
__m128i v1 = __lsx_vilvh_d(b.val, a.val);
__lsx_vst(v0, ptr, 0);
__lsx_vst(v1, ptr, 16);
}
inline void v_store_interleave(uchar* ptr, const v_uint8x16& a, const v_uint8x16& b, const v_uint8x16& c,
hal::StoreMode /*mode*/ = hal::STORE_UNALIGNED)
{
__m128i ab_lo = __lsx_vilvl_b(b.val, a.val);
__m128i ab_hi = __lsx_vilvh_b(b.val, a.val);
__m128i v_c = c.val;
const __m128i shuff0 = _v128_setr_b(0, 1, 16, 2, 3, 17, 4, 5, 18, 6, 7, 19, 8, 9, 20, 10);
const __m128i shuff1 = _v128_setr_b(11, 21, 12, 13, 22, 14, 15, 23, 0, 0, 0, 0, 0, 0, 0, 0);
const __m128i shuff2 = _v128_setr_b(0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 24, 18, 19, 25, 20, 21);
const __m128i shuff3 = _v128_setr_b(26, 6, 7, 27, 8, 9, 28, 10, 11, 29, 12, 13, 30, 14, 15, 31);
__m128i abc = __lsx_vpermi_w(v_c, ab_hi, 0xE4);
__m128i dst0 = __lsx_vshuf_b(v_c, ab_lo, shuff0);
__m128i dst1 = __lsx_vshuf_b(v_c, ab_lo, shuff1);
__m128i dst2 = __lsx_vshuf_b(v_c, ab_hi, shuff3);
dst1 = __lsx_vshuf_b(abc, dst1, shuff2);
__lsx_vst(dst0, ptr, 0);
__lsx_vst(dst1, ptr, 16);
__lsx_vst(dst2, ptr, 32);
}
inline void v_store_interleave(ushort* ptr, const v_uint16x8& a, const v_uint16x8& b, const v_uint16x8& c,
hal::StoreMode /*mode*/ = hal::STORE_UNALIGNED)
{
__m128i ab_lo = __lsx_vilvl_h(b.val, a.val);
__m128i ab_hi = __lsx_vilvh_h(b.val, a.val);
__m128i v_c = c.val;
const __m128i shuff0 = _v128_setr_b(0, 1, 2, 3, 16, 17, 4, 5, 6, 7, 18, 19, 8, 9, 10, 11);
const __m128i shuff1 = _v128_setr_b(20, 21, 12, 13, 14, 15, 22, 23, 0, 0, 0, 0, 0, 0, 0, 0);
const __m128i shuff2 = _v128_setr_b(0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 24, 25, 20, 21);
const __m128i shuff3 = _v128_setr_b(6, 7, 26, 27, 8, 9, 10, 11, 28, 29, 12, 13, 14, 15, 30, 31);
__m128i abc = __lsx_vpermi_w(v_c, ab_hi, 0xE4);
__m128i dst0 = __lsx_vshuf_b(v_c, ab_lo, shuff0);
__m128i dst1 = __lsx_vshuf_b(v_c, ab_lo, shuff1);
__m128i dst2 = __lsx_vshuf_b(v_c, ab_hi, shuff3);
dst1 = __lsx_vshuf_b(abc, dst1, shuff2);
__lsx_vst(dst0, ptr, 0);
__lsx_vst(dst1, ptr, 16);
__lsx_vst(dst2, ptr, 32);
}
inline void v_store_interleave(unsigned* ptr, const v_uint32x4& a, const v_uint32x4& b, const v_uint32x4& c,
hal::StoreMode /*mode*/ = hal::STORE_UNALIGNED)
{
__m128i v_c = c.val;
__m128i ab_lo = __lsx_vilvl_w(b.val, a.val); //a0 b0 a1 b1
__m128i ab_hi = __lsx_vilvh_w(b.val, a.val); //a2 b2 a3 b3
__m128i bc_od = __lsx_vpackod_w(v_c, b.val); // b1 c1 b3 c3
__m128i dst0 = __lsx_vshuf4i_w(ab_lo, 0xB4); //a0 b0 b1 a1
__m128i dst1 = __lsx_vilvl_d(ab_hi, bc_od); //b1 c1 a2 b2
__m128i dst2 = __lsx_vpermi_w(bc_od, ab_hi, 0xE8); //a2, a3, b3, c3
dst0 = __lsx_vextrins_w(dst0, v_c, 0x20);
dst2 = __lsx_vextrins_w(dst2, v_c, 0x2);
__lsx_vst(dst0, ptr, 0); //a0 b0 c0 a1
__lsx_vst(dst1, ptr, 16); //b1 c1 a2 b2
__lsx_vst(dst2, ptr, 32); //c2 a3 b3 c3
}
inline void v_store_interleave(uint64* ptr, const v_uint64x2& a, const v_uint64x2& b, const v_uint64x2& c,
hal::StoreMode /*mode*/ = hal::STORE_UNALIGNED)
{
__m128i dst0 = __lsx_vilvl_d(b.val, a.val);
__m128i dst1 = __lsx_vpermi_w(a.val, c.val, 0xE4);
__m128i dst2 = __lsx_vilvh_d(c.val, b.val);
__lsx_vst(dst0, ptr, 0);
__lsx_vst(dst1, ptr, 16);
__lsx_vst(dst2, ptr, 32);
}
inline void v_store_interleave(uchar* ptr, const v_uint8x16& a, const v_uint8x16& b,
const v_uint8x16& c, const v_uint8x16& d,
hal::StoreMode /*mode*/ = hal::STORE_UNALIGNED)
{
__m128i ab_lo = __lsx_vilvl_b(b.val, a.val);
__m128i ab_hi = __lsx_vilvh_b(b.val, a.val);
__m128i cd_lo = __lsx_vilvl_b(d.val, c.val);
__m128i cd_hi = __lsx_vilvh_b(d.val, c.val);
__m128i dst0 = __lsx_vilvl_h(cd_lo, ab_lo);
__m128i dst1 = __lsx_vilvh_h(cd_lo, ab_lo);
__m128i dst2 = __lsx_vilvl_h(cd_hi, ab_hi);
__m128i dst3 = __lsx_vilvh_h(cd_hi, ab_hi);
__lsx_vst(dst0, ptr, 0);
__lsx_vst(dst1, ptr, 16);
__lsx_vst(dst2, ptr, 32);
__lsx_vst(dst3, ptr, 48);
}
inline void v_store_interleave(ushort* ptr, const v_uint16x8& a, const v_uint16x8& b,
const v_uint16x8& c, const v_uint16x8& d,
hal::StoreMode /*mode*/ = hal::STORE_UNALIGNED)
{
__m128i ab_lo = __lsx_vilvl_h(b.val, a.val);
__m128i ab_hi = __lsx_vilvh_h(b.val, a.val);
__m128i cd_lo = __lsx_vilvl_h(d.val, c.val);
__m128i cd_hi = __lsx_vilvh_h(d.val, c.val);
__m128i dst0 = __lsx_vilvl_w(cd_lo, ab_lo);
__m128i dst1 = __lsx_vilvh_w(cd_lo, ab_lo);
__m128i dst2 = __lsx_vilvl_w(cd_hi, ab_hi);
__m128i dst3 = __lsx_vilvh_w(cd_hi, ab_hi);
__lsx_vst(dst0, ptr, 0);
__lsx_vst(dst1, ptr, 16);
__lsx_vst(dst2, ptr, 32);
__lsx_vst(dst3, ptr, 48);
}
inline void v_store_interleave(unsigned* ptr, const v_uint32x4& a, const v_uint32x4& b,
const v_uint32x4& c, const v_uint32x4& d,
hal::StoreMode /*mode*/ = hal::STORE_UNALIGNED)
{
__m128i ab_lo = __lsx_vilvl_w(b.val, a.val);
__m128i ab_hi = __lsx_vilvh_w(b.val, a.val);
__m128i cd_lo = __lsx_vilvl_w(d.val, c.val);
__m128i cd_hi = __lsx_vilvh_w(d.val, c.val);
__m128i dst0 = __lsx_vilvl_d(cd_lo, ab_lo);
__m128i dst1 = __lsx_vilvh_d(cd_lo, ab_lo);
__m128i dst2 = __lsx_vilvl_d(cd_hi, ab_hi);
__m128i dst3 = __lsx_vilvh_d(cd_hi, ab_hi);
__lsx_vst(dst0, ptr, 0);
__lsx_vst(dst1, ptr, 16);
__lsx_vst(dst2, ptr, 32);
__lsx_vst(dst3, ptr, 48);
}
inline void v_store_interleave(uint64* ptr, const v_uint64x2& a, const v_uint64x2& b,
const v_uint64x2& c, const v_uint64x2& d,
hal::StoreMode /*mode*/ = hal::STORE_UNALIGNED)
{
__m128i dst0 = __lsx_vilvl_d(b.val, a.val);
__m128i dst2 = __lsx_vilvh_d(b.val, a.val);
__m128i dst1 = __lsx_vilvl_d(d.val, c.val);
__m128i dst3 = __lsx_vilvh_d(d.val, c.val);
__lsx_vst(dst0, ptr, 0);
__lsx_vst(dst1, ptr, 16);
__lsx_vst(dst2, ptr, 32);
__lsx_vst(dst3, ptr, 48);
}
#define OPENCV_HAL_IMPL_LSX_LOADSTORE_INTERLEAVE(_Tpvec0, _Tp0, suffix0, _Tpvec1, _Tp1, suffix1) \
inline void v_load_deinterleave(const _Tp0* ptr, _Tpvec0& a0, _Tpvec0& b0) \
{ \
_Tpvec1 a1, b1; \
v_load_deinterleave((const _Tp1*)ptr, a1, b1); \
a0 = v_reinterpret_as_##suffix0(a1); \
b0 = v_reinterpret_as_##suffix0(b1); \
} \
inline void v_load_deinterleave(const _Tp0* ptr, _Tpvec0& a0, _Tpvec0& b0, _Tpvec0& c0) \
{ \
_Tpvec1 a1, b1, c1; \
v_load_deinterleave((const _Tp1*)ptr, a1, b1, c1); \
a0 = v_reinterpret_as_##suffix0(a1); \
b0 = v_reinterpret_as_##suffix0(b1); \
c0 = v_reinterpret_as_##suffix0(c1); \
} \
inline void v_load_deinterleave(const _Tp0* ptr, _Tpvec0& a0, _Tpvec0& b0, \
_Tpvec0& c0, _Tpvec0& d0) \
{ \
_Tpvec1 a1, b1, c1, d1; \
v_load_deinterleave((const _Tp1*)ptr, a1, b1, c1, d1); \
a0 = v_reinterpret_as_##suffix0(a1); \
b0 = v_reinterpret_as_##suffix0(b1); \
c0 = v_reinterpret_as_##suffix0(c1); \
d0 = v_reinterpret_as_##suffix0(d1); \
} \
inline void v_store_interleave(_Tp0* ptr, const _Tpvec0& a0, const _Tpvec0& b0, \
hal::StoreMode /*mode*/=hal::STORE_UNALIGNED) \
{ \
_Tpvec1 a1 = v_reinterpret_as_##suffix1(a0); \
_Tpvec1 b1 = v_reinterpret_as_##suffix1(b0); \
v_store_interleave((_Tp1*)ptr, a1, b1); \
} \
inline void v_store_interleave(_Tp0* ptr, const _Tpvec0& a0, const _Tpvec0& b0, const _Tpvec0& c0,\
hal::StoreMode /*mode*/=hal::STORE_UNALIGNED) \
{ \
_Tpvec1 a1 = v_reinterpret_as_##suffix1(a0); \
_Tpvec1 b1 = v_reinterpret_as_##suffix1(b0); \
_Tpvec1 c1 = v_reinterpret_as_##suffix1(c0); \
v_store_interleave((_Tp1*)ptr, a1, b1, c1); \
} \
inline void v_store_interleave(_Tp0* ptr, const _Tpvec0& a0, const _Tpvec0& b0, \
const _Tpvec0& c0, const _Tpvec0& d0, \
hal::StoreMode /*mode*/=hal::STORE_UNALIGNED) \
{ \
_Tpvec1 a1 = v_reinterpret_as_##suffix1(a0); \
_Tpvec1 b1 = v_reinterpret_as_##suffix1(b0); \
_Tpvec1 c1 = v_reinterpret_as_##suffix1(c0); \
_Tpvec1 d1 = v_reinterpret_as_##suffix1(d0); \
v_store_interleave((_Tp1*)ptr, a1, b1, c1, d1); \
}
OPENCV_HAL_IMPL_LSX_LOADSTORE_INTERLEAVE(v_int8x16, schar, s8, v_uint8x16, uchar, u8)
OPENCV_HAL_IMPL_LSX_LOADSTORE_INTERLEAVE(v_int16x8, short, s16, v_uint16x8, ushort, u16)
OPENCV_HAL_IMPL_LSX_LOADSTORE_INTERLEAVE(v_int32x4, int, s32, v_uint32x4, unsigned, u32)
OPENCV_HAL_IMPL_LSX_LOADSTORE_INTERLEAVE(v_float32x4, float, f32, v_uint32x4, unsigned, u32)
OPENCV_HAL_IMPL_LSX_LOADSTORE_INTERLEAVE(v_int64x2, int64, s64, v_uint64x2, uint64, u64)
OPENCV_HAL_IMPL_LSX_LOADSTORE_INTERLEAVE(v_float64x2, double, f64, v_uint64x2, uint64, u64)
//
// FP16
//
inline v_float32x4 v_load_expand(const hfloat* ptr)
{
#if CV_FP16
return v_float32x4(__lsx_vfcvtl_s_h((__m128)__lsx_vld(ptr, 0)));
#else
float CV_DECL_ALIGNED(32) buf[4];
for (int i = 0; i < 4; i++)
buf[i] = (float)ptr[i];
return v_float32x4((__m128)__lsx_vld(buf, 0));
#endif
}
inline void v_pack_store(hfloat* ptr, const v_float32x4& a)
{
#if CV_FP16
__m128i res = (__m218i)__lsx_vfcvt_h_s(a.val, a.val);
__lsx_vstelm_d(res, ptr, 0, 0);
#else
float CV_DECL_ALIGNED(32) buf[4];
v_store_aligned(buf, a);
for (int i = 0; i < 4; i++)
ptr[i] = hfloat(buf[i]);
#endif
}
//
// end of FP16
//
inline void v_cleanup() {}
#include "intrin_math.hpp"
inline v_float32x4 v_exp(const v_float32x4& x) { return v_exp_default_32f<v_float32x4, v_int32x4>(x); }
inline v_float32x4 v_log(const v_float32x4& x) { return v_log_default_32f<v_float32x4, v_int32x4>(x); }
inline void v_sincos(const v_float32x4& x, v_float32x4& s, v_float32x4& c) { v_sincos_default_32f<v_float32x4, v_int32x4>(x, s, c); }
inline v_float32x4 v_sin(const v_float32x4& x) { return v_sin_default_32f<v_float32x4, v_int32x4>(x); }
inline v_float32x4 v_cos(const v_float32x4& x) { return v_cos_default_32f<v_float32x4, v_int32x4>(x); }
inline v_float32x4 v_erf(const v_float32x4& x) { return v_erf_default_32f<v_float32x4, v_int32x4>(x); }
inline v_float64x2 v_exp(const v_float64x2& x) { return v_exp_default_64f<v_float64x2, v_int64x2>(x); }
inline v_float64x2 v_log(const v_float64x2& x) { return v_log_default_64f<v_float64x2, v_int64x2>(x); }
inline void v_sincos(const v_float64x2& x, v_float64x2& s, v_float64x2& c) { v_sincos_default_64f<v_float64x2, v_int64x2>(x, s, c); }
inline v_float64x2 v_sin(const v_float64x2& x) { return v_sin_default_64f<v_float64x2, v_int64x2>(x); }
inline v_float64x2 v_cos(const v_float64x2& x) { return v_cos_default_64f<v_float64x2, v_int64x2>(x); }
CV_CPU_OPTIMIZATION_HAL_NAMESPACE_END
//! @endcond
} // cv::
#endif // OPENCV_HAL_INTRIN_LSX_HPP
|
unknown
|
github
|
https://github.com/opencv/opencv
|
modules/core/include/opencv2/core/hal/intrin_lsx.hpp
|
# -*- coding: utf-8 -*-
"""
werkzeug.useragents
~~~~~~~~~~~~~~~~~~~
This module provides a helper to inspect user agent strings. This module
is far from complete but should work for most of the currently available
browsers.
:copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
class UserAgentParser(object):
"""A simple user agent parser. Used by the `UserAgent`."""
platforms = (
('iphone|ios', 'iphone'),
(r'darwin|mac|os\s*x', 'macos'),
('win', 'windows'),
(r'android', 'android'),
(r'x11|lin(\b|ux)?', 'linux'),
('(sun|i86)os', 'solaris'),
(r'nintendo\s+wii', 'wii'),
('irix', 'irix'),
('hp-?ux', 'hpux'),
('aix', 'aix'),
('sco|unix_sv', 'sco'),
('bsd', 'bsd'),
('amiga', 'amiga')
)
browsers = (
('googlebot', 'google'),
('msnbot', 'msn'),
('yahoo', 'yahoo'),
('ask jeeves', 'ask'),
(r'aol|america\s+online\s+browser', 'aol'),
('opera', 'opera'),
('chrome', 'chrome'),
('firefox|firebird|phoenix|iceweasel', 'firefox'),
('galeon', 'galeon'),
('safari', 'safari'),
('webkit', 'webkit'),
('camino', 'camino'),
('konqueror', 'konqueror'),
('k-meleon', 'kmeleon'),
('netscape', 'netscape'),
(r'msie|microsoft\s+internet\s+explorer', 'msie'),
('lynx', 'lynx'),
('links', 'links'),
('seamonkey|mozilla', 'seamonkey')
)
_browser_version_re = r'(?:%s)[/\sa-z(]*(\d+[.\da-z]+)?(?i)'
_language_re = re.compile(
r'(?:;\s*|\s+)(\b\w{2}\b(?:-\b\w{2}\b)?)\s*;|'
r'(?:\(|\[|;)\s*(\b\w{2}\b(?:-\b\w{2}\b)?)\s*(?:\]|\)|;)'
)
def __init__(self):
self.platforms = [(b, re.compile(a, re.I)) for a, b in self.platforms]
self.browsers = [(b, re.compile(self._browser_version_re % a))
for a, b in self.browsers]
def __call__(self, user_agent):
for platform, regex in self.platforms:
match = regex.search(user_agent)
if match is not None:
break
else:
platform = None
for browser, regex in self.browsers:
match = regex.search(user_agent)
if match is not None:
version = match.group(1)
break
else:
browser = version = None
match = self._language_re.search(user_agent)
if match is not None:
language = match.group(1) or match.group(2)
else:
language = None
return platform, browser, version, language
class UserAgent(object):
"""Represents a user agent. Pass it a WSGI environment or a user agent
string and you can inspect some of the details from the user agent
string via the attributes. The following attributes exist:
.. attribute:: string
the raw user agent string
.. attribute:: platform
the browser platform. The following platforms are currently
recognized:
- `aix`
- `amiga`
- `android`
- `bsd`
- `hpux`
- `iphone`
- `irix`
- `linux`
- `macos`
- `sco`
- `solaris`
- `wii`
- `windows`
.. attribute:: browser
the name of the browser. The following browsers are currently
recognized:
- `aol` *
- `ask` *
- `camino`
- `chrome`
- `firefox`
- `galeon`
- `google` *
- `kmeleon`
- `konqueror`
- `links`
- `lynx`
- `msie`
- `msn`
- `netscape`
- `opera`
- `safari`
- `seamonkey`
- `webkit`
- `yahoo` *
(Browsers maked with a star (``*``) are crawlers.)
.. attribute:: version
the version of the browser
.. attribute:: language
the language of the browser
"""
_parser = UserAgentParser()
def __init__(self, environ_or_string):
if isinstance(environ_or_string, dict):
environ_or_string = environ_or_string.get('HTTP_USER_AGENT', '')
self.string = environ_or_string
self.platform, self.browser, self.version, self.language = \
self._parser(environ_or_string)
def to_header(self):
return self.string
def __str__(self):
return self.string
def __nonzero__(self):
return bool(self.browser)
def __repr__(self):
return '<%s %r/%s>' % (
self.__class__.__name__,
self.browser,
self.version
)
# conceptionally this belongs in this module but because we want to lazily
# load the user agent module (which happens in wrappers.py) we have to import
# it afterwards. The class itself has the module set to this module so
# pickle, inspect and similar modules treat the object as if it was really
# implemented here.
from werkzeug.wrappers import UserAgentMixin
|
unknown
|
codeparrot/codeparrot-clean
| ||
import keep_me from 'hello';
import { keep_me2 } from 'hello2';
import * as keep_me3 from 'hello3';
import { but_not_me } from 'bar';
var leave_me_alone = 1;
function dont_bug_me_either() {}
export var __N_SSG = true;
export default function Test() {
return __jsx("div", null);
}
|
javascript
|
github
|
https://github.com/vercel/next.js
|
crates/next-custom-transforms/tests/fixture/ssg/getStaticProps/should-remove-re-exported-function-declarations-dependents-variables-functions-imports/output.js
|
import pickle
import random
import os
import sys
import time
import CORE_DATA
def merge(d1, d2, merger=lambda x,y:x+y):
#http://stackoverflow.com/questions/38987/how-can-i-merge-two-python-dictionaries-as-a-single-expression
result = dict(d1)
for k,v in d2.iteritems():
if k in result:
result[k] = merger(result[k], v)
else:
result[k] = v
return result
full_data = {}
imported_data = {}
try:
tiedostot = os.listdir("Marakov")
except:
os.mkdir("Marakov")
tiedostot = os.listdir("Marakov")
else:
pass
listaus = []
for i in tiedostot:
if "marakov." not in i.lower():
pass
else:
listaus.append(i)
for i in listaus:
tiedosto = open("Marakov/"+i,"r")
old_size = len(full_data.keys())
if i != "Marakov.Cache":
imported_data = merge(imported_data,pickle.load(tiedosto))
print "Added contents of "+i+" (Import)"
print "Entries: "+str(len(imported_data))
else:
full_data = merge(full_data,pickle.load(tiedosto))
new_size = len(full_data.keys())
print "Added contents of "+i
print "Entries: "+str(new_size-old_size)
time.sleep(0.1)
def give_data(data):
state = False
for a,b in zip(data.split(" "),data.split(" ")[1:]):
a = a.lower().replace(",","").replace(".","").replace("?","").replace("!","").replace("(","").replace(")","").replace("[","").replace("]","").replace('"',"").replace("'","")
b = b.lower().replace(",","").replace(".","").replace("?","").replace("!","").replace("(","").replace(")","").replace("[","").replace("]","").replace('"',"").replace("'","")
if a not in [CORE_DATA.prefix+"marakov"]+CORE_DATA.SName:
state = True
if a[:7] == "http://" or a[:7] == "http:\\\\" or a[:4] == "www.":
pass
else:
try:
if b not in full_data[a]:
full_data[a].append(b)
except:
try:
if b not in imported_data[a]:
pass
except:
full_data[a] = []
full_data[a].append(b)
if state == True:
tiedosto = open("Marakov/Marakov.Cache","w")
pickle.dump(full_data,tiedosto)
tiedosto.close()
def form_sentence(argument=None):
length = 0
attempts = 0
while attempts < 20:
sentence = []
if argument != None:
a = argument
else:
try:
a = random.choice(full_data.keys())
except IndexError:
try:
b = random.choice(imported_data.keys())
except IndexError:
attempts = 999
return "No sentences formable at all"
sentence.append(a)
length = 0
attempts += 1
while length < 12 or sentence[-1].lower() in ["but","who","gets","im","most","is","it","if","then","after","over","every","of","on","or","as","the","wheather","whether","a","to","and","for"] and length < 24:
try:
b = random.choice(full_data[a])
except:
try:
b = random.choice(imported_data[a])
except IndexError:
break
except KeyError:
break
else:
sentence.append(b)
length += 1
a = b
else:
sentence.append(b)
length += 1
a = b
if len(sentence) > 5:
argument = None
return sentence
else:
pass
argument = None
return sentence
def remdata(arg):
try:
del(full_data[arg])
except:
print "There is no such data"
else:
tiedosto = open("Marakov/Marakov.Cache","w")
pickle.dump(full_data,tiedosto)
tiedosto.close()
def remobject(arg1,arg2):
try:
del(full_data[arg1][full_data[arg1].index(arg2)])
except ValueError:
print "No such object"
except KeyError:
print "No such data"
else:
tiedosto = open("Marakov/Marakov.Cache","w")
pickle.dump(full_data,tiedosto)
tiedosto.close()
def convert(filename_from,filename_to):
try:
tiedosto = open(filename_from,"r")
data = pickle.load(tiedosto)
tiedosto.close()
except:
try:
tiedosto.close()
except:
pass
print "Error!"
else:
for lista in data.keys():
try:
a = lista[-1]
except IndexError:
pass
else:
if lista[-1] in """",.?!'()[]{}""" and not lista.islower():
if lista[:-1].lower() in data.keys():
data[lista[:-1].lower()] += data[lista]
print "Added "+str(len(data[lista]))+" Objects from "+lista+" To "+lista[:-1].lower()
del(data[lista])
else:
data[lista[:-1].lower()] = data[lista]
print lista+" Is now "+lista[:-1].lower()
del(data[lista])
elif lista[-1] in """",.?!'()[]{}""" and lista.islower():
if lista[:-1] in data.keys():
data[lista[:-1]] += data[lista]
print "Added "+str(len(data[lista]))+" Objects from "+lista+" To "+lista[:-1]
del(data[lista])
else:
data[lista[:-1]] = data[lista]
print lista+" Is now "+lista[:-1]
del(data[lista])
elif not lista.islower():
if lista.lower() in data.keys():
data[lista.lower()] += data[lista]
print "Added "+str(len(data[lista]))+" Objects from "+lista+" To "+lista.lower()
del(data[lista])
else:
data[lista.lower()] = data[lista]
print lista+" Is now "+lista.lower()
del(data[lista])
for a in data.keys():
for b in data[a]:
if b.lower()[:7] == "http://" or b.lower()[:7] == "http:\\\\" or b.lower()[:4] == "www.":
data[a].pop(b)
else:
try:
if b[-1] in """",.?!'()[]{}""" and not b.islower() and not b.isdigit():
data[a].pop(data[a].index(b))
data[a].append(b[:-1].lower())
print a+" | "+b +" -> "+b[:-1].lower()
elif b[-1] in """",.?!'()[]{}""" and b.islower():
data[a].pop(data[a].index(b))
data[a].append(b[:-1].lower())
print a+" | "+b +" -> "+b[:-1]
elif not b.islower() and not b.isdigit():
data[a].pop(data[a].index(b))
data[a].append(b.lower())
print a+" | "+b +" -> "+b.lower()
except IndexError: #If it has no letters.. well.. yeah.
data[a].pop(data[a].index(b))
print "Removed a NULL object"
tiedosto = open(filename_to,"w")
pickle.dump(data,tiedosto)
|
unknown
|
codeparrot/codeparrot-clean
| ||
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.cosn.contract;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractRenameTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
/**
* CosN contract tests for renaming a file.
*/
public class TestCosNContractRename extends AbstractContractRenameTest {
@Override
protected AbstractFSContract createContract(Configuration configuration) {
return new CosNContract(configuration);
}
}
|
java
|
github
|
https://github.com/apache/hadoop
|
hadoop-cloud-storage-project/hadoop-cos/src/test/java/org/apache/hadoop/fs/cosn/contract/TestCosNContractRename.java
|
"""
Title: test_findpeaks.py
Author: Tina Yang, tina.yang@ga.gov.au
CreationDate: 2014-05-01
Description: Unit testing module for findpeaks function in findpeaks.py
Version: $Rev$
$Id$
"""
import sys
import os.path
import unittest
import numpy as np
from numpy.testing import assert_almost_equal
from matplotlib import pyplot
import logging as log
from test_all_topo_engineered_data import test_line, expect_results
from inspect import getfile, currentframe
def escarpment_factor(profile, ridge, valley, data_spacing):
"""
Calculate escarpment factor
"""
max_escarp = 3
min_escarp = 0.5
nrow = np.size(profile)
H = profile[ridge] - profile[valley]
Lu = abs(ridge - valley) * data_spacing / 2
slope = H / (2 * Lu)
beta_ind = np.minimum(nrow - 1, np.floor(ridge + (2 * Lu / data_spacing)))
H_r2beta = profile[ridge] - profile[beta_ind]
D_r2beta = (beta_ind - ridge) * data_spacing
if D_r2beta > 0: # D_r2beta can be 0, 25, 50, ...
slope_r2mL2 = H_r2beta/D_r2beta
# when a symmetrical ridge slope_r2mL2=slope so escarp_factor=1
# If slope_r2mL2=0, escarp_factor=2.5
escarp_factor = 2.5 - 1.5 * slope_r2mL2 / slope
if escarp_factor < min_escarp:
escarp_factor = min_escarp
elif escarp_factor > max_escarp:
escarp_factor = max_escarp
else: # the ridge is on the end
slope_r2mL2 = 999
escarp_factor = 1
return H, slope, slope_r2mL2, escarp_factor
class TestFindpeaks(unittest.TestCase):
def setUp(self):
self.data_spacing = 25
def test_findpeaks(self):
cmd_folder = os.path.realpath(
os.path.abspath(os.path.split(
getfile(currentframe()))[0]))
parent = os.path.abspath(os.path.join(cmd_folder, os.pardir))
grandparent = os.path.abspath(os.path.join(parent, os.pardir))
if grandparent not in sys.path:
sys.path.insert(0, grandparent)
from topographic.findpeaks import findpeaks, findvalleys
# test for each scenerio
for p in range(1, len(test_line)+1):
#for p in range(3, 4):
print '\ntest ' + str(p) + ' ...'
nrow = np.size(test_line[p])
# take the largest integer of each element of the data line
fwd_line = np.floor(test_line[p])
# Get the indices of the ridges & valleys
ridge_ind = findpeaks(fwd_line) # relative ind
valley_ind = findvalleys(fwd_line) # relative ind
print ridge_ind
print valley_ind
nrow = np.size(ridge_ind)
H = np.ones((nrow, 1), dtype=float)
slope = np.ones((nrow, 1), dtype=float)
downwind_slope = np.ones((nrow, 1), dtype=float)
escarp_factor = np.ones((nrow, 1), dtype=float)
if np.size(ridge_ind) == 0: # the DEM is completely flat
log.debug( "Flat line" )
# the DEM is downward slope all the time
elif np.size(ridge_ind) == 1 and ridge_ind[0] == 0:
log.debug( "Downward slope" )
else: # 2 general cases, calculate m, works as Mh.m
if ridge_ind[0] == 0: # (1) down up down up ....
for i in range(1, np.size(ridge_ind)):
H[i], slope[i], downwind_slope[i], escarp_factor[i] = \
escarpment_factor(fwd_line, ridge_ind[i],
valley_ind[i-1],
self.data_spacing)
else: # (2) up dowm up dowm ....
for i in range(0, np.size(ridge_ind)):
H[i], slope[i], downwind_slope[i], escarp_factor[i] = \
escarpment_factor(fwd_line, ridge_ind[i],
valley_ind[i], self.data_spacing)
hill_no = np.size(ridge_ind)
# import pdb
# pdb.set_trace()
scripts_result = np.concatenate([[hill_no], H.flatten(),
slope.flatten(),
downwind_slope.flatten(),
escarp_factor.flatten()])
print scripts_result
print expect_results[p]
#plot the line profile
# point_no = len(test_line[p])
# x = np.arange(point_no)
# y = test_line[p]
# pyplot.plot(x, y, 'g')
# pyplot.show()
assert_almost_equal(scripts_result, expect_results[p], decimal=2,
err_msg='',verbose=True)
if __name__ == "__main__":
unittest.main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
{
"kind": "Dashboard",
"apiVersion": "dashboard.grafana.app/v2alpha1",
"metadata": {
"name": "v0alpha1.panel_tests_bar_gauge2.v42"
},
"spec": {
"annotations": [
{
"kind": "AnnotationQuery",
"spec": {
"query": {
"kind": "",
"spec": {}
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations \u0026 Alerts",
"builtIn": true,
"legacyOptions": {
"type": "dashboard"
}
}
}
],
"cursorSync": "Off",
"editable": true,
"elements": {
"panel-12": {
"kind": "Panel",
"spec": {
"id": 12,
"title": "Gradient ",
"description": "",
"links": [],
"data": {
"kind": "QueryGroup",
"spec": {
"queries": [
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"alias": "Inside",
"scenarioId": "csv_metric_values",
"stringInput": "100,100,100"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "H",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"alias": "Outhouse",
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "A",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "F",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "B",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "C",
"hidden": false
}
}
],
"transformations": [],
"queryOptions": {}
}
},
"vizConfig": {
"kind": "bargauge",
"spec": {
"pluginVersion": "6.5.0-pre",
"options": {
"displayMode": "gradient",
"fieldOptions": {
"calcs": [
"mean"
],
"defaults": {
"mappings": [],
"max": 100,
"min": 0,
"thresholds": [
{
"color": "blue"
},
{
"color": "green",
"value": 20
},
{
"color": "orange",
"value": 40
},
{
"color": "red",
"value": 80
}
],
"unit": "celsius"
},
"overrides": [],
"values": false
},
"orientation": "horizontal",
"showUnfilled": false
},
"fieldConfig": {
"defaults": {},
"overrides": []
}
}
}
}
},
"panel-21": {
"kind": "Panel",
"spec": {
"id": 21,
"title": "Title to left of bar",
"description": "",
"links": [],
"data": {
"kind": "QueryGroup",
"spec": {
"queries": [
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"alias": "Inside",
"scenarioId": "csv_metric_values",
"stringInput": "100,100,100"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "H",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"alias": "Outhouse",
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "A",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "F",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "B",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "C",
"hidden": false
}
}
],
"transformations": [],
"queryOptions": {}
}
},
"vizConfig": {
"kind": "bargauge",
"spec": {
"pluginVersion": "6.5.0-pre",
"options": {
"displayMode": "basic",
"fieldOptions": {
"calcs": [
"mean"
],
"defaults": {
"mappings": [],
"max": 100,
"min": 0,
"thresholds": [
{
"color": "blue"
},
{
"color": "green",
"value": 20
},
{
"color": "orange",
"value": 40
},
{
"color": "red",
"value": 80
}
],
"unit": "celsius"
},
"overrides": [],
"values": false
},
"orientation": "horizontal",
"showUnfilled": false
},
"fieldConfig": {
"defaults": {},
"overrides": []
}
}
}
}
},
"panel-22": {
"kind": "Panel",
"spec": {
"id": 22,
"title": "Basic vertical (Unfilled)",
"description": "",
"links": [],
"data": {
"kind": "QueryGroup",
"spec": {
"queries": [
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "csv_metric_values",
"stringInput": "100,100,100"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "H",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "J",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "K",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "L",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "M",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "N",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "O",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "P",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "Q",
"hidden": false
}
}
],
"transformations": [],
"queryOptions": {}
}
},
"vizConfig": {
"kind": "bargauge",
"spec": {
"pluginVersion": "6.5.0-pre",
"options": {
"displayMode": "basic",
"fieldOptions": {
"calcs": [
"mean"
],
"defaults": {
"mappings": [],
"max": 100,
"min": 0,
"thresholds": [
{
"color": "green"
},
{
"color": "blue",
"value": 25
},
{
"color": "orange",
"value": 37.5
},
{
"color": "purple",
"value": 43.75
},
{
"color": "red",
"value": 50
}
],
"unit": "watt"
},
"overrides": [],
"values": false
},
"orientation": "vertical",
"showUnfilled": true
},
"fieldConfig": {
"defaults": {},
"overrides": []
}
}
}
}
},
"panel-23": {
"kind": "Panel",
"spec": {
"id": 23,
"title": "Gradient (Unfilled)",
"description": "",
"links": [],
"data": {
"kind": "QueryGroup",
"spec": {
"queries": [
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"alias": "Inside",
"scenarioId": "csv_metric_values",
"stringInput": "100,100,100"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "H",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"alias": "Outhouse",
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "A",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "F",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "B",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "C",
"hidden": false
}
}
],
"transformations": [],
"queryOptions": {}
}
},
"vizConfig": {
"kind": "bargauge",
"spec": {
"pluginVersion": "6.5.0-pre",
"options": {
"displayMode": "gradient",
"fieldOptions": {
"calcs": [
"mean"
],
"defaults": {
"mappings": [],
"max": 100,
"min": 0,
"thresholds": [
{
"color": "blue"
},
{
"color": "green",
"value": 20
},
{
"color": "orange",
"value": 40
},
{
"color": "red",
"value": 80
}
],
"unit": "celsius"
},
"overrides": [],
"values": false
},
"orientation": "horizontal",
"showUnfilled": true
},
"fieldConfig": {
"defaults": {},
"overrides": []
}
}
}
}
},
"panel-24": {
"kind": "Panel",
"spec": {
"id": 24,
"title": "Title to left of bar (Filled)",
"description": "",
"links": [],
"data": {
"kind": "QueryGroup",
"spec": {
"queries": [
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"alias": "Inside",
"scenarioId": "csv_metric_values",
"stringInput": "100,100,100"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "H",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"alias": "Outhouse",
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "A",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "F",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "B",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "C",
"hidden": false
}
}
],
"transformations": [],
"queryOptions": {}
}
},
"vizConfig": {
"kind": "bargauge",
"spec": {
"pluginVersion": "6.5.0-pre",
"options": {
"displayMode": "basic",
"fieldOptions": {
"calcs": [
"mean"
],
"defaults": {
"mappings": [],
"max": 100,
"min": 0,
"thresholds": [
{
"color": "blue"
},
{
"color": "green",
"value": 20
},
{
"color": "orange",
"value": 40
},
{
"color": "red",
"value": 80
}
],
"unit": "celsius"
},
"overrides": [],
"values": false
},
"orientation": "horizontal",
"showUnfilled": true
},
"fieldConfig": {
"defaults": {},
"overrides": []
}
}
}
}
},
"panel-8": {
"kind": "Panel",
"spec": {
"id": 8,
"title": "Basic vertical ",
"description": "",
"links": [],
"data": {
"kind": "QueryGroup",
"spec": {
"queries": [
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "C",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "D",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "I",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "J",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "K",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "L",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "M",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "N",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "O",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "P",
"hidden": false
}
},
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "grafana-testdata-datasource",
"spec": {
"scenarioId": "random_walk"
}
},
"datasource": {
"type": "grafana-testdata-datasource",
"uid": "testdata-type-uid"
},
"refId": "Q",
"hidden": false
}
}
],
"transformations": [],
"queryOptions": {}
}
},
"vizConfig": {
"kind": "bargauge",
"spec": {
"pluginVersion": "6.5.0-pre",
"options": {
"displayMode": "basic",
"fieldOptions": {
"calcs": [
"mean"
],
"defaults": {
"mappings": [],
"max": 100,
"min": 0,
"thresholds": [
{
"color": "green"
},
{
"color": "purple",
"value": 50
},
{
"color": "blue",
"value": 70
}
],
"unit": "watt"
},
"overrides": [],
"values": false
},
"orientation": "vertical",
"showUnfilled": false
},
"fieldConfig": {
"defaults": {},
"overrides": []
}
}
}
}
}
},
"layout": {
"kind": "GridLayout",
"spec": {
"items": [
{
"kind": "GridLayoutItem",
"spec": {
"x": 0,
"y": 0,
"width": 12,
"height": 10,
"element": {
"kind": "ElementReference",
"name": "panel-8"
}
}
},
{
"kind": "GridLayoutItem",
"spec": {
"x": 12,
"y": 0,
"width": 12,
"height": 10,
"element": {
"kind": "ElementReference",
"name": "panel-22"
}
}
},
{
"kind": "GridLayoutItem",
"spec": {
"x": 0,
"y": 10,
"width": 12,
"height": 9,
"element": {
"kind": "ElementReference",
"name": "panel-12"
}
}
},
{
"kind": "GridLayoutItem",
"spec": {
"x": 12,
"y": 10,
"width": 12,
"height": 9,
"element": {
"kind": "ElementReference",
"name": "panel-23"
}
}
},
{
"kind": "GridLayoutItem",
"spec": {
"x": 0,
"y": 19,
"width": 12,
"height": 6,
"element": {
"kind": "ElementReference",
"name": "panel-21"
}
}
},
{
"kind": "GridLayoutItem",
"spec": {
"x": 12,
"y": 19,
"width": 12,
"height": 6,
"element": {
"kind": "ElementReference",
"name": "panel-24"
}
}
}
]
}
},
"links": [],
"liveNow": false,
"preload": false,
"tags": [
"gdev",
"panel-tests"
],
"timeSettings": {
"timezone": "",
"from": "now-6h",
"to": "now",
"autoRefresh": "",
"autoRefreshIntervals": [
"5s",
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"hideTimepicker": false,
"fiscalYearStartMonth": 0
},
"title": "Panel Tests - Bar Gauge 2",
"variables": []
},
"status": {
"conversion": {
"failed": false,
"storedVersion": "v0alpha1"
}
}
}
|
json
|
github
|
https://github.com/grafana/grafana
|
apps/dashboard/pkg/migration/conversion/testdata/output/migrated_dev_dashboards/panel-bargauge/v0alpha1.panel_tests_bar_gauge2.v42.v2alpha1.json
|
# Copyright (c) 2014-2015 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Hansson
import optparse
import m5
from m5.objects import *
from m5.util import addToPath
from m5.internal.stats import periodicStatDump
addToPath('../')
from common import MemConfig
# this script is helpful to sweep the efficiency of a specific memory
# controller configuration, by varying the number of banks accessed,
# and the sequential stride size (how many bytes per activate), and
# observe what bus utilisation (bandwidth) is achieved
parser = optparse.OptionParser()
# Use a single-channel DDR3-1600 x64 by default
parser.add_option("--mem-type", type="choice", default="DDR3_1600_x64",
choices=MemConfig.mem_names(),
help = "type of memory to use")
parser.add_option("--mem-ranks", "-r", type="int", default=1,
help = "Number of ranks to iterate across")
parser.add_option("--rd_perc", type="int", default=100,
help = "Percentage of read commands")
parser.add_option("--mode", type="choice", default="DRAM",
choices=["DRAM", "DRAM_ROTATE"],
help = "DRAM: Random traffic; \
DRAM_ROTATE: Traffic rotating across banks and ranks")
parser.add_option("--addr_map", type="int", default=1,
help = "0: RoCoRaBaCh; 1: RoRaBaCoCh/RoRaBaChCo")
(options, args) = parser.parse_args()
if args:
print "Error: script doesn't take any positional arguments"
sys.exit(1)
# at the moment we stay with the default open-adaptive page policy,
# and address mapping
# start with the system itself, using a multi-layer 2.0 GHz
# crossbar, delivering 64 bytes / 3 cycles (one header cycle)
# which amounts to 42.7 GByte/s per layer and thus per port
system = System(membus = IOXBar(width = 32))
system.clk_domain = SrcClockDomain(clock = '2.0GHz',
voltage_domain =
VoltageDomain(voltage = '1V'))
# we are fine with 256 MB memory for now
mem_range = AddrRange('256MB')
system.mem_ranges = [mem_range]
# do not worry about reserving space for the backing store
system.mmap_using_noreserve = True
# force a single channel to match the assumptions in the DRAM traffic
# generator
options.mem_channels = 1
options.external_memory_system = 0
options.tlm_memory = 0
options.elastic_trace_en = 0
MemConfig.config_mem(options, system)
# the following assumes that we are using the native DRAM
# controller, check to be sure
if not isinstance(system.mem_ctrls[0], m5.objects.DRAMCtrl):
fatal("This script assumes the memory is a DRAMCtrl subclass")
# there is no point slowing things down by saving any data
system.mem_ctrls[0].null = True
# Set the address mapping based on input argument
# Default to RoRaBaCoCh
if options.addr_map == 0:
system.mem_ctrls[0].addr_mapping = "RoCoRaBaCh"
elif options.addr_map == 1:
system.mem_ctrls[0].addr_mapping = "RoRaBaCoCh"
else:
fatal("Did not specify a valid address map argument")
# stay in each state for 0.25 ms, long enough to warm things up, and
# short enough to avoid hitting a refresh
period = 250000000
# this is where we go off piste, and print the traffic generator
# configuration that we will later use, crazy but it works
cfg_file_name = "configs/dram/sweep.cfg"
cfg_file = open(cfg_file_name, 'w')
# stay in each state as long as the dump/reset period, use the entire
# range, issue transactions of the right DRAM burst size, and match
# the DRAM maximum bandwidth to ensure that it is saturated
# get the number of banks
nbr_banks = system.mem_ctrls[0].banks_per_rank.value
# determine the burst length in bytes
burst_size = int((system.mem_ctrls[0].devices_per_rank.value *
system.mem_ctrls[0].device_bus_width.value *
system.mem_ctrls[0].burst_length.value) / 8)
# next, get the page size in bytes
page_size = system.mem_ctrls[0].devices_per_rank.value * \
system.mem_ctrls[0].device_rowbuffer_size.value
# match the maximum bandwidth of the memory, the parameter is in seconds
# and we need it in ticks (ps)
itt = system.mem_ctrls[0].tBURST.value * 1000000000000
# assume we start at 0
max_addr = mem_range.end
# use min of the page size and 512 bytes as that should be more than
# enough
max_stride = min(512, page_size)
# now we create the state by iterating over the stride size from burst
# size to the max stride, and from using only a single bank up to the
# number of banks available
nxt_state = 0
for bank in range(1, nbr_banks + 1):
for stride_size in range(burst_size, max_stride + 1, burst_size):
cfg_file.write("STATE %d %d %s %d 0 %d %d "
"%d %d %d %d %d %d %d %d %d\n" %
(nxt_state, period, options.mode, options.rd_perc,
max_addr, burst_size, itt, itt, 0, stride_size,
page_size, nbr_banks, bank, options.addr_map,
options.mem_ranks))
nxt_state = nxt_state + 1
cfg_file.write("INIT 0\n")
# go through the states one by one
for state in range(1, nxt_state):
cfg_file.write("TRANSITION %d %d 1\n" % (state - 1, state))
cfg_file.write("TRANSITION %d %d 1\n" % (nxt_state - 1, nxt_state - 1))
cfg_file.close()
# create a traffic generator, and point it to the file we just created
system.tgen = TrafficGen(config_file = cfg_file_name)
# add a communication monitor
system.monitor = CommMonitor()
# connect the traffic generator to the bus via a communication monitor
system.tgen.port = system.monitor.slave
system.monitor.master = system.membus.slave
# connect the system port even if it is not used in this example
system.system_port = system.membus.slave
# every period, dump and reset all stats
periodicStatDump(period)
# run Forrest, run!
root = Root(full_system = False, system = system)
root.system.mem_mode = 'timing'
m5.instantiate()
m5.simulate(nxt_state * period)
print "DRAM sweep with burst: %d, banks: %d, max stride: %d" % \
(burst_size, nbr_banks, max_stride)
|
unknown
|
codeparrot/codeparrot-clean
| ||
#![allow(
clippy::cast_lossless,
clippy::decimal_literal_representation,
clippy::derive_partial_eq_without_eq,
clippy::empty_enums,
clippy::manual_assert,
clippy::needless_pass_by_value,
clippy::uninlined_format_args,
clippy::unreadable_literal
)]
#![cfg_attr(feature = "unstable", feature(never_type))]
use serde::de::value::{F32Deserializer, F64Deserializer};
use serde::de::{Deserialize, DeserializeOwned, Deserializer, IntoDeserializer};
use serde_derive::Deserialize;
use serde_test::{assert_de_tokens, Configure, Token};
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::default::Default;
use std::ffi::{CStr, CString, OsString};
use std::fmt::Debug;
use std::iter;
use std::net;
use std::num::{
NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroIsize, NonZeroU128,
NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize, Saturating, Wrapping,
};
use std::ops::Bound;
use std::path::{Path, PathBuf};
use std::rc::{Rc, Weak as RcWeak};
use std::sync::atomic::{
AtomicBool, AtomicI16, AtomicI32, AtomicI8, AtomicIsize, AtomicU16, AtomicU32, AtomicU8,
AtomicUsize, Ordering,
};
#[cfg(target_arch = "x86_64")]
use std::sync::atomic::{AtomicI64, AtomicU64};
use std::sync::{Arc, Weak as ArcWeak};
use std::time::{Duration, UNIX_EPOCH};
#[macro_use]
mod macros;
//////////////////////////////////////////////////////////////////////////
#[derive(Copy, Clone, PartialEq, Debug, Deserialize)]
struct UnitStruct;
#[derive(Copy, Clone, PartialEq, Debug, Deserialize)]
struct GenericUnitStruct<const N: u8>;
#[derive(PartialEq, Debug, Deserialize)]
struct NewtypeStruct(i32);
#[derive(PartialEq, Debug, Deserialize)]
struct TupleStruct(i32, i32, i32);
#[derive(PartialEq, Debug, Deserialize)]
struct Struct {
a: i32,
b: i32,
#[serde(skip_deserializing)]
c: i32,
}
#[derive(PartialEq, Debug, Deserialize)]
#[serde(default)]
struct StructDefault<T> {
a: i32,
b: T,
}
impl Default for StructDefault<String> {
fn default() -> Self {
StructDefault {
a: 100,
b: "default".to_string(),
}
}
}
#[derive(PartialEq, Debug, Deserialize)]
struct StructSkipAll {
#[serde(skip_deserializing)]
a: i32,
}
#[derive(PartialEq, Debug, Deserialize)]
#[serde(default)]
struct StructSkipDefault {
#[serde(skip_deserializing)]
a: i32,
}
#[derive(PartialEq, Debug, Deserialize)]
#[serde(default)]
pub struct StructSkipDefaultGeneric<T> {
#[serde(skip_deserializing)]
t: T,
}
impl Default for StructSkipDefault {
fn default() -> Self {
StructSkipDefault { a: 16 }
}
}
#[derive(PartialEq, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
struct StructSkipAllDenyUnknown {
#[serde(skip_deserializing)]
a: i32,
}
#[derive(Default, PartialEq, Debug)]
struct NotDeserializable;
#[derive(PartialEq, Debug, Deserialize)]
enum Enum {
#[allow(dead_code)]
#[serde(skip_deserializing)]
Skipped,
Unit,
Simple(i32),
Seq(i32, i32, i32),
Map {
a: i32,
b: i32,
c: i32,
},
SimpleWithSkipped(#[serde(skip_deserializing)] NotDeserializable),
}
#[derive(PartialEq, Debug, Deserialize)]
enum EnumOther {
Unit,
#[serde(other)]
Other,
}
#[derive(PartialEq, Debug)]
struct IgnoredAny;
impl<'de> Deserialize<'de> for IgnoredAny {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
serde::de::IgnoredAny::deserialize(deserializer)?;
Ok(IgnoredAny)
}
}
//////////////////////////////////////////////////////////////////////////
#[track_caller]
fn test<'de, T>(value: T, tokens: &'de [Token])
where
T: Deserialize<'de> + PartialEq + Debug,
{
// Test ser/de roundtripping
assert_de_tokens(&value, tokens);
// Test that the tokens are ignorable
assert_de_tokens_ignore(tokens);
}
#[derive(Debug)]
struct SkipPartialEq<T>(T);
impl<'de, T> Deserialize<'de> for SkipPartialEq<T>
where
T: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
T::deserialize(deserializer).map(SkipPartialEq)
}
}
impl<T> PartialEq for SkipPartialEq<T> {
fn eq(&self, _other: &Self) -> bool {
true
}
}
#[track_caller]
fn assert_de_tokens_ignore(ignorable_tokens: &[Token]) {
#[derive(PartialEq, Debug, Deserialize)]
struct IgnoreBase {
a: i32,
}
// Embed the tokens to be ignored in the normal token
// stream for an IgnoreBase type
let concated_tokens: Vec<Token> = vec![
Token::Map { len: Some(2) },
Token::Str("a"),
Token::I32(1),
Token::Str("ignored"),
]
.into_iter()
.chain(ignorable_tokens.iter().copied())
.chain(iter::once(Token::MapEnd))
.collect();
let expected = IgnoreBase { a: 1 };
assert_de_tokens(&expected, &concated_tokens);
}
//////////////////////////////////////////////////////////////////////////
#[test]
fn test_bool() {
test(true, &[Token::Bool(true)]);
test(false, &[Token::Bool(false)]);
}
#[test]
fn test_i8() {
let test = test::<i8>;
// from signed
test(-128, &[Token::I8(-128)]);
test(-128, &[Token::I16(-128)]);
test(-128, &[Token::I32(-128)]);
test(-128, &[Token::I64(-128)]);
test(127, &[Token::I8(127)]);
test(127, &[Token::I16(127)]);
test(127, &[Token::I32(127)]);
test(127, &[Token::I64(127)]);
// from unsigned
test(0, &[Token::U8(0)]);
test(0, &[Token::U16(0)]);
test(0, &[Token::U32(0)]);
test(0, &[Token::U64(0)]);
test(127, &[Token::U8(127)]);
test(127, &[Token::U16(127)]);
test(127, &[Token::U32(127)]);
test(127, &[Token::U64(127)]);
}
#[test]
fn test_i16() {
let test = test::<i16>;
// from signed
test(-128, &[Token::I8(-128)]);
test(-32768, &[Token::I16(-32768)]);
test(-32768, &[Token::I32(-32768)]);
test(-32768, &[Token::I64(-32768)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(32767, &[Token::I32(32767)]);
test(32767, &[Token::I64(32767)]);
// from unsigned
test(0, &[Token::U8(0)]);
test(0, &[Token::U16(0)]);
test(0, &[Token::U32(0)]);
test(0, &[Token::U64(0)]);
test(255, &[Token::U8(255)]);
test(32767, &[Token::U16(32767)]);
test(32767, &[Token::U32(32767)]);
test(32767, &[Token::U64(32767)]);
}
#[test]
fn test_i32() {
let test = test::<i32>;
// from signed
test(-128, &[Token::I8(-128)]);
test(-32768, &[Token::I16(-32768)]);
test(-2147483648, &[Token::I32(-2147483648)]);
test(-2147483648, &[Token::I64(-2147483648)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(2147483647, &[Token::I32(2147483647)]);
test(2147483647, &[Token::I64(2147483647)]);
// from unsigned
test(0, &[Token::U8(0)]);
test(0, &[Token::U16(0)]);
test(0, &[Token::U32(0)]);
test(0, &[Token::U64(0)]);
test(255, &[Token::U8(255)]);
test(65535, &[Token::U16(65535)]);
test(2147483647, &[Token::U32(2147483647)]);
test(2147483647, &[Token::U64(2147483647)]);
}
#[test]
fn test_i64() {
let test = test::<i64>;
// from signed
test(-128, &[Token::I8(-128)]);
test(-32768, &[Token::I16(-32768)]);
test(-2147483648, &[Token::I32(-2147483648)]);
test(-9223372036854775808, &[Token::I64(-9223372036854775808)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(2147483647, &[Token::I32(2147483647)]);
test(9223372036854775807, &[Token::I64(9223372036854775807)]);
// from unsigned
test(0, &[Token::U8(0)]);
test(0, &[Token::U16(0)]);
test(0, &[Token::U32(0)]);
test(0, &[Token::U64(0)]);
test(255, &[Token::U8(255)]);
test(65535, &[Token::U16(65535)]);
test(4294967295, &[Token::U32(4294967295)]);
test(9223372036854775807, &[Token::U64(9223372036854775807)]);
}
#[test]
fn test_i128() {
let test = test::<i128>;
// from signed
test(-128, &[Token::I8(-128)]);
test(-32768, &[Token::I16(-32768)]);
test(-2147483648, &[Token::I32(-2147483648)]);
test(-9223372036854775808, &[Token::I64(-9223372036854775808)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(2147483647, &[Token::I32(2147483647)]);
test(9223372036854775807, &[Token::I64(9223372036854775807)]);
// from unsigned
test(0, &[Token::U8(0)]);
test(0, &[Token::U16(0)]);
test(0, &[Token::U32(0)]);
test(0, &[Token::U64(0)]);
test(255, &[Token::U8(255)]);
test(65535, &[Token::U16(65535)]);
test(4294967295, &[Token::U32(4294967295)]);
test(18446744073709551615, &[Token::U64(18446744073709551615)]);
}
#[test]
fn test_isize() {
let test = test::<isize>;
// from signed
test(-10, &[Token::I8(-10)]);
test(-10, &[Token::I16(-10)]);
test(-10, &[Token::I32(-10)]);
test(-10, &[Token::I64(-10)]);
test(10, &[Token::I8(10)]);
test(10, &[Token::I16(10)]);
test(10, &[Token::I32(10)]);
test(10, &[Token::I64(10)]);
// from unsigned
test(0, &[Token::U8(0)]);
test(0, &[Token::U16(0)]);
test(0, &[Token::U32(0)]);
test(0, &[Token::U64(0)]);
test(10, &[Token::U8(10)]);
test(10, &[Token::U16(10)]);
test(10, &[Token::U32(10)]);
test(10, &[Token::U64(10)]);
}
#[test]
fn test_u8() {
let test = test::<u8>;
// from signed
test(0, &[Token::I8(0)]);
test(0, &[Token::I16(0)]);
test(0, &[Token::I32(0)]);
test(0, &[Token::I64(0)]);
test(127, &[Token::I8(127)]);
test(255, &[Token::I16(255)]);
test(255, &[Token::I32(255)]);
test(255, &[Token::I64(255)]);
// from unsigned
test(0, &[Token::U8(0)]);
test(0, &[Token::U16(0)]);
test(0, &[Token::U32(0)]);
test(0, &[Token::U64(0)]);
test(255, &[Token::U8(255)]);
test(255, &[Token::U16(255)]);
test(255, &[Token::U32(255)]);
test(255, &[Token::U64(255)]);
}
#[test]
fn test_u16() {
let test = test::<u16>;
// from signed
test(0, &[Token::I8(0)]);
test(0, &[Token::I16(0)]);
test(0, &[Token::I32(0)]);
test(0, &[Token::I64(0)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(65535, &[Token::I32(65535)]);
test(65535, &[Token::I64(65535)]);
// from unsigned
test(0, &[Token::U8(0)]);
test(0, &[Token::U16(0)]);
test(0, &[Token::U32(0)]);
test(0, &[Token::U64(0)]);
test(255, &[Token::U8(255)]);
test(65535, &[Token::U16(65535)]);
test(65535, &[Token::U32(65535)]);
test(65535, &[Token::U64(65535)]);
}
#[test]
fn test_u32() {
let test = test::<u32>;
// from signed
test(0, &[Token::I8(0)]);
test(0, &[Token::I16(0)]);
test(0, &[Token::I32(0)]);
test(0, &[Token::I64(0)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(2147483647, &[Token::I32(2147483647)]);
test(4294967295, &[Token::I64(4294967295)]);
// from unsigned
test(0, &[Token::U8(0)]);
test(0, &[Token::U16(0)]);
test(0, &[Token::U32(0)]);
test(0, &[Token::U64(0)]);
test(255, &[Token::U8(255)]);
test(65535, &[Token::U16(65535)]);
test(4294967295, &[Token::U32(4294967295)]);
test(4294967295, &[Token::U64(4294967295)]);
}
#[test]
fn test_u64() {
let test = test::<u64>;
// from signed
test(0, &[Token::I8(0)]);
test(0, &[Token::I16(0)]);
test(0, &[Token::I32(0)]);
test(0, &[Token::I64(0)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(2147483647, &[Token::I32(2147483647)]);
test(9223372036854775807, &[Token::I64(9223372036854775807)]);
// from unsigned
test(0, &[Token::U8(0)]);
test(0, &[Token::U16(0)]);
test(0, &[Token::U32(0)]);
test(0, &[Token::U64(0)]);
test(255, &[Token::U8(255)]);
test(65535, &[Token::U16(65535)]);
test(4294967295, &[Token::U32(4294967295)]);
test(18446744073709551615, &[Token::U64(18446744073709551615)]);
}
#[test]
fn test_u128() {
let test = test::<u128>;
// from signed
test(0, &[Token::I8(0)]);
test(0, &[Token::I16(0)]);
test(0, &[Token::I32(0)]);
test(0, &[Token::I64(0)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(2147483647, &[Token::I32(2147483647)]);
test(9223372036854775807, &[Token::I64(9223372036854775807)]);
// from unsigned
test(0, &[Token::U8(0)]);
test(0, &[Token::U16(0)]);
test(0, &[Token::U32(0)]);
test(0, &[Token::U64(0)]);
test(255, &[Token::U8(255)]);
test(65535, &[Token::U16(65535)]);
test(4294967295, &[Token::U32(4294967295)]);
test(18446744073709551615, &[Token::U64(18446744073709551615)]);
}
#[test]
fn test_usize() {
let test = test::<usize>;
// from signed
test(0, &[Token::I8(0)]);
test(0, &[Token::I16(0)]);
test(0, &[Token::I32(0)]);
test(0, &[Token::I64(0)]);
test(10, &[Token::I8(10)]);
test(10, &[Token::I16(10)]);
test(10, &[Token::I32(10)]);
test(10, &[Token::I64(10)]);
// from unsigned
test(0, &[Token::U8(0)]);
test(0, &[Token::U16(0)]);
test(0, &[Token::U32(0)]);
test(0, &[Token::U64(0)]);
test(10, &[Token::U8(10)]);
test(10, &[Token::U16(10)]);
test(10, &[Token::U32(10)]);
test(10, &[Token::U64(10)]);
}
#[test]
fn test_nonzero_i8() {
let test = |value, tokens| test(NonZeroI8::new(value).unwrap(), tokens);
// from signed
test(-128, &[Token::I8(-128)]);
test(-128, &[Token::I16(-128)]);
test(-128, &[Token::I32(-128)]);
test(-128, &[Token::I64(-128)]);
test(127, &[Token::I8(127)]);
test(127, &[Token::I16(127)]);
test(127, &[Token::I32(127)]);
test(127, &[Token::I64(127)]);
// from unsigned
test(1, &[Token::U8(1)]);
test(1, &[Token::U16(1)]);
test(1, &[Token::U32(1)]);
test(1, &[Token::U64(1)]);
test(127, &[Token::U8(127)]);
test(127, &[Token::U16(127)]);
test(127, &[Token::U32(127)]);
test(127, &[Token::U64(127)]);
}
#[test]
fn test_nonzero_i16() {
let test = |value, tokens| test(NonZeroI16::new(value).unwrap(), tokens);
// from signed
test(-128, &[Token::I8(-128)]);
test(-32768, &[Token::I16(-32768)]);
test(-32768, &[Token::I32(-32768)]);
test(-32768, &[Token::I64(-32768)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(32767, &[Token::I32(32767)]);
test(32767, &[Token::I64(32767)]);
// from unsigned
test(1, &[Token::U8(1)]);
test(1, &[Token::U16(1)]);
test(1, &[Token::U32(1)]);
test(1, &[Token::U64(1)]);
test(255, &[Token::U8(255)]);
test(32767, &[Token::U16(32767)]);
test(32767, &[Token::U32(32767)]);
test(32767, &[Token::U64(32767)]);
}
#[test]
fn test_nonzero_i32() {
let test = |value, tokens| test(NonZeroI32::new(value).unwrap(), tokens);
// from signed
test(-128, &[Token::I8(-128)]);
test(-32768, &[Token::I16(-32768)]);
test(-2147483648, &[Token::I32(-2147483648)]);
test(-2147483648, &[Token::I64(-2147483648)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(2147483647, &[Token::I32(2147483647)]);
test(2147483647, &[Token::I64(2147483647)]);
// from unsigned
test(1, &[Token::U8(1)]);
test(1, &[Token::U16(1)]);
test(1, &[Token::U32(1)]);
test(1, &[Token::U64(1)]);
test(255, &[Token::U8(255)]);
test(65535, &[Token::U16(65535)]);
test(2147483647, &[Token::U32(2147483647)]);
test(2147483647, &[Token::U64(2147483647)]);
}
#[test]
fn test_nonzero_i64() {
let test = |value, tokens| test(NonZeroI64::new(value).unwrap(), tokens);
// from signed
test(-128, &[Token::I8(-128)]);
test(-32768, &[Token::I16(-32768)]);
test(-2147483648, &[Token::I32(-2147483648)]);
test(-9223372036854775808, &[Token::I64(-9223372036854775808)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(2147483647, &[Token::I32(2147483647)]);
test(9223372036854775807, &[Token::I64(9223372036854775807)]);
// from unsigned
test(1, &[Token::U8(1)]);
test(1, &[Token::U16(1)]);
test(1, &[Token::U32(1)]);
test(1, &[Token::U64(1)]);
test(255, &[Token::U8(255)]);
test(65535, &[Token::U16(65535)]);
test(4294967295, &[Token::U32(4294967295)]);
test(9223372036854775807, &[Token::U64(9223372036854775807)]);
}
#[test]
fn test_nonzero_i128() {
let test = |value, tokens| test(NonZeroI128::new(value).unwrap(), tokens);
// from signed
test(-128, &[Token::I8(-128)]);
test(-32768, &[Token::I16(-32768)]);
test(-2147483648, &[Token::I32(-2147483648)]);
test(-9223372036854775808, &[Token::I64(-9223372036854775808)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(2147483647, &[Token::I32(2147483647)]);
test(9223372036854775807, &[Token::I64(9223372036854775807)]);
// from unsigned
test(1, &[Token::U8(1)]);
test(1, &[Token::U16(1)]);
test(1, &[Token::U32(1)]);
test(1, &[Token::U64(1)]);
test(255, &[Token::U8(255)]);
test(65535, &[Token::U16(65535)]);
test(4294967295, &[Token::U32(4294967295)]);
test(18446744073709551615, &[Token::U64(18446744073709551615)]);
}
#[test]
fn test_nonzero_isize() {
let test = |value, tokens| test(NonZeroIsize::new(value).unwrap(), tokens);
// from signed
test(-10, &[Token::I8(-10)]);
test(-10, &[Token::I16(-10)]);
test(-10, &[Token::I32(-10)]);
test(-10, &[Token::I64(-10)]);
test(10, &[Token::I8(10)]);
test(10, &[Token::I16(10)]);
test(10, &[Token::I32(10)]);
test(10, &[Token::I64(10)]);
// from unsigned
test(1, &[Token::U8(1)]);
test(1, &[Token::U16(1)]);
test(1, &[Token::U32(1)]);
test(1, &[Token::U64(1)]);
test(10, &[Token::U8(10)]);
test(10, &[Token::U16(10)]);
test(10, &[Token::U32(10)]);
test(10, &[Token::U64(10)]);
}
#[test]
fn test_nonzero_u8() {
let test = |value, tokens| test(NonZeroU8::new(value).unwrap(), tokens);
// from signed
test(1, &[Token::I8(1)]);
test(1, &[Token::I16(1)]);
test(1, &[Token::I32(1)]);
test(1, &[Token::I64(1)]);
test(127, &[Token::I8(127)]);
test(255, &[Token::I16(255)]);
test(255, &[Token::I32(255)]);
test(255, &[Token::I64(255)]);
// from unsigned
test(1, &[Token::U8(1)]);
test(1, &[Token::U16(1)]);
test(1, &[Token::U32(1)]);
test(1, &[Token::U64(1)]);
test(255, &[Token::U8(255)]);
test(255, &[Token::U16(255)]);
test(255, &[Token::U32(255)]);
test(255, &[Token::U64(255)]);
}
#[test]
fn test_nonzero_u16() {
let test = |value, tokens| test(NonZeroU16::new(value).unwrap(), tokens);
// from signed
test(1, &[Token::I8(1)]);
test(1, &[Token::I16(1)]);
test(1, &[Token::I32(1)]);
test(1, &[Token::I64(1)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(65535, &[Token::I32(65535)]);
test(65535, &[Token::I64(65535)]);
// from unsigned
test(1, &[Token::U8(1)]);
test(1, &[Token::U16(1)]);
test(1, &[Token::U32(1)]);
test(1, &[Token::U64(1)]);
test(255, &[Token::U8(255)]);
test(65535, &[Token::U16(65535)]);
test(65535, &[Token::U32(65535)]);
test(65535, &[Token::U64(65535)]);
}
#[test]
fn test_nonzero_u32() {
let test = |value, tokens| test(NonZeroU32::new(value).unwrap(), tokens);
// from signed
test(1, &[Token::I8(1)]);
test(1, &[Token::I16(1)]);
test(1, &[Token::I32(1)]);
test(1, &[Token::I64(1)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(2147483647, &[Token::I32(2147483647)]);
test(4294967295, &[Token::I64(4294967295)]);
// from unsigned
test(1, &[Token::U8(1)]);
test(1, &[Token::U16(1)]);
test(1, &[Token::U32(1)]);
test(1, &[Token::U64(1)]);
test(255, &[Token::U8(255)]);
test(65535, &[Token::U16(65535)]);
test(4294967295, &[Token::U32(4294967295)]);
test(4294967295, &[Token::U64(4294967295)]);
}
#[test]
fn test_nonzero_u64() {
let test = |value, tokens| test(NonZeroU64::new(value).unwrap(), tokens);
// from signed
test(1, &[Token::I8(1)]);
test(1, &[Token::I16(1)]);
test(1, &[Token::I32(1)]);
test(1, &[Token::I64(1)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(2147483647, &[Token::I32(2147483647)]);
test(9223372036854775807, &[Token::I64(9223372036854775807)]);
// from unsigned
test(1, &[Token::U8(1)]);
test(1, &[Token::U16(1)]);
test(1, &[Token::U32(1)]);
test(1, &[Token::U64(1)]);
test(255, &[Token::U8(255)]);
test(65535, &[Token::U16(65535)]);
test(4294967295, &[Token::U32(4294967295)]);
test(18446744073709551615, &[Token::U64(18446744073709551615)]);
}
#[test]
fn test_nonzero_u128() {
let test = |value, tokens| test(NonZeroU128::new(value).unwrap(), tokens);
// from signed
test(1, &[Token::I8(1)]);
test(1, &[Token::I16(1)]);
test(1, &[Token::I32(1)]);
test(1, &[Token::I64(1)]);
test(127, &[Token::I8(127)]);
test(32767, &[Token::I16(32767)]);
test(2147483647, &[Token::I32(2147483647)]);
test(9223372036854775807, &[Token::I64(9223372036854775807)]);
// from unsigned
test(1, &[Token::U8(1)]);
test(1, &[Token::U16(1)]);
test(1, &[Token::U32(1)]);
test(1, &[Token::U64(1)]);
test(255, &[Token::U8(255)]);
test(65535, &[Token::U16(65535)]);
test(4294967295, &[Token::U32(4294967295)]);
test(18446744073709551615, &[Token::U64(18446744073709551615)]);
}
#[test]
fn test_nonzero_usize() {
let test = |value, tokens| test(NonZeroUsize::new(value).unwrap(), tokens);
// from signed
test(1, &[Token::I8(1)]);
test(1, &[Token::I16(1)]);
test(1, &[Token::I32(1)]);
test(1, &[Token::I64(1)]);
test(10, &[Token::I8(10)]);
test(10, &[Token::I16(10)]);
test(10, &[Token::I32(10)]);
test(10, &[Token::I64(10)]);
// from unsigned
test(1, &[Token::U8(1)]);
test(1, &[Token::U16(1)]);
test(1, &[Token::U32(1)]);
test(1, &[Token::U64(1)]);
test(10, &[Token::U8(10)]);
test(10, &[Token::U16(10)]);
test(10, &[Token::U32(10)]);
test(10, &[Token::U64(10)]);
}
#[test]
fn test_f32() {
let test = test::<f32>;
test(1.11, &[Token::F32(1.11)]);
test(1.11, &[Token::F64(1.11)]);
}
#[test]
fn test_f64() {
let test = test::<f64>;
test(1.11f32 as f64, &[Token::F32(1.11)]);
test(1.11, &[Token::F64(1.11)]);
}
#[test]
fn test_nan() {
let f32_deserializer = F32Deserializer::<serde::de::value::Error>::new;
let f64_deserializer = F64Deserializer::<serde::de::value::Error>::new;
let pos_f32_nan = f32_deserializer(f32::NAN.copysign(1.0));
let pos_f64_nan = f64_deserializer(f64::NAN.copysign(1.0));
assert!(f32::deserialize(pos_f32_nan).unwrap().is_sign_positive());
assert!(f32::deserialize(pos_f64_nan).unwrap().is_sign_positive());
assert!(f64::deserialize(pos_f32_nan).unwrap().is_sign_positive());
assert!(f64::deserialize(pos_f64_nan).unwrap().is_sign_positive());
let neg_f32_nan = f32_deserializer(f32::NAN.copysign(-1.0));
let neg_f64_nan = f64_deserializer(f64::NAN.copysign(-1.0));
assert!(f32::deserialize(neg_f32_nan).unwrap().is_sign_negative());
assert!(f32::deserialize(neg_f64_nan).unwrap().is_sign_negative());
assert!(f64::deserialize(neg_f32_nan).unwrap().is_sign_negative());
assert!(f64::deserialize(neg_f64_nan).unwrap().is_sign_negative());
}
#[test]
fn test_char() {
test('a', &[Token::Char('a')]);
test('a', &[Token::Str("a")]);
test('a', &[Token::String("a")]);
}
#[test]
fn test_string() {
test("abc".to_owned(), &[Token::Str("abc")]);
test("abc".to_owned(), &[Token::String("abc")]);
test("a".to_owned(), &[Token::Char('a')]);
}
#[test]
fn test_option() {
test(None::<i32>, &[Token::Unit]);
test(None::<i32>, &[Token::None]);
test(Some(1), &[Token::Some, Token::I32(1)]);
}
#[test]
fn test_result() {
test(
Ok::<i32, i32>(0),
&[
Token::Enum { name: "Result" },
Token::Str("Ok"),
Token::I32(0),
],
);
test(
Err::<i32, i32>(1),
&[
Token::Enum { name: "Result" },
Token::Str("Err"),
Token::I32(1),
],
);
}
#[test]
fn test_unit() {
test((), &[Token::Unit]);
}
#[test]
fn test_unit_struct() {
test(UnitStruct, &[Token::Unit]);
test(UnitStruct, &[Token::UnitStruct { name: "UnitStruct" }]);
}
#[test]
fn test_generic_unit_struct() {
test(GenericUnitStruct::<8>, &[Token::Unit]);
test(
GenericUnitStruct::<8>,
&[Token::UnitStruct {
name: "GenericUnitStruct",
}],
);
}
#[test]
fn test_newtype_struct() {
test(
NewtypeStruct(1),
&[
Token::NewtypeStruct {
name: "NewtypeStruct",
},
Token::I32(1),
],
);
}
#[test]
fn test_tuple_struct() {
test(
TupleStruct(1, 2, 3),
&[
Token::Seq { len: Some(3) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::SeqEnd,
],
);
test(
TupleStruct(1, 2, 3),
&[
Token::Seq { len: None },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::SeqEnd,
],
);
test(
TupleStruct(1, 2, 3),
&[
Token::TupleStruct {
name: "TupleStruct",
len: 3,
},
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::TupleStructEnd,
],
);
test(
TupleStruct(1, 2, 3),
&[
Token::TupleStruct {
name: "TupleStruct",
len: 3,
},
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::TupleStructEnd,
],
);
}
#[test]
fn test_btreeset() {
test(
BTreeSet::<isize>::new(),
&[Token::Seq { len: Some(0) }, Token::SeqEnd],
);
test(
btreeset![btreeset![], btreeset![1], btreeset![2, 3]],
&[
Token::Seq { len: Some(3) },
Token::Seq { len: Some(0) },
Token::SeqEnd,
Token::Seq { len: Some(1) },
Token::I32(1),
Token::SeqEnd,
Token::Seq { len: Some(2) },
Token::I32(2),
Token::I32(3),
Token::SeqEnd,
Token::SeqEnd,
],
);
test(
BTreeSet::<isize>::new(),
&[
Token::TupleStruct {
name: "Anything",
len: 0,
},
Token::TupleStructEnd,
],
);
}
#[test]
fn test_hashset() {
test(
HashSet::<isize>::new(),
&[Token::Seq { len: Some(0) }, Token::SeqEnd],
);
test(
hashset![1, 2, 3],
&[
Token::Seq { len: Some(3) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::SeqEnd,
],
);
test(
HashSet::<isize>::new(),
&[
Token::TupleStruct {
name: "Anything",
len: 0,
},
Token::TupleStructEnd,
],
);
test(
hashset![foldhash::fast::FixedState; 1, 2, 3],
&[
Token::Seq { len: Some(3) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::SeqEnd,
],
);
}
#[test]
fn test_vec() {
test(
Vec::<isize>::new(),
&[Token::Seq { len: Some(0) }, Token::SeqEnd],
);
test(
vec![vec![], vec![1], vec![2, 3]],
&[
Token::Seq { len: Some(3) },
Token::Seq { len: Some(0) },
Token::SeqEnd,
Token::Seq { len: Some(1) },
Token::I32(1),
Token::SeqEnd,
Token::Seq { len: Some(2) },
Token::I32(2),
Token::I32(3),
Token::SeqEnd,
Token::SeqEnd,
],
);
test(
Vec::<isize>::new(),
&[
Token::TupleStruct {
name: "Anything",
len: 0,
},
Token::TupleStructEnd,
],
);
}
#[test]
fn test_array() {
test([0; 0], &[Token::Seq { len: Some(0) }, Token::SeqEnd]);
test([0; 0], &[Token::Tuple { len: 0 }, Token::TupleEnd]);
test(
([0; 0], [1], [2, 3]),
&[
Token::Seq { len: Some(3) },
Token::Seq { len: Some(0) },
Token::SeqEnd,
Token::Seq { len: Some(1) },
Token::I32(1),
Token::SeqEnd,
Token::Seq { len: Some(2) },
Token::I32(2),
Token::I32(3),
Token::SeqEnd,
Token::SeqEnd,
],
);
test(
([0; 0], [1], [2, 3]),
&[
Token::Tuple { len: 3 },
Token::Tuple { len: 0 },
Token::TupleEnd,
Token::Tuple { len: 1 },
Token::I32(1),
Token::TupleEnd,
Token::Tuple { len: 2 },
Token::I32(2),
Token::I32(3),
Token::TupleEnd,
Token::TupleEnd,
],
);
test(
[0; 0],
&[
Token::TupleStruct {
name: "Anything",
len: 0,
},
Token::TupleStructEnd,
],
);
}
#[test]
fn test_tuple() {
test(
(1,),
&[Token::Seq { len: Some(1) }, Token::I32(1), Token::SeqEnd],
);
test(
(1, 2, 3),
&[
Token::Seq { len: Some(3) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::SeqEnd,
],
);
test(
(1,),
&[Token::Tuple { len: 1 }, Token::I32(1), Token::TupleEnd],
);
test(
(1, 2, 3),
&[
Token::Tuple { len: 3 },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::TupleEnd,
],
);
}
#[test]
fn test_btreemap() {
test(
BTreeMap::<isize, isize>::new(),
&[Token::Map { len: Some(0) }, Token::MapEnd],
);
test(
btreemap![1 => 2],
&[
Token::Map { len: Some(1) },
Token::I32(1),
Token::I32(2),
Token::MapEnd,
],
);
test(
btreemap![1 => 2, 3 => 4],
&[
Token::Map { len: Some(2) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::I32(4),
Token::MapEnd,
],
);
test(
btreemap![1 => btreemap![], 2 => btreemap![3 => 4, 5 => 6]],
&[
Token::Map { len: Some(2) },
Token::I32(1),
Token::Map { len: Some(0) },
Token::MapEnd,
Token::I32(2),
Token::Map { len: Some(2) },
Token::I32(3),
Token::I32(4),
Token::I32(5),
Token::I32(6),
Token::MapEnd,
Token::MapEnd,
],
);
test(
BTreeMap::<isize, isize>::new(),
&[
Token::Struct {
name: "Anything",
len: 0,
},
Token::StructEnd,
],
);
}
#[test]
fn test_hashmap() {
test(
HashMap::<isize, isize>::new(),
&[Token::Map { len: Some(0) }, Token::MapEnd],
);
test(
hashmap![1 => 2],
&[
Token::Map { len: Some(1) },
Token::I32(1),
Token::I32(2),
Token::MapEnd,
],
);
test(
hashmap![1 => 2, 3 => 4],
&[
Token::Map { len: Some(2) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::I32(4),
Token::MapEnd,
],
);
test(
hashmap![1 => hashmap![], 2 => hashmap![3 => 4, 5 => 6]],
&[
Token::Map { len: Some(2) },
Token::I32(1),
Token::Map { len: Some(0) },
Token::MapEnd,
Token::I32(2),
Token::Map { len: Some(2) },
Token::I32(3),
Token::I32(4),
Token::I32(5),
Token::I32(6),
Token::MapEnd,
Token::MapEnd,
],
);
test(
HashMap::<isize, isize>::new(),
&[
Token::Struct {
name: "Anything",
len: 0,
},
Token::StructEnd,
],
);
test(
hashmap![foldhash::fast::FixedState; 1 => 2, 3 => 4],
&[
Token::Map { len: Some(2) },
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::I32(4),
Token::MapEnd,
],
);
}
#[test]
fn test_struct() {
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Map { len: Some(3) },
Token::Str("a"),
Token::I32(1),
Token::Str("b"),
Token::I32(2),
Token::MapEnd,
],
);
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Map { len: Some(3) },
Token::U8(0),
Token::I32(1),
Token::U8(1),
Token::I32(2),
Token::MapEnd,
],
);
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Map { len: Some(3) },
Token::U16(0),
Token::I32(1),
Token::U16(1),
Token::I32(2),
Token::MapEnd,
],
);
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Map { len: Some(3) },
Token::U32(0),
Token::I32(1),
Token::U32(1),
Token::I32(2),
Token::MapEnd,
],
);
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Map { len: Some(3) },
Token::U64(0),
Token::I32(1),
Token::U64(1),
Token::I32(2),
Token::MapEnd,
],
);
// Mixed key types
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Map { len: Some(3) },
Token::U8(0),
Token::I32(1),
Token::U64(1),
Token::I32(2),
Token::MapEnd,
],
);
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Map { len: Some(3) },
Token::U8(0),
Token::I32(1),
Token::Str("b"),
Token::I32(2),
Token::MapEnd,
],
);
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Struct {
name: "Struct",
len: 2,
},
Token::Str("a"),
Token::I32(1),
Token::Str("b"),
Token::I32(2),
Token::StructEnd,
],
);
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Seq { len: Some(3) },
Token::I32(1),
Token::I32(2),
Token::SeqEnd,
],
);
}
#[test]
fn test_struct_borrowed_keys() {
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Map { len: Some(3) },
Token::BorrowedStr("a"),
Token::I32(1),
Token::BorrowedStr("b"),
Token::I32(2),
Token::MapEnd,
],
);
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Struct {
name: "Struct",
len: 2,
},
Token::BorrowedStr("a"),
Token::I32(1),
Token::BorrowedStr("b"),
Token::I32(2),
Token::StructEnd,
],
);
}
#[test]
fn test_struct_owned_keys() {
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Map { len: Some(3) },
Token::String("a"),
Token::I32(1),
Token::String("b"),
Token::I32(2),
Token::MapEnd,
],
);
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Struct {
name: "Struct",
len: 2,
},
Token::String("a"),
Token::I32(1),
Token::String("b"),
Token::I32(2),
Token::StructEnd,
],
);
}
#[test]
fn test_struct_with_skip() {
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Map { len: Some(3) },
Token::Str("a"),
Token::I32(1),
Token::Str("b"),
Token::I32(2),
Token::Str("c"),
Token::I32(3),
Token::Str("d"),
Token::I32(4),
Token::MapEnd,
],
);
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Map { len: Some(3) },
Token::U8(0),
Token::I32(1),
Token::U16(1),
Token::I32(2),
Token::U32(2),
Token::I32(3),
Token::U64(3),
Token::I32(4),
Token::MapEnd,
],
);
test(
Struct { a: 1, b: 2, c: 0 },
&[
Token::Struct {
name: "Struct",
len: 2,
},
Token::Str("a"),
Token::I32(1),
Token::Str("b"),
Token::I32(2),
Token::Str("c"),
Token::I32(3),
Token::Str("d"),
Token::I32(4),
Token::StructEnd,
],
);
}
#[test]
fn test_struct_skip_all() {
test(
StructSkipAll { a: 0 },
&[
Token::Struct {
name: "StructSkipAll",
len: 0,
},
Token::StructEnd,
],
);
test(
StructSkipAll { a: 0 },
&[
Token::Struct {
name: "StructSkipAll",
len: 0,
},
Token::Str("a"),
Token::I32(1),
Token::Str("b"),
Token::I32(2),
Token::StructEnd,
],
);
}
#[test]
fn test_struct_skip_default() {
test(
StructSkipDefault { a: 16 },
&[
Token::Struct {
name: "StructSkipDefault",
len: 0,
},
Token::StructEnd,
],
);
}
#[test]
fn test_struct_skip_all_deny_unknown() {
test(
StructSkipAllDenyUnknown { a: 0 },
&[
Token::Struct {
name: "StructSkipAllDenyUnknown",
len: 0,
},
Token::StructEnd,
],
);
}
#[test]
fn test_struct_default() {
test(
StructDefault {
a: 50,
b: "overwritten".to_string(),
},
&[
Token::Struct {
name: "StructDefault",
len: 2,
},
Token::Str("a"),
Token::I32(50),
Token::Str("b"),
Token::String("overwritten"),
Token::StructEnd,
],
);
test(
StructDefault {
a: 100,
b: "default".to_string(),
},
&[
Token::Struct {
name: "StructDefault",
len: 2,
},
Token::StructEnd,
],
);
}
#[test]
fn test_enum_unit() {
test(
Enum::Unit,
&[Token::UnitVariant {
name: "Enum",
variant: "Unit",
}],
);
}
#[test]
fn test_enum_simple() {
test(
Enum::Simple(1),
&[
Token::NewtypeVariant {
name: "Enum",
variant: "Simple",
},
Token::I32(1),
],
);
}
#[test]
fn test_enum_simple_with_skipped() {
test(
Enum::SimpleWithSkipped(NotDeserializable),
&[Token::UnitVariant {
name: "Enum",
variant: "SimpleWithSkipped",
}],
);
}
#[test]
fn test_enum_seq() {
test(
Enum::Seq(1, 2, 3),
&[
Token::TupleVariant {
name: "Enum",
variant: "Seq",
len: 3,
},
Token::I32(1),
Token::I32(2),
Token::I32(3),
Token::TupleVariantEnd,
],
);
}
#[test]
fn test_enum_map() {
test(
Enum::Map { a: 1, b: 2, c: 3 },
&[
Token::StructVariant {
name: "Enum",
variant: "Map",
len: 3,
},
Token::Str("a"),
Token::I32(1),
Token::Str("b"),
Token::I32(2),
Token::Str("c"),
Token::I32(3),
Token::StructVariantEnd,
],
);
}
#[test]
fn test_enum_unit_usize() {
test(
Enum::Unit,
&[Token::Enum { name: "Enum" }, Token::U32(0), Token::Unit],
);
}
#[test]
fn test_enum_unit_bytes() {
test(
Enum::Unit,
&[
Token::Enum { name: "Enum" },
Token::Bytes(b"Unit"),
Token::Unit,
],
);
}
#[test]
fn test_enum_other_unit() {
test(
EnumOther::Unit,
&[
Token::Enum { name: "EnumOther" },
Token::Str("Unit"),
Token::Unit,
],
);
test(
EnumOther::Unit,
&[Token::Enum { name: "EnumOther" }, Token::U8(0), Token::Unit],
);
test(
EnumOther::Unit,
&[
Token::Enum { name: "EnumOther" },
Token::U16(0),
Token::Unit,
],
);
test(
EnumOther::Unit,
&[
Token::Enum { name: "EnumOther" },
Token::U32(0),
Token::Unit,
],
);
test(
EnumOther::Unit,
&[
Token::Enum { name: "EnumOther" },
Token::U64(0),
Token::Unit,
],
);
}
#[test]
fn test_enum_other() {
test(
EnumOther::Other,
&[
Token::Enum { name: "EnumOther" },
Token::Str("Foo"),
Token::Unit,
],
);
test(
EnumOther::Other,
&[
Token::Enum { name: "EnumOther" },
Token::U8(42),
Token::Unit,
],
);
test(
EnumOther::Other,
&[
Token::Enum { name: "EnumOther" },
Token::U16(42),
Token::Unit,
],
);
test(
EnumOther::Other,
&[
Token::Enum { name: "EnumOther" },
Token::U32(42),
Token::Unit,
],
);
test(
EnumOther::Other,
&[
Token::Enum { name: "EnumOther" },
Token::U64(42),
Token::Unit,
],
);
}
#[test]
fn test_box() {
test(Box::new(0i32), &[Token::I32(0)]);
}
#[test]
fn test_boxed_slice() {
test(
Box::new([0, 1, 2]),
&[
Token::Seq { len: Some(3) },
Token::I32(0),
Token::I32(1),
Token::I32(2),
Token::SeqEnd,
],
);
}
#[test]
fn test_duration() {
test(
Duration::new(1, 2),
&[
Token::Struct {
name: "Duration",
len: 2,
},
Token::Str("secs"),
Token::U64(1),
Token::Str("nanos"),
Token::U32(2),
Token::StructEnd,
],
);
test(
Duration::new(1, 2),
&[
Token::Seq { len: Some(2) },
Token::I64(1),
Token::I64(2),
Token::SeqEnd,
],
);
}
#[test]
fn test_system_time() {
test(
UNIX_EPOCH + Duration::new(1, 2),
&[
Token::Struct {
name: "SystemTime",
len: 2,
},
Token::Str("secs_since_epoch"),
Token::U64(1),
Token::Str("nanos_since_epoch"),
Token::U32(2),
Token::StructEnd,
],
);
test(
UNIX_EPOCH + Duration::new(1, 2),
&[
Token::Seq { len: Some(2) },
Token::I64(1),
Token::I64(2),
Token::SeqEnd,
],
);
}
#[test]
fn test_range() {
test(
1u32..2u32,
&[
Token::Struct {
name: "Range",
len: 2,
},
Token::Str("start"),
Token::U32(1),
Token::Str("end"),
Token::U32(2),
Token::StructEnd,
],
);
test(
1u32..2u32,
&[
Token::Seq { len: Some(2) },
Token::U64(1),
Token::U64(2),
Token::SeqEnd,
],
);
}
#[test]
fn test_range_inclusive() {
test(
1u32..=2u32,
&[
Token::Struct {
name: "RangeInclusive",
len: 2,
},
Token::Str("start"),
Token::U32(1),
Token::Str("end"),
Token::U32(2),
Token::StructEnd,
],
);
test(
1u32..=2u32,
&[
Token::Seq { len: Some(2) },
Token::U64(1),
Token::U64(2),
Token::SeqEnd,
],
);
}
#[test]
fn test_range_from() {
test(
1u32..,
&[
Token::Struct {
name: "RangeFrom",
len: 1,
},
Token::Str("start"),
Token::U32(1),
Token::StructEnd,
],
);
test(
1u32..,
&[Token::Seq { len: Some(1) }, Token::U32(1), Token::SeqEnd],
);
}
#[test]
fn test_range_to() {
test(
..2u32,
&[
Token::Struct {
name: "RangeTo",
len: 1,
},
Token::Str("end"),
Token::U32(2),
Token::StructEnd,
],
);
test(
..2u32,
&[Token::Seq { len: Some(1) }, Token::U32(2), Token::SeqEnd],
);
}
#[test]
fn test_bound() {
test(
Bound::Unbounded::<()>,
&[
Token::Enum { name: "Bound" },
Token::Str("Unbounded"),
Token::Unit,
],
);
test(
Bound::Included(0),
&[
Token::Enum { name: "Bound" },
Token::Str("Included"),
Token::U8(0),
],
);
test(
Bound::Excluded(0),
&[
Token::Enum { name: "Bound" },
Token::Str("Excluded"),
Token::U8(0),
],
);
}
#[test]
fn test_path() {
test(
Path::new("/usr/local/lib"),
&[Token::BorrowedStr("/usr/local/lib")],
);
test(
Path::new("/usr/local/lib"),
&[Token::BorrowedBytes(b"/usr/local/lib")],
);
}
#[test]
fn test_path_buf() {
test(
PathBuf::from("/usr/local/lib"),
&[Token::Str("/usr/local/lib")],
);
test(
PathBuf::from("/usr/local/lib"),
&[Token::String("/usr/local/lib")],
);
test(
PathBuf::from("/usr/local/lib"),
&[Token::Bytes(b"/usr/local/lib")],
);
test(
PathBuf::from("/usr/local/lib"),
&[Token::ByteBuf(b"/usr/local/lib")],
);
}
#[test]
fn test_boxed_path() {
test(
PathBuf::from("/usr/local/lib").into_boxed_path(),
&[Token::Str("/usr/local/lib")],
);
test(
PathBuf::from("/usr/local/lib").into_boxed_path(),
&[Token::String("/usr/local/lib")],
);
test(
PathBuf::from("/usr/local/lib").into_boxed_path(),
&[Token::Bytes(b"/usr/local/lib")],
);
test(
PathBuf::from("/usr/local/lib").into_boxed_path(),
&[Token::ByteBuf(b"/usr/local/lib")],
);
}
#[test]
fn test_cstring() {
test(CString::new("abc").unwrap(), &[Token::Bytes(b"abc")]);
}
#[test]
fn test_rc() {
test(Rc::new(true), &[Token::Bool(true)]);
}
#[test]
fn test_rc_weak_some() {
test(
SkipPartialEq(RcWeak::<bool>::new()),
&[Token::Some, Token::Bool(true)],
);
}
#[test]
fn test_rc_weak_none() {
test(SkipPartialEq(RcWeak::<bool>::new()), &[Token::None]);
}
#[test]
fn test_arc() {
test(Arc::new(true), &[Token::Bool(true)]);
}
#[test]
fn test_arc_weak_some() {
test(
SkipPartialEq(ArcWeak::<bool>::new()),
&[Token::Some, Token::Bool(true)],
);
}
#[test]
fn test_arc_weak_none() {
test(SkipPartialEq(ArcWeak::<bool>::new()), &[Token::None]);
}
#[test]
fn test_wrapping() {
test(Wrapping(1usize), &[Token::U32(1)]);
test(Wrapping(1usize), &[Token::U64(1)]);
}
#[test]
fn test_saturating() {
test(Saturating(1usize), &[Token::U32(1)]);
test(Saturating(1usize), &[Token::U64(1)]);
test(Saturating(0u8), &[Token::I8(0)]);
test(Saturating(0u16), &[Token::I16(0)]);
// saturate input values at the minimum or maximum value
test(Saturating(u8::MAX), &[Token::U16(u16::MAX)]);
test(Saturating(u8::MAX), &[Token::U16(u8::MAX as u16 + 1)]);
test(Saturating(u16::MAX), &[Token::U32(u32::MAX)]);
test(Saturating(u32::MAX), &[Token::U64(u64::MAX)]);
test(Saturating(u8::MIN), &[Token::I8(i8::MIN)]);
test(Saturating(u16::MIN), &[Token::I16(i16::MIN)]);
test(Saturating(u32::MIN), &[Token::I32(i32::MIN)]);
test(Saturating(i8::MIN), &[Token::I16(i16::MIN)]);
test(Saturating(i16::MIN), &[Token::I32(i32::MIN)]);
test(Saturating(i32::MIN), &[Token::I64(i64::MIN)]);
test(Saturating(u8::MIN), &[Token::I8(-1)]);
test(Saturating(u16::MIN), &[Token::I16(-1)]);
#[cfg(target_pointer_width = "64")]
{
test(Saturating(usize::MIN), &[Token::U64(u64::MIN)]);
test(Saturating(usize::MAX), &[Token::U64(u64::MAX)]);
test(Saturating(isize::MIN), &[Token::I64(i64::MIN)]);
test(Saturating(isize::MAX), &[Token::I64(i64::MAX)]);
test(Saturating(0usize), &[Token::I64(i64::MIN)]);
test(
Saturating(9_223_372_036_854_775_807usize),
&[Token::I64(i64::MAX)],
);
}
}
#[test]
fn test_rc_dst() {
test(Rc::<str>::from("s"), &[Token::Str("s")]);
test(
Rc::<[bool]>::from(&[true][..]),
&[
Token::Seq { len: Some(1) },
Token::Bool(true),
Token::SeqEnd,
],
);
}
#[test]
fn test_arc_dst() {
test(Arc::<str>::from("s"), &[Token::Str("s")]);
test(
Arc::<[bool]>::from(&[true][..]),
&[
Token::Seq { len: Some(1) },
Token::Bool(true),
Token::SeqEnd,
],
);
}
#[test]
fn test_ignored_any() {
test(IgnoredAny, &[Token::Str("s")]);
test(
IgnoredAny,
&[
Token::Seq { len: Some(1) },
Token::Bool(true),
Token::SeqEnd,
],
);
test(
IgnoredAny,
&[Token::Enum { name: "E" }, Token::Str("Rust"), Token::Unit],
);
}
#[test]
fn test_net_ipv4addr_readable() {
test(
"1.2.3.4".parse::<net::Ipv4Addr>().unwrap().readable(),
&[Token::Str("1.2.3.4")],
);
}
#[test]
fn test_net_ipv6addr_readable() {
test(
"::1".parse::<net::Ipv6Addr>().unwrap().readable(),
&[Token::Str("::1")],
);
}
#[test]
fn test_net_ipaddr_readable() {
test(
"1.2.3.4".parse::<net::IpAddr>().unwrap().readable(),
&[Token::Str("1.2.3.4")],
);
}
#[test]
fn test_net_socketaddr_readable() {
test(
"1.2.3.4:1234"
.parse::<net::SocketAddr>()
.unwrap()
.readable(),
&[Token::Str("1.2.3.4:1234")],
);
test(
"1.2.3.4:1234"
.parse::<net::SocketAddrV4>()
.unwrap()
.readable(),
&[Token::Str("1.2.3.4:1234")],
);
test(
"[::1]:1234"
.parse::<net::SocketAddrV6>()
.unwrap()
.readable(),
&[Token::Str("[::1]:1234")],
);
}
#[test]
fn test_net_ipv4addr_compact() {
test(
net::Ipv4Addr::from(*b"1234").compact(),
&seq![
Token::Tuple { len: 4 },
b"1234".iter().copied().map(Token::U8),
Token::TupleEnd
],
);
}
#[test]
fn test_net_ipv6addr_compact() {
test(
net::Ipv6Addr::from(*b"1234567890123456").compact(),
&seq![
Token::Tuple { len: 4 },
b"1234567890123456".iter().copied().map(Token::U8),
Token::TupleEnd
],
);
}
#[test]
fn test_net_ipaddr_compact() {
test(
net::IpAddr::from(*b"1234").compact(),
&seq![
Token::NewtypeVariant {
name: "IpAddr",
variant: "V4"
},
Token::Tuple { len: 4 },
b"1234".iter().copied().map(Token::U8),
Token::TupleEnd
],
);
}
#[test]
fn test_net_socketaddr_compact() {
test(
net::SocketAddr::from((*b"1234567890123456", 1234)).compact(),
&seq![
Token::NewtypeVariant {
name: "SocketAddr",
variant: "V6"
},
Token::Tuple { len: 2 },
Token::Tuple { len: 16 },
b"1234567890123456".iter().copied().map(Token::U8),
Token::TupleEnd,
Token::U16(1234),
Token::TupleEnd
],
);
test(
net::SocketAddr::from((*b"1234", 1234)).compact(),
&seq![
Token::NewtypeVariant {
name: "SocketAddr",
variant: "V4"
},
Token::Tuple { len: 2 },
Token::Tuple { len: 4 },
b"1234".iter().copied().map(Token::U8),
Token::TupleEnd,
Token::U16(1234),
Token::TupleEnd
],
);
test(
net::SocketAddrV4::new(net::Ipv4Addr::from(*b"1234"), 1234).compact(),
&seq![
Token::Tuple { len: 2 },
Token::Tuple { len: 4 },
b"1234".iter().copied().map(Token::U8),
Token::TupleEnd,
Token::U16(1234),
Token::TupleEnd
],
);
test(
net::SocketAddrV6::new(net::Ipv6Addr::from(*b"1234567890123456"), 1234, 0, 0).compact(),
&seq![
Token::Tuple { len: 2 },
Token::Tuple { len: 16 },
b"1234567890123456".iter().copied().map(Token::U8),
Token::TupleEnd,
Token::U16(1234),
Token::TupleEnd
],
);
}
#[cfg(feature = "unstable")]
#[test]
fn test_never_result() {
test(
Ok::<u8, !>(0),
&[
Token::NewtypeVariant {
name: "Result",
variant: "Ok",
},
Token::U8(0),
],
);
}
#[cfg(unix)]
#[test]
fn test_osstring() {
use std::os::unix::ffi::OsStringExt;
let value = OsString::from_vec(vec![1, 2, 3]);
let tokens = [
Token::Enum { name: "OsString" },
Token::Str("Unix"),
Token::Seq { len: Some(2) },
Token::U8(1),
Token::U8(2),
Token::U8(3),
Token::SeqEnd,
];
assert_de_tokens(&value, &tokens);
assert_de_tokens_ignore(&tokens);
}
#[cfg(windows)]
#[test]
fn test_osstring() {
use std::os::windows::ffi::OsStringExt;
let value = OsString::from_wide(&[1, 2, 3]);
let tokens = [
Token::Enum { name: "OsString" },
Token::Str("Windows"),
Token::Seq { len: Some(2) },
Token::U16(1),
Token::U16(2),
Token::U16(3),
Token::SeqEnd,
];
assert_de_tokens(&value, &tokens);
assert_de_tokens_ignore(&tokens);
}
#[test]
fn test_cstr() {
assert_de_tokens::<Box<CStr>>(
&CString::new("abc").unwrap().into_boxed_c_str(),
&[Token::Bytes(b"abc")],
);
}
#[test]
fn test_atomics() {
fn test<L, A, T>(load: L, val: T)
where
L: Fn(&A, Ordering) -> T,
A: DeserializeOwned,
T: PartialEq + Debug + Copy + for<'de> IntoDeserializer<'de>,
{
match A::deserialize(val.into_deserializer()) {
Ok(v) => {
let loaded = load(&v, Ordering::Relaxed);
assert_eq!(val, loaded);
}
Err(e) => panic!("tokens failed to deserialize: {}", e),
}
}
test(AtomicBool::load, true);
test(AtomicI8::load, -127i8);
test(AtomicI16::load, -510i16);
test(AtomicI32::load, -131072i32);
test(AtomicIsize::load, -131072isize);
test(AtomicU8::load, 127u8);
test(AtomicU16::load, 510u16);
test(AtomicU32::load, 131072u32);
test(AtomicUsize::load, 131072usize);
#[cfg(target_arch = "x86_64")]
{
test(AtomicI64::load, -8589934592i64);
test(AtomicU64::load, 8589934592u64);
}
}
|
rust
|
github
|
https://github.com/serde-rs/serde
|
test_suite/tests/test_de.rs
|
#ifndef MetalNeuronType_h
#define MetalNeuronType_h
#import <ATen/native/metal/mpscnn/MPSCNNNeuronOp.h>
#import <MetalPerformanceShaders/MetalPerformanceShaders.h>
#include <ATen/ATen.h>
namespace at::native::metal {
enum class NeuronType {
None,
Clamp,
Relu,
Sigmoid,
HardSigmoid,
Tanh,
};
static inline NeuronType neuronType(
std::optional<c10::Scalar> output_min,
std::optional<c10::Scalar> output_max) {
float inf_max = std::numeric_limits<float>::infinity();
float inf_min = -std::numeric_limits<float>::infinity();
float output_max_ =
output_max.has_value() ? output_max.value().toFloat() : inf_max;
float output_min_ =
output_min.has_value() ? output_min.value().toFloat() : inf_min;
if (output_max_ == inf_max && output_min_ == 0) {
return NeuronType::Relu;
} else if (output_max_ < inf_max && output_min_ > inf_min) {
return NeuronType::Clamp;
} else {
return NeuronType::None;
}
}
static inline MPSCNNNeuron* neuron(NeuronType type) {
if (type == NeuronType::Relu) {
return [MPSCNNNeuronOp relu];
} else if (type == NeuronType::Sigmoid) {
return [MPSCNNNeuronOp sigmoid];
} else if (type == NeuronType::Tanh) {
return [MPSCNNNeuronOp tanh];
} else if (type == NeuronType::HardSigmoid) {
return [MPSCNNNeuronOp hardSigmoid];
} else {
return nil;
}
}
API_AVAILABLE(ios(11.3), macos(10.13), macCatalyst(13.0))
static inline MPSNNNeuronDescriptor* neuronDescriptor(NeuronType type) {
if (type == NeuronType::Relu) {
return [MPSCNNNeuronOpDescriptor reluDescriptor];
} else if (type == NeuronType::Sigmoid) {
return [MPSCNNNeuronOpDescriptor sigmoidDescriptor];
} else if (type == NeuronType::Tanh) {
return [MPSCNNNeuronOpDescriptor tanhDescriptor];
} else if (type == NeuronType::HardSigmoid) {
return [MPSCNNNeuronOpDescriptor hardSigmoidDescriptor];
} else {
return [MPSNNNeuronDescriptor cnnNeuronDescriptorWithType:MPSCNNNeuronTypeNone];
}
}
} // namespace at::native::metal
#endif /* MetalNeuronType_h */
|
c
|
github
|
https://github.com/pytorch/pytorch
|
aten/src/ATen/native/metal/MetalNeuronType.h
|
"""
EnOcean Component.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/EnOcean/
"""
DOMAIN = "enocean"
REQUIREMENTS = ['enocean==0.31']
CONF_DEVICE = "device"
ENOCEAN_DONGLE = None
def setup(hass, config):
"""Setup the EnOcean component."""
global ENOCEAN_DONGLE
serial_dev = config[DOMAIN].get(CONF_DEVICE, "/dev/ttyUSB0")
ENOCEAN_DONGLE = EnOceanDongle(hass, serial_dev)
return True
class EnOceanDongle:
"""Representation of an EnOcean dongle."""
def __init__(self, hass, ser):
"""Initialize the EnOcean dongle."""
from enocean.communicators.serialcommunicator import SerialCommunicator
self.__communicator = SerialCommunicator(port=ser,
callback=self.callback)
self.__communicator.start()
self.__devices = []
def register_device(self, dev):
"""Register another device."""
self.__devices.append(dev)
def send_command(self, command):
"""Send a command from the EnOcean dongle."""
self.__communicator.send(command)
def _combine_hex(self, data): # pylint: disable=no-self-use
"""Combine list of integer values to one big integer."""
output = 0x00
for i, j in enumerate(reversed(data)):
output |= (j << i * 8)
return output
# pylint: disable=too-many-branches
def callback(self, temp):
"""Callback function for EnOcean Device.
This is the callback function called by
python-enocan whenever there is an incoming
packet.
"""
from enocean.protocol.packet import RadioPacket
if isinstance(temp, RadioPacket):
rxtype = None
value = None
if temp.data[6] == 0x30:
rxtype = "wallswitch"
value = 1
elif temp.data[6] == 0x20:
rxtype = "wallswitch"
value = 0
elif temp.data[4] == 0x0c:
rxtype = "power"
value = temp.data[3] + (temp.data[2] << 8)
elif temp.data[2] == 0x60:
rxtype = "switch_status"
if temp.data[3] == 0xe4:
value = 1
elif temp.data[3] == 0x80:
value = 0
elif temp.data[0] == 0xa5 and temp.data[1] == 0x02:
rxtype = "dimmerstatus"
value = temp.data[2]
for device in self.__devices:
if rxtype == "wallswitch" and device.stype == "listener":
if temp.sender == self._combine_hex(device.dev_id):
device.value_changed(value, temp.data[1])
if rxtype == "power" and device.stype == "powersensor":
if temp.sender == self._combine_hex(device.dev_id):
device.value_changed(value)
if rxtype == "power" and device.stype == "switch":
if temp.sender == self._combine_hex(device.dev_id):
if value > 10:
device.value_changed(1)
if rxtype == "switch_status" and device.stype == "switch":
if temp.sender == self._combine_hex(device.dev_id):
device.value_changed(value)
if rxtype == "dimmerstatus" and device.stype == "dimmer":
if temp.sender == self._combine_hex(device.dev_id):
device.value_changed(value)
# pylint: disable=too-few-public-methods
class EnOceanDevice():
"""Parent class for all devices associated with the EnOcean component."""
def __init__(self):
"""Initialize the device."""
ENOCEAN_DONGLE.register_device(self)
self.stype = ""
self.sensorid = [0x00, 0x00, 0x00, 0x00]
# pylint: disable=no-self-use
def send_command(self, data, optional, packet_type):
"""Send a command via the EnOcean dongle."""
from enocean.protocol.packet import Packet
packet = Packet(packet_type, data=data, optional=optional)
ENOCEAN_DONGLE.send_command(packet)
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import time
from resource_management.core.logger import Logger
from resource_management.core.resources.system import Execute, File
from resource_management.libraries.resources.hdfs_resource import HdfsResource
import metron_service
# Wrap major operations and functionality in this class
class EnrichmentCommands:
__params = None
__enrichment_topology = None
__enrichment_topic = None
__configured = False
def __init__(self, params):
if params is None:
raise ValueError("params argument is required for initialization")
self.__params = params
self.__enrichment_topology = params.metron_enrichment_topology
self.__enrichment_topic = params.metron_enrichment_topic
self.__configured = os.path.isfile(self.__params.enrichment_configured_flag_file)
def is_configured(self):
return self.__configured
def set_configured(self):
File(self.__params.enrichment_configured_flag_file,
content="",
owner=self.__params.metron_user,
mode=0775)
def setup_repo(self):
def local_repo():
Logger.info("Setting up local repo")
Execute("yum -y install createrepo")
Execute("createrepo /localrepo")
Execute("chmod -R o-w+r /localrepo")
def remote_repo():
Logger.info('Using remote repo')
yum_repo_types = {
'local': local_repo,
'remote': remote_repo
}
repo_type = self.__params.yum_repo_type
if repo_type in yum_repo_types:
yum_repo_types[repo_type]()
Logger.info("Writing out repo file")
repo_template = ("echo \"[METRON-0.3.0]\n"
"name=Metron 0.3.0 packages\n"
"baseurl={0}\n"
"gpgcheck=0\n"
"enabled=1\n\""
" > /etc/yum.repos.d/metron.repo")
Execute(repo_template.format(self.__params.repo_url))
else:
raise ValueError("Unsupported repo type '{0}'".format(repo_type))
def init_kafka_topics(self):
Logger.info('Creating Kafka topics')
command_template = """{0}/kafka-topics.sh \
--zookeeper {1} \
--create \
--topic {2} \
--partitions {3} \
--replication-factor {4} \
--config retention.bytes={5}"""
num_partitions = 1
replication_factor = 1
retention_gigabytes = int(self.__params.metron_topic_retention)
retention_bytes = retention_gigabytes * 1024 * 1024 * 1024
Logger.info("Creating topics for enrichment")
Logger.info("Creating topic'{0}'".format(self.__enrichment_topic))
Execute(command_template.format(self.__params.kafka_bin_dir,
self.__params.zookeeper_quorum,
self.__enrichment_topic,
num_partitions,
replication_factor,
retention_bytes))
Logger.info("Done creating Kafka topics")
def init_hdfs_dir(self):
self.__params.HdfsResource(self.__params.metron_apps_enrichment_dir,
type="directory",
action="create_on_execute",
owner=self.__params.metron_user,
group=self.__params.user_group,
mode=0775,
)
def start_enrichment_topology(self):
Logger.info("Starting Metron enrichment topology: {0}".format(self.__enrichment_topology))
start_cmd_template = """{0}/bin/start_enrichment_topology.sh \
-s {1} \
-z {2}"""
Logger.info('Starting ' + self.__enrichment_topology)
Execute(start_cmd_template.format(self.__params.metron_home, self.__enrichment_topology, self.__params.zookeeper_quorum))
Logger.info('Finished starting enrichment topology')
def stop_enrichment_topology(self):
Logger.info('Stopping ' + self.__enrichment_topology)
stop_cmd = 'storm kill ' + self.__enrichment_topology
Execute(stop_cmd)
Logger.info('Done stopping enrichment topologies')
def restart_enrichment_topology(self, env):
Logger.info('Restarting the enrichment topologies')
self.stop_enrichment_topology()
# Wait for old topology to be cleaned up by Storm, before starting again.
retries = 0
topology_active = self.is_topology_active(env)
while topology_active and retries < 3:
Logger.info('Existing topology still active. Will wait and retry')
time.sleep(40)
topology_active = self.is_topology_active(env)
retries += 1
if not topology_active:
self.start_enrichment_topology()
Logger.info('Done restarting the enrichment topology')
else:
Logger.warning('Retries exhausted. Existing topology not cleaned up. Aborting topology start.')
def is_topology_active(self, env):
env.set_params(self.__params)
active = True
topologies = metron_service.get_running_topologies()
is_running = False
if self.__enrichment_topology in topologies:
is_running = topologies[self.__enrichment_topology] in ['ACTIVE', 'REBALANCING']
active &= is_running
return active
def create_hbase_tables(self):
add_enrichment_cmd = "echo \"create '{0}','{1}'\" | hbase shell -n".format(self.__params.enrichment_table, self.__params.enrichment_cf)
Execute(add_enrichment_cmd,
tries=3,
try_sleep=5,
logoutput=False,
path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'
)
add_threatintel_cmd = "echo \"create '{0}','{1}'\" | hbase shell -n".format(self.__params.threatintel_table, self.__params.threatintel_cf)
Execute(add_threatintel_cmd,
tries=3,
try_sleep=5,
logoutput=False,
path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'
)
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package tensorflow
import (
"bytes"
"fmt"
"io"
"reflect"
"runtime"
"testing"
)
func TestNewTensor(t *testing.T) {
var tests = []struct {
shape []int64
value interface{}
}{
{nil, bool(true)},
{nil, int8(5)},
{nil, int16(5)},
{nil, int32(5)},
{nil, int64(5)},
{nil, uint8(5)},
{nil, uint16(5)},
{nil, uint32(5)},
{nil, uint64(5)},
{nil, float32(5)},
{nil, float64(5)},
{nil, complex(float32(5), float32(6))},
{nil, complex(float64(5), float64(6))},
{nil, "a string"},
{[]int64{1}, []uint32{1}},
{[]int64{1}, []uint64{1}},
{[]int64{2}, []bool{true, false}},
{[]int64{1}, []float64{1}},
{[]int64{1}, [1]float64{1}},
{[]int64{1, 1}, [1][1]float64{{1}}},
{[]int64{1, 1, 1}, [1][1][]float64{{{1}}}},
{[]int64{1, 1, 2}, [1][][2]float64{{{1, 2}}}},
{[]int64{1, 1, 1, 1}, [1][][1][]float64{{{{1}}}}},
{[]int64{2}, []string{"string", "slice"}},
{[]int64{2}, [2]string{"string", "array"}},
{[]int64{3, 2}, [][]float64{{1, 2}, {3, 4}, {5, 6}}},
{[]int64{2, 3}, [2][3]float64{{1, 2, 3}, {3, 4, 6}}},
{[]int64{4, 3, 2}, [][][]float64{
{{1, 2}, {3, 4}, {5, 6}},
{{7, 8}, {9, 10}, {11, 12}},
{{0, -1}, {-2, -3}, {-4, -5}},
{{-6, -7}, {-8, -9}, {-10, -11}},
}},
{[]int64{2, 0}, [][]int64{{}, {}}},
{[]int64{2, 2}, [][]string{{"row0col0", "row0,col1"}, {"row1col0", "row1,col1"}}},
{[]int64{2, 3}, [2][3]string{
{"row0col0", "row0,col1", "row0,col2"},
{"row1col0", "row1,col1", "row1,col2"},
}},
}
var errorTests = []interface{}{
struct{ a int }{5},
new(int32),
new([]int32),
// native ints not supported
int(5),
[]int{5},
}
for _, test := range tests {
tensor, err := NewTensor(test.value)
if err != nil {
t.Errorf("NewTensor(%v): %v", test.value, err)
continue
}
if !reflect.DeepEqual(test.shape, tensor.Shape()) {
t.Errorf("Tensor.Shape(): got %v, want %v", tensor.Shape(), test.shape)
}
// Test that encode and decode gives the same value. We skip arrays because
// they're returned as slices.
if reflect.TypeOf(test.value).Kind() != reflect.Array {
if !reflect.DeepEqual(test.value, tensor.Value()) {
t.Errorf("encode/decode: got %v, want %v", tensor.Value(), test.value)
}
}
}
for _, test := range errorTests {
tensor, err := NewTensor(test)
if err == nil {
t.Errorf("NewTensor(%v): %v", test, err)
}
if tensor != nil {
t.Errorf("NewTensor(%v) = %v, want nil", test, tensor)
}
}
}
func TestNewTensorValidateDimensions(t *testing.T) {
var errorTests = []interface{}{
// Mismatched dimensions
[][]float32{{1, 2, 3}, {4}},
// Mismatched dimensions. Should return "mismatched slice lengths" error instead of "BUG"
[][][]float32{{{1, 2}, {3, 4}}, {{1}, {3}}},
// Mismatched dimensions. Should return error instead of valid tensor
[][][]float32{{{1, 2}, {3, 4}}, {{1}, {3}}, {{1, 2, 3}, {2, 3, 4}}},
// Mismatched dimensions for strings
[][]string{{"abc"}, {"abcd", "abcd"}},
}
// Test that an error is returned in response to mismatched dimensions
// and that no tensor is returned. Dimensions should be checked and a
// mismatch caught in NewTensor prior to actually allocating a new
// tensor in cgo. Given how string tensors are encoded and how tensors
// are freed, a mismatch caught partway through encoding a string
// tensor may result in a segfault, once the finalizer is called. A
// single run of this test is not reliable at producing a segfault,
// hence iteration. See github.com/tensorflow/tensorflow/pull/52257
// for some detail on the issue.
for i := 0; i < 1e5; i++ {
for _, test := range errorTests {
tensor, err := NewTensor(test)
if err == nil {
t.Errorf("NewTensor(%v): %v", test, err)
}
if tensor != nil {
t.Errorf("NewTensor(%v) = %v, want nil", test, tensor)
}
}
}
// Execute any finalizers (blocking).
runtime.GC()
}
func TestTensorSerialization(t *testing.T) {
var tests = []interface{}{
bool(true),
int8(5),
int16(5),
int32(5),
int64(5),
uint8(5),
uint16(5),
float32(5),
float64(5),
complex(float32(5), float32(6)),
complex(float64(5), float64(6)),
[]float64{1},
[][]float32{{1, 2}, {3, 4}, {5, 6}},
[][][]int8{
{{1, 2}, {3, 4}, {5, 6}},
{{7, 8}, {9, 10}, {11, 12}},
{{0, -1}, {-2, -3}, {-4, -5}},
{{-6, -7}, {-8, -9}, {-10, -11}},
},
[]bool{true, false, true},
}
for _, v := range tests {
t1, err := NewTensor(v)
if err != nil {
t.Errorf("(%v): %v", v, err)
continue
}
buf := new(bytes.Buffer)
n, err := t1.WriteContentsTo(buf)
if err != nil {
t.Errorf("(%v): %v", v, err)
continue
}
if n != int64(buf.Len()) {
t.Errorf("(%v): WriteContentsTo said it wrote %v bytes, but wrote %v", v, n, buf.Len())
}
t2, err := ReadTensor(t1.DataType(), t1.Shape(), buf)
if err != nil {
t.Errorf("(%v): %v", v, err)
continue
}
if buf.Len() != 0 {
t.Errorf("(%v): %v bytes written by WriteContentsTo not read by ReadTensor", v, buf.Len())
}
if got, want := t2.DataType(), t1.DataType(); got != want {
t.Errorf("(%v): Got %v, want %v", v, got, want)
}
if got, want := t2.Shape(), t1.Shape(); !reflect.DeepEqual(got, want) {
t.Errorf("(%v): Got %v, want %v", v, got, want)
}
if got, want := t2.Value(), v; !reflect.DeepEqual(got, want) {
t.Errorf("(%v): Got %v, want %v", v, got, want)
}
}
}
func TestReadTensorDoesNotReadBeyondContent(t *testing.T) {
t1, _ := NewTensor(int8(7))
t2, _ := NewTensor(float32(2.718))
buf := new(bytes.Buffer)
if _, err := t1.WriteContentsTo(buf); err != nil {
t.Fatal(err)
}
if _, err := t2.WriteContentsTo(buf); err != nil {
t.Fatal(err)
}
t3, err := ReadTensor(t1.DataType(), t1.Shape(), buf)
if err != nil {
t.Fatal(err)
}
t4, err := ReadTensor(t2.DataType(), t2.Shape(), buf)
if err != nil {
t.Fatal(err)
}
if v, ok := t3.Value().(int8); !ok || v != 7 {
t.Errorf("Got (%v (%T), %v), want (7 (int8), true)", v, v, ok)
}
if v, ok := t4.Value().(float32); !ok || v != 2.718 {
t.Errorf("Got (%v (%T), %v), want (2.718 (float32), true)", v, v, ok)
}
}
func TestTensorSerializationErrors(t *testing.T) {
// String tensors cannot be serialized
t1, err := NewTensor("abcd")
if err != nil {
t.Fatal(err)
}
buf := new(bytes.Buffer)
if n, err := t1.WriteContentsTo(buf); n != 0 || err == nil || buf.Len() != 0 {
t.Errorf("Got (%v, %v, %v) want (0, <non-nil>, 0)", n, err, buf.Len())
}
// Should fail to read a truncated value.
if t1, err = NewTensor(int8(8)); err != nil {
t.Fatal(err)
}
n, err := t1.WriteContentsTo(buf)
if err != nil {
t.Fatal(err)
}
r := bytes.NewReader(buf.Bytes()[:n-1])
if _, err = ReadTensor(t1.DataType(), t1.Shape(), r); err == nil {
t.Error("ReadTensor should have failed if the tensor content was truncated")
}
}
func TestReadTensorReadAll(t *testing.T) {
// Get the bytes of a tensor.
a := []float32{1.1, 1.2, 1.3}
ats, err := NewTensor(a)
if err != nil {
t.Fatal(err)
}
abuf := new(bytes.Buffer)
if _, err := ats.WriteContentsTo(abuf); err != nil {
t.Fatal(err)
}
// Get the bytes of another tensor.
b := []float32{1.1, 1.2, 1.3}
bts, err := NewTensor(b)
if err != nil {
t.Fatal(err)
}
bbuf := new(bytes.Buffer)
if _, err := bts.WriteContentsTo(bbuf); err != nil {
t.Fatal(err)
}
// Check that ReadTensor reads all bytes of both tensors, when the situation
// requires one than reads.
abbuf := io.MultiReader(abuf, bbuf)
abts, err := ReadTensor(Float, []int64{2, 3}, abbuf)
if err != nil {
t.Fatal(err)
}
abtsf32 := abts.Value().([][]float32)
expected := [][]float32{a, b}
if len(abtsf32) != 2 {
t.Fatalf("first dimension %d is not 2", len(abtsf32))
}
for i := 0; i < 2; i++ {
if len(abtsf32[i]) != 3 {
t.Fatalf("second dimension %d is not 3", len(abtsf32[i]))
}
for j := 0; j < 3; j++ {
if abtsf32[i][j] != expected[i][j] {
t.Errorf("value at %d %d not equal %f %f", i, j, abtsf32[i][j], expected[i][j])
}
}
}
}
func TestReadTensorNegativeDimention(t *testing.T) {
buf := new(bytes.Buffer)
_, err := ReadTensor(Int32, []int64{-1, 1}, buf)
if err == nil {
t.Fatal("ReadTensor should failed if shape contains negative dimention")
}
}
func benchmarkNewTensor(b *testing.B, v interface{}) {
b.ReportAllocs()
for i := 0; i < b.N; i++ {
if t, err := NewTensor(v); err != nil || t == nil {
b.Fatalf("(%v, %v)", t, err)
}
}
}
func benchmarkValueTensor(b *testing.B, v interface{}) {
t, err := NewTensor(v)
if err != nil {
b.Fatalf("(%v, %v)", t, err)
}
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
_ = t.Value()
}
}
func BenchmarkTensor(b *testing.B) {
// Some sample sizes from the Inception image labeling model.
// Where input tensors correspond to a 224x224 RGB image
// flattened into a vector.
var vector [224 * 224 * 3]int32
var arrays [100][100][100]int32
l3 := make([][][]float32, 100)
l2 := make([][]float32, 100*100)
l1 := make([]float32, 100*100*100)
for i := range l2 {
l2[i] = l1[i*100 : (i+1)*100]
}
for i := range l3 {
l3[i] = l2[i*100 : (i+1)*100]
}
s1 := make([]string, 100*100*100)
s2 := make([][]string, 100*100)
s3 := make([][][]string, 100)
for i := range s1 {
s1[i] = "cheesit"
}
for i := range s2 {
s2[i] = s1[i*100 : (i+1)*100]
}
for i := range s3 {
s3[i] = s2[i*100 : (i+1)*100]
}
tests := []interface{}{
vector,
arrays,
l1,
l2,
l3,
s1,
s2,
s3,
}
b.Run("New", func(b *testing.B) {
for _, test := range tests {
b.Run(fmt.Sprintf("%T", test), func(b *testing.B) { benchmarkNewTensor(b, test) })
}
})
b.Run("Value", func(b *testing.B) {
for _, test := range tests {
b.Run(fmt.Sprintf("%T", test), func(b *testing.B) { benchmarkValueTensor(b, test) })
}
})
}
func TestReshape(t *testing.T) {
tensor, err := NewTensor([]int64{1, 2})
if err != nil {
t.Fatalf("Unable to create new tensor: %v", err)
}
if got, want := len(tensor.Shape()), 1; got != want {
t.Fatalf("len(tensor.Shape()): got %d, want %d", got, want)
}
if got, want := tensor.Shape()[0], int64(2); got != want {
t.Errorf("tensor.Shape()[0]: got %d, want %d", got, want)
}
if err := tensor.Reshape([]int64{1, 2}); err != nil {
t.Fatalf("tensor.Reshape([1, 2]) failed: %v", err)
}
if got, want := len(tensor.Shape()), 2; got != want {
t.Fatalf("After reshape, len(tensor.Shape()): got %d, want %d", got, want)
}
if got, want := tensor.Shape()[0], int64(1); got != want {
t.Errorf("After reshape, tensor.Shape()[0]: got %d, want %d", got, want)
}
if got, want := tensor.Shape()[1], int64(2); got != want {
t.Errorf("After reshape, tensor.Shape()[1]: got %d, want %d", got, want)
}
}
|
go
|
github
|
https://github.com/tensorflow/tensorflow
|
tensorflow/go/tensor_test.go
|
function Component(props) {
let x;
let i = 0;
do {
if (i > 10) {
x = 10;
} else {
x = 1;
}
i++;
} while (i < props.test);
// The values assigned to `x` are non-reactive, but the value of `x`
// depends on the "control" variable `i`, whose value is affected by
// `props.test` which is reactive.
// Therefore x should be treated as reactive too.
return [x];
}
export const FIXTURE_ENTRYPOINT = {
fn: Component,
params: [],
sequentialRenders: [
{test: 12},
{test: 12},
{test: 1},
{test: 1},
{test: 12},
{test: 1},
{test: 12},
{test: 1},
],
};
|
javascript
|
github
|
https://github.com/facebook/react
|
compiler/packages/babel-plugin-react-compiler/src/__tests__/fixtures/compiler/reactive-control-dependency-do-while-test.js
|
#!/usr/bin/env python
"""
Install.py tool to download, unpack, build, and link to the MS-CG library
used to automate the steps described in the README file in this dir
"""
from __future__ import print_function
import sys, os, subprocess, shutil, tarfile
from argparse import ArgumentParser
sys.path.append('..')
from install_helpers import fullpath, geturl
parser = ArgumentParser(prog='Install.py',
description="LAMMPS library build wrapper script")
# settings
version = "1.7.3.1"
machine = "g++_simple"
# help message
HELP = """
Syntax from src dir: make lib-mscg args="-p [path] -m [suffix] -v [version]"
or: make lib-mscg args="-b -m [suffix]"
Syntax from lib dir: python Install.py -p [path] -m [suffix] -v [version]
Syntax from lib dir: python Install.py -b -m [suffix]
Example:
make lib-mscg args="-b -m serial " # download/build in lib/mscg/MSCG-release with settings compatible with "make serial"
make lib-mscg args="-b -m mpi " # download/build in lib/mscg/MSCG-release with settings compatible with "make mpi"
make lib-mscg args="-p /usr/local/mscg-release " # use existing MS-CG installation in /usr/local/mscg-release
"""
# known checksums for different MSCG versions. used to validate the download.
checksums = { \
'1.7.3.1' : '8c45e269ee13f60b303edd7823866a91', \
}
# parse and process arguments
pgroup = parser.add_mutually_exclusive_group()
pgroup.add_argument("-b", "--build", action="store_true",
help="download and build the MSCG library")
pgroup.add_argument("-p", "--path",
help="specify folder of existing MSCG installation")
parser.add_argument("-v", "--version", default=version, choices=checksums.keys(),
help="set version of MSCG to download and build (default: %s)" % version)
parser.add_argument("-m", "--machine", default=machine, choices=['mpi', 'serial', 'g++_simple', 'intel_simple', 'lapack', 'mac'],
help="set machine suffix specifies which src/Make/Makefile.suffix to use. (default: %s)" % machine)
args = parser.parse_args()
# print help message and exit, if neither build nor path options are given
if not args.build and not args.path:
parser.print_help()
sys.exit(HELP)
buildflag = args.build
pathflag = args.path is not None
mscgpath = args.path
msuffix = args.machine
mscgver = args.version
# settings
url = "https://github.com/uchicago-voth/MSCG-release/archive/%s.tar.gz" % mscgver
tarname = "MS-CG-%s.tar.gz" % mscgver
tardir = "MSCG-release-%s" % mscgver
homepath = fullpath('.')
homedir = os.path.join(homepath, tardir)
if pathflag:
if not os.path.isdir(mscgpath):
sys.exit("MS-CG path %s does not exist" % mscgpath)
homedir = fullpath(mscgpath)
# download and unpack MS-CG tarfile
if buildflag:
print("Downloading MS-CG ...")
tarname = os.path.join(homepath, tarname)
geturl(url, tarname)
print("Unpacking MS-CG tarfile ...")
if os.path.exists(os.path.join(homepath, tardir)):
shutil.rmtree(os.path.join(homepath, tardir))
if tarfile.is_tarfile(tarname):
tgz = tarfile.open(tarname)
tgz.extractall(path=homepath)
os.remove(tarname)
else:
sys.exit("File %s is not a supported archive", tarname)
if os.path.basename(homedir) != tardir:
if os.path.exists(homedir):
shutil.rmtree(homedir)
os.rename(os.path.join(homepath, tardir), homedir)
# build MS-CG
if buildflag:
print("Building MS-CG ...")
mkf = "Makefile.%s" % msuffix
mkp = os.path.join(homedir, 'src', 'Make', mkf)
if os.path.exists(mkp):
shutil.copyfile(mkp, os.path.join(homedir, 'src', mkf))
elif os.path.exists("Makefile.%s" % msuffix):
shutil.copyfile("Makefile.%s" % msuffix, os.path.join(homedir, 'src', mkf))
else:
sys.exit("Cannot find Makefile.%s" % msuffix)
try:
cmd = 'make -C %s -f Makefile.%s' % (os.path.join(homedir, 'src'), msuffix)
txt = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
print(txt.decode('UTF-8'))
except subprocess.CalledProcessError as e:
print("Make failed with:\n %s" % e.output.decode('UTF-8'))
sys.exit(1)
if not os.path.exists("Makefile.lammps"):
print("Creating Makefile.lammps")
if os.path.exists("Makefile.lammps.%s" % msuffix):
shutil.copyfile('Makefile.lammps.%s' % msuffix, 'Makefile.lammps')
else:
shutil.copyfile('Makefile.lammps.default', 'Makefile.lammps')
else: print("Makefile.lammps exists. Please check its settings")
# create 2 links in lib/mscg to MS-CG src dir
print("Creating links to MS-CG include and lib files")
if os.path.isfile("includelink") or os.path.islink("includelink"):
os.remove("includelink")
if os.path.isfile("liblink") or os.path.islink("liblink"):
os.remove("liblink")
os.symlink(os.path.join(homedir, 'src'), 'includelink')
os.symlink(os.path.join(homedir, 'src'), 'liblink')
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* jutils.c
*
* Copyright (C) 1991-1996, Thomas G. Lane.
* Modified 2009-2020 by Guido Vollbeding.
* This file is part of the Independent JPEG Group's software.
* For conditions of distribution and use, see the accompanying README file.
*
* This file contains tables and miscellaneous utility routines needed
* for both compression and decompression.
* Note we prefix all global names with "j" to minimize conflicts with
* a surrounding application.
*/
#define JPEG_INTERNALS
#include "jinclude.h"
#include "jpeglib.h"
/*
* jpeg_zigzag_order[i] is the zigzag-order position of the i'th element
* of a DCT block read in natural order (left to right, top to bottom).
*/
#if 0 /* This table is not actually needed in v6a */
const int jpeg_zigzag_order[DCTSIZE2] = {
0, 1, 5, 6, 14, 15, 27, 28,
2, 4, 7, 13, 16, 26, 29, 42,
3, 8, 12, 17, 25, 30, 41, 43,
9, 11, 18, 24, 31, 40, 44, 53,
10, 19, 23, 32, 39, 45, 52, 54,
20, 22, 33, 38, 46, 51, 55, 60,
21, 34, 37, 47, 50, 56, 59, 61,
35, 36, 48, 49, 57, 58, 62, 63
};
#endif
/*
* jpeg_natural_order[i] is the natural-order position of the i'th element
* of zigzag order.
*
* When reading corrupted data, the Huffman decoders could attempt
* to reference an entry beyond the end of this array (if the decoded
* zero run length reaches past the end of the block). To prevent
* wild stores without adding an inner-loop test, we put some extra
* "63"s after the real entries. This will cause the extra coefficient
* to be stored in location 63 of the block, not somewhere random.
* The worst case would be a run-length of 15, which means we need 16
* fake entries.
*/
const int jpeg_natural_order[DCTSIZE2+16] = {
0, 1, 8, 16, 9, 2, 3, 10,
17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 48, 41, 34,
27, 20, 13, 6, 7, 14, 21, 28,
35, 42, 49, 56, 57, 50, 43, 36,
29, 22, 15, 23, 30, 37, 44, 51,
58, 59, 52, 45, 38, 31, 39, 46,
53, 60, 61, 54, 47, 55, 62, 63,
63, 63, 63, 63, 63, 63, 63, 63, /* extra entries for safety in decoder */
63, 63, 63, 63, 63, 63, 63, 63
};
const int jpeg_natural_order7[7*7+16] = {
0, 1, 8, 16, 9, 2, 3, 10,
17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 48, 41, 34,
27, 20, 13, 6, 14, 21, 28, 35,
42, 49, 50, 43, 36, 29, 22, 30,
37, 44, 51, 52, 45, 38, 46, 53,
54,
63, 63, 63, 63, 63, 63, 63, 63, /* extra entries for safety in decoder */
63, 63, 63, 63, 63, 63, 63, 63
};
const int jpeg_natural_order6[6*6+16] = {
0, 1, 8, 16, 9, 2, 3, 10,
17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 41, 34, 27,
20, 13, 21, 28, 35, 42, 43, 36,
29, 37, 44, 45,
63, 63, 63, 63, 63, 63, 63, 63, /* extra entries for safety in decoder */
63, 63, 63, 63, 63, 63, 63, 63
};
const int jpeg_natural_order5[5*5+16] = {
0, 1, 8, 16, 9, 2, 3, 10,
17, 24, 32, 25, 18, 11, 4, 12,
19, 26, 33, 34, 27, 20, 28, 35,
36,
63, 63, 63, 63, 63, 63, 63, 63, /* extra entries for safety in decoder */
63, 63, 63, 63, 63, 63, 63, 63
};
const int jpeg_natural_order4[4*4+16] = {
0, 1, 8, 16, 9, 2, 3, 10,
17, 24, 25, 18, 11, 19, 26, 27,
63, 63, 63, 63, 63, 63, 63, 63, /* extra entries for safety in decoder */
63, 63, 63, 63, 63, 63, 63, 63
};
const int jpeg_natural_order3[3*3+16] = {
0, 1, 8, 16, 9, 2, 10, 17,
18,
63, 63, 63, 63, 63, 63, 63, 63, /* extra entries for safety in decoder */
63, 63, 63, 63, 63, 63, 63, 63
};
const int jpeg_natural_order2[2*2+16] = {
0, 1, 8, 9,
63, 63, 63, 63, 63, 63, 63, 63, /* extra entries for safety in decoder */
63, 63, 63, 63, 63, 63, 63, 63
};
/*
* Arithmetic utilities
*/
GLOBAL(long)
jdiv_round_up (long a, long b)
/* Compute a/b rounded up to next integer, ie, ceil(a/b) */
/* Assumes a >= 0, b > 0 */
{
return (a + b - 1L) / b;
}
GLOBAL(long)
jround_up (long a, long b)
/* Compute a rounded up to next multiple of b, ie, ceil(a/b)*b */
/* Assumes a >= 0, b > 0 */
{
a += b - 1L;
return a - (a % b);
}
/* On normal machines we can apply MEMCOPY() and MEMZERO() to sample arrays
* and coefficient-block arrays. This won't work on 80x86 because the arrays
* are FAR and we're assuming a small-pointer memory model. However, some
* DOS compilers provide far-pointer versions of memcpy() and memset() even
* in the small-model libraries. These will be used if USE_FMEM is defined.
* Otherwise, the routines below do it the hard way. (The performance cost
* is not all that great, because these routines aren't very heavily used.)
*/
#ifndef NEED_FAR_POINTERS /* normal case, same as regular macro */
#define FMEMCOPY(dest,src,size) MEMCOPY(dest,src,size)
#else /* 80x86 case, define if we can */
#ifdef USE_FMEM
#define FMEMCOPY(dest,src,size) _fmemcpy((void FAR *)(dest), (const void FAR *)(src), (size_t)(size))
#else
/* This function is for use by the FMEMZERO macro defined in jpegint.h.
* Do not call this function directly, use the FMEMZERO macro instead.
*/
GLOBAL(void)
jzero_far (void FAR * target, size_t bytestozero)
/* Zero out a chunk of FAR memory. */
/* This might be sample-array data, block-array data, or alloc_large data. */
{
register char FAR * ptr = (char FAR *) target;
register size_t count;
for (count = bytestozero; count > 0; count--) {
*ptr++ = 0;
}
}
#endif
#endif
GLOBAL(void)
jcopy_sample_rows (JSAMPARRAY input_array,
JSAMPARRAY output_array,
int num_rows, JDIMENSION num_cols)
/* Copy some rows of samples from one place to another.
* num_rows rows are copied from *input_array++ to *output_array++;
* these areas may overlap for duplication.
* The source and destination arrays must be at least as wide as num_cols.
*/
{
register JSAMPROW inptr, outptr;
#ifdef FMEMCOPY
register size_t count = (size_t) num_cols * SIZEOF(JSAMPLE);
#else
register JDIMENSION count;
#endif
register int row;
for (row = num_rows; row > 0; row--) {
inptr = *input_array++;
outptr = *output_array++;
#ifdef FMEMCOPY
FMEMCOPY(outptr, inptr, count);
#else
for (count = num_cols; count > 0; count--)
*outptr++ = *inptr++; /* needn't bother with GETJSAMPLE() here */
#endif
}
}
GLOBAL(void)
jcopy_block_row (JBLOCKROW input_row, JBLOCKROW output_row,
JDIMENSION num_blocks)
/* Copy a row of coefficient blocks from one place to another. */
{
#ifdef FMEMCOPY
FMEMCOPY(output_row, input_row, (size_t) num_blocks * (DCTSIZE2 * SIZEOF(JCOEF)));
#else
register JCOEFPTR inptr, outptr;
register long count;
inptr = (JCOEFPTR) input_row;
outptr = (JCOEFPTR) output_row;
for (count = (long) num_blocks * DCTSIZE2; count > 0; count--) {
*outptr++ = *inptr++;
}
#endif
}
|
c
|
github
|
https://github.com/opencv/opencv
|
3rdparty/libjpeg/jutils.c
|
# -*- coding: utf-8 -*-
import array
import os
# import pycurl
import random
import re
from base64 import standard_b64decode
from Crypto.Cipher import AES
from Crypto.Util import Counter
from module.common.json_layer import json_loads, json_dumps
from module.plugins.internal.Hoster import Hoster
from module.utils import decode, fs_decode, fs_encode
############################ General errors ###################################
# EINTERNAL (-1): An internal error has occurred. Please submit a bug report, detailing the exact circumstances in which this error occurred
# EARGS (-2): You have passed invalid arguments to this command
# EAGAIN (-3): (always at the request level) A temporary congestion or server malfunction prevented your request from being processed. No data was altered. Retry. Retries must be spaced with exponential backoff
# ERATELIMIT (-4): You have exceeded your command weight per time quota. Please wait a few seconds, then try again (this should never happen in sane real-life applications)
#
############################ Upload errors ####################################
# EFAILED (-5): The upload failed. Please restart it from scratch
# ETOOMANY (-6): Too many concurrent IP addresses are accessing this upload target URL
# ERANGE (-7): The upload file packet is out of range or not starting and ending on a chunk boundary
# EEXPIRED (-8): The upload target URL you are trying to access has expired. Please request a fresh one
#
############################ Stream/System errors #############################
# ENOENT (-9): Object (typically, node or user) not found
# ECIRCULAR (-10): Circular linkage attempted
# EACCESS (-11): Access violation (e.g., trying to write to a read-only share)
# EEXIST (-12): Trying to create an object that already exists
# EINCOMPLETE (-13): Trying to access an incomplete resource
# EKEY (-14): A decryption operation failed (never returned by the API)
# ESID (-15): Invalid or expired user session, please relogin
# EBLOCKED (-16): User blocked
# EOVERQUOTA (-17): Request over quota
# ETEMPUNAVAIL (-18): Resource temporarily not available, please try again later
# ETOOMANYCONNECTIONS (-19): Too many connections on this resource
# EWRITE (-20): Write failed
# EREAD (-21): Read failed
# EAPPKEY (-22): Invalid application key; request not processed
class MegaCoNz(Hoster):
__name__ = "MegaCoNz"
__type__ = "hoster"
__version__ = "0.31"
__status__ = "testing"
__pattern__ = r'(https?://(?:www\.)?mega(\.co)?\.nz/|mega:|chrome:.+?)#(?P<TYPE>N|)!(?P<ID>[\w^_]+)!(?P<KEY>[\w,-]+)'
__description__ = """Mega.co.nz hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("RaNaN", "ranan@pyload.org"),
("Walter Purcaro", "vuolter@gmail.com")]
API_URL = "https://eu.api.mega.co.nz/cs"
FILE_SUFFIX = ".crypted"
def b64_decode(self, data):
data = data.replace("-", "+").replace("_", "/")
return standard_b64decode(data + '=' * (-len(data) % 4))
def get_cipher_key(self, key):
"""
Construct the cipher key from the given data
"""
a = array.array("I", self.b64_decode(key))
k = array.array("I", (a[0] ^ a[4], a[1] ^ a[5], a[2] ^ a[6], a[3] ^ a[7]))
iv = a[4:6] + array.array("I", (0, 0))
meta_mac = a[6:8]
return k, iv, meta_mac
def api_response(self, **kwargs):
"""
Dispatch a call to the api, see https://mega.co.nz/#developers
"""
#: Generate a session id, no idea where to obtain elsewhere
uid = random.randint(10 << 9, 10 ** 10)
res = self.load(self.API_URL, get={'id': uid}, post=json_dumps([kwargs]))
self.log_debug("Api Response: " + res)
return json_loads(res)
def decrypt_attr(self, data, key):
k, iv, meta_mac = self.get_cipher_key(key)
cbc = AES.new(k, AES.MODE_CBC, "\0" * 16)
attr = decode(cbc.decrypt(self.b64_decode(data)))
self.log_debug("Decrypted Attr: %s" % attr)
if not attr.startswith("MEGA"):
self.fail(_("Decryption failed"))
#: Data is padded, 0-bytes must be stripped
return json_loads(re.search(r'{.+?}', attr).group(0))
def decrypt_file(self, key):
"""
Decrypts the file at last_download`
"""
#: Upper 64 bit of counter start
n = self.b64_decode(key)[16:24]
#: Convert counter to long and shift bytes
k, iv, meta_mac = self.get_cipher_key(key)
ctr = Counter.new(128, initial_value=long(n.encode("hex"), 16) << 64)
cipher = AES.new(k, AES.MODE_CTR, counter=ctr)
self.pyfile.setStatus("decrypting")
self.pyfile.setProgress(0)
file_crypted = fs_encode(self.last_download)
file_decrypted = file_crypted.rsplit(self.FILE_SUFFIX)[0]
try:
f = open(file_crypted, "rb")
df = open(file_decrypted, "wb")
except IOError, e:
self.fail(e)
chunk_size = 2 ** 15 #: Buffer size, 32k
# file_mac = [0, 0, 0, 0] # calculate CBC-MAC for checksum
chunks = os.path.getsize(file_crypted) / chunk_size + 1
for i in xrange(chunks):
buf = f.read(chunk_size)
if not buf:
break
chunk = cipher.decrypt(buf)
df.write(chunk)
self.pyfile.setProgress(int((100.0 / chunks) * i))
# chunk_mac = [iv[0], iv[1], iv[0], iv[1]]
# for i in xrange(0, chunk_size, 16):
# block = chunk[i:i+16]
# if len(block) % 16:
# block += '=' * (16 - (len(block) % 16))
# block = array.array("I", block)
# chunk_mac = [chunk_mac[0] ^ a_[0], chunk_mac[1] ^ block[1], chunk_mac[2] ^ block[2], chunk_mac[3] ^ block[3]]
# chunk_mac = aes_cbc_encrypt_a32(chunk_mac, k)
# file_mac = [file_mac[0] ^ chunk_mac[0], file_mac[1] ^ chunk_mac[1], file_mac[2] ^ chunk_mac[2], file_mac[3] ^ chunk_mac[3]]
# file_mac = aes_cbc_encrypt_a32(file_mac, k)
self.pyfile.setProgress(100)
f.close()
df.close()
# if file_mac[0] ^ file_mac[1], file_mac[2] ^ file_mac[3] is not meta_mac:
# os.remove(file_decrypted)
# self.fail(_("Checksum mismatch"))
os.remove(file_crypted)
self.last_download = fs_decode(file_decrypted)
def check_error(self, code):
ecode = abs(code)
if ecode in (9, 16, 21):
self.offline()
elif ecode in (3, 13, 17, 18, 19):
self.temp_offline()
elif ecode in (1, 4, 6, 10, 15, 21):
self.retry(5, 30, _("Error code: [%s]") % -ecode)
else:
self.fail(_("Error code: [%s]") % -ecode)
def process(self, pyfile):
pattern = re.match(self.__pattern__, pyfile.url).groupdict()
id = pattern['ID']
key = pattern['KEY']
public = pattern['TYPE'] == ""
self.log_debug("ID: %s" % id, "Key: %s" % key, "Type: %s" % ("public" if public else "node"))
#: G is for requesting a download url
#: This is similar to the calls in the mega js app, documentation is very bad
if public:
mega = self.api_response(a="g", g=1, p=id, ssl=1)[0]
else:
mega = self.api_response(a="g", g=1, n=id, ssl=1)[0]
if isinstance(mega, int):
self.check_error(mega)
elif "e" in mega:
self.check_error(mega['e'])
attr = self.decrypt_attr(mega['at'], key)
pyfile.name = attr['n'] + self.FILE_SUFFIX
pyfile.size = mega['s']
# self.req.http.c.setopt(pycurl.SSL_CIPHER_LIST, "RC4-MD5:DEFAULT")
self.download(mega['g'])
self.decrypt_file(key)
#: Everything is finished and final name can be set
pyfile.name = attr['n']
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutIteration(Koan):
def test_iterators_are_a_type(self):
it = iter(range(1,6))
fib = 0
for num in it:
fib += num
self.assertEqual(__ , fib)
def test_iterating_with_next(self):
stages = iter(['alpha','beta','gamma'])
try:
self.assertEqual(__, next(stages))
next(stages)
self.assertEqual(__, next(stages))
next(stages)
except StopIteration as ex:
err_msg = 'Ran out of iterations'
self.assertRegexpMatches(err_msg, __)
# ------------------------------------------------------------------
def add_ten(self, item):
return item + 10
def test_map_transforms_elements_of_a_list(self):
seq = [1, 2, 3]
mapped_seq = list()
mapping = map(self.add_ten, seq)
self.assertNotEqual(list, mapping.__class__)
self.assertEqual(__, mapping.__class__)
# In Python 3 built in iterator funcs return iterable view objects
# instead of lists
for item in mapping:
mapped_seq.append(item)
self.assertEqual(__, mapped_seq)
# Note, iterator methods actually return objects of iter type in
# python 3. In python 2 map() would give you a list.
def test_filter_selects_certain_items_from_a_list(self):
def is_even(item):
return (item % 2) == 0
seq = [1, 2, 3, 4, 5, 6]
even_numbers = list()
for item in filter(is_even, seq):
even_numbers.append(item)
self.assertEqual(__, even_numbers)
def test_just_return_first_item_found(self):
def is_big_name(item):
return len(item) > 4
names = ["Jim", "Bill", "Clarence", "Doug", "Eli"]
name = None
iterator = filter(is_big_name, names)
try:
name = next(iterator)
except StopIteration:
msg = 'Ran out of big names'
self.assertEqual(__, name)
# ------------------------------------------------------------------
def add(self,accum,item):
return accum + item
def multiply(self,accum,item):
return accum * item
def test_reduce_will_blow_your_mind(self):
import functools
# As of Python 3 reduce() has been demoted from a builtin function
# to the functools module.
result = functools.reduce(self.add, [2, 3, 4])
self.assertEqual(__, result.__class__)
# Reduce() syntax is same as Python 2
self.assertEqual(__, result)
result2 = functools.reduce(self.multiply, [2, 3, 4], 1)
self.assertEqual(__, result2)
# Extra Credit:
# Describe in your own words what reduce does.
# ------------------------------------------------------------------
def test_use_pass_for_iterations_with_no_body(self):
for num in range(1,5):
pass
self.assertEqual(__, num)
# ------------------------------------------------------------------
def test_all_iteration_methods_work_on_any_sequence_not_just_lists(self):
# Ranges are an iterable sequence
result = map(self.add_ten, range(1,4))
self.assertEqual(__, list(result))
try:
file = open("example_file.txt")
try:
def make_upcase(line):
return line.strip().upper()
upcase_lines = map(make_upcase, file.readlines())
self.assertEqual(__, list(upcase_lines))
finally:
# Arg, this is ugly.
# We will figure out how to fix this later.
file.close()
except IOError:
# should never happen
self.fail()
|
unknown
|
codeparrot/codeparrot-clean
| ||
{
"compilerOptions": {
"lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true,
"skipLibCheck": true,
"strict": true,
"noEmit": true,
"esModuleInterop": true,
"module": "esnext",
"moduleResolution": "bundler",
"resolveJsonModule": true,
"isolatedModules": true,
"jsx": "react-jsx",
"incremental": true,
"plugins": [
{
"name": "next"
}
],
"paths": {
"@/*": ["./*"]
},
"target": "ES2017"
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
"exclude": ["node_modules"]
}
|
json
|
github
|
https://github.com/vercel/next.js
|
examples/with-mux-video/tsconfig.json
|
"""
Support for monitoring the local system.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.systemmonitor/
"""
import logging
import os
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_RESOURCES, STATE_OFF, STATE_ON, STATE_UNKNOWN, CONF_TYPE)
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
import homeassistant.util.dt as dt_util
REQUIREMENTS = ['psutil==5.1.3']
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPES = {
'disk_free': ['Disk Free', 'GiB', 'mdi:harddisk'],
'disk_use': ['Disk Use', 'GiB', 'mdi:harddisk'],
'disk_use_percent': ['Disk Use', '%', 'mdi:harddisk'],
'ipv4_address': ['IPv4 address', '', 'mdi:server-network'],
'ipv6_address': ['IPv6 address', '', 'mdi:server-network'],
'last_boot': ['Last Boot', '', 'mdi:clock'],
'load_15m': ['Average Load (15m)', '', 'mdi:memory'],
'load_1m': ['Average Load (1m)', '', 'mdi:memory'],
'load_5m': ['Average Load (5m)', '', 'mdi:memory'],
'memory_free': ['RAM Free', 'MiB', 'mdi:memory'],
'memory_use': ['RAM Use', 'MiB', 'mdi:memory'],
'memory_use_percent': ['RAM Use', '%', 'mdi:memory'],
'network_in': ['Received', 'MiB', 'mdi:server-network'],
'network_out': ['Sent', 'MiB', 'mdi:server-network'],
'packets_in': ['Packets received', ' ', 'mdi:server-network'],
'packets_out': ['Packets sent', ' ', 'mdi:server-network'],
'process': ['Process', ' ', 'mdi:memory'],
'processor_use': ['CPU Use', '%', 'mdi:memory'],
'since_last_boot': ['Since Last Boot', '', 'mdi:clock'],
'swap_free': ['Swap Free', 'GiB', 'mdi:harddisk'],
'swap_use': ['Swap Use', 'GiB', 'mdi:harddisk'],
'swap_use_percent': ['Swap Use', '%', 'mdi:harddisk'],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_RESOURCES, default=['disk_use']):
vol.All(cv.ensure_list, [vol.Schema({
vol.Required(CONF_TYPE): vol.In(SENSOR_TYPES),
vol.Optional('arg'): cv.string,
})])
})
IO_COUNTER = {
'network_out': 0,
'network_in': 1,
'packets_out': 2,
'packets_in': 3,
}
IF_ADDRS = {
'ipv4_address': 0,
'ipv6_address': 1,
}
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the system monitor sensors."""
dev = []
for resource in config[CONF_RESOURCES]:
if 'arg' not in resource:
resource['arg'] = ''
dev.append(SystemMonitorSensor(resource[CONF_TYPE], resource['arg']))
add_devices(dev)
class SystemMonitorSensor(Entity):
"""Implementation of a system monitor sensor."""
def __init__(self, sensor_type, argument=''):
"""Initialize the sensor."""
self._name = '{} {}'.format(SENSOR_TYPES[sensor_type][0], argument)
self.argument = argument
self.type = sensor_type
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self.update()
@property
def name(self):
"""Return the name of the sensor."""
return self._name.rstrip()
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return SENSOR_TYPES[self.type][2]
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
def update(self):
"""Get the latest system information."""
import psutil
if self.type == 'disk_use_percent':
self._state = psutil.disk_usage(self.argument).percent
elif self.type == 'disk_use':
self._state = round(psutil.disk_usage(self.argument).used /
1024**3, 1)
elif self.type == 'disk_free':
self._state = round(psutil.disk_usage(self.argument).free /
1024**3, 1)
elif self.type == 'memory_use_percent':
self._state = psutil.virtual_memory().percent
elif self.type == 'memory_use':
self._state = round((psutil.virtual_memory().total -
psutil.virtual_memory().available) /
1024**2, 1)
elif self.type == 'memory_free':
self._state = round(psutil.virtual_memory().available / 1024**2, 1)
elif self.type == 'swap_use_percent':
self._state = psutil.swap_memory().percent
elif self.type == 'swap_use':
self._state = round(psutil.swap_memory().used / 1024**3, 1)
elif self.type == 'swap_free':
self._state = round(psutil.swap_memory().free / 1024**3, 1)
elif self.type == 'processor_use':
self._state = round(psutil.cpu_percent(interval=None))
elif self.type == 'process':
if any(self.argument in l.name() for l in psutil.process_iter()):
self._state = STATE_ON
else:
self._state = STATE_OFF
elif self.type == 'network_out' or self.type == 'network_in':
counters = psutil.net_io_counters(pernic=True)
if self.argument in counters:
counter = counters[self.argument][IO_COUNTER[self.type]]
self._state = round(counter / 1024**2, 1)
else:
self._state = STATE_UNKNOWN
elif self.type == 'packets_out' or self.type == 'packets_in':
counters = psutil.net_io_counters(pernic=True)
if self.argument in counters:
self._state = counters[self.argument][IO_COUNTER[self.type]]
else:
self._state = STATE_UNKNOWN
elif self.type == 'ipv4_address' or self.type == 'ipv6_address':
addresses = psutil.net_if_addrs()
if self.argument in addresses:
self._state = addresses[self.argument][IF_ADDRS[self.type]][1]
else:
self._state = STATE_UNKNOWN
elif self.type == 'last_boot':
self._state = dt_util.as_local(
dt_util.utc_from_timestamp(psutil.boot_time())
).date().isoformat()
elif self.type == 'since_last_boot':
self._state = dt_util.utcnow() - dt_util.utc_from_timestamp(
psutil.boot_time())
elif self.type == 'load_1m':
self._state = os.getloadavg()[0]
elif self.type == 'load_5m':
self._state = os.getloadavg()[1]
elif self.type == 'load_15m':
self._state = os.getloadavg()[2]
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright (c) 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api import extensions
from neutron_lib import constants
from neutron_lib.exceptions import agent as agent_exc
from neutron_lib.plugins import constants as plugin_constants
from neutron_lib.plugins import directory
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_log import log as logging
import oslo_messaging
from neutron.agent.common import utils as agent_utils
from neutron.conf.db import l3_agentschedulers_db
from neutron.db import agentschedulers_db
from neutron.db.models import l3agent as rb_model
from neutron.extensions import l3agentscheduler
from neutron.extensions import router_availability_zone as router_az
from neutron.objects import agent as ag_obj
from neutron.objects import base as base_obj
from neutron.objects import l3agent as rb_obj
from neutron.objects import router as l3_objs
LOG = logging.getLogger(__name__)
l3_agentschedulers_db.register_db_l3agentschedulers_opts()
class L3AgentSchedulerDbMixin(l3agentscheduler.L3AgentSchedulerPluginBase,
agentschedulers_db.AgentSchedulerDbMixin):
"""Mixin class to add l3 agent scheduler extension to plugins
using the l3 agent for routing.
"""
router_scheduler = None
def add_periodic_l3_agent_status_check(self):
if not cfg.CONF.allow_automatic_l3agent_failover:
LOG.info("Skipping period L3 agent status check because "
"automatic router rescheduling is disabled.")
return
self.add_agent_status_check_worker(
self.reschedule_routers_from_down_agents)
def reschedule_routers_from_down_agents(self):
"""Reschedule routers from down l3 agents if admin state is up."""
self.reschedule_resources_from_down_agents(
agent_type='L3',
get_down_bindings=self.get_down_router_bindings,
agent_id_attr='l3_agent_id',
resource_id_attr='router_id',
resource_name='router',
reschedule_resource=self.reschedule_router,
rescheduling_failed=l3agentscheduler.RouterReschedulingFailed)
def get_down_router_bindings(self, context, agent_dead_limit):
cutoff = self.get_cutoff_time(agent_dead_limit)
return rb_obj.RouterL3AgentBinding.get_down_router_bindings(
context, cutoff)
def _get_agent_mode(self, agent_db):
agent_conf = self.get_configuration_dict(agent_db)
return agent_conf.get(constants.L3_AGENT_MODE,
constants.L3_AGENT_MODE_LEGACY)
def validate_agent_router_combination(self, context, agent, router):
"""Validate if the router can be correctly assigned to the agent.
:raises: RouterL3AgentMismatch if attempting to assign DVR router
to legacy agent.
:raises: InvalidL3Agent if attempting to assign router to an
unsuitable agent (disabled, type != L3, incompatible configuration)
:raises: DVRL3CannotAssignToDvrAgent if attempting to assign a
router to an agent in 'dvr' mode.
"""
if agent['agent_type'] != constants.AGENT_TYPE_L3:
raise l3agentscheduler.InvalidL3Agent(id=agent['id'])
agent_mode = self._get_agent_mode(agent)
if agent_mode in [constants.L3_AGENT_MODE_DVR,
constants.L3_AGENT_MODE_DVR_NO_EXTERNAL]:
raise l3agentscheduler.DVRL3CannotAssignToDvrAgent()
if (agent_mode == constants.L3_AGENT_MODE_LEGACY and
router.get('distributed')):
raise l3agentscheduler.RouterL3AgentMismatch(
router_id=router['id'], agent_id=agent['id'])
is_suitable_agent = (
agentschedulers_db.services_available(agent['admin_state_up']) and
self.get_l3_agent_candidates(context, router,
[agent],
ignore_admin_state=True))
if not is_suitable_agent:
raise l3agentscheduler.InvalidL3Agent(id=agent['id'])
def check_agent_router_scheduling_needed(self, context, agent, router):
"""Check if the router scheduling is needed.
:raises: RouterHostedByL3Agent if router is already assigned
to a different agent.
:returns: True if scheduling is needed, otherwise False
"""
router_id = router['id']
agent_id = agent['id']
bindings = rb_obj.RouterL3AgentBinding.get_objects(context,
router_id=router_id)
if not bindings:
return True
for binding in bindings:
if binding.l3_agent_id == agent_id:
# router already bound to the agent we need
return False
if router.get('ha'):
return True
# legacy router case: router is already bound to some agent
raise l3agentscheduler.RouterHostedByL3Agent(
router_id=router_id,
agent_id=bindings[0].l3_agent_id)
def create_router_to_agent_binding(self, context, agent, router):
"""Create router to agent binding."""
router_id = router['id']
agent_id = agent['id']
if self.router_scheduler:
plugin = directory.get_plugin(plugin_constants.L3)
try:
if router.get('ha'):
self.router_scheduler.create_ha_port_and_bind(
plugin, context, router['id'],
router['tenant_id'], agent,
is_manual_scheduling=True)
else:
self.router_scheduler.bind_router(
plugin, context, router_id, agent.id)
except db_exc.DBError:
raise l3agentscheduler.RouterSchedulingFailed(
router_id=router_id, agent_id=agent_id)
def add_router_to_l3_agent(self, context, agent_id, router_id):
"""Add a l3 agent to host a router."""
if not self.router_supports_scheduling(context, router_id):
raise l3agentscheduler.RouterDoesntSupportScheduling(
router_id=router_id)
with context.session.begin(subtransactions=True):
router = self.get_router(context, router_id)
agent = self._get_agent(context, agent_id)
self.validate_agent_router_combination(context, agent, router)
if not self.check_agent_router_scheduling_needed(
context, agent, router):
return
self.create_router_to_agent_binding(context, agent, router)
l3_notifier = self.agent_notifiers.get(constants.AGENT_TYPE_L3)
if l3_notifier:
l3_notifier.router_added_to_agent(
context, [router_id], agent.host)
def _check_router_retain_needed(self, context, router, host):
"""Check whether a router needs to be retained on a host.
Check whether there are DVR serviceable ports owned by the host of
an l3 agent. If so, then the routers should be retained.
"""
if not host or not router.get('distributed'):
return False
plugin = directory.get_plugin(plugin_constants.L3)
subnet_ids = plugin.get_subnet_ids_on_router(context, router['id'])
return plugin._check_dvr_serviceable_ports_on_host(context, host,
subnet_ids)
def remove_router_from_l3_agent(self, context, agent_id, router_id):
"""Remove the router from l3 agent.
After removal, the router will be non-hosted until there is update
which leads to re-schedule or be added to another agent manually.
"""
agent = self._get_agent(context, agent_id)
agent_mode = self._get_agent_mode(agent)
if agent_mode in [constants.L3_AGENT_MODE_DVR,
constants.L3_AGENT_MODE_DVR_NO_EXTERNAL]:
raise l3agentscheduler.DVRL3CannotRemoveFromDvrAgent()
self._unbind_router(context, router_id, agent_id)
router = self.get_router(context, router_id)
if router.get('ha'):
plugin = directory.get_plugin(plugin_constants.L3)
plugin.delete_ha_interfaces_on_host(context, router_id, agent.host)
l3_notifier = self.agent_notifiers.get(constants.AGENT_TYPE_L3)
if not l3_notifier:
return
# NOTE(Swami): Need to verify if there are DVR serviceable
# ports owned by this agent. If owned by this agent, then
# the routers should be retained. This flag will be used
# to check if there are valid routers in this agent.
retain_router = self._check_router_retain_needed(context, router,
agent.host)
if retain_router:
l3_notifier.routers_updated_on_host(
context, [router_id], agent.host)
else:
l3_notifier.router_removed_from_agent(
context, router_id, agent.host)
def _unbind_router(self, context, router_id, agent_id):
rb_obj.RouterL3AgentBinding.delete_objects(
context, router_id=router_id, l3_agent_id=agent_id)
def _unschedule_router(self, context, router_id, agents_ids):
with context.session.begin(subtransactions=True):
for agent_id in agents_ids:
self._unbind_router(context, router_id, agent_id)
def reschedule_router(self, context, router_id, candidates=None):
"""Reschedule router to (a) new l3 agent(s)
Remove the router from the agent(s) currently hosting it and
schedule it again
"""
cur_agents = self.list_l3_agents_hosting_router(
context, router_id)['agents']
with context.session.begin(subtransactions=True):
cur_agents_ids = [agent['id'] for agent in cur_agents]
self._unschedule_router(context, router_id, cur_agents_ids)
self.schedule_router(context, router_id, candidates=candidates)
new_agents = self.list_l3_agents_hosting_router(
context, router_id)['agents']
if not new_agents:
raise l3agentscheduler.RouterReschedulingFailed(
router_id=router_id)
self._notify_agents_router_rescheduled(context, router_id,
cur_agents, new_agents)
def _notify_agents_router_rescheduled(self, context, router_id,
old_agents, new_agents):
l3_notifier = self.agent_notifiers.get(constants.AGENT_TYPE_L3)
if not l3_notifier:
return
old_hosts = [agent['host'] for agent in old_agents]
new_hosts = [agent['host'] for agent in new_agents]
router = self.get_router(context, router_id)
for host in set(old_hosts) - set(new_hosts):
retain_router = self._check_router_retain_needed(context,
router, host)
if retain_router:
l3_notifier.routers_updated_on_host(
context, [router_id], host)
else:
l3_notifier.router_removed_from_agent(
context, router_id, host)
for agent in new_agents:
try:
l3_notifier.router_added_to_agent(
context, [router_id], agent['host'])
except oslo_messaging.MessagingException:
self._unbind_router(context, router_id, agent['id'])
raise l3agentscheduler.RouterReschedulingFailed(
router_id=router_id)
def list_routers_on_l3_agent(self, context, agent_id):
binding_objs = rb_obj.RouterL3AgentBinding.get_objects(
context, l3_agent_id=agent_id)
router_ids = [item.router_id for item in binding_objs]
if router_ids:
return {'routers':
self.get_routers(context, filters={'id': router_ids})}
else:
# Exception will be thrown if the requested agent does not exist.
self._get_agent(context, agent_id)
return {'routers': []}
def _get_active_l3_agent_routers_sync_data(self, context, host, agent,
router_ids):
if extensions.is_extension_supported(
self, constants.L3_HA_MODE_EXT_ALIAS):
return self.get_ha_sync_data_for_host(context, host, agent,
router_ids=router_ids,
active=True)
return self.get_sync_data(context, router_ids=router_ids, active=True)
def list_router_ids_on_host(self, context, host, router_ids=None):
try:
agent = self._get_agent_by_type_and_host(
context, constants.AGENT_TYPE_L3, host)
except agent_exc.AgentNotFoundByTypeHost:
return []
if not agentschedulers_db.services_available(agent.admin_state_up):
return []
return self._get_router_ids_for_agent(context, agent, router_ids)
def _get_router_ids_for_agent(self, context, agent, router_ids):
"""Get IDs of routers that the agent should host
Overridden for DVR to handle agents in 'dvr' mode which have
no explicit bindings with routers
"""
filters = {'l3_agent_id': agent.id}
if router_ids:
filters['router_id'] = router_ids
bindings = rb_obj.RouterL3AgentBinding.get_objects(context, **filters)
return [item.router_id for item in bindings]
def list_active_sync_routers_on_active_l3_agent(
self, context, host, router_ids):
agent = self._get_agent_by_type_and_host(
context, constants.AGENT_TYPE_L3, host)
if not agentschedulers_db.services_available(agent.admin_state_up):
LOG.info("Agent has its services disabled. Returning "
"no active routers. Agent: %s", agent)
return []
scheduled_router_ids = self._get_router_ids_for_agent(
context, agent, router_ids)
diff = set(router_ids or []) - set(scheduled_router_ids or [])
if diff:
LOG.debug("Agent requested router IDs not scheduled to it. "
"Scheduled: %(sched)s. Unscheduled: %(diff)s. "
"Agent: %(agent)s.",
{'sched': scheduled_router_ids, 'diff': diff,
'agent': agent})
if scheduled_router_ids:
return self._get_active_l3_agent_routers_sync_data(
context, host, agent, scheduled_router_ids)
return []
def get_l3_agents_hosting_routers(self, context, router_ids,
admin_state_up=None,
active=None):
if not router_ids:
return []
record_objs = rb_obj.RouterL3AgentBinding.get_objects(
context, router_id=router_ids)
if admin_state_up is not None:
l3_agents = ag_obj.Agent.get_objects(context,
id=[obj.l3_agent_id for obj in record_objs],
admin_state_up=admin_state_up)
else:
l3_agents = [
ag_obj.Agent.get_object(context, id=obj.l3_agent_id)
for obj in record_objs
]
if active is not None:
l3_agents = [l3_agent for l3_agent in
l3_agents if not
agent_utils.is_agent_down(
l3_agent['heartbeat_timestamp'])]
return l3_agents
def _get_l3_agents_hosting_routers(self, context, router_ids):
if not router_ids:
return []
return (
rb_obj.RouterL3AgentBinding.get_l3_agents_by_router_ids(
context, router_ids))
def list_l3_agents_hosting_router(self, context, router_id):
with context.session.begin(subtransactions=True):
agents = self._get_l3_agents_hosting_routers(
context, [router_id])
return {'agents': [self._make_agent_dict(agent)
for agent in agents]}
def get_routers_l3_agents_count(self, context):
"""Return a map between routers and agent counts for all routers."""
# TODO(sshank): This portion needs Router OVO integration when it is
# merged.
l3_model_list = l3_objs.RouterExtraAttributes.get_router_agents_count(
context)
return [(self._make_router_dict(router_model),
agent_count if agent_count else 0)
for router_model, agent_count in l3_model_list]
def get_l3_agents(self, context, active=None, filters=None):
agent_filters = {'agent_type': constants.AGENT_TYPE_L3}
if active is not None:
agent_filters['admin_state_up'] = active
config_filters = []
if filters:
for key, value in filters.items():
column = ag_obj.Agent.fields.get(key, None)
if column:
if not value:
return []
agent_modes = filters.pop('agent_modes', [])
if agent_modes:
config_filters = set('\"agent_mode\": \"%s\"' % agent_mode
for agent_mode in agent_modes)
agent_filters.update(filters)
agent_objs = []
if config_filters:
for conf_filter in config_filters:
agent_objs.extend(ag_obj.Agent.get_objects_by_agent_mode(
context, conf_filter, **agent_filters))
else:
agent_objs = ag_obj.Agent.get_objects(context, **agent_filters)
return [l3_agent
for l3_agent in agent_objs
if agentschedulers_db.AgentSchedulerDbMixin.is_eligible_agent(
active, l3_agent)]
def get_l3_agent_candidates(self, context, sync_router, l3_agents,
ignore_admin_state=False):
"""Get the valid l3 agents for the router from a list of l3_agents.
It will not return agents in 'dvr' mode or in 'dvr_no_external' mode
for a dvr router as dvr routers are not explicitly scheduled to l3
agents on compute nodes
"""
candidates = []
is_router_distributed = sync_router.get('distributed', False)
for l3_agent in l3_agents:
if not ignore_admin_state and not l3_agent.admin_state_up:
# ignore_admin_state True comes from manual scheduling
# where admin_state_up judgement is already done.
continue
agent_conf = self.get_configuration_dict(l3_agent)
agent_mode = agent_conf.get(constants.L3_AGENT_MODE,
constants.L3_AGENT_MODE_LEGACY)
if (agent_mode == constants.L3_AGENT_MODE_DVR or
agent_mode == constants.L3_AGENT_MODE_DVR_NO_EXTERNAL or
(agent_mode == constants.L3_AGENT_MODE_LEGACY and
is_router_distributed)):
continue
router_id = agent_conf.get('router_id', None)
if router_id and router_id != sync_router['id']:
continue
handle_internal_only_routers = agent_conf.get(
'handle_internal_only_routers', True)
gateway_external_network_id = agent_conf.get(
'gateway_external_network_id', None)
ex_net_id = (sync_router['external_gateway_info'] or {}).get(
'network_id')
if ((not ex_net_id and not handle_internal_only_routers) or
(ex_net_id and gateway_external_network_id and
ex_net_id != gateway_external_network_id)):
continue
candidates.append(l3_agent)
return candidates
def auto_schedule_routers(self, context, host):
if self.router_scheduler:
self.router_scheduler.auto_schedule_routers(self, context, host)
def schedule_router(self, context, router, candidates=None):
if self.router_scheduler:
return self.router_scheduler.schedule(
self, context, router, candidates=candidates)
def schedule_routers(self, context, routers):
"""Schedule the routers to l3 agents."""
for router in routers:
self.schedule_router(context, router, candidates=None)
def get_l3_agent_with_min_routers(self, context, agent_ids):
if not agent_ids:
return None
agents = ag_obj.Agent.get_l3_agent_with_min_routers(
context, agent_ids)
return agents
def get_hosts_to_notify(self, context, router_id):
"""Returns all hosts to send notification about router update"""
state = agentschedulers_db.get_admin_state_up_filter()
agents = self.get_l3_agents_hosting_routers(
context, [router_id], admin_state_up=state, active=True)
return [a.host for a in agents]
def get_vacant_binding_index(self, context, router_id,
is_manual_scheduling=False):
"""Return a vacant binding_index to use and whether or not it exists.
Each RouterL3AgentBinding has a binding_index which is unique per
router_id, and when creating a single binding we require to find a
'vacant' binding_index which isn't yet used - for example if we have
bindings with indices 1 and 3, then clearly binding_index == 2 is free.
:returns: binding_index.
"""
num_agents = self.get_number_of_agents_for_scheduling(context)
pager = base_obj.Pager(sorts=[('binding_index', True)])
bindings = rb_obj.RouterL3AgentBinding.get_objects(
context, _pager=pager, router_id=router_id)
binding_indices = [b.binding_index for b in bindings]
all_indicies = set(range(rb_model.LOWEST_BINDING_INDEX,
num_agents + 1))
open_slots = sorted(list(all_indicies - set(binding_indices)))
if open_slots:
return open_slots[0]
# Last chance: if this is a manual scheduling, we're gonna allow
# creation of a binding_index even if it will exceed
# max_l3_agents_per_router.
if is_manual_scheduling:
return max(all_indicies) + 1
return -1
class AZL3AgentSchedulerDbMixin(L3AgentSchedulerDbMixin,
router_az.RouterAvailabilityZonePluginBase):
"""Mixin class to add availability_zone supported l3 agent scheduler."""
def get_router_availability_zones(self, router):
return list({agent.availability_zone for agent in router.l3_agents})
|
unknown
|
codeparrot/codeparrot-clean
| ||
<?php
$container->loadFromExtension('framework', [
'request' => [
'formats' => [],
],
]);
|
php
|
github
|
https://github.com/symfony/symfony
|
src/Symfony/Bundle/FrameworkBundle/Tests/DependencyInjection/Fixtures/php/request.php
|
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets all companies.
To create companies, run create_companies.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: CompanyService.getCompaniesByStatement
"""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
# Import appropriate modules from the client library.
from googleads import dfp
def main(client):
# Initialize appropriate service.
company_service = client.GetService('CompanyService', version='v201411')
# Create statement object to select all companies.
statement = dfp.FilterStatement()
# Get companies by statement.
while True:
response = company_service.getCompaniesByStatement(
statement.ToStatement())
if 'results' in response:
# Display results.
for company in response['results']:
print ('Company with ID \'%s\', name \'%s\', and type \'%s\' was found.'
% (company['id'], company['name'], company['type']))
statement.offset += dfp.SUGGESTED_PAGE_LIMIT
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from dateutil.relativedelta import relativedelta
import datetime
import logging
import time
from openerp.osv import osv, fields
import openerp.tools
from openerp.tools.translate import _
from openerp.addons.decimal_precision import decimal_precision as dp
_logger = logging.getLogger(__name__)
class account_analytic_invoice_line(osv.osv):
_name = "account.analytic.invoice.line"
def _amount_line(self, cr, uid, ids, prop, unknow_none, unknow_dict, context=None):
res = {}
for line in self.browse(cr, uid, ids, context=context):
res[line.id] = line.quantity * line.price_unit
if line.analytic_account_id.pricelist_id:
cur = line.analytic_account_id.pricelist_id.currency_id
res[line.id] = self.pool.get('res.currency').round(cr, uid, cur, res[line.id])
return res
_columns = {
'product_id': fields.many2one('product.product','Product',required=True),
'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account', ondelete='cascade'),
'name': fields.text('Description', required=True),
'quantity': fields.float('Quantity', required=True),
'uom_id': fields.many2one('product.uom', 'Unit of Measure',required=True),
'price_unit': fields.float('Unit Price', required=True),
'price_subtotal': fields.function(_amount_line, string='Sub Total', type="float",digits_compute= dp.get_precision('Account')),
}
_defaults = {
'quantity' : 1,
}
def product_id_change(self, cr, uid, ids, product, uom_id, qty=0, name='', partner_id=False, price_unit=False, pricelist_id=False, company_id=None, context=None):
context = context or {}
uom_obj = self.pool.get('product.uom')
company_id = company_id or False
local_context = dict(context, company_id=company_id, force_company=company_id, pricelist=pricelist_id)
if not product:
return {'value': {'price_unit': 0.0}, 'domain':{'product_uom':[]}}
if partner_id:
part = self.pool.get('res.partner').browse(cr, uid, partner_id, context=local_context)
if part.lang:
local_context.update({'lang': part.lang})
result = {}
res = self.pool.get('product.product').browse(cr, uid, product, context=local_context)
price = False
if price_unit is not False:
price = price_unit
elif pricelist_id:
price = res.price
if price is False:
price = res.list_price
if not name:
name = self.pool.get('product.product').name_get(cr, uid, [res.id], context=local_context)[0][1]
if res.description_sale:
name += '\n'+res.description_sale
result.update({'name': name or False,'uom_id': uom_id or res.uom_id.id or False, 'price_unit': price})
res_final = {'value':result}
if result['uom_id'] != res.uom_id.id:
selected_uom = uom_obj.browse(cr, uid, result['uom_id'], context=local_context)
new_price = uom_obj._compute_price(cr, uid, res.uom_id.id, res_final['value']['price_unit'], result['uom_id'])
res_final['value']['price_unit'] = new_price
return res_final
class account_analytic_account(osv.osv):
_name = "account.analytic.account"
_inherit = "account.analytic.account"
def _analysis_all(self, cr, uid, ids, fields, arg, context=None):
dp = 2
res = dict([(i, {}) for i in ids])
parent_ids = tuple(ids) #We don't want consolidation for each of these fields because those complex computation is resource-greedy.
accounts = self.browse(cr, uid, ids, context=context)
for f in fields:
if f == 'user_ids':
cr.execute('SELECT MAX(id) FROM res_users')
max_user = cr.fetchone()[0]
if parent_ids:
cr.execute('SELECT DISTINCT("user") FROM account_analytic_analysis_summary_user ' \
'WHERE account_id IN %s AND unit_amount <> 0.0', (parent_ids,))
result = cr.fetchall()
else:
result = []
for id in ids:
res[id][f] = [int((id * max_user) + x[0]) for x in result]
elif f == 'month_ids':
if parent_ids:
cr.execute('SELECT DISTINCT(month_id) FROM account_analytic_analysis_summary_month ' \
'WHERE account_id IN %s AND unit_amount <> 0.0', (parent_ids,))
result = cr.fetchall()
else:
result = []
for id in ids:
res[id][f] = [int(id * 1000000 + int(x[0])) for x in result]
elif f == 'last_worked_invoiced_date':
for id in ids:
res[id][f] = False
if parent_ids:
cr.execute("SELECT account_analytic_line.account_id, MAX(date) \
FROM account_analytic_line \
WHERE account_id IN %s \
AND invoice_id IS NOT NULL \
GROUP BY account_analytic_line.account_id;", (parent_ids,))
for account_id, sum in cr.fetchall():
if account_id not in res:
res[account_id] = {}
res[account_id][f] = sum
elif f == 'ca_to_invoice':
for id in ids:
res[id][f] = 0.0
res2 = {}
for account in accounts:
cr.execute("""
SELECT product_id, sum(amount), user_id, to_invoice, sum(unit_amount), product_uom_id, line.name
FROM account_analytic_line line
LEFT JOIN account_analytic_journal journal ON (journal.id = line.journal_id)
WHERE account_id = %s
AND journal.type != 'purchase'
AND invoice_id IS NULL
AND to_invoice IS NOT NULL
GROUP BY product_id, user_id, to_invoice, product_uom_id, line.name""", (account.id,))
res[account.id][f] = 0.0
for product_id, price, user_id, factor_id, qty, uom, line_name in cr.fetchall():
price = -price
if product_id:
price = self.pool.get('account.analytic.line')._get_invoice_price(cr, uid, account, product_id, user_id, qty, context)
factor = self.pool.get('hr_timesheet_invoice.factor').browse(cr, uid, factor_id, context=context)
res[account.id][f] += price * qty * (100-factor.factor or 0.0) / 100.0
# sum both result on account_id
for id in ids:
res[id][f] = round(res.get(id, {}).get(f, 0.0), dp) + round(res2.get(id, 0.0), 2)
elif f == 'last_invoice_date':
for id in ids:
res[id][f] = False
if parent_ids:
cr.execute ("SELECT account_analytic_line.account_id, \
DATE(MAX(account_invoice.date_invoice)) \
FROM account_analytic_line \
JOIN account_invoice \
ON account_analytic_line.invoice_id = account_invoice.id \
WHERE account_analytic_line.account_id IN %s \
AND account_analytic_line.invoice_id IS NOT NULL \
GROUP BY account_analytic_line.account_id",(parent_ids,))
for account_id, lid in cr.fetchall():
res[account_id][f] = lid
elif f == 'last_worked_date':
for id in ids:
res[id][f] = False
if parent_ids:
cr.execute("SELECT account_analytic_line.account_id, MAX(date) \
FROM account_analytic_line \
WHERE account_id IN %s \
AND invoice_id IS NULL \
GROUP BY account_analytic_line.account_id",(parent_ids,))
for account_id, lwd in cr.fetchall():
if account_id not in res:
res[account_id] = {}
res[account_id][f] = lwd
elif f == 'hours_qtt_non_invoiced':
for id in ids:
res[id][f] = 0.0
if parent_ids:
cr.execute("SELECT account_analytic_line.account_id, COALESCE(SUM(unit_amount), 0.0) \
FROM account_analytic_line \
JOIN account_analytic_journal \
ON account_analytic_line.journal_id = account_analytic_journal.id \
WHERE account_analytic_line.account_id IN %s \
AND account_analytic_journal.type='general' \
AND invoice_id IS NULL \
AND to_invoice IS NOT NULL \
GROUP BY account_analytic_line.account_id;",(parent_ids,))
for account_id, sua in cr.fetchall():
if account_id not in res:
res[account_id] = {}
res[account_id][f] = round(sua, dp)
for id in ids:
res[id][f] = round(res[id][f], dp)
elif f == 'hours_quantity':
for id in ids:
res[id][f] = 0.0
if parent_ids:
cr.execute("SELECT account_analytic_line.account_id, COALESCE(SUM(unit_amount), 0.0) \
FROM account_analytic_line \
JOIN account_analytic_journal \
ON account_analytic_line.journal_id = account_analytic_journal.id \
WHERE account_analytic_line.account_id IN %s \
AND account_analytic_journal.type='general' \
GROUP BY account_analytic_line.account_id",(parent_ids,))
ff = cr.fetchall()
for account_id, hq in ff:
if account_id not in res:
res[account_id] = {}
res[account_id][f] = round(hq, dp)
for id in ids:
res[id][f] = round(res[id][f], dp)
elif f == 'ca_theorical':
# TODO Take care of pricelist and purchase !
for id in ids:
res[id][f] = 0.0
# Warning
# This computation doesn't take care of pricelist !
# Just consider list_price
if parent_ids:
cr.execute("""SELECT account_analytic_line.account_id AS account_id, \
COALESCE(SUM((account_analytic_line.unit_amount * pt.list_price) \
- (account_analytic_line.unit_amount * pt.list_price \
* hr.factor)), 0.0) AS somme
FROM account_analytic_line \
LEFT JOIN account_analytic_journal \
ON (account_analytic_line.journal_id = account_analytic_journal.id) \
JOIN product_product pp \
ON (account_analytic_line.product_id = pp.id) \
JOIN product_template pt \
ON (pp.product_tmpl_id = pt.id) \
JOIN account_analytic_account a \
ON (a.id=account_analytic_line.account_id) \
JOIN hr_timesheet_invoice_factor hr \
ON (hr.id=a.to_invoice) \
WHERE account_analytic_line.account_id IN %s \
AND a.to_invoice IS NOT NULL \
AND account_analytic_journal.type IN ('purchase', 'general')
GROUP BY account_analytic_line.account_id""",(parent_ids,))
for account_id, sum in cr.fetchall():
res[account_id][f] = round(sum, dp)
return res
def _ca_invoiced_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
res_final = {}
child_ids = tuple(ids) #We don't want consolidation for each of these fields because those complex computation is resource-greedy.
for i in child_ids:
res[i] = 0.0
if not child_ids:
return res
if child_ids:
#Search all invoice lines not in cancelled state that refer to this analytic account
inv_line_obj = self.pool.get("account.invoice.line")
inv_lines = inv_line_obj.search(cr, uid, ['&', ('account_analytic_id', 'in', child_ids), ('invoice_id.state', 'not in', ['draft', 'cancel']), ('invoice_id.type', 'in', ['out_invoice', 'out_refund'])], context=context)
for line in inv_line_obj.browse(cr, uid, inv_lines, context=context):
if line.invoice_id.type == 'out_refund':
res[line.account_analytic_id.id] -= line.price_subtotal
else:
res[line.account_analytic_id.id] += line.price_subtotal
for acc in self.browse(cr, uid, res.keys(), context=context):
res[acc.id] = res[acc.id] - (acc.timesheet_ca_invoiced or 0.0)
res_final = res
return res_final
def _total_cost_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
res_final = {}
child_ids = tuple(ids) #We don't want consolidation for each of these fields because those complex computation is resource-greedy.
for i in child_ids:
res[i] = 0.0
if not child_ids:
return res
if child_ids:
cr.execute("""SELECT account_analytic_line.account_id, COALESCE(SUM(amount), 0.0) \
FROM account_analytic_line \
JOIN account_analytic_journal \
ON account_analytic_line.journal_id = account_analytic_journal.id \
WHERE account_analytic_line.account_id IN %s \
AND amount<0 \
GROUP BY account_analytic_line.account_id""",(child_ids,))
for account_id, sum in cr.fetchall():
res[account_id] = round(sum,2)
res_final = res
return res_final
def _remaining_hours_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
if account.quantity_max != 0:
res[account.id] = account.quantity_max - account.hours_quantity
else:
res[account.id] = 0.0
for id in ids:
res[id] = round(res.get(id, 0.0),2)
return res
def _remaining_hours_to_invoice_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = max(account.hours_qtt_est - account.timesheet_ca_invoiced, account.ca_to_invoice)
return res
def _hours_qtt_invoiced_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = account.hours_quantity - account.hours_qtt_non_invoiced
if res[account.id] < 0:
res[account.id] = 0.0
for id in ids:
res[id] = round(res.get(id, 0.0),2)
return res
def _revenue_per_hour_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
if account.hours_qtt_invoiced == 0:
res[account.id]=0.0
else:
res[account.id] = account.ca_invoiced / account.hours_qtt_invoiced
for id in ids:
res[id] = round(res.get(id, 0.0),2)
return res
def _real_margin_rate_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
if account.ca_invoiced == 0:
res[account.id]=0.0
elif account.total_cost != 0.0:
res[account.id] = -(account.real_margin / account.total_cost) * 100
else:
res[account.id] = 0.0
for id in ids:
res[id] = round(res.get(id, 0.0),2)
return res
def _fix_price_to_invoice_calc(self, cr, uid, ids, name, arg, context=None):
sale_obj = self.pool.get('sale.order')
res = {}
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = 0.0
sale_ids = sale_obj.search(cr, uid, [('project_id','=', account.id), ('state', '=', 'manual')], context=context)
for sale in sale_obj.browse(cr, uid, sale_ids, context=context):
res[account.id] += sale.amount_untaxed
for invoice in sale.invoice_ids:
if invoice.state != 'cancel':
res[account.id] -= invoice.amount_untaxed
return res
def _timesheet_ca_invoiced_calc(self, cr, uid, ids, name, arg, context=None):
lines_obj = self.pool.get('account.analytic.line')
res = {}
inv_ids = []
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = 0.0
line_ids = lines_obj.search(cr, uid, [('account_id','=', account.id), ('invoice_id','!=',False), ('invoice_id.state', 'not in', ['draft', 'cancel']), ('to_invoice','!=', False), ('journal_id.type', '=', 'general'), ('invoice_id.type', 'in', ['out_invoice', 'out_refund'])], context=context)
for line in lines_obj.browse(cr, uid, line_ids, context=context):
if line.invoice_id not in inv_ids:
inv_ids.append(line.invoice_id)
if line.invoice_id.type == 'out_refund':
res[account.id] -= line.invoice_id.amount_untaxed
else:
res[account.id] += line.invoice_id.amount_untaxed
return res
def _remaining_ca_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = max(account.amount_max - account.ca_invoiced, account.fix_price_to_invoice)
return res
def _real_margin_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = account.ca_invoiced + account.total_cost
for id in ids:
res[id] = round(res.get(id, 0.0),2)
return res
def _theorical_margin_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = account.ca_theorical + account.total_cost
for id in ids:
res[id] = round(res.get(id, 0.0),2)
return res
def _is_overdue_quantity(self, cr, uid, ids, fieldnames, args, context=None):
result = dict.fromkeys(ids, 0)
for record in self.browse(cr, uid, ids, context=context):
if record.quantity_max > 0.0:
result[record.id] = int(record.hours_quantity > record.quantity_max)
else:
result[record.id] = 0
return result
def _get_analytic_account(self, cr, uid, ids, context=None):
result = set()
for line in self.pool.get('account.analytic.line').browse(cr, uid, ids, context=context):
result.add(line.account_id.id)
return list(result)
def _get_total_estimation(self, account):
tot_est = 0.0
if account.fix_price_invoices:
tot_est += account.amount_max
if account.invoice_on_timesheets:
tot_est += account.hours_qtt_est
return tot_est
def _get_total_invoiced(self, account):
total_invoiced = 0.0
if account.fix_price_invoices:
total_invoiced += account.ca_invoiced
if account.invoice_on_timesheets:
total_invoiced += account.timesheet_ca_invoiced
return total_invoiced
def _get_total_remaining(self, account):
total_remaining = 0.0
if account.fix_price_invoices:
total_remaining += account.remaining_ca
if account.invoice_on_timesheets:
total_remaining += account.remaining_hours_to_invoice
return total_remaining
def _get_total_toinvoice(self, account):
total_toinvoice = 0.0
if account.fix_price_invoices:
total_toinvoice += account.fix_price_to_invoice
if account.invoice_on_timesheets:
total_toinvoice += account.ca_to_invoice
return total_toinvoice
def _sum_of_fields(self, cr, uid, ids, name, arg, context=None):
res = dict([(i, {}) for i in ids])
for account in self.browse(cr, uid, ids, context=context):
res[account.id]['est_total'] = self._get_total_estimation(account)
res[account.id]['invoiced_total'] = self._get_total_invoiced(account)
res[account.id]['remaining_total'] = self._get_total_remaining(account)
res[account.id]['toinvoice_total'] = self._get_total_toinvoice(account)
return res
_columns = {
'is_overdue_quantity' : fields.function(_is_overdue_quantity, method=True, type='boolean', string='Overdue Quantity',
store={
'account.analytic.line' : (_get_analytic_account, None, 20),
'account.analytic.account': (lambda self, cr, uid, ids, c=None: ids, ['quantity_max'], 10),
}),
'ca_invoiced': fields.function(_ca_invoiced_calc, type='float', string='Invoiced Amount',
help="Total customer invoiced amount for this account.",
digits_compute=dp.get_precision('Account')),
'total_cost': fields.function(_total_cost_calc, type='float', string='Total Costs',
help="Total of costs for this account. It includes real costs (from invoices) and indirect costs, like time spent on timesheets.",
digits_compute=dp.get_precision('Account')),
'ca_to_invoice': fields.function(_analysis_all, multi='analytic_analysis', type='float', string='Uninvoiced Amount',
help="If invoice from analytic account, the remaining amount you can invoice to the customer based on the total costs.",
digits_compute=dp.get_precision('Account')),
'ca_theorical': fields.function(_analysis_all, multi='analytic_analysis', type='float', string='Theoretical Revenue',
help="Based on the costs you had on the project, what would have been the revenue if all these costs have been invoiced at the normal sale price provided by the pricelist.",
digits_compute=dp.get_precision('Account')),
'hours_quantity': fields.function(_analysis_all, multi='analytic_analysis', type='float', string='Total Worked Time',
help="Number of time you spent on the analytic account (from timesheet). It computes quantities on all journal of type 'general'."),
'last_invoice_date': fields.function(_analysis_all, multi='analytic_analysis', type='date', string='Last Invoice Date',
help="If invoice from the costs, this is the date of the latest invoiced."),
'last_worked_invoiced_date': fields.function(_analysis_all, multi='analytic_analysis', type='date', string='Date of Last Invoiced Cost',
help="If invoice from the costs, this is the date of the latest work or cost that have been invoiced."),
'last_worked_date': fields.function(_analysis_all, multi='analytic_analysis', type='date', string='Date of Last Cost/Work',
help="Date of the latest work done on this account."),
'hours_qtt_non_invoiced': fields.function(_analysis_all, multi='analytic_analysis', type='float', string='Uninvoiced Time',
help="Number of time (hours/days) (from journal of type 'general') that can be invoiced if you invoice based on analytic account."),
'hours_qtt_invoiced': fields.function(_hours_qtt_invoiced_calc, type='float', string='Invoiced Time',
help="Number of time (hours/days) that can be invoiced plus those that already have been invoiced."),
'remaining_hours': fields.function(_remaining_hours_calc, type='float', string='Remaining Time',
help="Computed using the formula: Maximum Time - Total Worked Time"),
'remaining_hours_to_invoice': fields.function(_remaining_hours_to_invoice_calc, type='float', string='Remaining Time',
help="Computed using the formula: Expected on timesheets - Total invoiced on timesheets"),
'fix_price_to_invoice': fields.function(_fix_price_to_invoice_calc, type='float', string='Remaining Time',
help="Sum of quotations for this contract."),
'timesheet_ca_invoiced': fields.function(_timesheet_ca_invoiced_calc, type='float', string='Remaining Time',
help="Sum of timesheet lines invoiced for this contract."),
'remaining_ca': fields.function(_remaining_ca_calc, type='float', string='Remaining Revenue',
help="Computed using the formula: Max Invoice Price - Invoiced Amount.",
digits_compute=dp.get_precision('Account')),
'revenue_per_hour': fields.function(_revenue_per_hour_calc, type='float', string='Revenue per Time (real)',
help="Computed using the formula: Invoiced Amount / Total Time",
digits_compute=dp.get_precision('Account')),
'real_margin': fields.function(_real_margin_calc, type='float', string='Real Margin',
help="Computed using the formula: Invoiced Amount - Total Costs.",
digits_compute=dp.get_precision('Account')),
'theorical_margin': fields.function(_theorical_margin_calc, type='float', string='Theoretical Margin',
help="Computed using the formula: Theoretical Revenue - Total Costs",
digits_compute=dp.get_precision('Account')),
'real_margin_rate': fields.function(_real_margin_rate_calc, type='float', string='Real Margin Rate (%)',
help="Computes using the formula: (Real Margin / Total Costs) * 100.",
digits_compute=dp.get_precision('Account')),
'fix_price_invoices' : fields.boolean('Fixed Price'),
'invoice_on_timesheets' : fields.boolean("On Timesheets"),
'month_ids': fields.function(_analysis_all, multi='analytic_analysis', type='many2many', relation='account_analytic_analysis.summary.month', string='Month'),
'user_ids': fields.function(_analysis_all, multi='analytic_analysis', type="many2many", relation='account_analytic_analysis.summary.user', string='User'),
'hours_qtt_est': fields.float('Estimation of Hours to Invoice'),
'est_total' : fields.function(_sum_of_fields, type="float",multi="sum_of_all", string="Total Estimation"),
'invoiced_total' : fields.function(_sum_of_fields, type="float",multi="sum_of_all", string="Total Invoiced"),
'remaining_total' : fields.function(_sum_of_fields, type="float",multi="sum_of_all", string="Total Remaining", help="Expectation of remaining income for this contract. Computed as the sum of remaining subtotals which, in turn, are computed as the maximum between '(Estimation - Invoiced)' and 'To Invoice' amounts"),
'toinvoice_total' : fields.function(_sum_of_fields, type="float",multi="sum_of_all", string="Total to Invoice", help=" Sum of everything that could be invoiced for this contract."),
'recurring_invoice_line_ids': fields.one2many('account.analytic.invoice.line', 'analytic_account_id', 'Invoice Lines', copy=True),
'recurring_invoices' : fields.boolean('Generate recurring invoices automatically'),
'recurring_rule_type': fields.selection([
('daily', 'Day(s)'),
('weekly', 'Week(s)'),
('monthly', 'Month(s)'),
('yearly', 'Year(s)'),
], 'Recurrency', help="Invoice automatically repeat at specified interval"),
'recurring_interval': fields.integer('Repeat Every', help="Repeat every (Days/Week/Month/Year)"),
'recurring_next_date': fields.date('Date of Next Invoice'),
}
_defaults = {
'recurring_interval': 1,
'recurring_next_date': lambda *a: time.strftime('%Y-%m-%d'),
'recurring_rule_type':'monthly'
}
def open_sale_order_lines(self,cr,uid,ids,context=None):
if context is None:
context = {}
sale_ids = self.pool.get('sale.order').search(cr,uid,[('project_id','=',context.get('search_default_project_id',False)),('partner_id','in',context.get('search_default_partner_id',False))])
names = [record.name for record in self.browse(cr, uid, ids, context=context)]
name = _('Sales Order Lines to Invoice of %s') % ','.join(names)
return {
'type': 'ir.actions.act_window',
'name': name,
'view_type': 'form',
'view_mode': 'tree,form',
'context': context,
'domain' : [('order_id','in',sale_ids)],
'res_model': 'sale.order.line',
'nodestroy': True,
}
def on_change_template(self, cr, uid, ids, template_id, date_start=False, context=None):
if not template_id:
return {}
res = super(account_analytic_account, self).on_change_template(cr, uid, ids, template_id, date_start=date_start, context=context)
template = self.browse(cr, uid, template_id, context=context)
if not ids:
res['value']['fix_price_invoices'] = template.fix_price_invoices
res['value']['amount_max'] = template.amount_max
if not ids:
res['value']['invoice_on_timesheets'] = template.invoice_on_timesheets
res['value']['hours_qtt_est'] = template.hours_qtt_est
if template.to_invoice.id:
res['value']['to_invoice'] = template.to_invoice.id
if template.pricelist_id.id:
res['value']['pricelist_id'] = template.pricelist_id.id
if not ids:
invoice_line_ids = []
for x in template.recurring_invoice_line_ids:
invoice_line_ids.append((0, 0, {
'product_id': x.product_id.id,
'uom_id': x.uom_id.id,
'name': x.name,
'quantity': x.quantity,
'price_unit': x.price_unit,
'analytic_account_id': x.analytic_account_id and x.analytic_account_id.id or False,
}))
res['value']['recurring_invoices'] = template.recurring_invoices
res['value']['recurring_interval'] = template.recurring_interval
res['value']['recurring_rule_type'] = template.recurring_rule_type
res['value']['recurring_invoice_line_ids'] = invoice_line_ids
return res
def onchange_recurring_invoices(self, cr, uid, ids, recurring_invoices, date_start=False, context=None):
value = {}
if date_start and recurring_invoices:
value = {'value': {'recurring_next_date': date_start}}
return value
def cron_account_analytic_account(self, cr, uid, context=None):
context = dict(context or {})
remind = {}
def fill_remind(key, domain, write_pending=False):
base_domain = [
('type', '=', 'contract'),
('partner_id', '!=', False),
('manager_id', '!=', False),
('manager_id.email', '!=', False),
]
base_domain.extend(domain)
accounts_ids = self.search(cr, uid, base_domain, context=context, order='name asc')
accounts = self.browse(cr, uid, accounts_ids, context=context)
for account in accounts:
if write_pending:
account.write({'state' : 'pending'})
remind_user = remind.setdefault(account.manager_id.id, {})
remind_type = remind_user.setdefault(key, {})
remind_partner = remind_type.setdefault(account.partner_id, []).append(account)
# Already expired
fill_remind("old", [('state', 'in', ['pending'])])
# Expires now
fill_remind("new", [('state', 'in', ['draft', 'open']), '|', '&', ('date', '!=', False), ('date', '<=', time.strftime('%Y-%m-%d')), ('is_overdue_quantity', '=', True)], True)
# Expires in less than 30 days
fill_remind("future", [('state', 'in', ['draft', 'open']), ('date', '!=', False), ('date', '<', (datetime.datetime.now() + datetime.timedelta(30)).strftime("%Y-%m-%d"))])
context['base_url'] = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
context['action_id'] = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account_analytic_analysis', 'action_account_analytic_overdue_all')[1]
template_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account_analytic_analysis', 'account_analytic_cron_email_template')[1]
for user_id, data in remind.items():
context["data"] = data
_logger.debug("Sending reminder to uid %s", user_id)
self.pool.get('email.template').send_mail(cr, uid, template_id, user_id, force_send=True, context=context)
return True
def onchange_invoice_on_timesheets(self, cr, uid, ids, invoice_on_timesheets, context=None):
if not invoice_on_timesheets:
return {'value': {'to_invoice': False}}
result = {'value': {'use_timesheets': True}}
try:
to_invoice = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'hr_timesheet_invoice', 'timesheet_invoice_factor1')
result['value']['to_invoice'] = to_invoice[1]
except ValueError:
pass
return result
def hr_to_invoice_timesheets(self, cr, uid, ids, context=None):
domain = [('invoice_id','=',False),('to_invoice','!=',False), ('journal_id.type', '=', 'general'), ('account_id', 'in', ids)]
names = [record.name for record in self.browse(cr, uid, ids, context=context)]
name = _('Timesheets to Invoice of %s') % ','.join(names)
return {
'type': 'ir.actions.act_window',
'name': name,
'view_type': 'form',
'view_mode': 'tree,form',
'domain' : domain,
'res_model': 'account.analytic.line',
'nodestroy': True,
}
def _prepare_invoice_data(self, cr, uid, contract, context=None):
context = context or {}
journal_obj = self.pool.get('account.journal')
fpos_obj = self.pool['account.fiscal.position']
partner = contract.partner_id
if not partner:
raise osv.except_osv(_('No Customer Defined!'),_("You must first select a Customer for Contract %s!") % contract.name )
fpos_id = fpos_obj.get_fiscal_position(cr, uid, partner.company_id.id, partner.id, context=context)
journal_ids = journal_obj.search(cr, uid, [('type', '=','sale'),('company_id', '=', contract.company_id.id or False)], limit=1)
if not journal_ids:
raise osv.except_osv(_('Error!'),
_('Please define a sale journal for the company "%s".') % (contract.company_id.name or '', ))
partner_payment_term = partner.property_payment_term and partner.property_payment_term.id or False
currency_id = False
if contract.pricelist_id:
currency_id = contract.pricelist_id.currency_id.id
elif partner.property_product_pricelist:
currency_id = partner.property_product_pricelist.currency_id.id
elif contract.company_id:
currency_id = contract.company_id.currency_id.id
invoice = {
'account_id': partner.property_account_receivable.id,
'type': 'out_invoice',
'partner_id': partner.id,
'currency_id': currency_id,
'journal_id': len(journal_ids) and journal_ids[0] or False,
'date_invoice': contract.recurring_next_date,
'origin': contract.code,
'fiscal_position': fpos_id,
'payment_term': partner_payment_term,
'company_id': contract.company_id.id or False,
'user_id': contract.manager_id.id or uid,
}
return invoice
def _prepare_invoice_line(self, cr, uid, line, fiscal_position, context=None):
fpos_obj = self.pool.get('account.fiscal.position')
res = line.product_id
account_id = res.property_account_income.id
if not account_id:
account_id = res.categ_id.property_account_income_categ.id
account_id = fpos_obj.map_account(cr, uid, fiscal_position, account_id)
taxes = res.taxes_id or False
tax_id = fpos_obj.map_tax(cr, uid, fiscal_position, taxes)
values = {
'name': line.name,
'account_id': account_id,
'account_analytic_id': line.analytic_account_id.id,
'price_unit': line.price_unit or 0.0,
'quantity': line.quantity,
'uos_id': line.uom_id.id or False,
'product_id': line.product_id.id or False,
'invoice_line_tax_id': [(6, 0, tax_id)],
}
return values
def _prepare_invoice_lines(self, cr, uid, contract, fiscal_position_id, context=None):
fpos_obj = self.pool.get('account.fiscal.position')
fiscal_position = None
if fiscal_position_id:
fiscal_position = fpos_obj.browse(cr, uid, fiscal_position_id, context=context)
invoice_lines = []
for line in contract.recurring_invoice_line_ids:
values = self._prepare_invoice_line(cr, uid, line, fiscal_position, context=context)
invoice_lines.append((0, 0, values))
return invoice_lines
def _prepare_invoice(self, cr, uid, contract, context=None):
invoice = self._prepare_invoice_data(cr, uid, contract, context=context)
invoice['invoice_line'] = self._prepare_invoice_lines(cr, uid, contract, invoice['fiscal_position'], context=context)
return invoice
def recurring_create_invoice(self, cr, uid, ids, context=None):
return self._recurring_create_invoice(cr, uid, ids, context=context)
def _cron_recurring_create_invoice(self, cr, uid, context=None):
return self._recurring_create_invoice(cr, uid, [], automatic=True, context=context)
def _recurring_create_invoice(self, cr, uid, ids, automatic=False, context=None):
context = context or {}
invoice_ids = []
current_date = time.strftime('%Y-%m-%d')
if ids:
contract_ids = ids
else:
contract_ids = self.search(cr, uid, [('recurring_next_date','<=', current_date), ('state','=', 'open'), ('recurring_invoices','=', True), ('type', '=', 'contract')])
if contract_ids:
cr.execute('SELECT company_id, array_agg(id) as ids FROM account_analytic_account WHERE id IN %s GROUP BY company_id', (tuple(contract_ids),))
for company_id, ids in cr.fetchall():
for contract in self.browse(cr, uid, ids, context=dict(context, company_id=company_id, force_company=company_id)):
try:
invoice_values = self._prepare_invoice(cr, uid, contract, context=context)
invoice_ids.append(self.pool['account.invoice'].create(cr, uid, invoice_values, context=context))
next_date = datetime.datetime.strptime(contract.recurring_next_date or current_date, "%Y-%m-%d")
interval = contract.recurring_interval
if contract.recurring_rule_type == 'daily':
new_date = next_date+relativedelta(days=+interval)
elif contract.recurring_rule_type == 'weekly':
new_date = next_date+relativedelta(weeks=+interval)
elif contract.recurring_rule_type == 'monthly':
new_date = next_date+relativedelta(months=+interval)
else:
new_date = next_date+relativedelta(years=+interval)
self.write(cr, uid, [contract.id], {'recurring_next_date': new_date.strftime('%Y-%m-%d')}, context=context)
if automatic:
cr.commit()
except Exception:
if automatic:
cr.rollback()
_logger.exception('Fail to create recurring invoice for contract %s', contract.code)
else:
raise
return invoice_ids
class account_analytic_account_summary_user(osv.osv):
_name = "account_analytic_analysis.summary.user"
_description = "Hours Summary by User"
_order='user'
_auto = False
_rec_name = 'user'
def _unit_amount(self, cr, uid, ids, name, arg, context=None):
res = {}
account_obj = self.pool.get('account.analytic.account')
cr.execute('SELECT MAX(id) FROM res_users')
max_user = cr.fetchone()[0]
account_ids = [int(str(x/max_user - (x%max_user == 0 and 1 or 0))) for x in ids]
user_ids = [int(str(x-((x/max_user - (x%max_user == 0 and 1 or 0)) *max_user))) for x in ids]
parent_ids = tuple(account_ids) #We don't want consolidation for each of these fields because those complex computation is resource-greedy.
if parent_ids:
cr.execute('SELECT id, unit_amount ' \
'FROM account_analytic_analysis_summary_user ' \
'WHERE account_id IN %s ' \
'AND "user" IN %s',(parent_ids, tuple(user_ids),))
for sum_id, unit_amount in cr.fetchall():
res[sum_id] = unit_amount
for id in ids:
res[id] = round(res.get(id, 0.0), 2)
return res
_columns = {
'account_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'unit_amount': fields.float('Total Time'),
'user': fields.many2one('res.users', 'User'),
}
_depends = {
'res.users': ['id'],
'account.analytic.line': ['account_id', 'journal_id', 'unit_amount', 'user_id'],
'account.analytic.journal': ['type'],
}
def init(self, cr):
openerp.tools.sql.drop_view_if_exists(cr, 'account_analytic_analysis_summary_user')
cr.execute('''CREATE OR REPLACE VIEW account_analytic_analysis_summary_user AS (
with mu as
(select max(id) as max_user from res_users)
, lu AS
(SELECT
l.account_id AS account_id,
coalesce(l.user_id, 0) AS user_id,
SUM(l.unit_amount) AS unit_amount
FROM account_analytic_line AS l,
account_analytic_journal AS j
WHERE (j.type = 'general' ) and (j.id=l.journal_id)
GROUP BY l.account_id, l.user_id
)
select (lu.account_id * mu.max_user) + lu.user_id as id,
lu.account_id as account_id,
lu.user_id as "user",
unit_amount
from lu, mu)''')
class account_analytic_account_summary_month(osv.osv):
_name = "account_analytic_analysis.summary.month"
_description = "Hours summary by month"
_auto = False
_rec_name = 'month'
_columns = {
'account_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'unit_amount': fields.float('Total Time'),
'month': fields.char('Month', size=32, readonly=True),
}
_depends = {
'account.analytic.line': ['account_id', 'date', 'journal_id', 'unit_amount'],
'account.analytic.journal': ['type'],
}
def init(self, cr):
openerp.tools.sql.drop_view_if_exists(cr, 'account_analytic_analysis_summary_month')
cr.execute('CREATE VIEW account_analytic_analysis_summary_month AS (' \
'SELECT ' \
'(TO_NUMBER(TO_CHAR(d.month, \'YYYYMM\'), \'999999\') + (d.account_id * 1000000::bigint))::bigint AS id, ' \
'd.account_id AS account_id, ' \
'TO_CHAR(d.month, \'Mon YYYY\') AS month, ' \
'TO_NUMBER(TO_CHAR(d.month, \'YYYYMM\'), \'999999\') AS month_id, ' \
'COALESCE(SUM(l.unit_amount), 0.0) AS unit_amount ' \
'FROM ' \
'(SELECT ' \
'd2.account_id, ' \
'd2.month ' \
'FROM ' \
'(SELECT ' \
'a.id AS account_id, ' \
'l.month AS month ' \
'FROM ' \
'(SELECT ' \
'DATE_TRUNC(\'month\', l.date) AS month ' \
'FROM account_analytic_line AS l, ' \
'account_analytic_journal AS j ' \
'WHERE j.type = \'general\' ' \
'GROUP BY DATE_TRUNC(\'month\', l.date) ' \
') AS l, ' \
'account_analytic_account AS a ' \
'GROUP BY l.month, a.id ' \
') AS d2 ' \
'GROUP BY d2.account_id, d2.month ' \
') AS d ' \
'LEFT JOIN ' \
'(SELECT ' \
'l.account_id AS account_id, ' \
'DATE_TRUNC(\'month\', l.date) AS month, ' \
'SUM(l.unit_amount) AS unit_amount ' \
'FROM account_analytic_line AS l, ' \
'account_analytic_journal AS j ' \
'WHERE (j.type = \'general\') and (j.id=l.journal_id) ' \
'GROUP BY l.account_id, DATE_TRUNC(\'month\', l.date) ' \
') AS l '
'ON (' \
'd.account_id = l.account_id ' \
'AND d.month = l.month' \
') ' \
'GROUP BY d.month, d.account_id ' \
')')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
unknown
|
codeparrot/codeparrot-clean
| ||
from django.conf import settings
from django.contrib import auth
from django.contrib.auth.backends import ModelBackend
from django.db.models import Q
try:
from django.contrib.auth import get_user_model
except ImportError:
import django.contrib.auth.models
get_user_model = lambda: django.contrib.auth.models.User
SPLIT_CHAR = getattr(settings, 'AUTH_SUBSTUSER_SPLIT_CHAR', '?')
class SubstUserBackend(ModelBackend):
""" Let superusers login as regular users.
http://nedbatchelder.com/blog/201008/django_superuser_login_trapdoor.html
"""
def authenticate(self, username=None, password=None, **kwargs):
# The password should be name/password
if SPLIT_CHAR not in password:
return None
user_model = get_user_model()
username_field = getattr(user_model, 'USERNAME_FIELD', 'username')
if username is None:
username = kwargs.get(username_field)
if username is None:
return
try:
user = user_model._default_manager.get(
Q(username__iexact=username) | Q(email__iexact=username))
except (user_model.DoesNotExist, user_model.MultipleObjectsReturned):
return None
# authenticate superuser with passed credentials
username, password = password.split(SPLIT_CHAR, 1)
credentials = {username_field: username, 'password': password}
superuser = auth.authenticate(**credentials)
if superuser and superuser.is_superuser and superuser.is_active:
return user
|
unknown
|
codeparrot/codeparrot-clean
| ||
# stdlib
import threading
import time
from types import ListType
import unittest
# 3p
from nose.plugins.attrib import attr
# project
from aggregator import MetricsAggregator
from dogstatsd import Server
from jmxfetch import JMXFetch
from tests.checks.common import Fixtures
STATSD_PORT = 8121
class DummyReporter(threading.Thread):
def __init__(self, metrics_aggregator):
threading.Thread.__init__(self)
self.finished = threading.Event()
self.metrics_aggregator = metrics_aggregator
self.interval = 10
self.metrics = None
self.finished = False
self.start()
def run(self):
while not self.finished:
time.sleep(self.interval)
self.flush()
def flush(self):
metrics = self.metrics_aggregator.flush()
if metrics:
self.metrics = metrics
@attr(requires='cassandra')
class JMXTestCase(unittest.TestCase):
def setUp(self):
aggregator = MetricsAggregator("test_host")
self.server = Server(aggregator, "localhost", STATSD_PORT)
self.reporter = DummyReporter(aggregator)
self.t1 = threading.Thread(target=self.server.start)
self.t1.start()
confd_path = Fixtures.directory()
self.jmx_daemon = JMXFetch(confd_path, {'dogstatsd_port': STATSD_PORT})
self.t2 = threading.Thread(target=self.jmx_daemon.run)
self.t2.start()
def tearDown(self):
self.server.stop()
self.reporter.finished = True
self.jmx_daemon.terminate()
def testCustomJMXMetric(self):
count = 0
while self.reporter.metrics is None:
time.sleep(1)
count += 1
if count > 25:
raise Exception("No metrics were received in 25 seconds")
metrics = self.reporter.metrics
self.assertTrue(isinstance(metrics, ListType))
self.assertTrue(len(metrics) > 0)
self.assertTrue(len([t for t in metrics if "cassandra.db." in t['metric'] and "instance:cassandra_instance" in t['tags']]) > 40, metrics)
|
unknown
|
codeparrot/codeparrot-clean
| ||
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Support for memory sanitizer. See runtime/cgo/mmap.go.
//go:build (linux && (amd64 || arm64 || loong64)) || (freebsd && amd64)
package runtime
import "unsafe"
// _cgo_mmap is filled in by runtime/cgo when it is linked into the
// program, so it is only non-nil when using cgo.
//
//go:linkname _cgo_mmap _cgo_mmap
var _cgo_mmap unsafe.Pointer
// _cgo_munmap is filled in by runtime/cgo when it is linked into the
// program, so it is only non-nil when using cgo.
//
//go:linkname _cgo_munmap _cgo_munmap
var _cgo_munmap unsafe.Pointer
// mmap is used to route the mmap system call through C code when using cgo, to
// support sanitizer interceptors. Don't allow stack splits, since this function
// (used by sysAlloc) is called in a lot of low-level parts of the runtime and
// callers often assume it won't acquire any locks.
//
//go:nosplit
func mmap(addr unsafe.Pointer, n uintptr, prot, flags, fd int32, off uint32) (unsafe.Pointer, int) {
if _cgo_mmap != nil {
// Make ret a uintptr so that writing to it in the
// function literal does not trigger a write barrier.
// A write barrier here could break because of the way
// that mmap uses the same value both as a pointer and
// an errno value.
var ret uintptr
systemstack(func() {
ret = callCgoMmap(addr, n, prot, flags, fd, off)
})
if ret < 4096 {
return nil, int(ret)
}
return unsafe.Pointer(ret), 0
}
return sysMmap(addr, n, prot, flags, fd, off)
}
func munmap(addr unsafe.Pointer, n uintptr) {
if _cgo_munmap != nil {
systemstack(func() { callCgoMunmap(addr, n) })
return
}
sysMunmap(addr, n)
}
// sysMmap calls the mmap system call. It is implemented in assembly.
func sysMmap(addr unsafe.Pointer, n uintptr, prot, flags, fd int32, off uint32) (p unsafe.Pointer, err int)
// callCgoMmap calls the mmap function in the runtime/cgo package
// using the GCC calling convention. It is implemented in assembly.
func callCgoMmap(addr unsafe.Pointer, n uintptr, prot, flags, fd int32, off uint32) uintptr
// sysMunmap calls the munmap system call. It is implemented in assembly.
func sysMunmap(addr unsafe.Pointer, n uintptr)
// callCgoMunmap calls the munmap function in the runtime/cgo package
// using the GCC calling convention. It is implemented in assembly.
func callCgoMunmap(addr unsafe.Pointer, n uintptr)
|
go
|
github
|
https://github.com/golang/go
|
src/runtime/cgo_mmap.go
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the Bernoulli distribution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib import distributions
from tensorflow.contrib.distributions.python.kernel_tests import distribution_test
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_shape
from tensorflow.python.platform import test
class ConditionalDistributionTest(distribution_test.DistributionTest):
def _GetFakeDistribution(self):
class _FakeDistribution(distributions.ConditionalDistribution):
"""Fake Distribution for testing _set_sample_static_shape."""
def __init__(self, batch_shape=None, event_shape=None):
self._static_batch_shape = tensor_shape.TensorShape(batch_shape)
self._static_event_shape = tensor_shape.TensorShape(event_shape)
super(_FakeDistribution, self).__init__(
dtype=dtypes.float32,
reparameterization_type=distributions.NOT_REPARAMETERIZED,
validate_args=True,
allow_nan_stats=True,
name="DummyDistribution")
def _batch_shape(self):
return self._static_batch_shape
def _event_shape(self):
return self._static_event_shape
def _sample_n(self, unused_shape, unused_seed, arg1, arg2):
raise ValueError(arg1, arg2)
def _log_prob(self, _, arg1, arg2):
raise ValueError(arg1, arg2)
def _prob(self, _, arg1, arg2):
raise ValueError(arg1, arg2)
def _cdf(self, _, arg1, arg2):
raise ValueError(arg1, arg2)
def _log_cdf(self, _, arg1, arg2):
raise ValueError(arg1, arg2)
def _log_survival_function(self, _, arg1, arg2):
raise ValueError(arg1, arg2)
def _survival_function(self, _, arg1, arg2):
raise ValueError(arg1, arg2)
return _FakeDistribution
def testNotImplemented(self):
d = self._GetFakeDistribution()(batch_shape=[], event_shape=[])
for name in ["sample", "log_prob", "prob", "log_cdf", "cdf",
"log_survival_function", "survival_function"]:
method = getattr(d, name)
with self.assertRaisesRegexp(ValueError, "b1.*b2"):
method([] if name == "sample" else 1.0, arg1="b1", arg2="b2")
if __name__ == "__main__":
test.main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# django-lb-workflow documentation build configuration file, created by
# sphinx-quickstart on Mon May 1 20:04:08 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "django-lb-workflow"
copyright = "2017, vicalloy"
author = "vicalloy"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ""
# The full version, including alpha/beta/rc tags.
release = ""
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "django-lb-workflowdoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"django-lb-workflow.tex",
"django-lb-workflow Documentation",
"vicalloy",
"manual",
),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
master_doc,
"django-lb-workflow",
"django-lb-workflow Documentation",
[author],
1,
)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"django-lb-workflow",
"django-lb-workflow Documentation",
author,
"django-lb-workflow",
"One line description of project.",
"Miscellaneous",
),
]
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_profile_persistence_src_addr import ApiParameters
from library.modules.bigip_profile_persistence_src_addr import ModuleParameters
from library.modules.bigip_profile_persistence_src_addr import ModuleManager
from library.modules.bigip_profile_persistence_src_addr import ArgumentSpec
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_profile_persistence_src_addr import ApiParameters
from ansible.modules.network.f5.bigip_profile_persistence_src_addr import ModuleParameters
from ansible.modules.network.f5.bigip_profile_persistence_src_addr import ModuleManager
from ansible.modules.network.f5.bigip_profile_persistence_src_addr import ArgumentSpec
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='foo',
parent='bar',
match_across_services=False,
match_across_virtuals=True,
match_across_pools=False,
hash_algorithm='carp',
entry_timeout=100,
override_connection_limit=True
)
p = ModuleParameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/bar'
assert p.match_across_services == 'no'
assert p.match_across_virtuals == 'yes'
assert p.match_across_pools == 'no'
assert p.hash_algorithm == 'carp'
assert p.entry_timeout == 100
assert p.override_connection_limit == 'yes'
def test_api_parameters(self):
args = load_fixture('load_ltm_profile_persistence_src_addr_1.json')
p = ApiParameters(params=args)
assert p.name == 'source_addr'
assert p.match_across_pools == 'no'
assert p.match_across_services == 'no'
assert p.match_across_virtuals == 'no'
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create(self, *args):
# Configure the arguments that would be sent to the Ansible module
set_module_args(dict(
name='foo',
match_across_virtuals='yes',
parent='bar',
password='password',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.exists = Mock(return_value=False)
mm.create_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['match_across_virtuals'] == 'yes'
|
unknown
|
codeparrot/codeparrot-clean
| ||
class Object(object):
def __init__(self, groups):
for group in groups:
group.add(self)
self._groups = groups
self.z = 0
def alive(self):
return self._groups != []
def kill(self):
for g in self.groups:
g.remove(self)
self._groups = []
def update(self):
pass
def draw(self, surface):
pass
class Group(object):
def __init__(self):
self._objects = []
def __len__(self):
return len(self._objects)
def __iter__(self):
return iter(sorted(self._objects, key=lambda x: x.z))
def __getitem__(self, index):
return self._objects[index]
def objects(self):
return sorted(self._objects, key=lambda x: x.z)
def add(self, object):
if object not in self._objects:
self._objects.append(object)
def remove(self, object):
if object in self._objects:
self._objects.remove(object)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implement the "specs" DSL for describing deep networks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import importlib
import operator
import re
from six import exec_
QUOTED = re.compile(r"""
"([^"\\]|\\.)*" |
'([^'\\]|\\.)*'
""", re.VERBOSE)
KEYWORDS = re.compile(r"""\b(import|while|def|exec)\b""")
debug_ = False
def check_keywords(spec):
"""Check for common Python keywords in spec.
This function discourages the use of complex constructs
in TensorFlow specs; it doesn't completely prohibit them
(if necessary, we could check the AST).
Args:
spec: spec string
Raises:
ValueError: raised if spec contains a prohibited keyword.
"""
spec = re.sub(QUOTED, "", spec)
match = re.search(KEYWORDS, spec)
if match:
raise ValueError("keyword '%s' found in spec" % match.group(1))
def get_positional(args, kw, kw_overrides=False):
"""Interpolates keyword arguments into argument lists.
If `kw` contains keywords of the form "_0", "_1", etc., these
are positionally interpolated into the argument list.
Args:
args: argument list
kw: keyword dictionary
kw_overrides: key/value pairs that override kw
Returns:
(new_args, new_kw), new argument lists and keyword dictionaries
with values interpolated.
"""
new_kw = {k: v for k, v in kw.items() if k[0] != "_"}
if len(new_kw) == len(kw):
return args, kw
new_args = list(args)
for key, value in kw.items():
if key[0] != "_": continue
index = int(key[1:])
while len(new_args) <= index:
new_args += [None]
if kw_overrides or new_args[index] is None:
new_args[index] = value
return new_args, new_kw
class Composable(object):
"""A composable function.
This defines the operators common to all composable objects.
Currently defines copmosition (via "|") and repeated application
(via "**"), and maps addition ("+") and multiplication ("*")
as "(f + g)(x) = f(x) + g(x)".
"""
def __or__(self, f):
return Composition(self, f)
def __add__(self, g):
return Operator(operator.add, self, g)
def __mul__(self, g):
return Operator(operator.mul, self, g)
def __pow__(self, n):
assert n >= 0
if n == 0:
return Function(lambda x, *args, **kw: x)
result = self
for _ in range(n-1):
result = Composition(result, self)
return result
class Callable(Composable):
"""A composable function that simply defers to a callable function.
"""
def __init__(self, f):
self.f = f
def funcall(self, x):
return self.f(x)
class Operator(Composable):
"""A wrapper for an operator.
This takes an operator and an argument list and returns
the result of applying the operator to the results of applying
the functions in the argument list.
"""
def __init__(self, op, *args):
self.op = op
self.funs = args
def funcall(self, x):
outputs = [f.funcall(x) for f in self.funs]
return self.op(*outputs)
class Function(Composable):
"""A composable function wrapper for a regular Python function.
This overloads the regular __call__ operator for currying, i.e.,
arguments passed to __call__ are remembered for the eventual
function application.
The final function application happens via the `of` method.
"""
def __init__(self, f, *args, **kw):
if not callable(f):
raise ValueError("%s: is not callable" % f)
self.f = f
self.args = list(args)
self.kw = kw
def __call__(self, *args, **kw):
new_args = list(args) + self.args
new_kw = self.kw.copy()
new_kw.update(kw)
return Function(self.f, *new_args, **new_kw)
# TODO(tmb) The `of` method may be renamed to `function`.
def funcall(self, x):
args, kw = get_positional(self.args, self.kw)
if debug_:
print("DEBUG:", self.f, x, args, kw)
return self.f(x, *args, **kw)
class Composition(Composable):
"""A function composition.
This simply composes its two argument functions when
applied to a final argument via `of`.
"""
def __init__(self, f, g):
self.f = f
self.g = g
def funcall(self, x):
return self.g.funcall(self.f.funcall(x))
# These are DSL names, not Python names
# pylint: disable=invalid-name, exec-used
def External(module_name, function_name):
"""Import a function from an external module.
Note that the `module_name` must be a module name
that works with the usual import mechanisms. Shorthands
like "tf.nn" will not work.
Args:
module_name: name of the module
function_name: name of the function within the module
Returns:
Function-wrapped value of symbol.
"""
module = importlib.import_module(module_name)
return Function(vars(module)[function_name])
def Import(statements):
"""Import a function by exec.
Args:
statements: Python statements
Returns:
Function-wrapped value of `f`.
Raises:
ValueError: the statements didn't define a value for "f"
"""
environ = {}
exec_(statements, environ)
if "f" not in environ:
raise ValueError("failed to define \"f\": %s", statements)
f = environ["f"]
return Function(f)
# pylint: enable=invalid-name, exec-used
def debug(mode=True):
"""Turn on/off debugging mode.
Debugging mode prints more information about the construction
of a network.
Args:
mode: True if turned on, False otherwise
"""
global debug_
debug_ = mode
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import jpype
from jpype import *
from jpype import java
from jpype import javax
J_STRING = "java.lang.String"
class VsdDataCollector:
def __init__(self, name, host="localhost", port=1099, debug=False,
output_channel=None):
self.jmx = ConnectionJmx(host, port, debug, output_channel)
self.jmx.create_management_object(name=name)
self.saved_stats = None
self.saved_vsd_calls = 0
def start_test(self):
self.saved_stats = self.jmx.get_attribute("VsdStatisticsReport")
self.saved_vsd_calls = self.jmx.get_attribute("VSDStatistics")
return
def end_test(self):
self.jmx.print_rapport(old_stats=self.saved_stats,
old_vsd_count=self.saved_vsd_calls)
return
class ConnectionJmx:
def __init__(self, host="localhost", port=1099, debug=False,
output_channel=None):
self.host = host
self.port = port
self.url = "service:jmx:rmi:///jndi/rmi://%s:%d/jmxrmi" % (host, port)
self.debug = debug
self.mbean = None
self.output_channel = output_channel
jpype.startJVM(jpype.get_default_jvm_path())
if debug:
self.__print_to_ouput_channel("JVM loaded")
self.__print_to_ouput_channel(jpype.get_default_jvm_path())
jmx_url = javax.management.remote.JMXServiceURL(self.url)
jmx_soc = javax.management.remote.JMXConnectorFactory.connect(
jmx_url, java.util.HashMap())
self.connection = jmx_soc.getMBeanServerConnection()
if self.debug:
self.__print_to_ouput_channel("Connection successful")
def __print_to_ouput_channel(self, text):
if self.output_channel:
self.output_channel.debug(text)
else:
print text
def create_management_object(self, domain="com.cloud",
type="NuageVspResource",
name="Nuage VSD - 0.0.0.0"):
if name is not None:
object_name = domain + ":type=" + type + ", name=" + name
else:
object_name = domain + ":" + type
if self.debug:
self.__print_to_ouput_channel(object_name)
self.mbean = javax.management.ObjectName(object_name)
return self.mbean
def get_vsd_statistics_by_request_and_entity_type(self,
entity_type,
request_type,
mbean=None):
self._get_stats_by_entity_or_request_type(
mbean, [[entity_type, request_type]],
"getVsdStatisticsByEntityType")
def _jStringArray(self, elements):
return jpype.JArray(java.lang.String)(elements)
def get_vsd_statistics_by_request_type(self, request_type, mbean=None):
return self._get_stats_by_entity_or_request_type(
mbean, [request_type], "getVsdStatisticsByRequestType")
def get_vsd_statistics_by_entity_type(self, entity_type, mbean=None):
return self._get_stats_by_entity_or_request_type(
mbean, [entity_type], "getVsdStatisticsByEntityType")
def _get_stats_by_entity_or_request_type(self, vars, method, mbean=None):
if not mbean:
mbean = self.mbean
jarray = self._jStringArray(vars)
signature = self._jStringArray([J_STRING for _ in vars])
result = self.connection.invoke(mbean, method, jarray, signature)
if self.debug:
self.__print_to_ouput_channel(vars + ": " + str(result))
return result
def get_attribute(self, attribute, mbean=None):
if not mbean:
mbean = self.mbean
result = self.connection.getAttribute(mbean, attribute)
if self.debug:
self.__print_to_ouput_channel("Attribute " + attribute + ": " +
str(result))
return result
def print_rapport(self, mbean=None, old_stats=None, old_vsd_count=0):
if not mbean:
mbean = self.mbean
stat = self.get_attribute("VsdStatisticsReport", mbean)
number_of_vsd_calls = int(str(self.get_attribute("VSDStatistics",
mbean)))
number_of_vsd_calls = number_of_vsd_calls - int(str(old_vsd_count))
self.__print_to_ouput_channel("\n================"
"RAPPORT:"
"================\n")
self.__print_to_ouput_channel("Total VSD calls: " +
str(number_of_vsd_calls))
self.__print_to_ouput_channel("For each Entity:\n")
self.__print_total_for_entity(stat, old_stats)
self.__print_to_ouput_channel("\nFor each Request:\n")
self.__print_total_per_request(stat, old_stats)
self.__print_to_ouput_channel("\nCombined:\n")
self.__print_total_per_entity_and_request(stat, old_stats)
self.__print_to_ouput_channel("\n============="
"END OF RAPPORT"
"=============")
def __print_total_per_request(self, stat, old_stat=None):
data = dict()
entries = ((entry.getKey(), entry.getValue().get())
for requestmap in stat.values()
for entry in requestmap.entrySet())
for request, value in entries:
if request in data:
data[request] += value
else:
data[request] = value
if old_stat:
old_entries = ((entry.getKey(), entry.getValue().get())
for requestmap in old_stat.values()
for entry in requestmap.entrySet())
for request, value in old_entries:
if request in data:
data[request] -= value
else:
data[request] = 0
for key, value in data.iteritems():
self.__print_to_ouput_channel(" " + str(key) + ": " + str(value))
def __print_total_per_entity_and_request(self, stat, old_stat=None):
for entity in stat:
self.__print_to_ouput_channel(entity + ":")
for request in stat[entity]:
previous = 0
if old_stat and old_stat[entity] and old_stat[entity][request]:
previous = int(str(old_stat[entity][request]))
current = int(str(stat[entity][request]))
self.__print_to_ouput_channel(" " + str(request) + ":" +
str(current - previous))
self.__print_to_ouput_channel("--------------------"
"--------------------")
def __print_total_for_entity(self, stat, old_stat=None):
for entity in stat:
total = 0
for val in stat[entity]:
minus = 0
if old_stat and old_stat[entity] and old_stat[entity][val]:
minus = int(str(old_stat[entity][val]))
total = str(stat[entity][val])
total = int(total) - minus
self.__print_to_ouput_channel(" " + str(entity) +
": " + str(total))
|
unknown
|
codeparrot/codeparrot-clean
| ||
# #
# Copyright 2014 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Unit tests for parallelbuild.py
@author: Kenneth Hoste (Ghent University)
"""
import os
import re
import stat
import sys
from test.framework.utilities import EnhancedTestCase, TestLoaderFiltered, init_config
from unittest import TextTestRunner
from vsc.utils.fancylogger import setLogLevelDebug, logToScreen
from easybuild.framework.easyconfig.tools import process_easyconfig
from easybuild.tools import config
from easybuild.tools.filetools import adjust_permissions, mkdir, which, write_file
from easybuild.tools.job import pbs_python
from easybuild.tools.job.pbs_python import PbsPython
from easybuild.tools.parallelbuild import build_easyconfigs_in_parallel, submit_jobs
from easybuild.tools.robot import resolve_dependencies
# test GC3Pie configuration with large resource specs
GC3PIE_LOCAL_CONFIGURATION = """[resource/ebtestlocalhost]
enabled = yes
type = shellcmd
frontend = localhost
transport = local
max_cores_per_job = 1
max_memory_per_core = 1000GiB
max_walltime = 1000 hours
# this doubles as "maximum concurrent jobs"
max_cores = 1000
architecture = x86_64
auth = none
override = no
resourcedir = %(resourcedir)s
time_cmd = %(time)s
"""
def mock(*args, **kwargs):
"""Function used for mocking several functions imported in parallelbuild module."""
return 1
class MockPbsJob(object):
"""Mocking class for PbsJob."""
def __init__(self, *args, **kwargs):
self.deps = []
self.jobid = None
self.clean_conn = None
self.script = args[1]
self.cores = kwargs['cores']
def add_dependencies(self, jobs):
self.deps.extend(jobs)
def cleanup(self, *args, **kwargs):
pass
def has_holds(self, *args, **kwargs):
pass
def _submit(self, *args, **kwargs):
pass
class ParallelBuildTest(EnhancedTestCase):
""" Testcase for run module """
def test_build_easyconfigs_in_parallel_pbs_python(self):
"""Test build_easyconfigs_in_parallel(), using (mocked) pbs_python as backend for --job."""
# put mocked functions in place
PbsPython__init__ = PbsPython.__init__
PbsPython_check_version = PbsPython._check_version
PbsPython_complete = PbsPython.complete
PbsPython_connect_to_server = PbsPython.connect_to_server
PbsPython_ppn = PbsPython.ppn
pbs_python_PbsJob = pbs_python.PbsJob
PbsPython.__init__ = lambda self: PbsPython__init__(self, pbs_server='localhost')
PbsPython._check_version = lambda _: True
PbsPython.complete = mock
PbsPython.connect_to_server = mock
PbsPython.ppn = mock
pbs_python.PbsJob = MockPbsJob
topdir = os.path.dirname(os.path.abspath(__file__))
build_options = {
'external_modules_metadata': {},
'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'),
'valid_module_classes': config.module_classes(),
'validate': False,
'job_cores': 3,
}
init_config(args=['--job-backend=PbsPython'], build_options=build_options)
ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip', 'gzip-1.5-goolf-1.4.10.eb')
easyconfigs = process_easyconfig(ec_file)
ordered_ecs = resolve_dependencies(easyconfigs, self.modtool)
jobs = build_easyconfigs_in_parallel("echo '%(spec)s'", ordered_ecs, prepare_first=False)
self.assertEqual(len(jobs), 8)
regex = re.compile("echo '.*/gzip-1.5-goolf-1.4.10.eb'")
self.assertTrue(regex.search(jobs[-1].script), "Pattern '%s' found in: %s" % (regex.pattern, jobs[-1].script))
ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip', 'gzip-1.4-GCC-4.6.3.eb')
ordered_ecs = resolve_dependencies(process_easyconfig(ec_file), self.modtool, retain_all_deps=True)
jobs = submit_jobs(ordered_ecs, '', testing=False, prepare_first=False)
# make sure command is correct, and that --hidden is there when it needs to be
for i, ec in enumerate(ordered_ecs):
if ec['hidden']:
regex = re.compile("eb %s.* --hidden" % ec['spec'])
else:
regex = re.compile("eb %s" % ec['spec'])
self.assertTrue(regex.search(jobs[i].script), "Pattern '%s' found in: %s" % (regex.pattern, jobs[i].script))
for job in jobs:
self.assertEqual(job.cores, build_options['job_cores'])
# no deps for GCC/4.6.3 (toolchain) and ictce/4.1.13 (test easyconfig with 'fake' deps)
self.assertEqual(len(jobs[0].deps), 0)
self.assertEqual(len(jobs[1].deps), 0)
# only dependency for toy/0.0-deps is ictce/4.1.13 (dep marked as external module is filtered out)
self.assertTrue('toy-0.0-deps.eb' in jobs[2].script)
self.assertEqual(len(jobs[2].deps), 1)
self.assertTrue('ictce-4.1.13.eb' in jobs[2].deps[0].script)
# dependencies for gzip/1.4-GCC-4.6.3: GCC/4.6.3 (toolchain) + toy/.0.0-deps
self.assertTrue('gzip-1.4-GCC-4.6.3.eb' in jobs[3].script)
self.assertEqual(len(jobs[3].deps), 2)
regex = re.compile('toy-0.0-deps.eb\s* --hidden')
self.assertTrue(regex.search(jobs[3].deps[0].script))
self.assertTrue('GCC-4.6.3.eb' in jobs[3].deps[1].script)
# restore mocked stuff
PbsPython.__init__ = PbsPython__init__
PbsPython._check_version = PbsPython_check_version
PbsPython.complete = PbsPython_complete
PbsPython.connect_to_server = PbsPython_connect_to_server
PbsPython.ppn = PbsPython_ppn
pbs_python.PbsJob = pbs_python_PbsJob
def test_build_easyconfigs_in_parallel_gc3pie(self):
"""Test build_easyconfigs_in_parallel(), using GC3Pie with local config as backend for --job."""
try:
import gc3libs
except ImportError:
print "GC3Pie not available, skipping test"
return
# put GC3Pie config in place to use local host and fork/exec
resourcedir = os.path.join(self.test_prefix, 'gc3pie')
gc3pie_cfgfile = os.path.join(self.test_prefix, 'gc3pie_local.ini')
gc3pie_cfgtxt = GC3PIE_LOCAL_CONFIGURATION % {
'resourcedir': resourcedir,
'time': which('time'),
}
write_file(gc3pie_cfgfile, gc3pie_cfgtxt)
output_dir = os.path.join(self.test_prefix, 'subdir', 'gc3pie_output_dir')
# purposely pre-create output dir, and put a file in it (to check whether GC3Pie tries to rename the output dir)
mkdir(output_dir, parents=True)
write_file(os.path.join(output_dir, 'foo'), 'bar')
# remove write permissions on parent dir of specified output dir,
# to check that GC3Pie does not try to rename the (already existing) output directory...
adjust_permissions(os.path.dirname(output_dir), stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH,
add=False, recursive=False)
topdir = os.path.dirname(os.path.abspath(__file__))
build_options = {
'job_backend_config': gc3pie_cfgfile,
'job_max_walltime': 24,
'job_output_dir': output_dir,
'job_polling_interval': 0.2, # quick polling
'job_target_resource': 'ebtestlocalhost',
'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'),
'silent': True,
'valid_module_classes': config.module_classes(),
'validate': False,
}
options = init_config(args=['--job-backend=GC3Pie'], build_options=build_options)
ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
easyconfigs = process_easyconfig(ec_file)
ordered_ecs = resolve_dependencies(easyconfigs, self.modtool)
topdir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
test_easyblocks_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sandbox')
cmd = "PYTHONPATH=%s:%s:$PYTHONPATH eb %%(spec)s -df" % (topdir, test_easyblocks_path)
jobs = build_easyconfigs_in_parallel(cmd, ordered_ecs, prepare_first=False)
self.assertTrue(os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0'))
self.assertTrue(os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'bin', 'toy'))
def suite():
""" returns all the testcases in this module """
return TestLoaderFiltered().loadTestsFromTestCase(ParallelBuildTest, sys.argv[1:])
if __name__ == '__main__':
#logToScreen(enable=True)
#setLogLevelDebug()
TextTestRunner(verbosity=1).run(suite())
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""adjusting key length
Revision ID: 956a063c52b3
Revises: f0fbf6129e13
Create Date: 2016-05-11 17:28:32.407340
"""
# revision identifiers, used by Alembic.
revision = '956a063c52b3'
down_revision = 'f0fbf6129e13'
from alembic import op
import sqlalchemy as sa
def upgrade():
with op.batch_alter_table('clusters', schema=None) as batch_op:
batch_op.alter_column('broker_endpoint',
existing_type=sa.VARCHAR(length=256),
type_=sa.String(length=255),
existing_nullable=True)
batch_op.alter_column('broker_host',
existing_type=sa.VARCHAR(length=256),
type_=sa.String(length=255),
existing_nullable=True)
batch_op.alter_column('coordinator_endpoint',
existing_type=sa.VARCHAR(length=256),
type_=sa.String(length=255),
existing_nullable=True)
batch_op.alter_column('coordinator_host',
existing_type=sa.VARCHAR(length=256),
type_=sa.String(length=255),
existing_nullable=True)
with op.batch_alter_table('columns', schema=None) as batch_op:
batch_op.alter_column('column_name',
existing_type=sa.VARCHAR(length=256),
type_=sa.String(length=255),
existing_nullable=True)
with op.batch_alter_table('datasources', schema=None) as batch_op:
batch_op.alter_column('datasource_name',
existing_type=sa.VARCHAR(length=256),
type_=sa.String(length=255),
existing_nullable=True)
with op.batch_alter_table('table_columns', schema=None) as batch_op:
batch_op.alter_column('column_name',
existing_type=sa.VARCHAR(length=256),
type_=sa.String(length=255),
existing_nullable=True)
with op.batch_alter_table('tables', schema=None) as batch_op:
batch_op.alter_column('schema',
existing_type=sa.VARCHAR(length=256),
type_=sa.String(length=255),
existing_nullable=True)
def downgrade():
with op.batch_alter_table('tables', schema=None) as batch_op:
batch_op.alter_column('schema',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=256),
existing_nullable=True)
with op.batch_alter_table('table_columns', schema=None) as batch_op:
batch_op.alter_column('column_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=256),
existing_nullable=True)
with op.batch_alter_table('datasources', schema=None) as batch_op:
batch_op.alter_column('datasource_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=256),
existing_nullable=True)
with op.batch_alter_table('columns', schema=None) as batch_op:
batch_op.alter_column('column_name',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=256),
existing_nullable=True)
with op.batch_alter_table('clusters', schema=None) as batch_op:
batch_op.alter_column('coordinator_host',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=256),
existing_nullable=True)
batch_op.alter_column('coordinator_endpoint',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=256),
existing_nullable=True)
batch_op.alter_column('broker_host',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=256),
existing_nullable=True)
batch_op.alter_column('broker_endpoint',
existing_type=sa.String(length=255),
type_=sa.VARCHAR(length=256),
existing_nullable=True)
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright (C) 2015 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect.testing.google;
import static junit.framework.Assert.fail;
import com.google.common.annotations.GwtCompatible;
import com.google.common.collect.Multimap;
/**
* Helper methods/assertions for use with {@code com.google.common.collect} types.
*
* @author Colin Decker
*/
@GwtCompatible
final class GoogleHelpers {
private GoogleHelpers() {}
static void assertEmpty(Multimap<?, ?> multimap) {
if (!multimap.isEmpty()) {
fail("Not true that " + multimap + " is empty");
}
}
}
|
java
|
github
|
https://github.com/google/guava
|
android/guava-testlib/src/com/google/common/collect/testing/google/GoogleHelpers.java
|
from __future__ import absolute_import
from pip.req import InstallRequirement, RequirementSet, parse_requirements
from pip.basecommand import Command
from pip.exceptions import InstallationError
class UninstallCommand(Command):
"""
Uninstall packages.
pip is able to uninstall most installed packages. Known exceptions are:
- Pure distutils packages installed with ``python setup.py install``, which
leave behind no metadata to determine what files were installed.
- Script wrappers installed by ``python setup.py develop``.
"""
name = 'uninstall'
usage = """
%prog [options] <package> ...
%prog [options] -r <requirements file> ..."""
summary = 'Uninstall packages.'
def __init__(self, *args, **kw):
super(UninstallCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'-r', '--requirement',
dest='requirements',
action='append',
default=[],
metavar='file',
help='Uninstall all the packages listed in the given requirements '
'file. This option can be used multiple times.',
)
self.cmd_opts.add_option(
'-y', '--yes',
dest='yes',
action='store_true',
help="Don't ask for confirmation of uninstall deletions.")
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
with self._build_session(options) as session:
requirement_set = RequirementSet(
build_dir=None,
src_dir=None,
download_dir=None,
isolated=options.isolated_mode,
session=session,
)
for name in args:
requirement_set.add_requirement(
InstallRequirement.from_line(
name, isolated=options.isolated_mode,
)
)
for filename in options.requirements:
for req in parse_requirements(
filename,
options=options,
session=session):
requirement_set.add_requirement(req)
if not requirement_set.has_requirements:
raise InstallationError(
'You must give at least one requirement to %(name)s (see '
'"pip help %(name)s")' % dict(name=self.name)
)
requirement_set.uninstall(auto_confirm=options.yes)
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
A module for convolutions with cudnn.
"""
__author__ = "Nicolas Ballas"
__license__ = "3-clause BSD"
__credits__ = "Nicolas Ballas and Francesco Visin"
__maintainer__ = "Lisa Lab"
import functools
import numpy as np
from theano.sandbox.cuda.dnn import GpuDnnConv, GpuDnnConvDesc
from theano.sandbox.cuda.basic_ops import gpu_contiguous, gpu_alloc_empty
from pylearn2.packaged_dependencies.theano_linear.conv2d \
import Conv2d as OrigConv2D
from pylearn2.linear.conv2D import make_normal_conv2D, make_random_conv2D, make_sparse_random_conv2D
from pylearn2.linear.linear_transform import LinearTransform as P2LT
from pylearn2.utils import sharedX
from pylearn2.utils.rng import make_np_rng
default_seed = [2012, 11, 6, 9]
default_sparse_seed = [2012, 11, 6]
class Cudnn2D(OrigConv2D):
"""
Wrapper on the Theano Cudnn op.
Parameters
----------
filters : Theano shared variable
4D-tensor of shape (out channels, in channels, rows, cols)
batch_size : int
The size of the input batches
input_space : Space
The Space of the input data
output_axes : tuple, optional
The requested output axes. If not specified `bc01` will be used.
subsample : tuple or list, optional
Factor by which to subsample the output. Default (1, 1)
border_mode : string, optional
`valid` or `full`. See scipy.signal.convolve2d
filters_shape : tuple of length 2 or 3, optional
([filter's number,] filter's height, filter's width)
message : string, optional
TODO
"""
def __init__(self,
filters,
batch_size,
input_space,
output_axes=('b', 'c', 0, 1),
subsample=(1, 1),
border_mode='valid',
filters_shape=None,
message=''):
assert batch_size is None or batch_size > 0
self._input_space = input_space
self._output_axes = output_axes
self._subsample = tuple(subsample)
self._border_mode = border_mode
super(Cudnn2D, self).__init__(
filters=filters,
img_shape=(batch_size, input_space.num_channels,
input_space.shape[0], input_space.shape[1]),
subsample=self._subsample,
border_mode=border_mode,
filters_shape=filters.get_value(borrow=True).shape,
message=message
)
# conv_op has to be changed
self._conv_op = GpuDnnConv()
self._desc = GpuDnnConvDesc(border_mode=border_mode,
subsample=self._subsample,
conv_mode='conv')
@functools.wraps(P2LT.get_params)
def get_params(self):
""" Return self._filters. """
return [self._filters]
@functools.wraps(P2LT.get_weights_topo)
def get_weights_topo(self, borrow):
"""
Parameters
----------
borrow : TODO
TODO
"""
return np.transpose(self._filters.get_value(borrow=borrow),
(0, 2, 3, 1))
def lmul(self, x):
"""
.. todo::
WRITEME properly
dot(x, A)
This method overrides the original Conv2D lmul to make it work
with arbitrary axis orders
Parameters
----------
x : TODO
TODO
"""
# x must be formatted as batch index, channel, topo dim 0, topo dim 1
# for use with conv2d, so check what the current input space format is
assert x.ndim == 4
axes = self._input_space.axes
assert len(axes) == 4
op_axes = ('b', 'c', 0, 1)
if tuple(axes) != op_axes:
x = x.dimshuffle(*[axes.index(ax) for ax in op_axes])
# The calling format has to be changed
img = gpu_contiguous(x)
kerns = gpu_contiguous(self._filters)
shape = GpuDnnConv.get_out_shape(
img.shape, kerns.shape, self._border_mode, self._subsample)
rval = gpu_alloc_empty(*shape)
desc = self._desc(img.shape, kerns.shape)
rval = self._conv_op(img, kerns, rval, desc)
# Format the output based on the output space
axes = self._output_axes
assert len(axes) == 4
if tuple(self._output_axes) != op_axes:
rval = rval.dimshuffle(*[op_axes.index(ax) for ax in
self._output_axes])
return rval
def set_batch_size(self, batch_size):
"""
.. todo::
WRITEME
Parameters
----------
batch_size : TODO
TODO
"""
self._img_shape = tuple([batch_size] + list(self._img_shape[1:]))
def make_random_conv2D(irange, rng=None, *args, **kwargs):
return make_random_conv2D(irange, rng, cls=Cudnn2D, *args, **kwargs)
def make_normal_conv2D(istd, rng=None, *args, **kwargs):
return make_normal_conv2D(istd, rng, cls=Cudnn2D, *args, **kwargs)
def make_sparse_random_conv2D(num_nonzero, rng=None, *args, **kwargs):
return make_sparse_random_conv2D(num_nonzero, rng, cls=Cudnn2D, *args, **kwargs)
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright 2014-2025 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.client.tests.plugins
import io.ktor.client.call.*
import io.ktor.client.plugins.cache.*
import io.ktor.client.plugins.cache.storage.*
import io.ktor.client.plugins.logging.*
import io.ktor.client.request.*
import io.ktor.client.statement.*
import io.ktor.client.test.base.*
import io.ktor.client.utils.*
import io.ktor.http.*
import io.ktor.util.*
import io.ktor.util.date.*
import io.ktor.utils.io.*
import kotlinx.coroutines.delay
import kotlin.test.*
@Suppress("DEPRECATION", "DEPRECATION_ERROR")
class CacheLegacyStorageTest : ClientLoader() {
@Test
fun testNoStore() = clientTests {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
val url = Url("$TEST_SERVER/cache/no-store")
val first = client.get(url).body<String>()
assertTrue(privateStorage.findByUrl(url).isEmpty())
assertTrue(publicStorage.findByUrl(url).isEmpty())
val second = client.get(url).body<String>()
assertTrue(privateStorage.findByUrl(url).isEmpty())
assertTrue(publicStorage.findByUrl(url).isEmpty())
assertNotEquals(first, second)
}
}
@Test
fun testNoCache() = clientTests {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
val url = Url("$TEST_SERVER/cache/no-cache")
val first = client.get(url).body<String>()
assertEquals(1, publicStorage.findByUrl(url).size)
assertEquals(0, privateStorage.findByUrl(url).size)
val second = client.get(url).body<String>()
assertEquals(1, publicStorage.findByUrl(url).size)
assertEquals(0, privateStorage.findByUrl(url).size)
assertNotEquals(first, second)
}
}
@Test
fun testETagCache() = clientTests(except("Js")) {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
val url = Url("$TEST_SERVER/cache/etag")
val first = client.get(url).body<String>()
assertEquals(1, publicStorage.findByUrl(url).size)
val second = client.get(url).body<String>()
assertEquals(1, publicStorage.findByUrl(url).size)
assertEquals(first, second)
}
}
@Test
fun testLastModified() = clientTests(except("Js")) {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
val url = Url("$TEST_SERVER/cache/last-modified")
val first = client.get(url).body<String>()
assertEquals(1, publicStorage.findByUrl(url).size)
val second = client.get(url).body<String>()
assertEquals(1, publicStorage.findByUrl(url).size)
assertEquals(first, second)
}
}
@Test
fun testVary() = clientTests(except("Js")) {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
val url = Url("$TEST_SERVER/cache/vary")
// first header value from Vary
val first = client.get(url) {
header(HttpHeaders.ContentLanguage, "en")
}.body<String>()
val second = client.get(url) {
header(HttpHeaders.ContentLanguage, "en")
}.body<String>()
assertEquals(first, second)
// second header value from Vary
val third = client.get(url) {
header(HttpHeaders.ContentLanguage, "ru")
}.body<String>()
assertNotEquals(third, second)
val fourth = client.get(url) {
header(HttpHeaders.ContentLanguage, "ru")
}.body<String>()
assertEquals(third, fourth)
// first header value from Vary
val fifth = client.get(url) {
header(HttpHeaders.ContentLanguage, "en")
}.body<String>()
assertEquals(first, fifth)
// no header value from Vary
val sixth = client.get(url).body<String>()
assertNotEquals(sixth, second)
assertNotEquals(sixth, third)
val seventh = client.get(url).body<String>()
assertEquals(sixth, seventh)
}
}
@Test
fun testVaryStale() = clientTests(except("Js")) {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
val url = Url("$TEST_SERVER/cache/vary-stale")
// first header value from Vary
val first = client.get(url) {
header(HttpHeaders.ContentLanguage, "en")
}.body<String>()
val second = client.get(url) {
header(HttpHeaders.ContentLanguage, "en")
}.body<String>()
assertEquals(first, second)
// second header value from Vary
val third = client.get(url) {
header(HttpHeaders.ContentLanguage, "ru")
}.body<String>()
assertNotEquals(third, second)
val fourth = client.get(url) {
header(HttpHeaders.ContentLanguage, "ru")
}.body<String>()
assertEquals(third, fourth)
// first header value from Vary
val fifth = client.get(url) {
header(HttpHeaders.ContentLanguage, "en")
}.body<String>()
assertEquals(first, fifth)
// no header value from Vary
val sixth = client.get(url).body<String>()
assertNotEquals(sixth, second)
assertNotEquals(sixth, third)
val seventh = client.get(url).body<String>()
assertEquals(sixth, seventh)
}
}
@OptIn(InternalAPI::class)
@Test
fun testNoVaryIn304() = clientTests(except("Js")) {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
client.receivePipeline.intercept(HttpReceivePipeline.Before) { response ->
if (response.status == HttpStatusCode.NotModified) {
val headers = buildHeaders {
response.headers
.filter { name, _ ->
!name.equals(HttpHeaders.Vary, ignoreCase = true)
}
.forEach(::appendAll)
}
proceedWith(
object : HttpResponse() {
override val call get() = response.call
override val rawContent get() = response.rawContent
override val coroutineContext get() = response.coroutineContext
override val headers = headers
override val requestTime get() = response.requestTime
override val responseTime get() = response.responseTime
override val status get() = response.status
override val version get() = response.version
}
)
}
}
val url = Url("$TEST_SERVER/cache/vary-stale")
// first header value from Vary
val first = client.get(url) {
header(HttpHeaders.ContentLanguage, "en")
}.body<String>()
val second = client.get(url) {
header(HttpHeaders.ContentLanguage, "en")
}.body<String>()
assertEquals(first, second)
// second header value from Vary
val third = client.get(url) {
header(HttpHeaders.ContentLanguage, "ru")
}.body<String>()
assertNotEquals(third, second)
val fourth = client.get(url) {
header(HttpHeaders.ContentLanguage, "ru")
}.body<String>()
assertEquals(third, fourth)
// first header value from Vary
val fifth = client.get(url) {
header(HttpHeaders.ContentLanguage, "en")
}.body<String>()
assertEquals(first, fifth)
// no header value from Vary
val sixth = client.get(url).body<String>()
assertNotEquals(sixth, second)
assertNotEquals(sixth, third)
val seventh = client.get(url).body<String>()
assertEquals(sixth, seventh)
}
}
@Test
fun testMaxAge() = clientTests(except("native:CIO")) {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
val url = Url("$TEST_SERVER/cache/max-age")
val first = client.get(url).body<String>()
val cache = publicStorage.findByUrl(url)
assertEquals(1, cache.size)
val second = client.get(url).body<String>()
assertEquals(first, second)
delay(2500)
val third = client.get(url).body<String>()
assertNotEquals(first, third)
}
}
@Test
fun testOnlyIfCached() = clientTests {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
val url = Url("$TEST_SERVER/cache/etag?max-age=10")
val responseNoCache = client.get(url) {
header(HttpHeaders.CacheControl, "only-if-cached")
}
assertEquals(HttpStatusCode.GatewayTimeout, responseNoCache.status)
val bodyOriginal = client.get(url).bodyAsText()
val responseCached = client.get(url) {
header(HttpHeaders.CacheControl, "only-if-cached")
}
val bodyCached = responseCached.bodyAsText()
assertEquals(HttpStatusCode.OK, responseCached.status)
assertEquals(bodyOriginal, bodyCached)
}
}
@Test
fun testMaxStale() = clientTests {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
val url = Url("$TEST_SERVER/cache/max-age")
val original = client.get(url).body<String>()
val cache = publicStorage.findByUrl(url)
assertEquals(1, cache.size)
delay(2500)
val stale = client.get(url) {
header(HttpHeaders.CacheControl, "max-stale=4")
}
assertEquals("110", stale.headers[HttpHeaders.Warning])
val staleBody = stale.body<String>()
assertEquals(original, staleBody)
val staleMaxInt = client.get(url) {
header(HttpHeaders.CacheControl, "max-stale=${Int.MAX_VALUE}")
}
assertEquals("110", stale.headers[HttpHeaders.Warning])
val staleMaxIntBody = staleMaxInt.body<String>()
assertEquals(original, staleMaxIntBody)
val notStale = client.get(url)
val notStaleBody = notStale.body<String>()
assertNull(notStale.headers[HttpHeaders.Warning])
assertNotEquals(original, notStaleBody)
}
}
@Test
fun testNoStoreRequest() = clientTests(except("Js")) {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
val url = Url("$TEST_SERVER/cache/etag")
val first = client.get(url) {
header(HttpHeaders.CacheControl, "no-store")
}.body<String>()
assertEquals(0, publicStorage.findByUrl(url).size)
val second = client.get(url).body<String>()
assertEquals(1, publicStorage.findByUrl(url).size)
assertNotEquals(first, second)
}
}
@Test
fun testNoCacheRequest() = clientTests(except("Js")) {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
var requestsCount = 0
client.sendPipeline.intercept(HttpSendPipeline.Engine) {
requestsCount++
}
val url = Url("$TEST_SERVER/cache/etag?max-age=30")
val first = client.get(url).body<String>()
assertEquals(1, publicStorage.findByUrl(url).size)
val second = client.get(url) {
header(HttpHeaders.CacheControl, "no-cache")
}.body<String>()
assertEquals(1, publicStorage.findByUrl(url).size)
assertEquals(2, requestsCount)
assertEquals(first, second)
}
}
@Test
fun testRequestWithMaxAge0() = clientTests(except("Js")) {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
var requestsCount = 0
client.sendPipeline.intercept(HttpSendPipeline.Engine) {
requestsCount++
}
val url = Url("$TEST_SERVER/cache/etag?max-age=30")
val first = client.get(url).body<String>()
assertEquals(1, publicStorage.findByUrl(url).size)
val second = client.get(url) {
header(HttpHeaders.CacheControl, "max-age=0")
}.body<String>()
assertEquals(1, publicStorage.findByUrl(url).size)
assertEquals(2, requestsCount)
assertEquals(first, second)
}
}
@Test
fun testExpires() = clientTests {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
val now = GMTDate() + 2000L
val url = Url("$TEST_SERVER/cache/expires")
suspend fun getWithHeader(expires: String): String {
delayGMTDate(1)
return client.get(url) {
header("X-Expires", expires)
}.body()
}
val first = getWithHeader(now.toHttpDate())
val cache = publicStorage.findByUrl(url)
assertEquals(1, cache.size)
// this should be from the cache
val second = client.get(url).body<String>()
assertEquals(first, second)
delay(2500)
// now it should be already expired
val third = client.get(url).body<String>()
assertNotEquals(first, third)
// illegal values: broken, "0" and blank should be treated as already expired
// so shouldn't be cached
var previous = third
getWithHeader("broken-date").let { result ->
assertNotEquals(previous, result)
previous = result
}
getWithHeader("0").let { result ->
assertNotEquals(previous, result)
previous = result
}
getWithHeader(" ").let { result ->
assertNotEquals(previous, result)
previous = result
}
delayGMTDate(1)
val last = client.get(url).body<String>()
assertNotEquals(previous, last)
}
}
@Test
fun testPublicAndPrivateCache() = clientTests(except("native:*")) {
val publicStorage = HttpCacheStorage.Unlimited()
val privateStorage = HttpCacheStorage.Unlimited()
config {
install(HttpCache) {
this.publicStorage = publicStorage
this.privateStorage = privateStorage
}
}
test { client ->
val privateUrl = Url("$TEST_SERVER/cache/private")
val publicUrl = Url("$TEST_SERVER/cache/public")
fun publicCache() = publicStorage.findByUrl(publicUrl)
fun privateCache() = privateStorage.findByUrl(privateUrl)
val firstPrivate = client.get(privateUrl).body<String>()
assertEquals(firstPrivate, "private")
assertEquals(1, privateCache().size)
assertEquals(0, publicCache().size)
val privateCacheEntry = privateCache().first()
val firstPublic = client.get(publicUrl).body<String>()
assertEquals(firstPublic, "public")
assertEquals(1, publicCache().size)
assertEquals(1, privateCache().size)
val publicCacheEntry = publicCache().first()
val secondPrivate = client.get(privateUrl).body<String>()
assertEquals(secondPrivate, "private")
assertSame(privateCacheEntry, privateCache().first())
// Public from cache.
val secondPublic = client.get(publicUrl).body<String>()
assertEquals(secondPublic, "public")
assertEquals(1, privateCache().size)
assertEquals(1, publicCache().size)
assertSame(publicCacheEntry, publicCache().first())
}
}
@Test
fun testWithLogging() = clientTests {
config {
install(Logging) {
level = LogLevel.ALL
logger = Logger.EMPTY
}
install(HttpCache)
}
test { client ->
client.receivePipeline.intercept(HttpReceivePipeline.State) { response ->
val savedResponse = response.call.save().response
proceedWith(savedResponse)
}
val result = client.get("$TEST_SERVER/content/chunked-data?size=5000").bodyAsText()
assertEquals(5000, result.length)
}
}
/**
* Does delay and ensures that the [GMTDate] measurements report at least
* the specified number of [milliseconds].
* The reason why it's not the same is that on some platforms time granularity of GMTDate
* is not that high and could be even larger than a millisecond.
*/
private suspend fun delayGMTDate(milliseconds: Long) {
var delayValue = milliseconds
do {
val start = GMTDate()
delay(delayValue)
val end = GMTDate()
if (end > start + milliseconds) {
break
}
if (delayValue != 1L) {
delayValue = 1L
}
} while (true)
}
}
|
kotlin
|
github
|
https://github.com/ktorio/ktor
|
ktor-client/ktor-client-tests/common/test/io/ktor/client/tests/plugins/CacheLegacyStorageTest.kt
|
from helper import timestamp_to_datetime
class ApiModel(object):
@classmethod
def object_from_dictionary(cls, entry):
# make dict keys all strings
entry_str_dict = dict([(str(key), value) for key, value in entry.items()])
return cls(**entry_str_dict)
def __repr__(self):
return unicode(self).encode('utf8')
class Image(ApiModel):
def __init__(self, url, width, height):
self.url = url
self.height = height
self.width = width
def __unicode__(self):
return "Image: %s" % self.url
class Media(ApiModel):
def __init__(self, id=None, **kwargs):
self.id = id
for key, value in kwargs.iteritems():
setattr(self, key, value)
def get_standard_resolution_url(self):
return self.images['standard_resolution'].url
def __unicode__(self):
return "Media: %s" % self.id
@classmethod
def object_from_dictionary(cls, entry):
new_media = Media(id=entry['id'])
new_media.user = User.object_from_dictionary(entry['user'])
new_media.images = {}
for version, version_info in entry['images'].iteritems():
new_media.images[version] = Image.object_from_dictionary(version_info)
if 'user_has_liked' in entry:
new_media.user_has_liked = entry['user_has_liked']
new_media.like_count = entry['likes']['count']
new_media.likes = []
if 'data' in entry['likes']:
for like in entry['likes']['data']:
new_media.likes.append(User.object_from_dictionary(like))
new_media.comment_count = entry['comments']['count']
new_media.comments = []
for comment in entry['comments']['data']:
new_media.comments.append(Comment.object_from_dictionary(comment))
new_media.created_time = timestamp_to_datetime(entry['created_time'])
if entry['location'] and 'id' in entry:
new_media.location = Location.object_from_dictionary(entry['location'])
new_media.caption = None
if entry['caption']:
new_media.caption = Comment.object_from_dictionary(entry['caption'])
if entry['tags']:
new_media.tags = []
for tag in entry['tags']:
new_media.tags.append(Tag.object_from_dictionary({'name': tag}))
new_media.link = entry['link']
new_media.filter = entry.get('filter')
return new_media
class Tag(ApiModel):
def __init__(self, name, **kwargs):
self.name = name
for key, value in kwargs.iteritems():
setattr(self, key, value)
def __unicode__(self):
return "Tag: %s" % self.name
class Comment(ApiModel):
def __init__(self, *args, **kwargs):
for key, value in kwargs.iteritems():
setattr(self, key, value)
@classmethod
def object_from_dictionary(cls, entry):
user = User.object_from_dictionary(entry['from'])
text = entry['text']
created_at = timestamp_to_datetime(entry['created_time'])
id = entry['id']
return Comment(id=id, user=user, text=text, created_at=created_at)
def __unicode__(self):
return "Comment: %s said \"%s\"" % (self.user.username, self.text)
class Point(ApiModel):
def __init__(self, latitude, longitude):
self.latitude = latitude
self.longitude = longitude
def __unicode__(self):
return "Point: (%s, %s)" % (self.latitude, self.longitude)
class Location(ApiModel):
def __init__(self, id, *args, **kwargs):
self.id = id
for key, value in kwargs.iteritems():
setattr(self, key, value)
@classmethod
def object_from_dictionary(cls, entry):
point = None
if 'latitude' in entry:
point = Point(entry.get('latitude'),
entry.get('longitude'))
location = Location(entry.get('id', 0),
point=point,
name=entry.get('name', ''))
return location
def __unicode__(self):
return "Location: %s (%s)" % (self.id, self.point)
class User(ApiModel):
def __init__(self, id, *args, **kwargs):
self.id = id
for key, value in kwargs.iteritems():
setattr(self, key, value)
def __unicode__(self):
return "User: %s" % self.username
class Relationship(ApiModel):
def __init__(self, incoming_status="none", outgoing_status="none", target_user_is_private=False):
self.incoming_status = incoming_status
self.outgoing_status = outgoing_status
self.target_user_is_private = target_user_is_private
def __unicode__(self):
follows = False if self.outgoing_status == 'none' else True
followed = False if self.incoming_status == 'none' else True
return "Relationship: (Follows: %s, Followed by: %s)" % (follows, followed)
|
unknown
|
codeparrot/codeparrot-clean
| ||
- Feature Name: segmented_storage
- Status: rejected
- Start Date: 2015-07-29
- RFC PR: [#1866](https://github.com/cockroachdb/cockroach/pull/1866)
- Cockroach Issue: [#1644](https://github.com/cockroachdb/cockroach/issues/1644)
# Rejection notes
This proposal was deemed too complex and expensive for the problem it
solves. Instead, we will drop snapshots whose application would create
a conflict in the `replicasByKey` map. This avoids the race conditions
in issue #1644, but leaves the range in an uninitialized and unusable
state. In the common case, this state will resolve quickly, and in the
uncommon case when it persists, we simply rely on the usual repair and
recovery process to move the replica to a new node.
# Summary
Partition the RocksDB keyspace into segments so that replicas created
by raft replication do not share physical storage with replicas
created by splits.
# Motivation
Currently, keys in the distributed sorted map correspond more or less
directly to keys in RocksDB. This makes splits and merges cheap
(since the bulk of the data does not need to be moved to a new
location), but it introduces ambiguity since the same RocksDB key
may be owned by different ranges at different times.
For a concrete example of the problems this can cause (discussed more
fully in
[#1644](https://github.com/cockroachdb/cockroach/issues/1644)),
consider a node `N3` which is temporarily down while a range `R1` is
split (creating `R2`). When the range comes back up, the leaders of
both `R1` and `R2` (`N1` and `N2` respectively) will try to bring it
up to date. If `R2` acts first, it will see that `N3` doesn't have any
knowledge of `R2` and so it sends a snapshot. The snapshot will replace
data in `R2`'s keyspace, which `N3`'s replica of `R1` still covers.
`N3` cannot correctly process any messages relating to `R2` until `R1`
has caught up to the point of the split.
# Detailed design
## Segment IDs
Each replica is associated with a **segment ID**. When a replica is
created in response to a raft message, it gets a newly-generated
segment ID. When a replica is created as a part of `splitTrigger`, it
shares the parent replica's segment ID. Segment IDs are unique per
store and are generated from a store-local counter. They are generally
not sent over the wire (except perhaps for debugging info); all
awareness of segment IDs is contained in the storage package.
## Key encoding
We introduce a new level of key encoding at the storage level.
For clarity, the existing `roachpb.EncodedKey` type will be renamed to
`roachpb.MVCCKey`, and the three levels of encoding will be as follows:
* `Key`: a raw key in the monolithic sorted map.
* `StorageKey`: a `Key` prefixed with a segment ID.
* `MVCCKey`: a `StorageKey` suffixed with a timestamp.
The functions in `storage/engine` will take `StorageKeys` as input and
use `MVCCKeys` internally. All code outside the `storage` package will
continue to use raw `Keys`, and even inside the `storage` package
conversion to `StorageKey` will usually be done immediately before a
call to an MVCC function.
The actual encoding will use fixed-width big-endian integers, similar
to the encoding of the timestamp in MVCCKey. Thus a fully-encoded key
is:
```
+-----------------------------------------------+
| roachpb.MVCCKey |
+-----------------------+
| roachpb.StorageKey |
+------------+
| roachpb.Key |
Segment ID | Raw key | Wall time | Logical TS |
4 bytes | (variable) | 8 bytes | 4 bytes |
```
All keys not associated with a replica (including the counter used to
generate segment IDs) will use segment ID 0.
## Splitting and snapshotting
Ranges can be created in two ways (ignoring the initial bootstrapping
of the first range): an existing range splits into a new range on the
same store, or a raft leader sends a snapshot to a store that should
have a replica of the same range but doesn't.
Each replica-creation path will need to consider whether the replica
has already been created via the other path (comparing replica IDs,
not just range IDs). In `splitTrigger`, if the replica already exists
under a different segment, then a snapshot occurred before the split.
The left-hand range should delete all data that are outside the bounds
established by the split. In the `ApplySnapshot` path, a new segment
will need to be created only if the replica has not already been
assigned a segment.
TODO(bdarnell): `ApplySnapshot` happens in the `Store`'s raft
goroutine, but raft may call other (read-only) methods on its own
goroutine. I think this is safe (raft already has to handle the data
changing out from under it in other ways), but we should double-check
that raft behaves sanely in this case.
# Drawbacks
* Adding a segment ID to every key is a non-trivial storage cost.
* Merges will require copying the entire data of at least one range to
put them into the same segment.
# Alternatives
* An earlier version of this proposal did not reuse segment IDs on
splits, so splits required copying the new range's data to a new
segment (segments were also identified by a (range ID, replica ID)
tuple instead of a separate ID).
# Unresolved questions
* Whenever a split and snapshot race, we are wasting work, since the
snapshot will be ignored if the split completes while the snapshot
is in flight. It's probably worthwhile to prevent or delay sending
snapshots when an in-progress split should be able to accomplish the
same thing more cheaply. This is less of an issue currently as new
ranges are started in a leaderless state and so no snapshots will be
sent until a round of elections, but we intend to kick-start
elections in this case to minimize unavailability so we will need to
be mindful of the cost of premature snapshots.
|
unknown
|
github
|
https://github.com/cockroachdb/cockroach
|
docs/RFCS/20150729_segmented_storage.md
|
#!/usr/bin/python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import time
import unittest
import node
LEADER = 1
REED = 2
ED = 3
class Cert_6_1_2_REEDAttach(unittest.TestCase):
def setUp(self):
self.nodes = {}
for i in range(1,4):
self.nodes[i] = node.Node(i)
self.nodes[LEADER].set_panid(0xface)
self.nodes[LEADER].set_mode('rsdn')
self.nodes[LEADER].add_whitelist(self.nodes[REED].get_addr64())
self.nodes[LEADER].enable_whitelist()
self.nodes[REED].set_panid(0xface)
self.nodes[REED].set_mode('rsdn')
self.nodes[REED].add_whitelist(self.nodes[LEADER].get_addr64())
self.nodes[REED].add_whitelist(self.nodes[ED].get_addr64())
self.nodes[REED].enable_whitelist()
self.nodes[REED].set_router_upgrade_threshold(0)
self.nodes[ED].set_panid(0xface)
self.nodes[ED].set_mode('rsn')
self.nodes[ED].add_whitelist(self.nodes[REED].get_addr64())
self.nodes[ED].enable_whitelist()
def tearDown(self):
for node in list(self.nodes.values()):
node.stop()
del self.nodes
def test(self):
self.nodes[LEADER].start()
self.nodes[LEADER].set_state('leader')
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
self.nodes[REED].start()
time.sleep(5)
self.assertEqual(self.nodes[REED].get_state(), 'child')
self.nodes[ED].start()
time.sleep(5)
self.assertEqual(self.nodes[ED].get_state(), 'child')
self.assertEqual(self.nodes[REED].get_state(), 'router')
if __name__ == '__main__':
unittest.main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
"""
This module provides an interface to the Elastic Compute Cloud (EC2)
load balancing service from AWS.
"""
from boto.connection import AWSQueryConnection
from boto.ec2.instanceinfo import InstanceInfo
from boto.ec2.elb.loadbalancer import LoadBalancer, LoadBalancerZones
from boto.ec2.elb.instancestate import InstanceState
from boto.ec2.elb.healthcheck import HealthCheck
from boto.ec2.elb.listelement import ListElement
from boto.regioninfo import RegionInfo
import boto
RegionData = {
'us-east-1': 'elasticloadbalancing.us-east-1.amazonaws.com',
'us-gov-west-1': 'elasticloadbalancing.us-gov-west-1.amazonaws.com',
'us-west-1': 'elasticloadbalancing.us-west-1.amazonaws.com',
'us-west-2': 'elasticloadbalancing.us-west-2.amazonaws.com',
'sa-east-1': 'elasticloadbalancing.sa-east-1.amazonaws.com',
'eu-west-1': 'elasticloadbalancing.eu-west-1.amazonaws.com',
'ap-northeast-1': 'elasticloadbalancing.ap-northeast-1.amazonaws.com',
'ap-southeast-1': 'elasticloadbalancing.ap-southeast-1.amazonaws.com',
'ap-southeast-2': 'elasticloadbalancing.ap-southeast-2.amazonaws.com',
}
def regions():
"""
Get all available regions for the ELB service.
:rtype: list
:return: A list of :class:`boto.RegionInfo` instances
"""
regions = []
for region_name in RegionData:
region = RegionInfo(name=region_name,
endpoint=RegionData[region_name],
connection_cls=ELBConnection)
regions.append(region)
return regions
def connect_to_region(region_name, **kw_params):
"""
Given a valid region name, return a
:class:`boto.ec2.elb.ELBConnection`.
:param str region_name: The name of the region to connect to.
:rtype: :class:`boto.ec2.ELBConnection` or ``None``
:return: A connection to the given region, or None if an invalid region
name is given
"""
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
class ELBConnection(AWSQueryConnection):
APIVersion = boto.config.get('Boto', 'elb_version', '2012-06-01')
DefaultRegionName = boto.config.get('Boto', 'elb_region_name', 'us-east-1')
DefaultRegionEndpoint = boto.config.get('Boto', 'elb_region_endpoint',
'elasticloadbalancing.us-east-1.amazonaws.com')
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None, debug=0,
https_connection_factory=None, region=None, path='/',
security_token=None, validate_certs=True):
"""
Init method to create a new connection to EC2 Load Balancing Service.
.. note:: The region argument is overridden by the region specified in
the boto configuration file.
"""
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
self.region = region
AWSQueryConnection.__init__(self, aws_access_key_id,
aws_secret_access_key,
is_secure, port, proxy, proxy_port,
proxy_user, proxy_pass,
self.region.endpoint, debug,
https_connection_factory, path,
security_token,
validate_certs=validate_certs)
def _required_auth_capability(self):
return ['ec2']
def build_list_params(self, params, items, label):
if isinstance(items, str):
items = [items]
for index, item in enumerate(items):
params[label % (index + 1)] = item
def get_all_load_balancers(self, load_balancer_names=None):
"""
Retrieve all load balancers associated with your account.
:type load_balancer_names: list
:keyword load_balancer_names: An optional list of load balancer names.
:rtype: :py:class:`boto.resultset.ResultSet`
:return: A ResultSet containing instances of
:class:`boto.ec2.elb.loadbalancer.LoadBalancer`
"""
params = {}
if load_balancer_names:
self.build_list_params(params, load_balancer_names,
'LoadBalancerNames.member.%d')
return self.get_list('DescribeLoadBalancers', params,
[('member', LoadBalancer)])
def create_load_balancer(self, name, zones, listeners=None, subnets=None,
security_groups=None, scheme='internet-facing', complex_listeners=None):
"""
Create a new load balancer for your account. By default the load
balancer will be created in EC2. To create a load balancer inside a
VPC, parameter zones must be set to None and subnets must not be None.
The load balancer will be automatically created under the VPC that
contains the subnet(s) specified.
:type name: string
:param name: The mnemonic name associated with the new load balancer
:type zones: List of strings
:param zones: The names of the availability zone(s) to add.
:type listeners: List of tuples
:param listeners: Each tuple contains three or four values,
(LoadBalancerPortNumber, InstancePortNumber, Protocol,
[SSLCertificateId]) where LoadBalancerPortNumber and
InstancePortNumber are integer values between 1 and 65535,
Protocol is a string containing either 'TCP', 'SSL', HTTP', or
'HTTPS'; SSLCertificateID is the ARN of a AWS AIM
certificate, and must be specified when doing HTTPS.
:type subnets: list of strings
:param subnets: A list of subnet IDs in your VPC to attach to
your LoadBalancer.
:type security_groups: list of strings
:param security_groups: The security groups assigned to your
LoadBalancer within your VPC.
:type scheme: string
:param scheme: The type of a LoadBalancer. By default, Elastic
Load Balancing creates an internet-facing LoadBalancer with
a publicly resolvable DNS name, which resolves to public IP
addresses.
Specify the value internal for this option to create an
internal LoadBalancer with a DNS name that resolves to
private IP addresses.
This option is only available for LoadBalancers attached
to an Amazon VPC.
:type complex_listeners: List of tuples
:param complex_listeners: Each tuple contains four or five values,
(LoadBalancerPortNumber, InstancePortNumber, Protocol, InstanceProtocol,
SSLCertificateId).
Where;
- LoadBalancerPortNumber and InstancePortNumber are integer
values between 1 and 65535.
- Protocol and InstanceProtocol is a string containing either 'TCP',
'SSL', 'HTTP', or 'HTTPS'
- SSLCertificateId is the ARN of an SSL certificate loaded into
AWS IAM
:rtype: :class:`boto.ec2.elb.loadbalancer.LoadBalancer`
:return: The newly created
:class:`boto.ec2.elb.loadbalancer.LoadBalancer`
"""
if not listeners and not complex_listeners:
# Must specify one of the two options
return None
params = {'LoadBalancerName': name,
'Scheme': scheme}
# Handle legacy listeners
if listeners:
for index, listener in enumerate(listeners):
i = index + 1
protocol = listener[2].upper()
params['Listeners.member.%d.LoadBalancerPort' % i] = listener[0]
params['Listeners.member.%d.InstancePort' % i] = listener[1]
params['Listeners.member.%d.Protocol' % i] = listener[2]
if protocol == 'HTTPS' or protocol == 'SSL':
params['Listeners.member.%d.SSLCertificateId' % i] = listener[3]
# Handle the full listeners
if complex_listeners:
for index, listener in enumerate(complex_listeners):
i = index + 1
protocol = listener[2].upper()
InstanceProtocol = listener[3].upper()
params['Listeners.member.%d.LoadBalancerPort' % i] = listener[0]
params['Listeners.member.%d.InstancePort' % i] = listener[1]
params['Listeners.member.%d.Protocol' % i] = listener[2]
params['Listeners.member.%d.InstanceProtocol' % i] = listener[3]
if protocol == 'HTTPS' or protocol == 'SSL':
params['Listeners.member.%d.SSLCertificateId' % i] = listener[4]
if zones:
self.build_list_params(params, zones, 'AvailabilityZones.member.%d')
if subnets:
self.build_list_params(params, subnets, 'Subnets.member.%d')
if security_groups:
self.build_list_params(params, security_groups,
'SecurityGroups.member.%d')
load_balancer = self.get_object('CreateLoadBalancer',
params, LoadBalancer)
load_balancer.name = name
load_balancer.listeners = listeners
load_balancer.availability_zones = zones
load_balancer.subnets = subnets
load_balancer.security_groups = security_groups
return load_balancer
def create_load_balancer_listeners(self, name, listeners=None, complex_listeners=None):
"""
Creates a Listener (or group of listeners) for an existing
Load Balancer
:type name: string
:param name: The name of the load balancer to create the listeners for
:type listeners: List of tuples
:param listeners: Each tuple contains three or four values,
(LoadBalancerPortNumber, InstancePortNumber, Protocol,
[SSLCertificateId]) where LoadBalancerPortNumber and
InstancePortNumber are integer values between 1 and 65535,
Protocol is a string containing either 'TCP', 'SSL', HTTP', or
'HTTPS'; SSLCertificateID is the ARN of a AWS AIM
certificate, and must be specified when doing HTTPS.
:type complex_listeners: List of tuples
:param complex_listeners: Each tuple contains four or five values,
(LoadBalancerPortNumber, InstancePortNumber, Protocol, InstanceProtocol,
SSLCertificateId).
Where;
- LoadBalancerPortNumber and InstancePortNumber are integer
values between 1 and 65535.
- Protocol and InstanceProtocol is a string containing either 'TCP',
'SSL', 'HTTP', or 'HTTPS'
- SSLCertificateId is the ARN of an SSL certificate loaded into
AWS IAM
:return: The status of the request
"""
if not listeners and not complex_listeners:
# Must specify one of the two options
return None
params = {'LoadBalancerName': name}
# Handle the simple listeners
if listeners:
for index, listener in enumerate(listeners):
i = index + 1
protocol = listener[2].upper()
params['Listeners.member.%d.LoadBalancerPort' % i] = listener[0]
params['Listeners.member.%d.InstancePort' % i] = listener[1]
params['Listeners.member.%d.Protocol' % i] = listener[2]
if protocol == 'HTTPS' or protocol == 'SSL':
params['Listeners.member.%d.SSLCertificateId' % i] = listener[3]
# Handle the full listeners
if complex_listeners:
for index, listener in enumerate(complex_listeners):
i = index + 1
protocol = listener[2].upper()
InstanceProtocol = listener[3].upper()
params['Listeners.member.%d.LoadBalancerPort' % i] = listener[0]
params['Listeners.member.%d.InstancePort' % i] = listener[1]
params['Listeners.member.%d.Protocol' % i] = listener[2]
params['Listeners.member.%d.InstanceProtocol' % i] = listener[3]
if protocol == 'HTTPS' or protocol == 'SSL':
params['Listeners.member.%d.SSLCertificateId' % i] = listener[4]
return self.get_status('CreateLoadBalancerListeners', params)
def delete_load_balancer(self, name):
"""
Delete a Load Balancer from your account.
:type name: string
:param name: The name of the Load Balancer to delete
"""
params = {'LoadBalancerName': name}
return self.get_status('DeleteLoadBalancer', params)
def delete_load_balancer_listeners(self, name, ports):
"""
Deletes a load balancer listener (or group of listeners)
:type name: string
:param name: The name of the load balancer to create the listeners for
:type ports: List int
:param ports: Each int represents the port on the ELB to be removed
:return: The status of the request
"""
params = {'LoadBalancerName': name}
for index, port in enumerate(ports):
params['LoadBalancerPorts.member.%d' % (index + 1)] = port
return self.get_status('DeleteLoadBalancerListeners', params)
def enable_availability_zones(self, load_balancer_name, zones_to_add):
"""
Add availability zones to an existing Load Balancer
All zones must be in the same region as the Load Balancer
Adding zones that are already registered with the Load Balancer
has no effect.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type zones: List of strings
:param zones: The name of the zone(s) to add.
:rtype: List of strings
:return: An updated list of zones for this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
self.build_list_params(params, zones_to_add,
'AvailabilityZones.member.%d')
obj = self.get_object('EnableAvailabilityZonesForLoadBalancer',
params, LoadBalancerZones)
return obj.zones
def disable_availability_zones(self, load_balancer_name, zones_to_remove):
"""
Remove availability zones from an existing Load Balancer.
All zones must be in the same region as the Load Balancer.
Removing zones that are not registered with the Load Balancer
has no effect.
You cannot remove all zones from an Load Balancer.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type zones: List of strings
:param zones: The name of the zone(s) to remove.
:rtype: List of strings
:return: An updated list of zones for this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
self.build_list_params(params, zones_to_remove,
'AvailabilityZones.member.%d')
obj = self.get_object('DisableAvailabilityZonesForLoadBalancer',
params, LoadBalancerZones)
return obj.zones
def register_instances(self, load_balancer_name, instances):
"""
Add new Instances to an existing Load Balancer.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type instances: List of strings
:param instances: The instance ID's of the EC2 instances to add.
:rtype: List of strings
:return: An updated list of instances for this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
self.build_list_params(params, instances,
'Instances.member.%d.InstanceId')
return self.get_list('RegisterInstancesWithLoadBalancer',
params, [('member', InstanceInfo)])
def deregister_instances(self, load_balancer_name, instances):
"""
Remove Instances from an existing Load Balancer.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type instances: List of strings
:param instances: The instance ID's of the EC2 instances to remove.
:rtype: List of strings
:return: An updated list of instances for this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
self.build_list_params(params, instances,
'Instances.member.%d.InstanceId')
return self.get_list('DeregisterInstancesFromLoadBalancer',
params, [('member', InstanceInfo)])
def describe_instance_health(self, load_balancer_name, instances=None):
"""
Get current state of all Instances registered to an Load Balancer.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type instances: List of strings
:param instances: The instance ID's of the EC2 instances
to return status for. If not provided,
the state of all instances will be returned.
:rtype: List of :class:`boto.ec2.elb.instancestate.InstanceState`
:return: list of state info for instances in this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
if instances:
self.build_list_params(params, instances,
'Instances.member.%d.InstanceId')
return self.get_list('DescribeInstanceHealth', params,
[('member', InstanceState)])
def configure_health_check(self, name, health_check):
"""
Define a health check for the EndPoints.
:type name: string
:param name: The mnemonic name associated with the load balancer
:type health_check: :class:`boto.ec2.elb.healthcheck.HealthCheck`
:param health_check: A HealthCheck object populated with the desired
values.
:rtype: :class:`boto.ec2.elb.healthcheck.HealthCheck`
:return: The updated :class:`boto.ec2.elb.healthcheck.HealthCheck`
"""
params = {'LoadBalancerName': name,
'HealthCheck.Timeout': health_check.timeout,
'HealthCheck.Target': health_check.target,
'HealthCheck.Interval': health_check.interval,
'HealthCheck.UnhealthyThreshold': health_check.unhealthy_threshold,
'HealthCheck.HealthyThreshold': health_check.healthy_threshold}
return self.get_object('ConfigureHealthCheck', params, HealthCheck)
def set_lb_listener_SSL_certificate(self, lb_name, lb_port,
ssl_certificate_id):
"""
Sets the certificate that terminates the specified listener's SSL
connections. The specified certificate replaces any prior certificate
that was used on the same LoadBalancer and port.
"""
params = {'LoadBalancerName': lb_name,
'LoadBalancerPort': lb_port,
'SSLCertificateId': ssl_certificate_id}
return self.get_status('SetLoadBalancerListenerSSLCertificate', params)
def create_app_cookie_stickiness_policy(self, name, lb_name, policy_name):
"""
Generates a stickiness policy with sticky session lifetimes that follow
that of an application-generated cookie. This policy can only be
associated with HTTP listeners.
This policy is similar to the policy created by
CreateLBCookieStickinessPolicy, except that the lifetime of the special
Elastic Load Balancing cookie follows the lifetime of the
application-generated cookie specified in the policy configuration. The
load balancer only inserts a new stickiness cookie when the application
response includes a new application cookie.
If the application cookie is explicitly removed or expires, the session
stops being sticky until a new application cookie is issued.
"""
params = {'CookieName': name,
'LoadBalancerName': lb_name,
'PolicyName': policy_name}
return self.get_status('CreateAppCookieStickinessPolicy', params)
def create_lb_cookie_stickiness_policy(self, cookie_expiration_period,
lb_name, policy_name):
"""
Generates a stickiness policy with sticky session lifetimes controlled
by the lifetime of the browser (user-agent) or a specified expiration
period. This policy can only be associated only with HTTP listeners.
When a load balancer implements this policy, the load balancer uses a
special cookie to track the backend server instance for each request.
When the load balancer receives a request, it first checks to see if
this cookie is present in the request. If so, the load balancer sends
the request to the application server specified in the cookie. If not,
the load balancer sends the request to a server that is chosen based on
the existing load balancing algorithm.
A cookie is inserted into the response for binding subsequent requests
from the same user to that server. The validity of the cookie is based
on the cookie expiration time, which is specified in the policy
configuration.
None may be passed for cookie_expiration_period.
"""
params = {'LoadBalancerName': lb_name,
'PolicyName': policy_name}
if cookie_expiration_period is not None:
params['CookieExpirationPeriod'] = cookie_expiration_period
return self.get_status('CreateLBCookieStickinessPolicy', params)
def create_lb_policy(self, lb_name, policy_name, policy_type, policy_attributes):
"""
Creates a new policy that contais the necessary attributes depending on
the policy type. Policies are settings that are saved for your load
balancer and that can be applied to the front-end listener, or
the back-end application server.
"""
params = {'LoadBalancerName': lb_name,
'PolicyName': policy_name,
'PolicyTypeName': policy_type}
for index, (name, value) in enumerate(policy_attributes.iteritems(), 1):
params['PolicyAttributes.member.%d.AttributeName' % index] = name
params['PolicyAttributes.member.%d.AttributeValue' % index] = value
else:
params['PolicyAttributes'] = ''
return self.get_status('CreateLoadBalancerPolicy', params)
def delete_lb_policy(self, lb_name, policy_name):
"""
Deletes a policy from the LoadBalancer. The specified policy must not
be enabled for any listeners.
"""
params = {'LoadBalancerName': lb_name,
'PolicyName': policy_name}
return self.get_status('DeleteLoadBalancerPolicy', params)
def set_lb_policies_of_listener(self, lb_name, lb_port, policies):
"""
Associates, updates, or disables a policy with a listener on the load
balancer. Currently only zero (0) or one (1) policy can be associated
with a listener.
"""
params = {'LoadBalancerName': lb_name,
'LoadBalancerPort': lb_port}
self.build_list_params(params, policies, 'PolicyNames.member.%d')
return self.get_status('SetLoadBalancerPoliciesOfListener', params)
def set_lb_policies_of_backend_server(self, lb_name, instance_port, policies):
"""
Replaces the current set of policies associated with a port on which
the back-end server is listening with a new set of policies.
"""
params = {'LoadBalancerName': lb_name,
'InstancePort': instance_port}
if policies:
self.build_list_params(params, policies, 'PolicyNames.member.%d')
else:
params['PolicyNames'] = ''
return self.get_status('SetLoadBalancerPoliciesForBackendServer', params)
def apply_security_groups_to_lb(self, name, security_groups):
"""
Applies security groups to the load balancer.
Applying security groups that are already registered with the
Load Balancer has no effect.
:type name: string
:param name: The name of the Load Balancer
:type security_groups: List of strings
:param security_groups: The name of the security group(s) to add.
:rtype: List of strings
:return: An updated list of security groups for this Load Balancer.
"""
params = {'LoadBalancerName': name}
self.build_list_params(params, security_groups,
'SecurityGroups.member.%d')
return self.get_list('ApplySecurityGroupsToLoadBalancer',
params, None)
def attach_lb_to_subnets(self, name, subnets):
"""
Attaches load balancer to one or more subnets.
Attaching subnets that are already registered with the
Load Balancer has no effect.
:type name: string
:param name: The name of the Load Balancer
:type subnets: List of strings
:param subnets: The name of the subnet(s) to add.
:rtype: List of strings
:return: An updated list of subnets for this Load Balancer.
"""
params = {'LoadBalancerName': name}
self.build_list_params(params, subnets,
'Subnets.member.%d')
return self.get_list('AttachLoadBalancerToSubnets',
params, None)
def detach_lb_from_subnets(self, name, subnets):
"""
Detaches load balancer from one or more subnets.
:type name: string
:param name: The name of the Load Balancer
:type subnets: List of strings
:param subnets: The name of the subnet(s) to detach.
:rtype: List of strings
:return: An updated list of subnets for this Load Balancer.
"""
params = {'LoadBalancerName': name}
self.build_list_params(params, subnets,
'Subnets.member.%d')
return self.get_list('DetachLoadBalancerFromSubnets',
params, None)
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
Lmfit provides a high-level interface to non-linear optimization and curve
fitting problems for Python. Lmfit builds on Levenberg-Marquardt algorithm of
scipy.optimize.leastsq(), but also supports most of the optimization methods
from scipy.optimize. It has a number of useful enhancements, including:
* Using Parameter objects instead of plain floats as variables. A Parameter
has a value that can be varied in the fit, fixed, have upper and/or lower
bounds. It can even have a value that is constrained by an algebraic
expression of other Parameter values.
* Ease of changing fitting algorithms. Once a fitting model is set up, one
can change the fitting algorithm without changing the objective function.
* Improved estimation of confidence intervals. While
scipy.optimize.leastsq() will automatically calculate uncertainties and
correlations from the covariance matrix, lmfit also has functions to
explicitly explore parameter space to determine confidence levels even for
the most difficult cases.
* Improved curve-fitting with the Model class. This which extends the
capabilities of scipy.optimize.curve_fit(), allowing you to turn a function
that models for your data into a python class that helps you parametrize
and fit data with that model.
* Many pre-built models for common lineshapes are included and ready to use.
version: 0.8.0
last update: 2014-Sep-21
License: MIT
Authors: Matthew Newville, The University of Chicago
Till Stensitzki, Freie Universitat Berlin
Daniel B. Allen, Johns Hopkins University
Antonino Ingargiola, University of California, Los Angeles
"""
from .minimizer import minimize, Minimizer, MinimizerException
from .parameter import Parameter, Parameters
from .confidence import conf_interval, conf_interval2d
from .printfuncs import (fit_report, ci_report,
report_fit, report_ci, report_errors)
from .model import Model, CompositeModel
from . import models
from . import uncertainties
from .uncertainties import ufloat, correlated_values
## versioneer code
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Locked file interface that should work on Unix and Windows pythons.
This module first tries to use fcntl locking to ensure serialized access
to a file, then falls back on a lock file if that is unavialable.
Usage:
f = LockedFile('filename', 'r+b', 'rb')
f.open_and_lock()
if f.is_locked():
print 'Acquired filename with r+b mode'
f.file_handle().write('locked data')
else:
print 'Aquired filename with rb mode'
f.unlock_and_close()
"""
__author__ = 'cache@google.com (David T McWherter)'
import errno
import logging
import os
import time
from oauth2client import util
logger = logging.getLogger(__name__)
class CredentialsFileSymbolicLinkError(Exception):
"""Credentials files must not be symbolic links."""
class AlreadyLockedException(Exception):
"""Trying to lock a file that has already been locked by the LockedFile."""
pass
def validate_file(filename):
if os.path.islink(filename):
raise CredentialsFileSymbolicLinkError(
'File: %s is a symbolic link.' % filename)
class _Opener(object):
"""Base class for different locking primitives."""
def __init__(self, filename, mode, fallback_mode):
"""Create an Opener.
Args:
filename: string, The pathname of the file.
mode: string, The preferred mode to access the file with.
fallback_mode: string, The mode to use if locking fails.
"""
self._locked = False
self._filename = filename
self._mode = mode
self._fallback_mode = fallback_mode
self._fh = None
def is_locked(self):
"""Was the file locked."""
return self._locked
def file_handle(self):
"""The file handle to the file. Valid only after opened."""
return self._fh
def filename(self):
"""The filename that is being locked."""
return self._filename
def open_and_lock(self, timeout, delay):
"""Open the file and lock it.
Args:
timeout: float, How long to try to lock for.
delay: float, How long to wait between retries.
"""
pass
def unlock_and_close(self):
"""Unlock and close the file."""
pass
class _PosixOpener(_Opener):
"""Lock files using Posix advisory lock files."""
def open_and_lock(self, timeout, delay):
"""Open the file and lock it.
Tries to create a .lock file next to the file we're trying to open.
Args:
timeout: float, How long to try to lock for.
delay: float, How long to wait between retries.
Raises:
AlreadyLockedException: if the lock is already acquired.
IOError: if the open fails.
CredentialsFileSymbolicLinkError if the file is a symbolic link.
"""
if self._locked:
raise AlreadyLockedException('File %s is already locked' %
self._filename)
self._locked = False
validate_file(self._filename)
try:
self._fh = open(self._filename, self._mode)
except IOError, e:
# If we can't access with _mode, try _fallback_mode and don't lock.
if e.errno == errno.EACCES:
self._fh = open(self._filename, self._fallback_mode)
return
lock_filename = self._posix_lockfile(self._filename)
start_time = time.time()
while True:
try:
self._lock_fd = os.open(lock_filename,
os.O_CREAT|os.O_EXCL|os.O_RDWR)
self._locked = True
break
except OSError, e:
if e.errno != errno.EEXIST:
raise
if (time.time() - start_time) >= timeout:
logger.warn('Could not acquire lock %s in %s seconds' % (
lock_filename, timeout))
# Close the file and open in fallback_mode.
if self._fh:
self._fh.close()
self._fh = open(self._filename, self._fallback_mode)
return
time.sleep(delay)
def unlock_and_close(self):
"""Unlock a file by removing the .lock file, and close the handle."""
if self._locked:
lock_filename = self._posix_lockfile(self._filename)
os.close(self._lock_fd)
os.unlink(lock_filename)
self._locked = False
self._lock_fd = None
if self._fh:
self._fh.close()
def _posix_lockfile(self, filename):
"""The name of the lock file to use for posix locking."""
return '%s.lock' % filename
try:
import fcntl
class _FcntlOpener(_Opener):
"""Open, lock, and unlock a file using fcntl.lockf."""
def open_and_lock(self, timeout, delay):
"""Open the file and lock it.
Args:
timeout: float, How long to try to lock for.
delay: float, How long to wait between retries
Raises:
AlreadyLockedException: if the lock is already acquired.
IOError: if the open fails.
CredentialsFileSymbolicLinkError if the file is a symbolic link.
"""
if self._locked:
raise AlreadyLockedException('File %s is already locked' %
self._filename)
start_time = time.time()
validate_file(self._filename)
try:
self._fh = open(self._filename, self._mode)
except IOError, e:
# If we can't access with _mode, try _fallback_mode and don't lock.
if e.errno == errno.EACCES:
self._fh = open(self._filename, self._fallback_mode)
return
# We opened in _mode, try to lock the file.
while True:
try:
fcntl.lockf(self._fh.fileno(), fcntl.LOCK_EX)
self._locked = True
return
except IOError, e:
# If not retrying, then just pass on the error.
if timeout == 0:
raise e
if e.errno != errno.EACCES:
raise e
# We could not acquire the lock. Try again.
if (time.time() - start_time) >= timeout:
logger.warn('Could not lock %s in %s seconds' % (
self._filename, timeout))
if self._fh:
self._fh.close()
self._fh = open(self._filename, self._fallback_mode)
return
time.sleep(delay)
def unlock_and_close(self):
"""Close and unlock the file using the fcntl.lockf primitive."""
if self._locked:
fcntl.lockf(self._fh.fileno(), fcntl.LOCK_UN)
self._locked = False
if self._fh:
self._fh.close()
except ImportError:
_FcntlOpener = None
try:
import pywintypes
import win32con
import win32file
class _Win32Opener(_Opener):
"""Open, lock, and unlock a file using windows primitives."""
# Error #33:
# 'The process cannot access the file because another process'
FILE_IN_USE_ERROR = 33
# Error #158:
# 'The segment is already unlocked.'
FILE_ALREADY_UNLOCKED_ERROR = 158
def open_and_lock(self, timeout, delay):
"""Open the file and lock it.
Args:
timeout: float, How long to try to lock for.
delay: float, How long to wait between retries
Raises:
AlreadyLockedException: if the lock is already acquired.
IOError: if the open fails.
CredentialsFileSymbolicLinkError if the file is a symbolic link.
"""
if self._locked:
raise AlreadyLockedException('File %s is already locked' %
self._filename)
start_time = time.time()
validate_file(self._filename)
try:
self._fh = open(self._filename, self._mode)
except IOError, e:
# If we can't access with _mode, try _fallback_mode and don't lock.
if e.errno == errno.EACCES:
self._fh = open(self._filename, self._fallback_mode)
return
# We opened in _mode, try to lock the file.
while True:
try:
hfile = win32file._get_osfhandle(self._fh.fileno())
win32file.LockFileEx(
hfile,
(win32con.LOCKFILE_FAIL_IMMEDIATELY|
win32con.LOCKFILE_EXCLUSIVE_LOCK), 0, -0x10000,
pywintypes.OVERLAPPED())
self._locked = True
return
except pywintypes.error, e:
if timeout == 0:
raise e
# If the error is not that the file is already in use, raise.
if e[0] != _Win32Opener.FILE_IN_USE_ERROR:
raise
# We could not acquire the lock. Try again.
if (time.time() - start_time) >= timeout:
logger.warn('Could not lock %s in %s seconds' % (
self._filename, timeout))
if self._fh:
self._fh.close()
self._fh = open(self._filename, self._fallback_mode)
return
time.sleep(delay)
def unlock_and_close(self):
"""Close and unlock the file using the win32 primitive."""
if self._locked:
try:
hfile = win32file._get_osfhandle(self._fh.fileno())
win32file.UnlockFileEx(hfile, 0, -0x10000, pywintypes.OVERLAPPED())
except pywintypes.error, e:
if e[0] != _Win32Opener.FILE_ALREADY_UNLOCKED_ERROR:
raise
self._locked = False
if self._fh:
self._fh.close()
except ImportError:
_Win32Opener = None
class LockedFile(object):
"""Represent a file that has exclusive access."""
@util.positional(4)
def __init__(self, filename, mode, fallback_mode, use_native_locking=True):
"""Construct a LockedFile.
Args:
filename: string, The path of the file to open.
mode: string, The mode to try to open the file with.
fallback_mode: string, The mode to use if locking fails.
use_native_locking: bool, Whether or not fcntl/win32 locking is used.
"""
opener = None
if not opener and use_native_locking:
if _Win32Opener:
opener = _Win32Opener(filename, mode, fallback_mode)
if _FcntlOpener:
opener = _FcntlOpener(filename, mode, fallback_mode)
if not opener:
opener = _PosixOpener(filename, mode, fallback_mode)
self._opener = opener
def filename(self):
"""Return the filename we were constructed with."""
return self._opener._filename
def file_handle(self):
"""Return the file_handle to the opened file."""
return self._opener.file_handle()
def is_locked(self):
"""Return whether we successfully locked the file."""
return self._opener.is_locked()
def open_and_lock(self, timeout=0, delay=0.05):
"""Open the file, trying to lock it.
Args:
timeout: float, The number of seconds to try to acquire the lock.
delay: float, The number of seconds to wait between retry attempts.
Raises:
AlreadyLockedException: if the lock is already acquired.
IOError: if the open fails.
"""
self._opener.open_and_lock(timeout, delay)
def unlock_and_close(self):
"""Unlock and close a file."""
self._opener.unlock_and_close()
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright 2012-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.buildpack.platform.json;
import java.util.ArrayList;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonCreator;
import org.junit.jupiter.api.Test;
import tools.jackson.databind.node.ObjectNode;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link JsonStream}.
*
* @author Phillip Webb
* @author Scott Frederick
*/
class JsonStreamTests extends AbstractJsonTests {
private final JsonStream jsonStream;
JsonStreamTests() {
this.jsonStream = new JsonStream(getJsonMapper());
}
@Test
void getWhenReadingObjectNodeReturnsNodes() throws Exception {
List<ObjectNode> result = new ArrayList<>();
this.jsonStream.get(getContent("stream.json"), result::add);
assertThat(result).hasSize(595);
assertThat(result.get(594).get("status").asString())
.contains("Status: Downloaded newer image for paketo-buildpacks/cnb:base");
}
@Test
void getWhenReadTypesReturnsTypes() throws Exception {
List<TestEvent> result = new ArrayList<>();
this.jsonStream.get(getContent("stream.json"), TestEvent.class, result::add);
assertThat(result).hasSize(595);
assertThat(result.get(1).getId()).isEqualTo("5667fdb72017");
assertThat(result.get(594).getStatus())
.isEqualTo("Status: Downloaded newer image for paketo-buildpacks/cnb:base");
}
/**
* Event for type deserialization tests.
*/
static class TestEvent {
private final String id;
private final String status;
@JsonCreator
TestEvent(String id, String status) {
this.id = id;
this.status = status;
}
String getId() {
return this.id;
}
String getStatus() {
return this.status;
}
}
}
|
java
|
github
|
https://github.com/spring-projects/spring-boot
|
buildpack/spring-boot-buildpack-platform/src/test/java/org/springframework/boot/buildpack/platform/json/JsonStreamTests.java
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
High-level history functions. These wrap the basic his_read function to allow
some alternate representations of the historical data.
"""
import hszinc
import fysom
import pytz
from copy import deepcopy
from datetime import tzinfo
from six import string_types
from ...util import state
from ...util.asyncexc import AsynchronousException
try:
from pandas import Series, DataFrame
HAVE_PANDAS = True
except ImportError: # pragma: no cover
# Not covered, since we'll always have 'pandas' available during tests.
HAVE_PANDAS = False
def _resolve_tz(tz):
"""
Resolve a given timestamp.
"""
if (tz is None) or isinstance(tz, tzinfo):
return tz
if isinstance(tz, string_types):
if '/' in tz:
# Olson database name
return pytz.timezone(tz)
else:
return hszinc.zoneinfo.timezone(tz)
class HisReadSeriesOperation(state.HaystackOperation):
"""
Read the series data from a 'point' entity and present it in a concise
format.
"""
FORMAT_LIST = 'list' # [(ts1, value1), (ts2, value2), ...]
FORMAT_DICT = 'dict' # {ts1: value1, ts2: value2, ...}
FORMAT_SERIES = 'series' # pandas.Series
def __init__(self, session, point, rng, tz, series_format):
"""
Read the series data and return it.
:param session: Haystack HTTP session object.
:param point: ID of historical 'point' object to read.
:param rng: Range to read from 'point'
:param tz: Timezone to translate timezones to. May be None.
:param series_format: What format to present the series in.
"""
super(HisReadSeriesOperation, self).__init__()
if series_format not in (self.FORMAT_LIST, self.FORMAT_DICT,
self.FORMAT_SERIES):
raise ValueError('Unrecognised series_format %s' % series_format)
if (series_format == self.FORMAT_SERIES) and (not HAVE_PANDAS):
raise NotImplementedError('pandas not available.')
if isinstance(rng, slice):
rng = ','.join([
hszinc.dump_scalar(p, mode=hszinc.MODE_ZINC)
for p in (rng.start, rng.stop)
])
self._session = session
self._point = point
self._range = hszinc.dump_scalar(rng, mode=hszinc.MODE_ZINC)
self._tz = _resolve_tz(tz)
self._series_format = series_format
self._state_machine = fysom.Fysom(
initial='init', final='done',
events=[
# Event Current State New State
('go', 'init', 'read'),
('read_done', 'read', 'done'),
('exception', '*', 'done'),
], callbacks={
'onenterread': self._do_read,
'onenterdone': self._do_done,
})
def go(self):
self._state_machine.go()
def _do_read(self, event):
"""
Request the data from the server.
"""
self._session.his_read(point=self._point, rng=self._range,
callback=self._on_read)
def _on_read(self, operation, **kwargs):
"""
Process the grid, format it into the requested format.
"""
try:
# See if the read succeeded.
grid = operation.result
if self._tz is None:
conv_ts = lambda ts : ts
else:
conv_ts = lambda ts : ts.astimezone(self._tz)
# Convert grid to list of tuples
data = [(conv_ts(row['ts']), row['val']) for row in grid]
if self._series_format == self.FORMAT_DICT:
data = dict(data)
elif self._series_format == self.FORMAT_SERIES:
# Split into index and data.
try:
(index, data) = zip(*data)
if isinstance(data[0], hszinc.Quantity):
values = [each.value for each in data]
units = data[0].unit
else:
values = data
units = ''
except ValueError:
values = []
index = []
units = ''
#ser = Series(data=data[0].value, index=index)
meta_serie = MetaSeries(data=values, index=index)
meta_serie.add_meta('units', units)
meta_serie.add_meta('point', self._point)
self._state_machine.read_done(result=meta_serie)
except: # Catch all exceptions to pass to caller.
self._state_machine.exception(result=AsynchronousException())
def _do_done(self, event):
"""
Return the result from the state machine.
"""
self._done(event.result)
class HisReadFrameOperation(state.HaystackOperation):
"""
Read the series data from several 'point' entities and present them in a
concise format.
"""
FORMAT_LIST = 'list' # [{'ts': ts1, 'col1': val1, ...}, {...}, ...]
FORMAT_DICT = 'dict' # {ts1: {'col1': val1, ...}, ts2: ...}
FORMAT_FRAME = 'frame' # pandas.DataFrame
def __init__(self, session, columns, rng, tz, frame_format):
"""
Read the series data and return it.
:param session: Haystack HTTP session object.
:param columns: IDs of historical point objects to read.
:param rng: Range to read from 'point'
:param tz: Timezone to translate timezones to. May be None.
:param frame_format: What format to present the frame in.
"""
super(HisReadFrameOperation, self).__init__()
self._log = session._log.getChild('his_read_frame')
if frame_format not in (self.FORMAT_LIST, self.FORMAT_DICT,
self.FORMAT_FRAME):
raise ValueError('Unrecognised frame_format %s' % frame_format)
if (frame_format == self.FORMAT_FRAME) and (not HAVE_PANDAS):
raise NotImplementedError('pandas not available.')
if isinstance(rng, slice):
rng = ','.join([
hszinc.dump_scalar(p, mode=hszinc.MODE_ZINC)
for p in (rng.start, rng.stop)
])
# Convert the columns to a list of tuples.
strip_ref = lambda r : r.name if isinstance(r, hszinc.Ref) else r
if isinstance(columns, dict):
# Ensure all are strings to references
columns = [(str(c),strip_ref(r)) for c, r in columns.items()]
else:
# Translate to a dict:
columns = [(strip_ref(c), c) for c in columns]
self._session = session
self._columns = columns
self._range = hszinc.dump_scalar(rng, mode=hszinc.MODE_ZINC)
self._tz = _resolve_tz(tz)
self._frame_format = frame_format
self._data_by_ts = {}
self._todo = set([c[0] for c in columns])
self._state_machine = fysom.Fysom(
initial='init', final='done',
events=[
# Event Current State New State
('probe_multi', 'init', 'probing'),
('do_multi_read', 'probing', 'multi_read'),
('all_read_done', 'multi_read', 'postprocess'),
('do_single_read', 'probing', 'single_read'),
('all_read_done', 'single_read', 'postprocess'),
('process_done', 'postprocess', 'done'),
('exception', '*', 'done'),
], callbacks={
'onenterprobing': self._do_probe_multi,
'onentermulti_read': self._do_multi_read,
'onentersingle_read': self._do_single_read,
'onenterpostprocess': self._do_postprocess,
'onenterdone': self._do_done,
})
def go(self):
self._state_machine.probe_multi()
def _do_probe_multi(self, event):
self._log.debug('Probing for multi-his-read support')
self._session.has_features([self._session.FEATURE_HISREAD_MULTI],
callback=self._on_probe_multi)
def _on_probe_multi(self, operation, **kwargs):
try:
result = operation.result
except: # Catch all exceptions to pass to caller.
self._state_machine.exception(result=AsynchronousException())
return
if result.get(self._session.FEATURE_HISREAD_MULTI):
# Session object supports multi-his-read
self._log.debug('Using multi-his-read support')
self._state_machine.do_multi_read()
else:
# Emulate multi-his-read with separate
self._log.debug('No multi-his-read support, emulating')
self._state_machine.do_single_read()
def _get_ts_rec(self, ts):
try:
return self._data_by_ts[ts]
except KeyError:
rec = {}
self._data_by_ts[ts] = rec
return rec
def _do_multi_read(self, event):
"""
Request the data from the server as a single multi-read request.
"""
self._session.multi_his_read(points=[c[1] for c in self._columns],
rng=self._range, callback=self._on_multi_read)
def _on_multi_read(self, operation, **kwargs):
"""
Handle the multi-valued grid.
"""
try:
grid = operation.result
if self._tz is None:
conv_ts = lambda ts : ts
else:
conv_ts = lambda ts : ts.astimezone(self._tz)
for row in grid:
ts = conv_ts(row['ts'])
rec = self._get_ts_rec(ts)
for (col_idx, (col, _)) in enumerate(self._columns):
val = row.get('v%d' % col_idx)
if (val is not None) or \
(self._frame_format != self.FORMAT_FRAME):
rec[col] = val
self._state_machine.all_read_done()
except: # Catch all exceptions to pass to caller.
self._log.debug('Hit exception', exc_info=1)
self._state_machine.exception(result=AsynchronousException())
def _do_single_read(self, event):
"""
Request the data from the server as multiple single-read requests.
"""
for col, point in self._columns:
self._log.debug('Column %s point %s', col, point)
self._session.his_read(point, self._range,
lambda operation, **kw : self._on_single_read(operation,
col=col))
def _on_single_read(self, operation, col, **kwargs):
"""
Handle the multi-valued grid.
"""
self._log.debug('Response back for column %s', col)
try:
grid = operation.result
#print(grid)
#print('===========')
if self._tz is None:
conv_ts = lambda ts : ts
else:
conv_ts = lambda ts : ts.astimezone(self._tz)
self._log.debug('%d records for %s: %s', len(grid), col, grid)
for row in grid:
ts = conv_ts(row['ts'])
if self._tz is None:
self._tz = ts.tzinfo
rec = self._get_ts_rec(ts)
val = row.get('val')
if (val is not None) or \
(self._frame_format != self.FORMAT_FRAME):
rec[col] = val
self._todo.discard(col)
self._log.debug('Still waiting for: %s', self._todo)
if not self._todo:
# No more to read
self._state_machine.all_read_done()
except: # Catch all exceptions to pass to caller.
self._log.debug('Hit exception', exc_info=1)
self._state_machine.exception(result=AsynchronousException())
def _do_postprocess(self, event):
"""
Convert the dict-of-dicts to the desired frame format.
"""
self._log.debug('Post-processing')
try:
if self._frame_format == self.FORMAT_LIST:
def _merge_ts(item):
rec = item[1].copy()
rec['ts'] = item[0]
return rec
data = list(map(_merge_ts, list(self._data_by_ts.items())))
#print(data)
elif self._frame_format == self.FORMAT_FRAME:
# Build from dict
data = MetaDataFrame.from_dict(self._data_by_ts, orient='index')
def convert_quantity(val):
"""
If value is Quantity, convert to value
"""
if isinstance(val,hszinc.Quantity):
return val.value
else:
return val
def get_units(serie):
try:
first_element = serie.dropna()[0]
except IndexError: # needed for empty results
return ''
if isinstance(first_element, hszinc.Quantity):
return first_element.unit
else:
return ''
for name, serie in data.iteritems():
"""
Convert Quantity and put unit in metadata
"""
data.add_meta(name,get_units(serie))
data[name] = data[name].apply(convert_quantity)
else:
data = self._data_by_ts
self._state_machine.process_done(result=data)
except: # Catch all exceptions to pass to caller.
self._log.debug('Hit exception', exc_info=1)
self._state_machine.exception(result=AsynchronousException())
def _do_done(self, event):
"""
Return the result from the state machine.
"""
self._done(event.result)
class HisWriteSeriesOperation(state.HaystackOperation):
"""
Write the series data to a 'point' entity.
"""
def __init__(self, session, point, series, tz):
"""
Write the series data to the point.
:param session: Haystack HTTP session object.
:param point: ID of historical 'point' object to write.
:param series: Series data to be written to the point.
:param tz: If not None, a datetime.tzinfo instance for this write.
"""
super(HisWriteSeriesOperation, self).__init__()
# We've either been given an Entity instance or a string/reference
# giving the name of an entity.
if isinstance(point, string_types) or isinstance(point, hszinc.Ref):
# We have the name of an entity, we'll need to fetch it.
self._entity_id = point
self._point = None
else:
# We have an entity.
self._point = point
self._entity_id = point.id
self._session = session
self._series = series
self._tz = _resolve_tz(tz)
self._state_machine = fysom.Fysom(
initial='init', final='done',
events=[
# Event Current State New State
('have_tz', 'init', 'write'),
('have_point', 'init', 'get_point_tz'),
('need_point', 'init', 'get_point'),
('have_point', 'get_point', 'get_point_tz'),
('have_tz', 'get_point_tz', 'write'),
('need_equip', 'get_point_tz', 'get_equip'),
('have_equip', 'get_equip', 'get_equip_tz'),
('have_tz', 'get_equip_tz', 'write'),
('need_site', 'get_equip_tz', 'get_site'),
('have_site', 'get_site', 'get_site_tz'),
('have_tz', 'get_site_tz', 'write'),
('write_done', 'write', 'done'),
('exception', '*', 'done'),
], callbacks={
'onenterget_point': self._do_get_point,
'onenterget_point_tz': self._do_get_point_tz,
'onenterget_equip': self._do_get_equip,
'onenterget_equip_tz': self._do_get_equip_tz,
'onenterget_site': self._do_get_site,
'onenterget_site_tz': self._do_get_site_tz,
'onenterwrite': self._do_write,
'onenterdone': self._do_done,
})
def go(self):
if self._tz is not None: # Do we have a timezone?
# We do!
self._state_machine.have_tz()
elif self._point is not None: # Nope, do we have the point?
# We do!
self._state_machine.have_point()
else:
# We need to fetch the point to get its timezone.
self._state_machine.need_point()
def _do_get_point(self, event):
"""
Retrieve the point entity.
"""
self._session.get_entity(self._entity_id, single=True,
callback=self._got_point)
def _got_point(self, operation, **kwargs):
"""
Process the return value from get_entity
"""
try:
self._point = operation.result
self._state_machine.have_point()
except:
self._state_machine.exception(result=AsynchronousException())
def _do_get_point_tz(self, event):
"""
See if the point has a timezone?
"""
if hasattr(self._point, 'tz') and isinstance(self._point.tz, tzinfo):
# We have our timezone.
self._tz = self._point.tz
self._state_machine.have_tz()
else:
# Nope, look at the equip then.
self._state_machine.need_equip()
def _do_get_equip(self, event):
"""
Retrieve the equip entity.
"""
self._point.get_equip(callback=self._got_equip)
def _got_equip(self, operation, **kwargs):
"""
Process the return value from get_entity
"""
try:
equip = operation.result
self._state_machine.have_equip(equip=equip)
except:
self._state_machine.exception(result=AsynchronousException())
def _do_get_equip_tz(self, event):
"""
See if the equip has a timezone?
"""
equip = event.equip
if hasattr(equip, 'tz') and isinstance(equip.tz, tzinfo):
# We have our timezone.
self._tz = equip.tz
self._state_machine.have_tz()
else:
# Nope, look at the site then.
self._state_machine.need_site()
def _do_get_site(self, event):
"""
Retrieve the site entity.
"""
self._point.get_site(callback=self._got_site)
def _got_site(self, operation, **kwargs):
"""
Process the return value from get_entity
"""
try:
site = operation.result
self._state_machine.have_site(site=site)
except:
self._state_machine.exception(result=AsynchronousException())
def _do_get_site_tz(self, event):
"""
See if the site has a timezone?
"""
site = event.site
if hasattr(site, 'tz') and isinstance(site.tz, tzinfo):
# We have our timezone.
self._tz = site.tz
self._state_machine.have_tz()
else:
try:
# Nope, no idea then.
raise ValueError('No timezone specified for operation, '\
'point, equip or site.')
except:
self._state_machine.exception(result=AsynchronousException())
def _do_write(self, event):
"""
Push the data to the server.
"""
try:
# Process the timestamp records into an appropriate format.
if hasattr(self._series, 'to_dict'):
records = self._series.to_dict()
elif not isinstance(self._series, dict):
records = dict(self._series)
else:
records = self._series
if not bool(records):
# No data, skip writing this series.
self._state_machine.write_done(result=None)
return
# Time-shift the records.
if hasattr(self._tz, 'localize'):
localise = lambda ts : self._tz.localize(ts) \
if ts.tzinfo is None else ts.astimezone(self._tz)
else:
localise = lambda ts : ts.replace(tzinfo=self._tz) \
if ts.tzinfo is None else ts.astimezone(self._tz)
records = dict([(localise(ts), val) \
for ts, val in records.items()])
# Write the data
self._session.his_write(point=self._entity_id,
timestamp_records=records, callback=self._on_write)
except:
self._state_machine.exception(result=AsynchronousException())
def _on_write(self, operation, **kwargs):
"""
Handle the write error, if any.
"""
try:
# See if the write succeeded.
grid = operation.result
if not isinstance(grid, hszinc.Grid):
raise TypeError('Unexpected result: %r' % grid)
# Move to the done state.
self._state_machine.write_done(result=None)
except: # Catch all exceptions to pass to caller.
self._state_machine.exception(result=AsynchronousException())
def _do_done(self, event):
"""
Return the result from the state machine.
"""
self._done(event.result)
class HisWriteFrameOperation(state.HaystackOperation):
"""
Write the series data to several 'point' entities.
"""
def __init__(self, session, columns, frame, tz):
"""
Write the series data.
:param session: Haystack HTTP session object.
:param columns: IDs of historical point objects to read.
:param frame: Range to read from 'point'
:param tz: Timezone to translate timezones to.
"""
super(HisWriteFrameOperation, self).__init__()
self._log = session._log.getChild('his_write_frame')
tz = _resolve_tz(tz)
if tz is None:
tz = pytz.utc
if hasattr(tz, 'localize'):
localise = lambda ts : tz.localize(ts) \
if ts.tzinfo is None else ts.astimezone(tz)
else:
localise = lambda ts : ts.replace(tzinfo=tz) \
if ts.tzinfo is None else ts.astimezone(tz)
# Convert frame to list of records.
if HAVE_PANDAS:
# Convert Pandas frame to dict of dicts form.
if isinstance(frame, DataFrame):
self._log.debug('Convert from Pandas DataFrame')
raw_frame = frame.to_dict(orient='dict')
frame = {}
for col, col_data in raw_frame.items():
for ts, val in col_data.items():
try:
frame_rec = frame[ts]
except KeyError:
frame_rec = {}
frame[ts] = frame_rec
frame[col] = val
# Convert dict of dicts to records, de-referencing column names.
if isinstance(frame, dict):
if columns is None:
def _to_rec(item):
(ts, raw_record) = item
record = raw_record.copy()
record['ts'] = ts
return record
else:
def _to_rec(item):
(ts, raw_record) = item
record = {}
for col, val in raw_record.items():
entity = columns[col]
if hasattr(entity, 'id'):
entity = entity.id
if isinstance(entity, hszinc.Ref):
entity = entity.name
record[entity] = val
record['ts'] = ts
return record
frame = list(map(_to_rec, list(frame.items())))
elif columns is not None:
# Columns are aliased. De-alias the column names.
frame = deepcopy(frame)
for row in frame:
ts = row.pop('ts')
raw = row.copy()
row.clear()
row['ts'] = ts
for column, point in columns.items():
try:
value = raw.pop(column)
except KeyError:
self._log.debug('At %s missing column %s (for %s): %s',
ts, column, point, raw)
continue
row[session._obj_to_ref(point).name] = value
# Localise all timestamps, extract columns:
columns = set()
def _localise_rec(r):
r['ts'] = localise(r['ts'])
columns.update(set(r.keys()) - set(['ts']))
return r
frame = list(map(_localise_rec, frame))
self._session = session
self._frame = frame
self._columns = columns
self._todo = columns.copy()
self._tz = _resolve_tz(tz)
self._state_machine = fysom.Fysom(
initial='init', final='done',
events=[
# Event Current State New State
('probe_multi', 'init', 'probing'),
('no_data', 'init', 'done'),
('do_multi_write', 'probing', 'multi_write'),
('all_write_done', 'multi_write', 'done'),
('do_single_write', 'probing', 'single_write'),
('all_write_done', 'single_write', 'done'),
('exception', '*', 'done'),
], callbacks={
'onenterprobing': self._do_probe_multi,
'onentermulti_write': self._do_multi_write,
'onentersingle_write': self._do_single_write,
'onenterdone': self._do_done,
})
def go(self):
if not bool(self._columns):
self._log.debug('No data to write')
self._state_machine.no_data(result=None)
else:
self._state_machine.probe_multi()
def _do_probe_multi(self, event):
self._log.debug('Probing for multi-his-write support')
self._session.has_features([self._session.FEATURE_HISWRITE_MULTI],
callback=self._on_probe_multi)
def _on_probe_multi(self, operation, **kwargs):
try:
result = operation.result
except: # Catch all exceptions to pass to caller.
self._log.warning('Unable to probe multi-his-write support',
exc_info=1)
self._state_machine.exception(result=AsynchronousException())
result = {}
return
self._log.debug('Got result: %s', result)
if result.get(self._session.FEATURE_HISWRITE_MULTI):
# Session object supports multi-his-write
self._log.debug('Using multi-his-write support')
self._state_machine.do_multi_write()
else:
# Emulate multi-his-write with separate
self._log.debug('No multi-his-write support, emulating')
self._state_machine.do_single_write()
def _do_multi_write(self, event):
"""
Request the data from the server as a single multi-read request.
"""
self._session.multi_his_write(self._frame,
callback=self._on_multi_write)
def _on_multi_write(self, operation, **kwargs):
"""
Handle the multi-valued grid.
"""
try:
grid = operation.result
if not isinstance(grid, hszinc.Grid):
raise ValueError('Unexpected result %r' % grid)
self._state_machine.all_write_done(result=None)
except: # Catch all exceptions to pass to caller.
self._log.debug('Hit exception', exc_info=1)
self._state_machine.exception(result=AsynchronousException())
def _do_single_write(self, event):
"""
Submit the data in single write requests.
"""
for point in self._columns:
self._log.debug('Point %s', point)
# Extract a series for this column
series = dict([(r['ts'], r[point]) for r in \
filter(lambda r : r.get(point) is not None, self._frame)])
self._session.his_write_series(point, series,
callback=lambda operation, **kw : \
self._on_single_write(operation, point=point))
def _on_single_write(self, operation, point, **kwargs):
"""
Handle the single write.
"""
self._log.debug('Response back for point %s', point)
try:
res = operation.result
if res is not None:
raise ValueError('Unexpected result %r' % res)
self._todo.discard(point)
self._log.debug('Still waiting for: %s', self._todo)
if not self._todo:
# No more to read
self._state_machine.all_write_done(result=None)
except: # Catch all exceptions to pass to caller.
self._log.debug('Hit exception', exc_info=1)
self._state_machine.exception(result=AsynchronousException())
def _do_done(self, event):
"""
Return the result from the state machine.
"""
self._done(event.result)
if HAVE_PANDAS:
class MetaSeries(Series):
"""
Custom Pandas Serie with meta data
"""
meta = {}
@property
def _constructor(self):
return MetaSeries
def add_meta(self, key, value):
self.meta[key] = value
class MetaDataFrame(DataFrame):
"""
Custom Pandas Dataframe with meta data
Made from MetaSeries
"""
meta = {}
def __init__(self, *args, **kw):
super(MetaDataFrame, self).__init__(*args, **kw)
@property
def _constructor(self):
return MetaDataFrame
_constructor_sliced = MetaSeries
def add_meta(self, key, value):
self.meta[key] = value
|
unknown
|
codeparrot/codeparrot-clean
| ||
[
{
"pk": "4",
"model": "fixtures_regress.person",
"fields": {
"name": "Neal Stephenson"
}
},
{
"pk": "2",
"model": "fixtures_regress.store",
"fields": {
"main": null,
"name": "Amazon"
}
},
{
"pk": "3",
"model": "fixtures_regress.store",
"fields": {
"main": null,
"name": "Borders"
}
},
{
"pk": 1,
"model": "fixtures_regress.book",
"fields": {
"name": "Cryptonomicon",
"author": ["Neal Stephenson"],
"stores": [["Amazon"], ["Borders"]]
}
}
]
|
json
|
github
|
https://github.com/django/django
|
tests/fixtures_regress/fixtures/forward_ref_lookup.json
|
/*!
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { paramPlaceholder, useParamStore } from "src/queries/useParamStore";
import type { FlexibleFormElementProps } from ".";
import { Switch } from "../ui";
export const FieldBool = ({ name, namespace = "default" }: FlexibleFormElementProps) => {
const { disabled, paramsDict, setParamsDict } = useParamStore(namespace);
const param = paramsDict[name] ?? paramPlaceholder;
const onCheck = (value: boolean) => {
if (paramsDict[name]) {
paramsDict[name].value = value;
}
setParamsDict(paramsDict);
};
return (
<Switch
checked={Boolean(param.value)}
colorPalette="brand"
disabled={disabled}
id={`element_${name}`}
name={`element_${name}`}
onCheckedChange={(event) => onCheck(event.checked)}
/>
);
};
|
typescript
|
github
|
https://github.com/apache/airflow
|
airflow-core/src/airflow/ui/src/components/FlexibleForm/FieldBool.tsx
|
import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('squall_female')
mobileTemplate.setLevel(37)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Herbivore Meat")
mobileTemplate.setMeatAmount(8)
mobileTemplate.setHideType("Bristley Hide")
mobileTemplate.setBoneAmount(12)
mobileTemplate.setBoneType("Avian Bones")
mobileTemplate.setBoneAmount(8)
mobileTemplate.setSocialGroup("squall")
mobileTemplate.setAssistRange(6)
mobileTemplate.setStalker(False)
mobileTemplate.setOptionsBitmask(Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_squall.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
attacks.add('bm_bite_3')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('squall_female', mobileTemplate)
return
|
unknown
|
codeparrot/codeparrot-clean
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.