code stringlengths 1 25.8M | language stringclasses 18 values | source stringclasses 4 values | repo stringclasses 78 values | path stringlengths 0 268 |
|---|---|---|---|---|
# Copyright (c) 2010 OpenStack Foundation
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Chance (Random) Scheduler implementation
"""
import random
from oslo_config import cfg
from nova import exception
from nova.i18n import _
from nova.scheduler import driver
CONF = cfg.CONF
CONF.import_opt('compute_topic', 'nova.compute.rpcapi')
class ChanceScheduler(driver.Scheduler):
"""Implements Scheduler as a random node selector."""
def _filter_hosts(self, request_spec, hosts, filter_properties):
"""Filter a list of hosts based on request_spec."""
ignore_hosts = filter_properties.get('ignore_hosts', [])
hosts = [host for host in hosts if host not in ignore_hosts]
return hosts
def _schedule(self, context, topic, request_spec, filter_properties):
"""Picks a host that is up at random."""
elevated = context.elevated()
hosts = self.hosts_up(elevated, topic)
if not hosts:
msg = _("Is the appropriate service running?")
raise exception.NoValidHost(reason=msg)
hosts = self._filter_hosts(request_spec, hosts, filter_properties)
if not hosts:
msg = _("Could not find another compute")
raise exception.NoValidHost(reason=msg)
return random.choice(hosts)
def select_destinations(self, context, request_spec, filter_properties):
"""Selects random destinations."""
num_instances = request_spec['num_instances']
# NOTE(timello): Returns a list of dicts with 'host', 'nodename' and
# 'limits' as keys for compatibility with filter_scheduler.
dests = []
for i in range(num_instances):
host = self._schedule(context, CONF.compute_topic,
request_spec, filter_properties)
host_state = dict(host=host, nodename=None, limits=None)
dests.append(host_state)
if len(dests) < num_instances:
reason = _('There are not enough hosts available.')
raise exception.NoValidHost(reason=reason)
return dests | unknown | codeparrot/codeparrot-clean | ||
1000000.times.map{|i| a={}; 2.times{|j| a[j]=j}; a} | ruby | github | https://github.com/ruby/ruby | benchmark/hash_small2.rb |
"Dialog to specify or edit the parameters for a user configured help source."
import os
import sys
from Tkinter import *
import tkMessageBox
import tkFileDialog
class GetHelpSourceDialog(Toplevel):
def __init__(self, parent, title, menuItem='', filePath=''):
"""Get menu entry and url/ local file location for Additional Help
User selects a name for the Help resource and provides a web url
or a local file as its source. The user can enter a url or browse
for the file.
"""
Toplevel.__init__(self, parent)
self.configure(borderwidth=5)
self.resizable(height=FALSE, width=FALSE)
self.title(title)
self.transient(parent)
self.grab_set()
self.protocol("WM_DELETE_WINDOW", self.Cancel)
self.parent = parent
self.result = None
self.CreateWidgets()
self.menu.set(menuItem)
self.path.set(filePath)
self.withdraw() #hide while setting geometry
#needs to be done here so that the winfo_reqwidth is valid
self.update_idletasks()
#centre dialog over parent:
self.geometry("+%d+%d" %
((parent.winfo_rootx() + ((parent.winfo_width()/2)
-(self.winfo_reqwidth()/2)),
parent.winfo_rooty() + ((parent.winfo_height()/2)
-(self.winfo_reqheight()/2)))))
self.deiconify() #geometry set, unhide
self.bind('<Return>', self.Ok)
self.wait_window()
def CreateWidgets(self):
self.menu = StringVar(self)
self.path = StringVar(self)
self.fontSize = StringVar(self)
self.frameMain = Frame(self, borderwidth=2, relief=GROOVE)
self.frameMain.pack(side=TOP, expand=TRUE, fill=BOTH)
labelMenu = Label(self.frameMain, anchor=W, justify=LEFT,
text='Menu Item:')
self.entryMenu = Entry(self.frameMain, textvariable=self.menu,
width=30)
self.entryMenu.focus_set()
labelPath = Label(self.frameMain, anchor=W, justify=LEFT,
text='Help File Path: Enter URL or browse for file')
self.entryPath = Entry(self.frameMain, textvariable=self.path,
width=40)
self.entryMenu.focus_set()
labelMenu.pack(anchor=W, padx=5, pady=3)
self.entryMenu.pack(anchor=W, padx=5, pady=3)
labelPath.pack(anchor=W, padx=5, pady=3)
self.entryPath.pack(anchor=W, padx=5, pady=3)
browseButton = Button(self.frameMain, text='Browse', width=8,
command=self.browseFile)
browseButton.pack(pady=3)
frameButtons = Frame(self)
frameButtons.pack(side=BOTTOM, fill=X)
self.buttonOk = Button(frameButtons, text='OK',
width=8, default=ACTIVE, command=self.Ok)
self.buttonOk.grid(row=0, column=0, padx=5,pady=5)
self.buttonCancel = Button(frameButtons, text='Cancel',
width=8, command=self.Cancel)
self.buttonCancel.grid(row=0, column=1, padx=5, pady=5)
def browseFile(self):
filetypes = [
("HTML Files", "*.htm *.html", "TEXT"),
("PDF Files", "*.pdf", "TEXT"),
("Windows Help Files", "*.chm"),
("Text Files", "*.txt", "TEXT"),
("All Files", "*")]
path = self.path.get()
if path:
dir, base = os.path.split(path)
else:
base = None
if sys.platform[:3] == 'win':
dir = os.path.join(os.path.dirname(sys.executable), 'Doc')
if not os.path.isdir(dir):
dir = os.getcwd()
else:
dir = os.getcwd()
opendialog = tkFileDialog.Open(parent=self, filetypes=filetypes)
file = opendialog.show(initialdir=dir, initialfile=base)
if file:
self.path.set(file)
def MenuOk(self):
"Simple validity check for a sensible menu item name"
menuOk = True
menu = self.menu.get()
menu.strip()
if not menu:
tkMessageBox.showerror(title='Menu Item Error',
message='No menu item specified',
parent=self)
self.entryMenu.focus_set()
menuOk = False
elif len(menu) > 30:
tkMessageBox.showerror(title='Menu Item Error',
message='Menu item too long:'
'\nLimit 30 characters.',
parent=self)
self.entryMenu.focus_set()
menuOk = False
return menuOk
def PathOk(self):
"Simple validity check for menu file path"
pathOk = True
path = self.path.get()
path.strip()
if not path: #no path specified
tkMessageBox.showerror(title='File Path Error',
message='No help file path specified.',
parent=self)
self.entryPath.focus_set()
pathOk = False
elif path.startswith(('www.', 'http')):
pass
else:
if path[:5] == 'file:':
path = path[5:]
if not os.path.exists(path):
tkMessageBox.showerror(title='File Path Error',
message='Help file path does not exist.',
parent=self)
self.entryPath.focus_set()
pathOk = False
return pathOk
def Ok(self, event=None):
if self.MenuOk() and self.PathOk():
self.result = (self.menu.get().strip(),
self.path.get().strip())
if sys.platform == 'darwin':
path = self.result[1]
if path.startswith(('www', 'file:', 'http:')):
pass
else:
# Mac Safari insists on using the URI form for local files
self.result = list(self.result)
self.result[1] = "file://" + path
self.destroy()
def Cancel(self, event=None):
self.result = None
self.destroy()
if __name__ == '__main__':
#test the dialog
root = Tk()
def run():
keySeq = ''
dlg = GetHelpSourceDialog(root, 'Get Help Source')
print dlg.result
Button(root,text='Dialog', command=run).pack()
root.mainloop() | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import logging
from metrics import Metric
from telemetry.value import scalar
_COUNTER_NAMES = [
'V8.OsMemoryAllocated',
'V8.MemoryNewSpaceBytesAvailable',
'V8.MemoryNewSpaceBytesCommitted',
'V8.MemoryNewSpaceBytesUsed',
'V8.MemoryOldPointerSpaceBytesAvailable',
'V8.MemoryOldPointerSpaceBytesCommitted',
'V8.MemoryOldPointerSpaceBytesUsed',
'V8.MemoryOldDataSpaceBytesAvailable',
'V8.MemoryOldDataSpaceBytesCommitted',
'V8.MemoryOldDataSpaceBytesUsed',
'V8.MemoryCodeSpaceBytesAvailable',
'V8.MemoryCodeSpaceBytesCommitted',
'V8.MemoryCodeSpaceBytesUsed',
'V8.MemoryMapSpaceBytesAvailable',
'V8.MemoryMapSpaceBytesCommitted',
'V8.MemoryMapSpaceBytesUsed',
'V8.MemoryCellSpaceBytesAvailable',
'V8.MemoryCellSpaceBytesCommitted',
'V8.MemoryCellSpaceBytesUsed',
'V8.MemoryPropertyCellSpaceBytesAvailable',
'V8.MemoryPropertyCellSpaceBytesCommitted',
'V8.MemoryPropertyCellSpaceBytesUsed',
'V8.MemoryLoSpaceBytesAvailable',
'V8.MemoryLoSpaceBytesCommitted',
'V8.MemoryLoSpaceBytesUsed',
'V8.SizeOf_ACCESSOR_PAIR_TYPE',
'V8.SizeOf_ACCESS_CHECK_INFO_TYPE',
'V8.SizeOf_ALIASED_ARGUMENTS_ENTRY_TYPE',
'V8.SizeOf_ALLOCATION_MEMENTO_TYPE',
'V8.SizeOf_ALLOCATION_SITE_TYPE',
'V8.SizeOf_ASCII_INTERNALIZED_STRING_TYPE',
'V8.SizeOf_ASCII_STRING_TYPE',
'V8.SizeOf_BOX_TYPE',
'V8.SizeOf_BREAK_POINT_INFO_TYPE',
'V8.SizeOf_BYTE_ARRAY_TYPE',
'V8.SizeOf_CALL_HANDLER_INFO_TYPE',
'V8.SizeOf_CELL_TYPE',
'V8.SizeOf_CODE_AGE-NotExecuted',
'V8.SizeOf_CODE_AGE-ExecutedOnce',
'V8.SizeOf_CODE_AGE-NoAge',
'V8.SizeOf_CODE_AGE-Quadragenarian',
'V8.SizeOf_CODE_AGE-Quinquagenarian',
'V8.SizeOf_CODE_AGE-Sexagenarian',
'V8.SizeOf_CODE_AGE-Septuagenarian',
'V8.SizeOf_CODE_AGE-Octogenarian',
'V8.SizeOf_CODE_CACHE_TYPE',
'V8.SizeOf_CODE_TYPE',
'V8.SizeOf_CODE_TYPE-BINARY_OP_IC',
'V8.SizeOf_CODE_TYPE-BUILTIN',
'V8.SizeOf_CODE_TYPE-CALL_IC',
'V8.SizeOf_CODE_TYPE-COMPARE_IC',
'V8.SizeOf_CODE_TYPE-COMPARE_NIL_IC',
'V8.SizeOf_CODE_TYPE-FUNCTION',
'V8.SizeOf_CODE_TYPE-KEYED_CALL_IC',
'V8.SizeOf_CODE_TYPE-KEYED_LOAD_IC',
'V8.SizeOf_CODE_TYPE-KEYED_STORE_IC',
'V8.SizeOf_CODE_TYPE-LOAD_IC',
'V8.SizeOf_CODE_TYPE-OPTIMIZED_FUNCTION',
'V8.SizeOf_CODE_TYPE-REGEXP',
'V8.SizeOf_CODE_TYPE-STORE_IC',
'V8.SizeOf_CODE_TYPE-STUB',
'V8.SizeOf_CODE_TYPE-TO_BOOLEAN_IC',
'V8.SizeOf_CONS_ASCII_INTERNALIZED_STRING_TYPE',
'V8.SizeOf_CONS_ASCII_STRING_TYPE',
'V8.SizeOf_CONS_INTERNALIZED_STRING_TYPE',
'V8.SizeOf_CONS_STRING_TYPE',
'V8.SizeOf_DEBUG_INFO_TYPE',
'V8.SizeOf_DECLARED_ACCESSOR_DESCRIPTOR_TYPE',
'V8.SizeOf_DECLARED_ACCESSOR_INFO_TYPE',
'V8.SizeOf_EXECUTABLE_ACCESSOR_INFO_TYPE',
'V8.SizeOf_EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE',
'V8.SizeOf_EXTERNAL_ASCII_STRING_TYPE',
'V8.SizeOf_EXTERNAL_BYTE_ARRAY_TYPE',
'V8.SizeOf_EXTERNAL_DOUBLE_ARRAY_TYPE',
'V8.SizeOf_EXTERNAL_FLOAT_ARRAY_TYPE',
'V8.SizeOf_EXTERNAL_INTERNALIZED_STRING_TYPE',
'V8.SizeOf_EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE',
'V8.SizeOf_EXTERNAL_INT_ARRAY_TYPE',
'V8.SizeOf_EXTERNAL_PIXEL_ARRAY_TYPE',
'V8.SizeOf_EXTERNAL_SHORT_ARRAY_TYPE',
'V8.SizeOf_EXTERNAL_STRING_TYPE',
'V8.SizeOf_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE',
'V8.SizeOf_EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE',
'V8.SizeOf_EXTERNAL_UNSIGNED_INT_ARRAY_TYPE',
'V8.SizeOf_EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE',
'V8.SizeOf_FILLER_TYPE',
'V8.SizeOf_FIXED_ARRAY-DESCRIPTOR_ARRAY_SUB_TYPE',
'V8.SizeOf_FIXED_ARRAY-DICTIONARY_ELEMENTS_SUB_TYPE',
'V8.SizeOf_FIXED_ARRAY-DICTIONARY_PROPERTIES_SUB_TYPE',
'V8.SizeOf_FIXED_ARRAY-FAST_ELEMENTS_SUB_TYPE',
'V8.SizeOf_FIXED_ARRAY-FAST_PROPERTIES_SUB_TYPE',
'V8.SizeOf_FIXED_ARRAY-MAP_CODE_CACHE_SUB_TYPE',
'V8.SizeOf_FIXED_ARRAY-SCOPE_INFO_SUB_TYPE',
'V8.SizeOf_FIXED_ARRAY-STRING_TABLE_SUB_TYPE',
'V8.SizeOf_FIXED_ARRAY-TRANSITION_ARRAY_SUB_TYPE',
'V8.SizeOf_FIXED_ARRAY_TYPE',
'V8.SizeOf_FIXED_DOUBLE_ARRAY_TYPE',
'V8.SizeOf_FOREIGN_TYPE',
'V8.SizeOf_FREE_SPACE_TYPE',
'V8.SizeOf_FUNCTION_TEMPLATE_INFO_TYPE',
'V8.SizeOf_HEAP_NUMBER_TYPE',
'V8.SizeOf_INTERCEPTOR_INFO_TYPE',
'V8.SizeOf_INTERNALIZED_STRING_TYPE',
'V8.SizeOf_JS_ARRAY_BUFFER_TYPE',
'V8.SizeOf_JS_ARRAY_TYPE',
'V8.SizeOf_JS_BUILTINS_OBJECT_TYPE',
'V8.SizeOf_JS_CONTEXT_EXTENSION_OBJECT_TYPE',
'V8.SizeOf_JS_DATA_VIEW_TYPE',
'V8.SizeOf_JS_DATE_TYPE',
'V8.SizeOf_JS_FUNCTION_PROXY_TYPE',
'V8.SizeOf_JS_FUNCTION_TYPE',
'V8.SizeOf_JS_GENERATOR_OBJECT_TYPE',
'V8.SizeOf_JS_GLOBAL_OBJECT_TYPE',
'V8.SizeOf_JS_GLOBAL_PROXY_TYPE',
'V8.SizeOf_JS_MAP_TYPE',
'V8.SizeOf_JS_MESSAGE_OBJECT_TYPE',
'V8.SizeOf_JS_MODULE_TYPE',
'V8.SizeOf_JS_OBJECT_TYPE',
'V8.SizeOf_JS_PROXY_TYPE',
'V8.SizeOf_JS_REGEXP_TYPE',
'V8.SizeOf_JS_SET_TYPE',
'V8.SizeOf_JS_TYPED_ARRAY_TYPE',
'V8.SizeOf_JS_VALUE_TYPE',
'V8.SizeOf_JS_WEAK_MAP_TYPE',
'V8.SizeOf_JS_WEAK_SET_TYPE',
'V8.SizeOf_MAP_TYPE',
'V8.SizeOf_OBJECT_TEMPLATE_INFO_TYPE',
'V8.SizeOf_ODDBALL_TYPE',
'V8.SizeOf_POLYMORPHIC_CODE_CACHE_TYPE',
'V8.SizeOf_PROPERTY_CELL_TYPE',
'V8.SizeOf_SCRIPT_TYPE',
'V8.SizeOf_SHARED_FUNCTION_INFO_TYPE',
'V8.SizeOf_SHORT_EXTERNAL_ASCII_INTERNALIZED_STRING_TYPE',
'V8.SizeOf_SHORT_EXTERNAL_ASCII_STRING_TYPE',
'V8.SizeOf_SHORT_EXTERNAL_INTERNALIZED_STRING_TYPE',
'V8.SizeOf_SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE',
'V8.SizeOf_SHORT_EXTERNAL_STRING_TYPE',
'V8.SizeOf_SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE',
'V8.SizeOf_SIGNATURE_INFO_TYPE',
'V8.SizeOf_SLICED_ASCII_STRING_TYPE',
'V8.SizeOf_SLICED_STRING_TYPE',
'V8.SizeOf_STRING_TYPE',
'V8.SizeOf_SYMBOL_TYPE',
'V8.SizeOf_TYPE_FEEDBACK_INFO_TYPE',
'V8.SizeOf_TYPE_SWITCH_INFO_TYPE',
]
# Descriptions for what different counter names represent.
DESCRIPTIONS = {
'V8.MemoryExternalFragmentationTotal':
'Total external memory fragmentation after each GC in percent.',
'V8.MemoryHeapSampleTotalCommitted':
'The total size of committed memory used by V8 after each GC in KB.',
'V8.MemoryHeapSampleTotalUsed':
'The total size of live memory used by V8 after each GC in KB.',
}
class V8ObjectStatsMetric(Metric):
"""V8ObjectStatsMetric gathers statistics on the size of types in the V8 heap.
It does this by enabling the --track_gc_object_stats flag on V8 and reading
these statistics from the StatsTableMetric.
"""
def __init__(self, counters=None):
super(V8ObjectStatsMetric, self).__init__()
self._results = None
self._counters = counters or _COUNTER_NAMES
@classmethod
def CustomizeBrowserOptions(cls, options):
options.AppendExtraBrowserArgs([
'--enable-stats-table',
'--enable-benchmarking',
'--js-flags=--track_gc_object_stats --expose_gc',
# TODO(rmcilroy): This is needed for --enable-stats-table. Update once
# https://codereview.chromium.org/22911027/ lands.
'--no-sandbox'
])
@staticmethod
def GetV8StatsTable(tab, counters):
return tab.EvaluateJavaScript("""
(function(counters) {
var results = {};
if (!window.chrome || !window.chrome.benchmarking)
return results;
try {
window.gc(); // Trigger GC to ensure stats are checkpointed.
} catch(e) {
// window.gc() could have been mapped to something else,
// just continue.
}
for (var i = 0; i < counters.length; i++)
results[counters[i]] =
chrome.benchmarking.counterForRenderer(counters[i]);
return results;
})(%s);
""" % json.dumps(counters))
def Start(self, page, tab):
"""Do Nothing."""
pass
def Stop(self, page, tab):
"""Get the values in the stats table after the page is loaded."""
self._results = V8ObjectStatsMetric.GetV8StatsTable(tab, self._counters)
if not self._results:
logging.warning('No V8 object stats from website: ' + page.display_name)
def AddResults(self, tab, results):
"""Add results for this page to the results object."""
assert self._results != None, 'Must call Stop() first'
for counter_name in self._results:
description = DESCRIPTIONS.get(counter_name)
display_name = counter_name.replace('.', '_')
results.AddValue(scalar.ScalarValue(
results.current_page, display_name, 'kb',
self._results[counter_name] / 1024.0, description=description)) | unknown | codeparrot/codeparrot-clean | ||
<?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Bridge\Doctrine\Validator\Constraints;
use Symfony\Component\Validator\Attribute\HasNamedArguments;
use Symfony\Component\Validator\Constraint;
/**
* Constraint for the Unique Entity validator.
*
* @author Benjamin Eberlei <kontakt@beberlei.de>
*/
#[\Attribute(\Attribute::TARGET_CLASS | \Attribute::IS_REPEATABLE)]
class UniqueEntity extends Constraint
{
public const NOT_UNIQUE_ERROR = '23bd9dbf-6b9b-41cd-a99e-4844bcf3077f';
protected const ERROR_NAMES = [
self::NOT_UNIQUE_ERROR => 'NOT_UNIQUE_ERROR',
];
public string $message = 'This value is already used.';
public string $service = 'doctrine.orm.validator.unique';
public ?string $em = null;
public ?string $entityClass = null;
public string $repositoryMethod = 'findBy';
public array|string $fields = [];
public ?string $errorPath = null;
public bool|array|string $ignoreNull = true;
public array $identifierFieldNames = [];
/**
* @param array|string $fields The combination of fields that must contain unique values or a set of options
* @param bool|string[]|string $ignoreNull The combination of fields that ignore null values
* @param string|null $em The entity manager used to query for uniqueness instead of the manager of this class
* @param string|null $entityClass The entity class to enforce uniqueness on instead of the current class
* @param string|null $repositoryMethod The repository method to check uniqueness instead of findBy. The method will receive as its argument
* a fieldName => value associative array according to the fields option configuration
* @param string|null $errorPath Bind the constraint violation to this field instead of the first one in the fields option configuration
*/
#[HasNamedArguments]
public function __construct(
array|string $fields,
?string $message = null,
?string $service = null,
?string $em = null,
?string $entityClass = null,
?string $repositoryMethod = null,
?string $errorPath = null,
bool|string|array|null $ignoreNull = null,
?array $identifierFieldNames = null,
?array $groups = null,
$payload = null,
) {
parent::__construct(null, $groups, $payload);
$this->fields = $fields ?? $this->fields;
$this->message = $message ?? $this->message;
$this->service = $service ?? $this->service;
$this->em = $em ?? $this->em;
$this->entityClass = $entityClass ?? $this->entityClass;
$this->repositoryMethod = $repositoryMethod ?? $this->repositoryMethod;
$this->errorPath = $errorPath ?? $this->errorPath;
$this->ignoreNull = $ignoreNull ?? $this->ignoreNull;
$this->identifierFieldNames = $identifierFieldNames ?? $this->identifierFieldNames;
}
/**
* The validator must be defined as a service with this name.
*/
public function validatedBy(): string
{
return $this->service;
}
public function getTargets(): string|array
{
return self::CLASS_CONSTRAINT;
}
} | php | github | https://github.com/symfony/symfony | src/Symfony/Bridge/Doctrine/Validator/Constraints/UniqueEntity.php |
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Optional
from typing_extensions import Literal
from ..._models import BaseModel
__all__ = ["PythonGrader"]
class PythonGrader(BaseModel):
"""A PythonGrader object that runs a python script on the input."""
name: str
"""The name of the grader."""
source: str
"""The source code of the python script."""
type: Literal["python"]
"""The object type, which is always `python`."""
image_tag: Optional[str] = None
"""The image tag to use for the python script.""" | python | github | https://github.com/openai/openai-python | src/openai/types/graders/python_grader.py |
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import os
class SspaceLongread(Package):
"""SSPACE-LongRead is a stand-alone program for scaffolding pre-assembled
contigs using long reads
Note: A manual download is required for SSPACE-LongRead.
Spack will search your current directory for the download file.
Alternatively, add this file to a mirror so that Spack can find it.
For instructions on how to set up a mirror, see
http://spack.readthedocs.io/en/latest/mirrors.html"""
homepage = "https://www.baseclear.com/genomics/bioinformatics/basetools/SSPACE-longread"
version('1.1', '0bb5d8603d7ead4ff1596135a520cc26')
depends_on('perl', type=('build', 'run'))
def url_for_version(self, version):
return "file://{0}/40SSPACE-LongRead_v{1}.tar.gz".format(
os.getcwd(), version.dashed)
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('blasr', prefix.bin)
install('SSPACE-LongRead.pl', prefix.bin) | unknown | codeparrot/codeparrot-clean | ||
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = "v"
cfg.parentdir_prefix = "dmf_control_board_firmware-"
cfg.versionfile_source = "dmf_control_board_firmware/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None} | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Strategy to export custom proto formats."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import os
from tensorflow.contrib.boosted_trees.proto import tree_config_pb2
from tensorflow.contrib.boosted_trees.python.training.functions import gbdt_batch
from tensorflow.contrib.decision_trees.proto import generic_tree_model_extensions_pb2
from tensorflow.contrib.decision_trees.proto import generic_tree_model_pb2
from tensorflow.contrib.learn.python.learn import export_strategy
from tensorflow.contrib.learn.python.learn.utils import saved_model_export_utils
from tensorflow.python.client import session as tf_session
from tensorflow.python.framework import ops
from tensorflow.python.platform import gfile
from tensorflow.python.saved_model import loader as saved_model_loader
from tensorflow.python.saved_model import tag_constants
def make_custom_export_strategy(name,
convert_fn,
feature_columns,
export_input_fn):
"""Makes custom exporter of GTFlow tree format.
Args:
name: A string, for the name of the export strategy.
convert_fn: A function that converts the tree proto to desired format and
saves it to the desired location. Can be None to skip conversion.
feature_columns: A list of feature columns.
export_input_fn: A function that takes no arguments and returns an
`InputFnOps`.
Returns:
An `ExportStrategy`.
"""
base_strategy = saved_model_export_utils.make_export_strategy(
serving_input_fn=export_input_fn)
input_fn = export_input_fn()
(sorted_feature_names, dense_floats, sparse_float_indices, _, _,
sparse_int_indices, _, _) = gbdt_batch.extract_features(
input_fn.features, feature_columns)
def export_fn(estimator, export_dir, checkpoint_path=None, eval_result=None):
"""A wrapper to export to SavedModel, and convert it to other formats."""
result_dir = base_strategy.export(estimator, export_dir,
checkpoint_path,
eval_result)
with ops.Graph().as_default() as graph:
with tf_session.Session(graph=graph) as sess:
saved_model_loader.load(
sess, [tag_constants.SERVING], result_dir)
# Note: This is GTFlow internal API and might change.
ensemble_model = graph.get_operation_by_name(
"ensemble_model/TreeEnsembleSerialize")
_, dfec_str = sess.run(ensemble_model.outputs)
dtec = tree_config_pb2.DecisionTreeEnsembleConfig()
dtec.ParseFromString(dfec_str)
# Export the result in the same folder as the saved model.
if convert_fn:
convert_fn(dtec, sorted_feature_names,
len(dense_floats),
len(sparse_float_indices),
len(sparse_int_indices), result_dir, eval_result)
feature_importances = _get_feature_importances(
dtec, sorted_feature_names,
len(dense_floats),
len(sparse_float_indices), len(sparse_int_indices))
sorted_by_importance = sorted(
feature_importances.items(), key=lambda x: -x[1])
assets_dir = os.path.join(result_dir, "assets.extra")
gfile.MakeDirs(assets_dir)
with gfile.GFile(os.path.join(assets_dir, "feature_importances"),
"w") as f:
f.write("\n".join("%s, %f" % (k, v) for k, v in sorted_by_importance))
return result_dir
return export_strategy.ExportStrategy(name, export_fn)
def convert_to_universal_format(dtec, sorted_feature_names,
num_dense, num_sparse_float,
num_sparse_int,
feature_name_to_proto=None):
"""Convert GTFlow trees to universal format."""
del num_sparse_int # unused.
model_and_features = generic_tree_model_pb2.ModelAndFeatures()
# TODO(jonasz): Feature descriptions should contain information about how each
# feature is processed before it's fed to the model (e.g. bucketing
# information). As of now, this serves as a list of features the model uses.
for feature_name in sorted_feature_names:
if not feature_name_to_proto:
model_and_features.features[feature_name].SetInParent()
else:
model_and_features.features[feature_name].CopyFrom(
feature_name_to_proto[feature_name])
model = model_and_features.model
model.ensemble.summation_combination_technique.SetInParent()
for tree_idx in range(len(dtec.trees)):
gtflow_tree = dtec.trees[tree_idx]
tree_weight = dtec.tree_weights[tree_idx]
member = model.ensemble.members.add()
member.submodel_id.value = tree_idx
tree = member.submodel.decision_tree
for node_idx in range(len(gtflow_tree.nodes)):
gtflow_node = gtflow_tree.nodes[node_idx]
node = tree.nodes.add()
node_type = gtflow_node.WhichOneof("node")
node.node_id.value = node_idx
if node_type == "leaf":
leaf = gtflow_node.leaf
if leaf.HasField("vector"):
for weight in leaf.vector.value:
new_value = node.leaf.vector.value.add()
new_value.float_value = weight * tree_weight
else:
for index, weight in zip(
leaf.sparse_vector.index, leaf.sparse_vector.value):
new_value = node.leaf.sparse_vector.sparse_value[index]
new_value.float_value = weight * tree_weight
else:
node = node.binary_node
# Binary nodes here.
if node_type == "dense_float_binary_split":
split = gtflow_node.dense_float_binary_split
feature_id = split.feature_column
inequality_test = node.inequality_left_child_test
inequality_test.feature_id.id.value = sorted_feature_names[feature_id]
inequality_test.type = (
generic_tree_model_pb2.InequalityTest.LESS_OR_EQUAL)
inequality_test.threshold.float_value = split.threshold
elif node_type == "sparse_float_binary_split_default_left":
split = gtflow_node.sparse_float_binary_split_default_left.split
node.default_direction = (
generic_tree_model_pb2.BinaryNode.LEFT)
# TODO(nponomareva): adjust this id assignement when we allow multi-
# column sparse tensors.
feature_id = split.feature_column + num_dense
inequality_test = node.inequality_left_child_test
inequality_test.feature_id.id.value = sorted_feature_names[feature_id]
inequality_test.type = (
generic_tree_model_pb2.InequalityTest.LESS_OR_EQUAL)
inequality_test.threshold.float_value = split.threshold
elif node_type == "sparse_float_binary_split_default_right":
split = gtflow_node.sparse_float_binary_split_default_right.split
node.default_direction = (
generic_tree_model_pb2.BinaryNode.RIGHT)
# TODO(nponomareva): adjust this id assignement when we allow multi-
# column sparse tensors.
feature_id = split.feature_column + num_dense
inequality_test = node.inequality_left_child_test
inequality_test.feature_id.id.value = sorted_feature_names[feature_id]
inequality_test.type = (
generic_tree_model_pb2.InequalityTest.LESS_OR_EQUAL)
inequality_test.threshold.float_value = split.threshold
elif node_type == "categorical_id_binary_split":
split = gtflow_node.categorical_id_binary_split
node.default_direction = generic_tree_model_pb2.BinaryNode.RIGHT
feature_id = split.feature_column + num_dense + num_sparse_float
categorical_test = (
generic_tree_model_extensions_pb2.MatchingValuesTest())
categorical_test.feature_id.id.value = sorted_feature_names[
feature_id]
matching_id = categorical_test.value.add()
matching_id.int64_value = split.feature_id
node.custom_left_child_test.Pack(categorical_test)
else:
raise ValueError("Unexpected node type %s", node_type)
node.left_child_id.value = split.left_id
node.right_child_id.value = split.right_id
return model_and_features
def _get_feature_importances(dtec, feature_names, num_dense_floats,
num_sparse_float, num_sparse_int):
"""Export the feature importance per feature column."""
del num_sparse_int # Unused.
sums = collections.defaultdict(lambda: 0)
for tree_idx in range(len(dtec.trees)):
tree = dtec.trees[tree_idx]
for tree_node in tree.nodes:
node_type = tree_node.WhichOneof("node")
if node_type == "dense_float_binary_split":
split = tree_node.dense_float_binary_split
split_column = feature_names[split.feature_column]
elif node_type == "sparse_float_binary_split_default_left":
split = tree_node.sparse_float_binary_split_default_left.split
split_column = feature_names[split.feature_column + num_dense_floats]
elif node_type == "sparse_float_binary_split_default_right":
split = tree_node.sparse_float_binary_split_default_right.split
split_column = feature_names[split.feature_column + num_dense_floats]
elif node_type == "categorical_id_binary_split":
split = tree_node.categorical_id_binary_split
split_column = feature_names[split.feature_column + num_dense_floats +
num_sparse_float]
elif node_type == "categorical_id_set_membership_binary_split":
split = tree_node.categorical_id_set_membership_binary_split
split_column = feature_names[split.feature_column + num_dense_floats +
num_sparse_float]
elif node_type == "leaf":
assert tree_node.node_metadata.gain == 0
continue
else:
raise ValueError("Unexpected split type %s", node_type)
# Apply shrinkage factor. It is important since it is not always uniform
# across different trees.
sums[split_column] += (
tree_node.node_metadata.gain * dtec.tree_weights[tree_idx])
return dict(sums) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
import six
from django.conf import settings
from django.contrib import messages
from django.contrib.messages.api import MessageFailure
from django.shortcuts import redirect
from django.utils.http import urlquote
from social.exceptions import SocialAuthBaseException
class SocialAuthExceptionMiddleware(object):
"""Middleware that handles Social Auth AuthExceptions by providing the user
with a message, logging an error, and redirecting to some next location.
By default, the exception message itself is sent to the user and they are
redirected to the location specified in the SOCIAL_AUTH_LOGIN_ERROR_URL
setting.
This middleware can be extended by overriding the get_message or
get_redirect_uri methods, which each accept request and exception.
"""
def process_exception(self, request, exception):
strategy = getattr(request, 'social_strategy', None)
if strategy is None or self.raise_exception(request, exception):
return
if isinstance(exception, SocialAuthBaseException):
backend_name = strategy.backend.name
message = self.get_message(request, exception)
url = self.get_redirect_uri(request, exception)
try:
messages.error(request, message,
extra_tags='social-auth ' + backend_name)
except MessageFailure:
url += ('?' in url and '&' or '?') + \
'message={0}&backend={1}'.format(urlquote(message),
backend_name)
return redirect(url)
def raise_exception(self, request, exception):
strategy = getattr(request, 'social_strategy', None)
if strategy is not None:
return strategy.setting('RAISE_EXCEPTIONS', settings.DEBUG)
def get_message(self, request, exception):
return six.text_type(exception)
def get_redirect_uri(self, request, exception):
strategy = getattr(request, 'social_strategy', None)
return strategy.setting('LOGIN_ERROR_URL') | unknown | codeparrot/codeparrot-clean | ||
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains container classes to represent different protocol buffer types.
This file defines container classes which represent categories of protocol
buffer field types which need extra maintenance. Currently these categories
are:
- Repeated scalar fields - These are all repeated fields which aren't
composite (e.g. they are of simple types like int32, string, etc).
- Repeated composite fields - Repeated fields which are composite. This
includes groups and nested messages.
"""
__author__ = 'petar@google.com (Petar Petrov)'
class BaseContainer(object):
"""Base container class."""
# Minimizes memory usage and disallows assignment to other attributes.
__slots__ = ['_message_listener', '_values']
def __init__(self, message_listener):
"""
Args:
message_listener: A MessageListener implementation.
The RepeatedScalarFieldContainer will call this object's
Modified() method when it is modified.
"""
self._message_listener = message_listener
self._values = []
def __getitem__(self, key):
"""Retrieves item by the specified key."""
return self._values[key]
def __len__(self):
"""Returns the number of elements in the container."""
return len(self._values)
def __ne__(self, other):
"""Checks if another instance isn't equal to this one."""
# The concrete classes should define __eq__.
return not self == other
def __hash__(self):
raise TypeError('unhashable object')
def __repr__(self):
return repr(self._values)
def sort(self, *args, **kwargs):
# Continue to support the old sort_function keyword argument.
# This is expected to be a rare occurrence, so use LBYL to avoid
# the overhead of actually catching KeyError.
if 'sort_function' in kwargs:
kwargs['cmp'] = kwargs.pop('sort_function')
self._values.sort(*args, **kwargs)
class RepeatedScalarFieldContainer(BaseContainer):
"""Simple, type-checked, list-like container for holding repeated scalars."""
# Disallows assignment to other attributes.
__slots__ = ['_type_checker']
def __init__(self, message_listener, type_checker):
"""
Args:
message_listener: A MessageListener implementation.
The RepeatedScalarFieldContainer will call this object's
Modified() method when it is modified.
type_checker: A type_checkers.ValueChecker instance to run on elements
inserted into this container.
"""
super(RepeatedScalarFieldContainer, self).__init__(message_listener)
self._type_checker = type_checker
def append(self, value):
"""Appends an item to the list. Similar to list.append()."""
self._values.append(self._type_checker.CheckValue(value))
if not self._message_listener.dirty:
self._message_listener.Modified()
def insert(self, key, value):
"""Inserts the item at the specified position. Similar to list.insert()."""
self._values.insert(key, self._type_checker.CheckValue(value))
if not self._message_listener.dirty:
self._message_listener.Modified()
def extend(self, elem_seq):
"""Extends by appending the given sequence. Similar to list.extend()."""
if not elem_seq:
return
new_values = []
for elem in elem_seq:
new_values.append(self._type_checker.CheckValue(elem))
self._values.extend(new_values)
self._message_listener.Modified()
def MergeFrom(self, other):
"""Appends the contents of another repeated field of the same type to this
one. We do not check the types of the individual fields.
"""
self._values.extend(other._values)
self._message_listener.Modified()
def remove(self, elem):
"""Removes an item from the list. Similar to list.remove()."""
self._values.remove(elem)
self._message_listener.Modified()
def __setitem__(self, key, value):
"""Sets the item on the specified position."""
if isinstance(key, slice): # PY3
if key.step is not None:
raise ValueError('Extended slices not supported')
self.__setslice__(key.start, key.stop, value)
else:
self._values[key] = self._type_checker.CheckValue(value)
self._message_listener.Modified()
def __getslice__(self, start, stop):
"""Retrieves the subset of items from between the specified indices."""
return self._values[start:stop]
def __setslice__(self, start, stop, values):
"""Sets the subset of items from between the specified indices."""
new_values = []
for value in values:
new_values.append(self._type_checker.CheckValue(value))
self._values[start:stop] = new_values
self._message_listener.Modified()
def __delitem__(self, key):
"""Deletes the item at the specified position."""
del self._values[key]
self._message_listener.Modified()
def __delslice__(self, start, stop):
"""Deletes the subset of items from between the specified indices."""
del self._values[start:stop]
self._message_listener.Modified()
def __eq__(self, other):
"""Compares the current instance with another one."""
if self is other:
return True
# Special case for the same type which should be common and fast.
if isinstance(other, self.__class__):
return other._values == self._values
# We are presumably comparing against some other sequence type.
return other == self._values
class RepeatedCompositeFieldContainer(BaseContainer):
"""Simple, list-like container for holding repeated composite fields."""
# Disallows assignment to other attributes.
__slots__ = ['_message_descriptor']
def __init__(self, message_listener, message_descriptor):
"""
Note that we pass in a descriptor instead of the generated directly,
since at the time we construct a _RepeatedCompositeFieldContainer we
haven't yet necessarily initialized the type that will be contained in the
container.
Args:
message_listener: A MessageListener implementation.
The RepeatedCompositeFieldContainer will call this object's
Modified() method when it is modified.
message_descriptor: A Descriptor instance describing the protocol type
that should be present in this container. We'll use the
_concrete_class field of this descriptor when the client calls add().
"""
super(RepeatedCompositeFieldContainer, self).__init__(message_listener)
self._message_descriptor = message_descriptor
def add(self, **kwargs):
"""Adds a new element at the end of the list and returns it. Keyword
arguments may be used to initialize the element.
"""
new_element = self._message_descriptor._concrete_class(**kwargs)
new_element._SetListener(self._message_listener)
self._values.append(new_element)
if not self._message_listener.dirty:
self._message_listener.Modified()
return new_element
def extend(self, elem_seq):
"""Extends by appending the given sequence of elements of the same type
as this one, copying each individual message.
"""
message_class = self._message_descriptor._concrete_class
listener = self._message_listener
values = self._values
for message in elem_seq:
new_element = message_class()
new_element._SetListener(listener)
new_element.MergeFrom(message)
values.append(new_element)
listener.Modified()
def MergeFrom(self, other):
"""Appends the contents of another repeated field of the same type to this
one, copying each individual message.
"""
self.extend(other._values)
def remove(self, elem):
"""Removes an item from the list. Similar to list.remove()."""
self._values.remove(elem)
self._message_listener.Modified()
def __getslice__(self, start, stop):
"""Retrieves the subset of items from between the specified indices."""
return self._values[start:stop]
def __delitem__(self, key):
"""Deletes the item at the specified position."""
del self._values[key]
self._message_listener.Modified()
def __delslice__(self, start, stop):
"""Deletes the subset of items from between the specified indices."""
del self._values[start:stop]
self._message_listener.Modified()
def __eq__(self, other):
"""Compares the current instance with another one."""
if self is other:
return True
if not isinstance(other, self.__class__):
raise TypeError('Can only compare repeated composite fields against '
'other repeated composite fields.')
return self._values == other._values | unknown | codeparrot/codeparrot-clean | ||
"""
FEniCS tutorial demo program: Diffusion equation with Dirichlet
conditions and a solution that will be exact at all nodes.
As d1_d2D.py, but here we do all assembly prior to the time loop
(for increased efficiency).
"""
from dolfin import *
import numpy
# Create mesh and define function space
nx = ny = 2
mesh = UnitSquare(nx, ny)
V = FunctionSpace(mesh, 'Lagrange', 1)
# Define boundary conditions
alpha = 3; beta = 1.2
u0 = Expression('1 + x[0]*x[0] + alpha*x[1]*x[1] + beta*t',
alpha=alpha, beta=beta, t=0)
class Boundary(SubDomain): # define the Dirichlet boundary
def inside(self, x, on_boundary):
return on_boundary
boundary = Boundary()
bc = DirichletBC(V, u0, boundary)
# Initial condition
u_1 = interpolate(u0, V)
#u_1 = project(u0, V) # will not result in exact solution!
T = 1.9 # total simulation time
dt = 0.3 # time step
# Define variational problem
# Laplace term
u = TrialFunction(V)
v = TestFunction(V)
a_K = inner(nabla_grad(u), nabla_grad(v))*dx
# "Mass matrix" term
a_M = u*v*dx
M = assemble(a_M)
K = assemble(a_K)
A = M + dt*K
# f term
f = Expression('beta - 2 - 2*alpha', beta=beta, alpha=alpha)
# Compute solution
u = Function(V)
t = dt
while t <= T:
print 'time =', t
# f.t = t
f_k = interpolate(f, V)
F_k = f_k.vector()
b = M*u_1.vector() + dt*M*F_k
u0.t = t
bc.apply(A, b)
solve(A, u.vector(), b)
# Verify
u_e = interpolate(u0, V)
u_e_array = u_e.vector().array()
u_array = u.vector().array()
print 'Max error, t=%-10.3f:' % t, numpy.abs(u_e_array - u_array).max()
t += dt
u_1.assign(u) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('finance', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='bourse',
name='company',
field=models.ForeignKey(verbose_name='Company', to='company.Company'),
),
migrations.AlterField(
model_name='bourse',
name='dateReception',
field=models.DateField(null=True, verbose_name='Received date', blank=True),
),
migrations.AlterField(
model_name='bourse',
name='dateSoumission',
field=models.DateField(verbose_name='Date of submission'),
),
migrations.AlterField(
model_name='bourse',
name='description',
field=models.CharField(max_length=512, verbose_name='Description', blank=True),
),
migrations.AlterField(
model_name='bourse',
name='name',
field=models.CharField(max_length=200, verbose_name='Name', blank=True),
),
migrations.AlterField(
model_name='bourse',
name='sommeReception',
field=models.PositiveIntegerField(null=True, verbose_name='Amount received', blank=True),
),
migrations.AlterField(
model_name='bourse',
name='sommeSoumission',
field=models.PositiveIntegerField(verbose_name='Amount requested'),
),
migrations.AlterField(
model_name='investissement',
name='company',
field=models.ForeignKey(verbose_name='Company', to='company.Company'),
),
migrations.AlterField(
model_name='investissement',
name='dateReception',
field=models.DateField(null=True, verbose_name='Received date', blank=True),
),
migrations.AlterField(
model_name='investissement',
name='dateSoumission',
field=models.DateField(verbose_name='Date of submission'),
),
migrations.AlterField(
model_name='investissement',
name='description',
field=models.CharField(max_length=512, verbose_name='Description', blank=True),
),
migrations.AlterField(
model_name='investissement',
name='name',
field=models.CharField(max_length=200, verbose_name='Name', blank=True),
),
migrations.AlterField(
model_name='investissement',
name='sommeReception',
field=models.PositiveIntegerField(null=True, verbose_name='Amount received', blank=True),
),
migrations.AlterField(
model_name='investissement',
name='sommeSoumission',
field=models.PositiveIntegerField(verbose_name='Amount requested'),
),
migrations.AlterField(
model_name='pret',
name='company',
field=models.ForeignKey(verbose_name='Company', to='company.Company'),
),
migrations.AlterField(
model_name='pret',
name='dateReception',
field=models.DateField(null=True, verbose_name='Received date', blank=True),
),
migrations.AlterField(
model_name='pret',
name='dateSoumission',
field=models.DateField(verbose_name='Date of submission'),
),
migrations.AlterField(
model_name='pret',
name='description',
field=models.CharField(max_length=512, verbose_name='Description', blank=True),
),
migrations.AlterField(
model_name='pret',
name='name',
field=models.CharField(max_length=200, verbose_name='Name', blank=True),
),
migrations.AlterField(
model_name='pret',
name='sommeReception',
field=models.PositiveIntegerField(null=True, verbose_name='Amount received', blank=True),
),
migrations.AlterField(
model_name='pret',
name='sommeSoumission',
field=models.PositiveIntegerField(verbose_name='Amount requested'),
),
migrations.AlterField(
model_name='subvention',
name='company',
field=models.ForeignKey(verbose_name='Company', to='company.Company'),
),
migrations.AlterField(
model_name='subvention',
name='dateReception',
field=models.DateField(null=True, verbose_name='Received date', blank=True),
),
migrations.AlterField(
model_name='subvention',
name='dateSoumission',
field=models.DateField(verbose_name='Date of submission'),
),
migrations.AlterField(
model_name='subvention',
name='description',
field=models.CharField(max_length=512, verbose_name='Description', blank=True),
),
migrations.AlterField(
model_name='subvention',
name='name',
field=models.CharField(max_length=200, verbose_name='Name', blank=True),
),
migrations.AlterField(
model_name='subvention',
name='sommeReception',
field=models.PositiveIntegerField(null=True, verbose_name='Amount received', blank=True),
),
migrations.AlterField(
model_name='subvention',
name='sommeSoumission',
field=models.PositiveIntegerField(verbose_name='Amount requested'),
),
migrations.AlterField(
model_name='vente',
name='company',
field=models.ForeignKey(verbose_name='Company', to='company.Company'),
),
migrations.AlterField(
model_name='vente',
name='dateReception',
field=models.DateField(null=True, verbose_name='Received date', blank=True),
),
migrations.AlterField(
model_name='vente',
name='dateSoumission',
field=models.DateField(verbose_name='Date of submission'),
),
migrations.AlterField(
model_name='vente',
name='description',
field=models.CharField(max_length=512, verbose_name='Description', blank=True),
),
migrations.AlterField(
model_name='vente',
name='name',
field=models.CharField(max_length=200, verbose_name='Name', blank=True),
),
migrations.AlterField(
model_name='vente',
name='sommeReception',
field=models.PositiveIntegerField(null=True, verbose_name='Amount received', blank=True),
),
migrations.AlterField(
model_name='vente',
name='sommeSoumission',
field=models.PositiveIntegerField(verbose_name='Amount requested'),
),
] | unknown | codeparrot/codeparrot-clean | ||
"""
Created on Wed Jun 24 11:04:10 2015
Learn T1 NMR experiement run on TOPSPIN
T1 inversion recovery model defined in find_T1_model class
includes calls to run TOPSPIN commands- NMR experiment
@author: Kissan Mistry
"""
#imports and intializations
from __future__ import division
from t1_model import T1Model
from qinfer.distributions import UniformDistribution
#from qinfer.distributions import NormalDistribution
from qinfer.smc import SMCUpdater
from qinfer.resamplers import LiuWestResampler
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
import time
import Lorentzian_fit as LF
from qinfer.expdesign import ExperimentDesigner
import logging
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
model = T1Model()
prior = UniformDistribution(np.array([0, 100]))
N_particles=100000
updater = SMCUpdater(model, N_particles, prior, resampler=LiuWestResampler(0.98),zero_weight_policy='reset')
designer=ExperimentDesigner(updater,opt_algo=1)
#Set the value of T1 to Learn, pick 1 value from prior
#true_model=prior.sample()
true_model=np.array([6.77], dtype=model.expparams_dtype)
performance_dtype = [
('expparams', 'float'),
('sim_outcome', 'float'),
('est_mean', 'float'),
]
#NMR EXPERIMENT Initialization*******************************
#going to normalize Mo max of 1.
#model.Mo=float(raw_input('Please enter Mo: '))
#dummy=float(raw_input('Waiting for Mo: '))
#Mo_norm=LF.lorentzfit('1_spectrum.txt')
#model.Mo=(Mo_norm/Mo_norm)
#
#to save output data
timestr = time.strftime("%Y%m%d-%H%M%S")
Saver = PdfPages(timestr+'.pdf')
save_exp=open(timestr+'_exp.txt','w')
save_out=open(timestr+'_out.txt','w')
save_mean=open(timestr+'_mean.txt','w')
#iterative process to find T1
trials=20
data = np.zeros((trials, 1), dtype=performance_dtype)
for idx_trials in xrange(trials):
log.info('trial: ' + str(idx_trials))
#CHOOSE EXPERIMENTAL PARAMETER****************************
guess_iter=50
guess_vec=np.zeros((guess_iter,1))
risk_vec=np.zeros((guess_iter,1))
designer.new_exp()
store_risk=100000000
for idx_guess in xrange(guess_iter):
# print 'guess iteration: '+ str(idx_guess)
# guess=np.array([[[0.1+(0.1*idx_guess)]]],dtype=model.expparams_dtype) #sweep guess/incremental increase
guess=np.array([model.particle_guess_heuristic(updater, 10000)], dtype=model.expparams_dtype) #generate guess from PGH
# print 'Your Guess is: '+ str(guess)
#evaluate bayes risk for the guess
current_risk=updater.bayes_risk(guess)
# print 'bayes_risk: ' + str(current_risk)
if current_risk<store_risk:
store_risk=current_risk
expparams=guess
risk_vec[idx_guess]=current_risk
guess_vec[idx_guess]=guess
log.debug('Your Tau is: ' + str(expparams))
#optimize that guess
# expparams=designer.design_expparams_field(guess,0,cost_scale_k=1,disp=False,maxiter=10000,maxfun=10000,store_guess=True,grad_h=1,)
# print 'Your Tau is: ' + str(expparams)
fig = plt.figure()
plt.scatter(guess_vec,risk_vec,s=1)
plt.title('Bayes Risk of Guesses, Best Guess= '+str(expparams))
plt.ylabel('Bayes Risk')
plt.xlabel(r'$\tau$'+' Guess')
Saver.savefig()
#THIS MANUALLY COMPARES THE BAYES RISK OF THE GUESS VALUE AND THE OPTIMIZED VALUE AND PLOTS IT FOR SHOW,
#TO SEE HOW IT IS CHOOSING THE BEST VALUE.
# guess_iter=100
# guess_vec=np.zeros((guess_iter,1))
# grisk_vec=np.zeros((guess_iter,1))
# tau_vec=np.zeros((guess_iter,1))
# trisk_vec=np.zeros((guess_iter,1))
# designer.new_exp()
# for idx_guess in xrange(guess_iter):
# print 'guess iteration: '+ str(idx_guess)
# guess=np.array([model.particle_guess_heuristic(updater,10000)],dtype=model.expparams_dtype )
# guess_risk=updater.bayes_risk(guess)
# print 'Your Guess is: '+ str(guess)
# guess_vec[idx_guess]=guess
# grisk_vec[idx_guess]=guess_risk
# expparams=designer.design_expparams_field(guess,0,cost_scale_k=10,disp=False,maxiter=10000,maxfun=10000,store_guess=False,grad_h=1,)
# tau_risk=updater.bayes_risk(expparams)
# print 'Your Tau is: ' + str(expparams)
# tau_vec[idx_guess]=expparams
# trisk_vec[idx_guess]=tau_risk
# fig1=plt.figure()
# plt.scatter(guess_vec,grisk_vec)
# fig2=plt.figure()
# plt.scatter(tau_vec,trisk_vec)
# expparams=np.array([guess_vec[np.argmin(grisk_vec)]],dtype=model.expparams_dtype)
#Try getting quantity for Fisher Information and Score
# score=model.score()
## expparams=np.array([np.linspace(1, 10, 1000)])
# expparams=model.pgh(updater,10000) #generate guess from PGH
#
# fisher=model.fisher_information(true_model,expparams)
#
#SIMULATE*******************************************************
#simulate outcomes- based on the true T1, and the chosen intial value
#will be replaced by actual data collection from NMR for Mz values
sim_outcome=model.simulate_experiment(true_model,expparams)
outcome=sim_outcome
#NMR EXPERIMENT*************************************************
#USE this instead of simualate when doing experiments in NMR
# outcome=np.array([[[float(raw_input('Enter obtained Mz: '))]]])
# dummy=float(raw_input('waiting for Mz'))
# Mz_value=LF.lorentzfit(str(idx_trials+2)+'_spectrum.txt')
# outcome=np.array([[[Mz_value/abs(Mo_norm)]]])
#Run SMC and update the posterior distribution
updater.update(outcome,expparams,check_for_resample=True)
#STORE DATA******************************************
data[idx_trials]['est_mean'] = updater.est_mean()
data[idx_trials]['sim_outcome'] = outcome
data[idx_trials]['expparams'] = expparams
save_exp.writelines(str(expparams)+'\n')
save_mean.write(str(updater.est_mean())+'\n')
save_out.write(str(outcome)+'\n')
# PLOT *******************************************
#plotting particles and weights
particles = updater.particle_locations
weights = updater.particle_weights
if idx_trials==0:
maxw=max(weights)
weights=weights/maxw #normalize the posterior
fig1 = plt.figure()
plt.axvline(updater.est_mean(), linestyle = '--', c = 'blue', linewidth =2,label='Est. Mean')
plt.axvline(true_model, linestyle = '--', c = 'red', linewidth = 2,label='True Model')
plt.scatter(particles,weights,s=0.1)
plt.title('Posterior Distribution T1= '+str(updater.est_mean()))
plt.ylabel('Normalized Weight')
plt.xlabel('Particles')
plt.legend()
Saver.savefig()
#END LOOP***************************************************
Saver.close()
save_exp.close()
save_mean.close()
save_out.close() | unknown | codeparrot/codeparrot-clean | ||
from __future__ import unicode_literals
from django.db import models
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.utils.encoding import python_2_unicode_compatible
from taggit.models import TaggedItemBase
from taggit.managers import TaggableManager
from modelcluster.fields import ParentalKey
from modelcluster.contrib.taggit import ClusterTaggableManager
from wagtail.wagtailcore.models import Page, Orderable
from wagtail.wagtailcore.fields import RichTextField, StreamField
from wagtail.wagtailcore.blocks import CharBlock, RichTextBlock
from wagtail.wagtailadmin.edit_handlers import FieldPanel, MultiFieldPanel, InlinePanel, PageChooserPanel, TabbedInterface, ObjectList
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
from wagtail.wagtaildocs.edit_handlers import DocumentChooserPanel
from wagtail.wagtailsnippets.models import register_snippet
from wagtail.wagtailforms.models import AbstractEmailForm, AbstractFormField
from wagtail.wagtailsnippets.edit_handlers import SnippetChooserPanel
from wagtail.wagtailsearch import index
from wagtail.wagtailimages.models import AbstractImage, Image
from wagtail.wagtailimages.blocks import ImageChooserBlock
EVENT_AUDIENCE_CHOICES = (
('public', "Public"),
('private', "Private"),
)
COMMON_PANELS = (
FieldPanel('slug'),
FieldPanel('seo_title'),
FieldPanel('show_in_menus'),
FieldPanel('search_description'),
)
# Link fields
class LinkFields(models.Model):
link_external = models.URLField("External link", blank=True)
link_page = models.ForeignKey(
'wagtailcore.Page',
null=True,
blank=True,
related_name='+'
)
link_document = models.ForeignKey(
'wagtaildocs.Document',
null=True,
blank=True,
related_name='+'
)
@property
def link(self):
if self.link_page:
return self.link_page.url
elif self.link_document:
return self.link_document.url
else:
return self.link_external
panels = [
FieldPanel('link_external'),
PageChooserPanel('link_page'),
DocumentChooserPanel('link_document'),
]
class Meta:
abstract = True
# Carousel items
class CarouselItem(LinkFields):
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
embed_url = models.URLField("Embed URL", blank=True)
caption = models.CharField(max_length=255, blank=True)
panels = [
ImageChooserPanel('image'),
FieldPanel('embed_url'),
FieldPanel('caption'),
MultiFieldPanel(LinkFields.panels, "Link"),
]
class Meta:
abstract = True
# Related links
class RelatedLink(LinkFields):
title = models.CharField(max_length=255, help_text="Link title")
panels = [
FieldPanel('title'),
MultiFieldPanel(LinkFields.panels, "Link"),
]
class Meta:
abstract = True
# Simple page
class SimplePage(Page):
content = models.TextField()
class PageWithOldStyleRouteMethod(Page):
"""
Prior to Wagtail 0.4, the route() method on Page returned an HttpResponse
rather than a Page instance. As subclasses of Page may override route,
we need to continue accepting this convention (albeit as a deprecated API).
"""
content = models.TextField()
template = 'tests/simple_page.html'
def route(self, request, path_components):
return self.serve(request)
# Event page
class EventPageCarouselItem(Orderable, CarouselItem):
page = ParentalKey('tests.EventPage', related_name='carousel_items')
class EventPageRelatedLink(Orderable, RelatedLink):
page = ParentalKey('tests.EventPage', related_name='related_links')
class EventPageSpeaker(Orderable, LinkFields):
page = ParentalKey('tests.EventPage', related_name='speakers')
first_name = models.CharField("Name", max_length=255, blank=True)
last_name = models.CharField("Surname", max_length=255, blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
@property
def name_display(self):
return self.first_name + " " + self.last_name
panels = [
FieldPanel('first_name'),
FieldPanel('last_name'),
ImageChooserPanel('image'),
MultiFieldPanel(LinkFields.panels, "Link"),
]
class EventPage(Page):
date_from = models.DateField("Start date", null=True)
date_to = models.DateField(
"End date",
null=True,
blank=True,
help_text="Not required if event is on a single day"
)
time_from = models.TimeField("Start time", null=True, blank=True)
time_to = models.TimeField("End time", null=True, blank=True)
audience = models.CharField(max_length=255, choices=EVENT_AUDIENCE_CHOICES)
location = models.CharField(max_length=255)
body = RichTextField(blank=True)
cost = models.CharField(max_length=255)
signup_link = models.URLField(blank=True)
feed_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
search_fields = (
index.SearchField('get_audience_display'),
index.SearchField('location'),
index.SearchField('body'),
)
password_required_template = 'tests/event_page_password_required.html'
EventPage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('date_from'),
FieldPanel('date_to'),
FieldPanel('time_from'),
FieldPanel('time_to'),
FieldPanel('location'),
FieldPanel('audience'),
FieldPanel('cost'),
FieldPanel('signup_link'),
InlinePanel('carousel_items', label="Carousel items"),
FieldPanel('body', classname="full"),
InlinePanel('speakers', label="Speakers"),
InlinePanel('related_links', label="Related links"),
]
EventPage.promote_panels = [
MultiFieldPanel(COMMON_PANELS, "Common page configuration"),
ImageChooserPanel('feed_image'),
]
# Just to be able to test multi table inheritance
class SingleEventPage(EventPage):
excerpt = models.TextField(max_length=255, blank=True, null=True, help_text="Short text to describe what is this action about")
SingleEventPage.content_panels = [FieldPanel('excerpt')] + EventPage.content_panels
# Event index (has a separate AJAX template, and a custom template context)
class EventIndex(Page):
intro = RichTextField(blank=True)
ajax_template = 'tests/includes/event_listing.html'
def get_events(self):
return self.get_children().live().type(EventPage)
def get_paginator(self):
return Paginator(self.get_events(), 4)
def get_context(self, request, page=1):
# Pagination
paginator = self.get_paginator()
try:
events = paginator.page(page)
except PageNotAnInteger:
events = paginator.page(1)
except EmptyPage:
events = paginator.page(paginator.num_pages)
# Update context
context = super(EventIndex, self).get_context(request)
context['events'] = events
return context
def route(self, request, path_components):
if self.live and len(path_components) == 1:
try:
return self.serve(request, page=int(path_components[0]))
except (TypeError, ValueError):
pass
return super(EventIndex, self).route(request, path_components)
def get_static_site_paths(self):
# Get page count
page_count = self.get_paginator().num_pages
# Yield a path for each page
for page in range(page_count):
yield '/%d/' % (page + 1)
# Yield from superclass
for path in super(EventIndex, self).get_static_site_paths():
yield path
def get_sitemap_urls(self):
# Add past events url to sitemap
return super(EventIndex, self).get_sitemap_urls() + [
{
'location': self.full_url + 'past/',
'lastmod': self.latest_revision_created_at
}
]
EventIndex.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('intro', classname="full"),
]
class FormField(AbstractFormField):
page = ParentalKey('FormPage', related_name='form_fields')
class FormPage(AbstractEmailForm):
def get_context(self, request):
context = super(FormPage, self).get_context(request)
context['greeting'] = "hello world"
return context
FormPage.content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('form_fields', label="Form fields"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
# Snippets
class AdvertPlacement(models.Model):
page = ParentalKey('wagtailcore.Page', related_name='advert_placements')
advert = models.ForeignKey('tests.Advert', related_name='+')
colour = models.CharField(max_length=255)
class AdvertTag(TaggedItemBase):
content_object = ParentalKey('Advert', related_name='tagged_items')
@python_2_unicode_compatible
class Advert(models.Model):
url = models.URLField(null=True, blank=True)
text = models.CharField(max_length=255)
tags = TaggableManager(through=AdvertTag, blank=True)
panels = [
FieldPanel('url'),
FieldPanel('text'),
FieldPanel('tags'),
]
def __str__(self):
return self.text
register_snippet(Advert)
class StandardIndex(Page):
""" Index for the site, not allowed to be placed anywhere """
parent_page_types = []
# A custom panel setup where all Promote fields are placed in the Content tab instead;
# we use this to test that the 'promote' tab is left out of the output when empty
StandardIndex.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('seo_title'),
FieldPanel('slug'),
InlinePanel('advert_placements', label="Adverts"),
]
StandardIndex.promote_panels = []
class StandardChild(Page):
pass
# Test overriding edit_handler with a custom one
StandardChild.edit_handler = TabbedInterface([
ObjectList(StandardChild.content_panels, heading='Content'),
ObjectList(StandardChild.promote_panels, heading='Promote'),
ObjectList(StandardChild.settings_panels, heading='Settings', classname='settings'),
ObjectList([], heading='Dinosaurs'),
])
class BusinessIndex(Page):
""" Can be placed anywhere, can only have Business children """
subpage_types = ['tests.BusinessChild', 'tests.BusinessSubIndex']
class BusinessSubIndex(Page):
""" Can be placed under BusinessIndex, and have BusinessChild children """
subpage_types = ['tests.BusinessChild']
parent_page_types = ['tests.BusinessIndex']
class BusinessChild(Page):
""" Can only be placed under Business indexes, no children allowed """
subpage_types = []
parent_page_types = ['tests.BusinessIndex', BusinessSubIndex]
class TaggedPageTag(TaggedItemBase):
content_object = ParentalKey('tests.TaggedPage', related_name='tagged_items')
class TaggedPage(Page):
tags = ClusterTaggableManager(through=TaggedPageTag, blank=True)
TaggedPage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('tags'),
]
class PageChooserModel(models.Model):
page = models.ForeignKey('wagtailcore.Page', help_text='help text')
class EventPageChooserModel(models.Model):
page = models.ForeignKey('tests.EventPage', help_text='more help text')
class SnippetChooserModel(models.Model):
advert = models.ForeignKey(Advert, help_text='help text')
panels = [
SnippetChooserPanel('advert', Advert),
]
class CustomImageWithoutAdminFormFields(AbstractImage):
caption = models.CharField(max_length=255)
not_editable_field = models.CharField(max_length=255)
class CustomImageWithAdminFormFields(AbstractImage):
caption = models.CharField(max_length=255)
not_editable_field = models.CharField(max_length=255)
admin_form_fields = Image.admin_form_fields + (
'caption',
)
class StreamModel(models.Model):
body = StreamField([
('text', CharBlock()),
('rich_text', RichTextBlock()),
('image', ImageChooserBlock()),
])
class StreamPage(Page):
body = StreamField([
('text', CharBlock()),
('rich_text', RichTextBlock()),
('image', ImageChooserBlock()),
])
api_fields = ('body',)
class MTIBasePage(Page):
is_creatable = False
class MTIChildPage(MTIBasePage):
# Should be creatable by default, no need to set anything
pass
class AbstractPage(Page):
class Meta:
abstract = True | unknown | codeparrot/codeparrot-clean | ||
Vault API
=================
This provides the `github.com/hashicorp/vault/api` package which contains code useful for interacting with a Vault server.
For examples of how to use this module, see the [vault-examples](https://github.com/hashicorp/vault-examples) repo.
For a step-by-step walkthrough on using these client libraries, see the [developer quickstart](https://developer.hashicorp.com/vault/docs/get-started/developer-qs).
[](https://godoc.org/github.com/hashicorp/vault/api) | unknown | github | https://github.com/hashicorp/vault | api/README.md |
//===--- HTTP2StateMachine.swift ------------------------------------------===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2018 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
import TestsUtils
//
// A trimmed-down version of SwiftNIO's HTTP/2 stream state machine. This version removes all code
// comments and removes any custom data types that are used by SwiftNIO. Its purpose is to benchmark
// Swift's performance switching over enums with substantial amounts of associated data.
//
public let benchmarks = [
BenchmarkInfo(
name: "HTTP2StateMachine",
runFunction: run_HTTP2StateMachine,
tags: [.miniapplication])
]
typealias HTTP2FlowControlWindow = Int
struct HTTP2StreamStateMachine {
enum State {
case idle(localRole: StreamRole, localWindow: HTTP2FlowControlWindow, remoteWindow: HTTP2FlowControlWindow)
case reservedRemote(remoteWindow: HTTP2FlowControlWindow)
case reservedLocal(localWindow: HTTP2FlowControlWindow)
case halfOpenLocalPeerIdle(localWindow: HTTP2FlowControlWindow, remoteWindow: HTTP2FlowControlWindow)
case halfOpenRemoteLocalIdle(localWindow: HTTP2FlowControlWindow, remoteWindow: HTTP2FlowControlWindow)
case fullyOpen(localRole: StreamRole, localWindow: HTTP2FlowControlWindow, remoteWindow: HTTP2FlowControlWindow)
case halfClosedLocalPeerIdle(remoteWindow: HTTP2FlowControlWindow)
case halfClosedLocalPeerActive(localRole: StreamRole, initiatedBy: StreamRole, remoteWindow: HTTP2FlowControlWindow)
case halfClosedRemoteLocalIdle(localWindow: HTTP2FlowControlWindow)
case halfClosedRemoteLocalActive(localRole: StreamRole, initiatedBy: StreamRole, localWindow: HTTP2FlowControlWindow)
case closed
}
enum StreamRole {
case server
case client
}
private var state: State
init(localRole: StreamRole, localWindow: HTTP2FlowControlWindow, remoteWindow: HTTP2FlowControlWindow) {
self.state = .idle(localRole: localRole, localWindow: localWindow, remoteWindow: remoteWindow)
}
init(receivedPushPromiseWithRemoteInitialWindowSize remoteWindow: HTTP2FlowControlWindow) {
self.state = .reservedRemote(remoteWindow: remoteWindow)
}
init(sentPushPromiseWithLocalInitialWindowSize localWindow: HTTP2FlowControlWindow) {
self.state = .reservedLocal(localWindow: localWindow)
}
@inline(never)
mutating func sendHeaders(isEndStreamSet endStream: Bool) -> Bool {
switch self.state {
case .idle(.client, localWindow: let localWindow, remoteWindow: let remoteWindow):
self.state = endStream ? .halfClosedLocalPeerIdle(remoteWindow: remoteWindow) : .halfOpenLocalPeerIdle(localWindow: localWindow, remoteWindow: remoteWindow)
return true
case .halfOpenRemoteLocalIdle(localWindow: let localWindow, remoteWindow: let remoteWindow):
self.state = endStream ? .halfClosedLocalPeerActive(localRole: .server, initiatedBy: .client, remoteWindow: remoteWindow) : .fullyOpen(localRole: .server, localWindow: localWindow, remoteWindow: remoteWindow)
return true
case .halfOpenLocalPeerIdle(localWindow: _, remoteWindow: let remoteWindow):
self.state = .halfClosedLocalPeerIdle(remoteWindow: remoteWindow)
return true
case .reservedLocal(let localWindow):
self.state = endStream ? .closed : .halfClosedRemoteLocalActive(localRole: .server, initiatedBy: .server, localWindow: localWindow)
return true
case .fullyOpen(let localRole, localWindow: _, remoteWindow: let remoteWindow):
self.state = .halfClosedLocalPeerActive(localRole: localRole, initiatedBy: .client, remoteWindow: remoteWindow)
return true
case .halfClosedRemoteLocalIdle(let localWindow):
self.state = endStream ? .closed : . halfClosedRemoteLocalActive(localRole: .server, initiatedBy: .client, localWindow: localWindow)
return true
case .halfClosedRemoteLocalActive:
self.state = .closed
return true
case .idle(.server, _, _), .closed:
return false
case .reservedRemote, .halfClosedLocalPeerIdle, .halfClosedLocalPeerActive:
return false
}
}
@inline(never)
mutating func receiveHeaders(isEndStreamSet endStream: Bool) -> Bool {
switch self.state {
case .idle(.server, localWindow: let localWindow, remoteWindow: let remoteWindow):
self.state = endStream ? .halfClosedRemoteLocalIdle(localWindow: localWindow) : .halfOpenRemoteLocalIdle(localWindow: localWindow, remoteWindow: remoteWindow)
return true
case .halfOpenLocalPeerIdle(localWindow: let localWindow, remoteWindow: let remoteWindow):
self.state = endStream ? .halfClosedRemoteLocalActive(localRole: .client,initiatedBy: .client, localWindow: localWindow) : .fullyOpen(localRole: .client, localWindow: localWindow, remoteWindow: remoteWindow)
return true
case .halfOpenRemoteLocalIdle(localWindow: let localWindow, remoteWindow: _):
self.state = .halfClosedRemoteLocalIdle(localWindow: localWindow)
return true
case .reservedRemote(let remoteWindow):
self.state = endStream ? .closed : .halfClosedLocalPeerActive(localRole: .client, initiatedBy: .server, remoteWindow: remoteWindow)
return true
case .fullyOpen(let localRole, localWindow: let localWindow, remoteWindow: _):
self.state = .halfClosedRemoteLocalActive(localRole: localRole, initiatedBy: .client, localWindow: localWindow)
return true
case .halfClosedLocalPeerIdle(let remoteWindow):
self.state = endStream ? .closed : . halfClosedLocalPeerActive(localRole: .client, initiatedBy: .client, remoteWindow: remoteWindow)
return true
case .halfClosedLocalPeerActive:
self.state = .closed
return true
case .idle(.client, _, _), .closed:
return false
case .reservedLocal, .halfClosedRemoteLocalIdle, .halfClosedRemoteLocalActive:
return false
}
}
@inline(never)
mutating func sendData(flowControlledBytes: Int, isEndStreamSet endStream: Bool) -> Bool {
switch self.state {
case .halfOpenLocalPeerIdle(localWindow: var localWindow, remoteWindow: let remoteWindow):
localWindow -= flowControlledBytes
self.state = endStream ? .halfClosedLocalPeerIdle(remoteWindow: remoteWindow) : .halfOpenLocalPeerIdle(localWindow: localWindow, remoteWindow: remoteWindow)
return true
case .fullyOpen(let localRole, localWindow: var localWindow, remoteWindow: let remoteWindow):
localWindow -= flowControlledBytes
self.state = endStream ? .halfClosedLocalPeerActive(localRole: localRole, initiatedBy: .client, remoteWindow: remoteWindow) : .fullyOpen(localRole: localRole, localWindow: localWindow, remoteWindow: remoteWindow)
return true
case .halfClosedRemoteLocalActive(let localRole, let initiatedBy, var localWindow):
localWindow -= flowControlledBytes
self.state = endStream ? .closed : .halfClosedRemoteLocalActive(localRole: localRole, initiatedBy: initiatedBy, localWindow: localWindow)
return true
case .idle, .halfOpenRemoteLocalIdle, .reservedLocal, .reservedRemote, .halfClosedLocalPeerIdle,
.halfClosedLocalPeerActive, .halfClosedRemoteLocalIdle, .closed:
return false
}
}
@inline(never)
mutating func receiveData(flowControlledBytes: Int, isEndStreamSet endStream: Bool) -> Bool {
switch self.state {
case .halfOpenRemoteLocalIdle(localWindow: let localWindow, remoteWindow: var remoteWindow):
remoteWindow -= flowControlledBytes
self.state = endStream ? .halfClosedRemoteLocalIdle(localWindow: localWindow) : .halfOpenRemoteLocalIdle(localWindow: localWindow, remoteWindow: remoteWindow)
return true
case .fullyOpen(let localRole, localWindow: let localWindow, remoteWindow: var remoteWindow):
remoteWindow -= flowControlledBytes
self.state = endStream ? .halfClosedRemoteLocalActive(localRole: localRole, initiatedBy: .client, localWindow: localWindow) : .fullyOpen(localRole: localRole, localWindow: localWindow, remoteWindow: remoteWindow)
return true
case .halfClosedLocalPeerActive(let localRole, let initiatedBy, var remoteWindow):
remoteWindow -= flowControlledBytes
self.state = endStream ? .closed : .halfClosedLocalPeerActive(localRole: localRole, initiatedBy: initiatedBy, remoteWindow: remoteWindow)
return true
case .idle, .halfOpenLocalPeerIdle, .reservedLocal, .reservedRemote, .halfClosedLocalPeerIdle,
.halfClosedRemoteLocalActive, .halfClosedRemoteLocalIdle, .closed:
return false
}
}
@inline(never)
mutating func sendPushPromise() -> Bool {
switch self.state {
case .fullyOpen(localRole: .server, localWindow: _, remoteWindow: _),
.halfClosedRemoteLocalActive(localRole: .server, initiatedBy: .client, localWindow: _):
return true
case .idle, .reservedLocal, .reservedRemote, .halfClosedLocalPeerIdle, .halfClosedLocalPeerActive,
.halfClosedRemoteLocalIdle, .halfOpenLocalPeerIdle, .halfOpenRemoteLocalIdle, .closed,
.fullyOpen(localRole: .client, localWindow: _, remoteWindow: _),
.halfClosedRemoteLocalActive(localRole: .client, initiatedBy: _, localWindow: _),
.halfClosedRemoteLocalActive(localRole: .server, initiatedBy: .server, localWindow: _):
return false
}
}
@inline(never)
mutating func receivePushPromise() -> Bool {
switch self.state {
case .fullyOpen(localRole: .client, localWindow: _, remoteWindow: _),
.halfClosedLocalPeerActive(localRole: .client, initiatedBy: .client, remoteWindow: _):
return true
case .idle, .reservedLocal, .reservedRemote, .halfClosedLocalPeerIdle, .halfClosedRemoteLocalIdle,
.halfClosedRemoteLocalActive, .halfOpenLocalPeerIdle, .halfOpenRemoteLocalIdle, .closed,
.fullyOpen(localRole: .server, localWindow: _, remoteWindow: _),
.halfClosedLocalPeerActive(localRole: .server, initiatedBy: _, remoteWindow: _),
.halfClosedLocalPeerActive(localRole: .client, initiatedBy: .server, remoteWindow: _):
return false
}
}
@inline(never)
mutating func sendWindowUpdate(windowIncrement: Int) -> Bool {
switch self.state {
case .reservedRemote(remoteWindow: var remoteWindow):
remoteWindow += windowIncrement
self.state = .reservedRemote(remoteWindow: remoteWindow)
case .halfOpenLocalPeerIdle(localWindow: let localWindow, remoteWindow: var remoteWindow):
remoteWindow += windowIncrement
self.state = .halfOpenLocalPeerIdle(localWindow: localWindow, remoteWindow: remoteWindow)
case .halfOpenRemoteLocalIdle(localWindow: let localWindow, remoteWindow: var remoteWindow):
remoteWindow += windowIncrement
self.state = .halfOpenRemoteLocalIdle(localWindow: localWindow, remoteWindow: remoteWindow)
case .fullyOpen(localRole: let localRole, localWindow: let localWindow, remoteWindow: var remoteWindow):
remoteWindow += windowIncrement
self.state = .fullyOpen(localRole: localRole, localWindow: localWindow, remoteWindow: remoteWindow)
case .halfClosedLocalPeerIdle(remoteWindow: var remoteWindow):
remoteWindow += windowIncrement
self.state = .halfClosedLocalPeerIdle(remoteWindow: remoteWindow)
case .halfClosedLocalPeerActive(localRole: let localRole, initiatedBy: let initiatedBy, remoteWindow: var remoteWindow):
remoteWindow += windowIncrement
self.state = .halfClosedLocalPeerActive(localRole: localRole, initiatedBy: initiatedBy, remoteWindow: remoteWindow)
case .idle, .reservedLocal, .halfClosedRemoteLocalIdle, .halfClosedRemoteLocalActive, .closed:
return false
}
return true
}
@inline(never)
mutating func receiveWindowUpdate(windowIncrement: Int) -> Bool {
switch self.state {
case .reservedLocal(localWindow: var localWindow):
localWindow += windowIncrement
self.state = .reservedLocal(localWindow: localWindow)
case .halfOpenLocalPeerIdle(localWindow: var localWindow, remoteWindow: let remoteWindow):
localWindow += windowIncrement
self.state = .halfOpenLocalPeerIdle(localWindow: localWindow, remoteWindow: remoteWindow)
case .halfOpenRemoteLocalIdle(localWindow: var localWindow, remoteWindow: let remoteWindow):
localWindow += windowIncrement
self.state = .halfOpenRemoteLocalIdle(localWindow: localWindow, remoteWindow: remoteWindow)
case .fullyOpen(localRole: let localRole, localWindow: var localWindow, remoteWindow: let remoteWindow):
localWindow += windowIncrement
self.state = .fullyOpen(localRole: localRole, localWindow: localWindow, remoteWindow: remoteWindow)
case .halfClosedRemoteLocalIdle(localWindow: var localWindow):
localWindow += windowIncrement
self.state = .halfClosedRemoteLocalIdle(localWindow: localWindow)
case .halfClosedRemoteLocalActive(localRole: let localRole, initiatedBy: let initiatedBy, localWindow: var localWindow):
localWindow += windowIncrement
self.state = .halfClosedRemoteLocalActive(localRole: localRole, initiatedBy: initiatedBy, localWindow: localWindow)
case .halfClosedLocalPeerIdle, .halfClosedLocalPeerActive:
break
case .idle, .reservedRemote, .closed:
return false
}
return true
}
}
@inline(never)
func testSimpleRequestResponse(_ n: Int) -> Bool {
var successful = true
var server = HTTP2StreamStateMachine(localRole: .server, localWindow: 1<<16, remoteWindow: n)
var client = HTTP2StreamStateMachine(localRole: .client, localWindow: 1<<16, remoteWindow: n)
successful = successful && client.sendHeaders(isEndStreamSet: false)
successful = successful && server.receiveHeaders(isEndStreamSet: false)
successful = successful && client.sendData(flowControlledBytes: 128, isEndStreamSet: false)
successful = successful && client.sendData(flowControlledBytes: 128, isEndStreamSet: false)
successful = successful && server.receiveData(flowControlledBytes: 128, isEndStreamSet: false)
successful = successful && server.receiveData(flowControlledBytes: 128, isEndStreamSet: false)
successful = successful && server.sendWindowUpdate(windowIncrement: 256)
successful = successful && client.receiveWindowUpdate(windowIncrement: 256)
successful = successful && client.sendData(flowControlledBytes: 128, isEndStreamSet: true)
successful = successful && server.receiveData(flowControlledBytes: 128, isEndStreamSet: true)
successful = successful && server.sendHeaders(isEndStreamSet: false)
successful = successful && client.receiveHeaders(isEndStreamSet: false)
successful = successful && server.sendData(flowControlledBytes: 1024, isEndStreamSet: false)
successful = successful && client.receiveData(flowControlledBytes: 1024, isEndStreamSet: false)
successful = successful && client.sendWindowUpdate(windowIncrement: 1024)
successful = successful && server.receiveWindowUpdate(windowIncrement: 1024)
successful = successful && server.sendData(flowControlledBytes: 1024, isEndStreamSet: true)
successful = successful && client.receiveData(flowControlledBytes: 1024, isEndStreamSet: true)
return successful
}
@inline(never)
func testPushedRequests(_ n: Int) -> Bool {
var successful = true
var server = HTTP2StreamStateMachine(sentPushPromiseWithLocalInitialWindowSize: n)
var client = HTTP2StreamStateMachine(receivedPushPromiseWithRemoteInitialWindowSize: n)
successful = successful && client.sendWindowUpdate(windowIncrement: 1024)
successful = successful && server.sendHeaders(isEndStreamSet: false)
successful = successful && client.receiveHeaders(isEndStreamSet: false)
successful = successful && server.sendData(flowControlledBytes: 1024, isEndStreamSet: false)
successful = successful && server.sendData(flowControlledBytes: 1024, isEndStreamSet: false)
successful = successful && client.receiveData(flowControlledBytes: 1024, isEndStreamSet: false)
successful = successful && client.receiveData(flowControlledBytes: 1024, isEndStreamSet: false)
successful = successful && client.sendWindowUpdate(windowIncrement: 1024)
successful = successful && server.receiveWindowUpdate(windowIncrement: 1024)
successful = successful && server.sendData(flowControlledBytes: 1024, isEndStreamSet: false)
successful = successful && client.receiveData(flowControlledBytes: 1024, isEndStreamSet: false)
successful = successful && server.sendHeaders(isEndStreamSet: true)
successful = successful && client.receiveHeaders(isEndStreamSet: true)
return successful
}
@inline(never)
func testPushingRequests(_ n: Int) -> Bool {
var successful = true
var server = HTTP2StreamStateMachine(localRole: .server, localWindow: 1<<16, remoteWindow: n)
var client = HTTP2StreamStateMachine(localRole: .client, localWindow: 1<<16, remoteWindow: n)
successful = successful && client.sendHeaders(isEndStreamSet: true)
successful = successful && server.receiveHeaders(isEndStreamSet: true)
successful = successful && server.sendHeaders(isEndStreamSet: false)
successful = successful && client.receiveHeaders(isEndStreamSet: false)
successful = successful && server.sendPushPromise()
successful = successful && client.receivePushPromise()
successful = successful && server.sendData(flowControlledBytes: 1024, isEndStreamSet: true)
successful = successful && client.receiveData(flowControlledBytes: 1024, isEndStreamSet: true)
return successful
}
@inline(never)
func run_HTTP2StateMachine(_ n: Int) {
for i in 0 ..< 100000 * n {
check(testSimpleRequestResponse(identity(i)))
check(testPushedRequests(identity(i)))
check(testPushingRequests(identity(i)))
}
} | swift | github | https://github.com/apple/swift | benchmark/single-source/HTTP2StateMachine.swift |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.compress;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Specification of a direct ByteBuffer 'de-compressor'.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public interface DirectDecompressor {
/*
* This exposes a direct interface for record decompression with direct byte
* buffers.
*
* The decompress() function need not always consume the buffers provided,
* it will need to be called multiple times to decompress an entire buffer
* and the object will hold the compression context internally.
*
* Codecs such as {@link SnappyCodec} may or may not support partial
* decompression of buffers and will need enough space in the destination
* buffer to decompress an entire block.
*
* The operation is modelled around dst.put(src);
*
* The end result will move src.position() by the bytes-read and
* dst.position() by the bytes-written. It should not modify the src.limit()
* or dst.limit() to maintain consistency of operation between codecs.
*
* @param src Source direct {@link ByteBuffer} for reading from. Requires src
* != null and src.remaining() > 0
*
* @param dst Destination direct {@link ByteBuffer} for storing the results
* into. Requires dst != null and dst.remaining() to be > 0
*
* @throws IOException if compression fails
*/
public void decompress(ByteBuffer src, ByteBuffer dst) throws IOException;
} | java | github | https://github.com/apache/hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DirectDecompressor.java |
from mongoengine import *
from datetime import datetime
from mongoengine.base import BaseField
connect('infoneige')
class StreetProperties(EmbeddedDocument):
A = StringField()
SENS_CIR = IntField()
DE = StringField()
SUR = StringField()
TRC_ID = IntField()
class Street(Document):
trcId = IntField()
geometry = LineStringField()
properties = EmbeddedDocumentField(StreetProperties)
class Plan(EmbeddedDocument):
munid = IntField()
coteRueId = IntField()
etatDeneig = IntField()
dateDebutPlanif = DateTimeField(default=datetime.min)
dateFinPlanif = DateTimeField(default=datetime.min)
dateDebutReplanif = DateTimeField(default=datetime.min)
dateFinReplanif = DateTimeField(default=datetime.min)
dateMaj = DateTimeField(default=datetime.min)
class CoteProperties(EmbeddedDocument):
COTE_RUE_ID = IntField()
ID_TRC = IntField()
DEBUT_ADRESSE = IntField()
FIN_ADRESSE = IntField()
ORIENTATION_F = StringField()
NOM_VOIE = StringField()
LIEN_F = StringField()
ARRONDISSEMENT = StringField()
TYPE_F = StringField()
class Cote(Document):
coteRueId = IntField()
geometry = LineStringField()
properties = EmbeddedDocumentField(CoteProperties)
street = ReferenceField(Street)
plan = EmbeddedDocumentField(Plan) | unknown | codeparrot/codeparrot-clean | ||
//===--- NoDiscard.h ------------------------------------------------------===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
#ifndef SWIFT_BASIC_NODISCARD_H
#define SWIFT_BASIC_NODISCARD_H
#if __cplusplus > 201402l && __has_cpp_attribute(nodiscard)
#define SWIFT_NODISCARD [[nodiscard]]
#elif __has_cpp_attribute(clang::warn_unused_result)
#define SWIFT_NODISCARD [[clang::warn_unused_result]]
#else
#define SWIFT_NODISCARD
#endif
#endif | c | github | https://github.com/apple/swift | include/swift/Basic/NoDiscard.h |
# Copyright (c) 2012 VMware, Inc.
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility classes for defining the time saving transfer of data from the reader
to the write using a LightQueue as a Pipe between the reader and the writer.
"""
from eventlet import event
from eventlet import greenthread
from eventlet import queue
from nova import exception
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
IO_THREAD_SLEEP_TIME = .01
GLANCE_POLL_INTERVAL = 5
class ThreadSafePipe(queue.LightQueue):
"""The pipe to hold the data which the reader writes to and the writer
reads from.
"""
def __init__(self, maxsize, transfer_size):
queue.LightQueue.__init__(self, maxsize)
self.transfer_size = transfer_size
self.transferred = 0
def read(self, chunk_size):
"""Read data from the pipe.
Chunksize if ignored for we have ensured
that the data chunks written to the pipe by readers is the same as the
chunks asked for by the Writer.
"""
if self.transferred < self.transfer_size:
data_item = self.get()
self.transferred += len(data_item)
return data_item
else:
return ""
def write(self, data):
"""Put a data item in the pipe."""
self.put(data)
def seek(self, offset, whence=0):
"""Set the file's current position at the offset."""
pass
def tell(self):
"""Get size of the file to be read."""
return self.transfer_size
def close(self):
"""A place-holder to maintain consistency."""
pass
class GlanceWriteThread(object):
"""Ensures that image data is written to in the glance client and that
it is in correct ('active')state.
"""
def __init__(self, context, input, image_service, image_id,
image_meta=None):
if not image_meta:
image_meta = {}
self.context = context
self.input = input
self.image_service = image_service
self.image_id = image_id
self.image_meta = image_meta
self._running = False
def start(self):
self.done = event.Event()
def _inner():
"""Function to do the image data transfer through an update
and thereon checks if the state is 'active'.
"""
self.image_service.update(self.context,
self.image_id,
self.image_meta,
data=self.input)
self._running = True
while self._running:
try:
image_meta = self.image_service.show(self.context,
self.image_id)
image_status = image_meta.get("status")
if image_status == "active":
self.stop()
self.done.send(True)
# If the state is killed, then raise an exception.
elif image_status == "killed":
self.stop()
msg = (_("Glance image %s is in killed state") %
self.image_id)
LOG.error(msg)
self.done.send_exception(exception.NovaException(msg))
elif image_status in ["saving", "queued"]:
greenthread.sleep(GLANCE_POLL_INTERVAL)
else:
self.stop()
msg = _("Glance image "
"%(image_id)s is in unknown state "
"- %(state)s") % {
"image_id": self.image_id,
"state": image_status}
LOG.error(msg)
self.done.send_exception(exception.NovaException(msg))
except Exception as exc:
self.stop()
self.done.send_exception(exc)
greenthread.spawn(_inner)
return self.done
def stop(self):
self._running = False
def wait(self):
return self.done.wait()
def close(self):
pass
class IOThread(object):
"""Class that reads chunks from the input file and writes them to the
output file till the transfer is completely done.
"""
def __init__(self, input, output):
self.input = input
self.output = output
self._running = False
self.got_exception = False
def start(self):
self.done = event.Event()
def _inner():
"""Read data from the input and write the same to the output
until the transfer completes.
"""
self._running = True
while self._running:
try:
data = self.input.read(None)
if not data:
self.stop()
self.done.send(True)
self.output.write(data)
greenthread.sleep(IO_THREAD_SLEEP_TIME)
except Exception as exc:
self.stop()
LOG.exception(exc)
self.done.send_exception(exc)
greenthread.spawn(_inner)
return self.done
def stop(self):
self._running = False
def wait(self):
return self.done.wait() | unknown | codeparrot/codeparrot-clean | ||
package cluster
import (
"context"
types "github.com/moby/moby/api/types/swarm"
"github.com/moby/moby/v2/daemon/cluster/convert"
"github.com/moby/moby/v2/daemon/server/swarmbackend"
"github.com/moby/moby/v2/errdefs"
swarmapi "github.com/moby/swarmkit/v2/api"
"google.golang.org/grpc"
)
// GetNodes returns a list of all nodes known to a cluster.
func (c *Cluster) GetNodes(options swarmbackend.NodeListOptions) ([]types.Node, error) {
filters, err := newListNodesFilters(options.Filters)
if err != nil {
return nil, err
}
var r *swarmapi.ListNodesResponse
err = c.lockedManagerAction(context.TODO(), func(ctx context.Context, state nodeState) error {
var err error
r, err = state.controlClient.ListNodes(
ctx,
&swarmapi.ListNodesRequest{Filters: filters},
grpc.MaxCallRecvMsgSize(defaultRecvSizeForListResponse),
)
return err
})
if err != nil {
return nil, err
}
nodes := make([]types.Node, 0, len(r.Nodes))
for _, node := range r.Nodes {
nodes = append(nodes, convert.NodeFromGRPC(*node))
}
return nodes, nil
}
// GetNode returns a node based on an ID.
func (c *Cluster) GetNode(input string) (types.Node, error) {
var node *swarmapi.Node
if err := c.lockedManagerAction(context.TODO(), func(ctx context.Context, state nodeState) error {
n, err := getNode(ctx, state.controlClient, input)
if err != nil {
return err
}
node = n
return nil
}); err != nil {
return types.Node{}, err
}
return convert.NodeFromGRPC(*node), nil
}
// UpdateNode updates existing nodes properties.
func (c *Cluster) UpdateNode(input string, version uint64, spec types.NodeSpec) error {
return c.lockedManagerAction(context.TODO(), func(_ context.Context, state nodeState) error {
nodeSpec, err := convert.NodeSpecToGRPC(spec)
if err != nil {
return errdefs.InvalidParameter(err)
}
ctx := context.TODO()
ctx, cancel := context.WithTimeout(ctx, swarmRequestTimeout)
defer cancel()
currentNode, err := getNode(ctx, state.controlClient, input)
if err != nil {
return err
}
_, err = state.controlClient.UpdateNode(
ctx,
&swarmapi.UpdateNodeRequest{
NodeID: currentNode.ID,
Spec: &nodeSpec,
NodeVersion: &swarmapi.Version{
Index: version,
},
},
)
return err
})
}
// RemoveNode removes a node from a cluster
func (c *Cluster) RemoveNode(input string, force bool) error {
return c.lockedManagerAction(context.TODO(), func(ctx context.Context, state nodeState) error {
node, err := getNode(ctx, state.controlClient, input)
if err != nil {
return err
}
_, err = state.controlClient.RemoveNode(ctx, &swarmapi.RemoveNodeRequest{NodeID: node.ID, Force: force})
return err
})
} | go | github | https://github.com/moby/moby | daemon/cluster/nodes.go |
---
title: Custom 404 Page
author: ashmaroli
date: 2017-03-11 17:23:24 +0530
---
You can easily serve custom 404 error pages with Jekyll to replace the default **Error 404 -- File Not Found** page displayed when one tries to access a broken link on your site.
## On GitHub Pages
Any `404.html` at the **root of your `_site` directory** will be served automatically by GitHub Pages and the local WEBrick development server.
Simply add a `404.md` or `404.html` at the root of your site's source directory and include front matter data to use the theme's base layout.
If you plan to organize your files under subdirectories, the error page should have the following Front Matter Data, set: `permalink: /404.html`. This is to ensure that the compiled `404.html` resides at the root of your processed site, where it'll be picked by the server.
```markdown
---
# example 404.md
layout: default
permalink: /404.html
---
# 404
Page not found! :(
```
## Hosting on Apache Web Servers
Apache Web Servers load a configuration file named [`.htaccess`](http://www.htaccess-guide.com/) that modifies the functionality of these servers.
Simply add the following to your `.htaccess` file.
```apache
ErrorDocument 404 /404.html
```
With an `.htaccess` file, you have the freedom to place your error page within a subdirectory.
```apache
ErrorDocument 404 /error_pages/404.html
```
Where the path is relative to your site's domain.
More info on configuring Apache Error Pages can found in [official documentation](https://httpd.apache.org/docs/current/mod/core.html#errordocument).
## Hosting on Nginx server
The procedure is just as simple as configuring Apache servers, but slightly different.
The nginx configuration file depends on the system in which it is installed. In most systems, it is the `nginx.conf` file, which is usually located inside `/etc/nginx/` or `/etc/nginx/conf/`. However, in other systems like Ubuntu, you would have to look for a `default` nginx configuration file, containing server related information, which is usually located inside `/etc/nginx/sites-available/` or `/etc/nginx/sites-enabled/`. Add the following to your nginx configuration file, _i.e._ either to `nginx.conf` file or to `default` file:
```nginx
server {
error_page 404 /404.html;
location = /404.html {
internal;
}
}
```
If the `server` block already exists, only add the code inside the `server` block given above.
The `location` directive prevents users from directly browsing the 404.html page.
More info on nginx error page can be found on [nginx official documentation](http://nginx.org/en/docs/http/ngx_http_core_module.html#error_page).
<p class="note warning">
Proceed with caution while editing the configuration file.
</p> | unknown | github | https://github.com/jekyll/jekyll | docs/_tutorials/custom-404-page.md |
# -*- coding: utf-8 -*-
# pylint: disable=W0102
from datetime import datetime, date
import nose
import numpy as np
import re
import itertools
from pandas import (Index, MultiIndex, DataFrame, DatetimeIndex,
Series, Categorical)
from pandas.compat import OrderedDict, lrange
from pandas.sparse.array import SparseArray
from pandas.core.internals import (BlockPlacement, SingleBlockManager,
make_block, BlockManager)
import pandas.core.algorithms as algos
import pandas.util.testing as tm
import pandas as pd
from pandas import lib
from pandas.util.testing import (assert_almost_equal, assert_frame_equal,
randn, assert_series_equal)
from pandas.compat import zip, u
def assert_block_equal(left, right):
tm.assert_numpy_array_equal(left.values, right.values)
assert (left.dtype == right.dtype)
tm.assertIsInstance(left.mgr_locs, lib.BlockPlacement)
tm.assertIsInstance(right.mgr_locs, lib.BlockPlacement)
tm.assert_numpy_array_equal(left.mgr_locs.as_array,
right.mgr_locs.as_array)
def get_numeric_mat(shape):
arr = np.arange(shape[0])
return np.lib.stride_tricks.as_strided(x=arr, shape=shape, strides=(
arr.itemsize, ) + (0, ) * (len(shape) - 1)).copy()
N = 10
def create_block(typestr, placement, item_shape=None, num_offset=0):
"""
Supported typestr:
* float, f8, f4, f2
* int, i8, i4, i2, i1
* uint, u8, u4, u2, u1
* complex, c16, c8
* bool
* object, string, O
* datetime, dt, M8[ns], M8[ns, tz]
* timedelta, td, m8[ns]
* sparse (SparseArray with fill_value=0.0)
* sparse_na (SparseArray with fill_value=np.nan)
* category, category2
"""
placement = BlockPlacement(placement)
num_items = len(placement)
if item_shape is None:
item_shape = (N, )
shape = (num_items, ) + item_shape
mat = get_numeric_mat(shape)
if typestr in ('float', 'f8', 'f4', 'f2', 'int', 'i8', 'i4', 'i2', 'i1',
'uint', 'u8', 'u4', 'u2', 'u1'):
values = mat.astype(typestr) + num_offset
elif typestr in ('complex', 'c16', 'c8'):
values = 1.j * (mat.astype(typestr) + num_offset)
elif typestr in ('object', 'string', 'O'):
values = np.reshape(['A%d' % i for i in mat.ravel() + num_offset],
shape)
elif typestr in ('b', 'bool', ):
values = np.ones(shape, dtype=np.bool_)
elif typestr in ('datetime', 'dt', 'M8[ns]'):
values = (mat * 1e9).astype('M8[ns]')
elif typestr.startswith('M8[ns'):
# datetime with tz
m = re.search(r'M8\[ns,\s*(\w+\/?\w*)\]', typestr)
assert m is not None, "incompatible typestr -> {0}".format(typestr)
tz = m.groups()[0]
assert num_items == 1, "must have only 1 num items for a tz-aware"
values = DatetimeIndex(np.arange(N) * 1e9, tz=tz)
elif typestr in ('timedelta', 'td', 'm8[ns]'):
values = (mat * 1).astype('m8[ns]')
elif typestr in ('category', ):
values = Categorical([1, 1, 2, 2, 3, 3, 3, 3, 4, 4])
elif typestr in ('category2', ):
values = Categorical(['a', 'a', 'a', 'a', 'b', 'b', 'c', 'c', 'c', 'd'
])
elif typestr in ('sparse', 'sparse_na'):
# FIXME: doesn't support num_rows != 10
assert shape[-1] == 10
assert all(s == 1 for s in shape[:-1])
if typestr.endswith('_na'):
fill_value = np.nan
else:
fill_value = 0.0
values = SparseArray([fill_value, fill_value, 1, 2, 3, fill_value,
4, 5, fill_value, 6], fill_value=fill_value)
arr = values.sp_values.view()
arr += (num_offset - 1)
else:
raise ValueError('Unsupported typestr: "%s"' % typestr)
return make_block(values, placement=placement, ndim=len(shape))
def create_single_mgr(typestr, num_rows=None):
if num_rows is None:
num_rows = N
return SingleBlockManager(
create_block(typestr, placement=slice(0, num_rows), item_shape=()),
np.arange(num_rows))
def create_mgr(descr, item_shape=None):
"""
Construct BlockManager from string description.
String description syntax looks similar to np.matrix initializer. It looks
like this::
a,b,c: f8; d,e,f: i8
Rules are rather simple:
* see list of supported datatypes in `create_block` method
* components are semicolon-separated
* each component is `NAME,NAME,NAME: DTYPE_ID`
* whitespace around colons & semicolons are removed
* components with same DTYPE_ID are combined into single block
* to force multiple blocks with same dtype, use '-SUFFIX'::
'a:f8-1; b:f8-2; c:f8-foobar'
"""
if item_shape is None:
item_shape = (N, )
offset = 0
mgr_items = []
block_placements = OrderedDict()
for d in descr.split(';'):
d = d.strip()
if not len(d):
continue
names, blockstr = d.partition(':')[::2]
blockstr = blockstr.strip()
names = names.strip().split(',')
mgr_items.extend(names)
placement = list(np.arange(len(names)) + offset)
try:
block_placements[blockstr].extend(placement)
except KeyError:
block_placements[blockstr] = placement
offset += len(names)
mgr_items = Index(mgr_items)
blocks = []
num_offset = 0
for blockstr, placement in block_placements.items():
typestr = blockstr.split('-')[0]
blocks.append(create_block(typestr,
placement,
item_shape=item_shape,
num_offset=num_offset, ))
num_offset += len(placement)
return BlockManager(sorted(blocks, key=lambda b: b.mgr_locs[0]),
[mgr_items] + [np.arange(n) for n in item_shape])
class TestBlock(tm.TestCase):
_multiprocess_can_split_ = True
def setUp(self):
# self.fblock = get_float_ex() # a,c,e
# self.cblock = get_complex_ex() #
# self.oblock = get_obj_ex()
# self.bool_block = get_bool_ex()
# self.int_block = get_int_ex()
self.fblock = create_block('float', [0, 2, 4])
self.cblock = create_block('complex', [7])
self.oblock = create_block('object', [1, 3])
self.bool_block = create_block('bool', [5])
self.int_block = create_block('int', [6])
def test_constructor(self):
int32block = create_block('i4', [0])
self.assertEqual(int32block.dtype, np.int32)
def test_pickle(self):
def _check(blk):
assert_block_equal(self.round_trip_pickle(blk), blk)
_check(self.fblock)
_check(self.cblock)
_check(self.oblock)
_check(self.bool_block)
def test_mgr_locs(self):
tm.assertIsInstance(self.fblock.mgr_locs, lib.BlockPlacement)
tm.assert_numpy_array_equal(self.fblock.mgr_locs.as_array,
np.array([0, 2, 4], dtype=np.int64))
def test_attrs(self):
self.assertEqual(self.fblock.shape, self.fblock.values.shape)
self.assertEqual(self.fblock.dtype, self.fblock.values.dtype)
self.assertEqual(len(self.fblock), len(self.fblock.values))
def test_merge(self):
avals = randn(2, 10)
bvals = randn(2, 10)
ref_cols = Index(['e', 'a', 'b', 'd', 'f'])
ablock = make_block(avals, ref_cols.get_indexer(['e', 'b']))
bblock = make_block(bvals, ref_cols.get_indexer(['a', 'd']))
merged = ablock.merge(bblock)
tm.assert_numpy_array_equal(merged.mgr_locs.as_array,
np.array([0, 1, 2, 3], dtype=np.int64))
tm.assert_numpy_array_equal(merged.values[[0, 2]], np.array(avals))
tm.assert_numpy_array_equal(merged.values[[1, 3]], np.array(bvals))
# TODO: merge with mixed type?
def test_copy(self):
cop = self.fblock.copy()
self.assertIsNot(cop, self.fblock)
assert_block_equal(self.fblock, cop)
def test_reindex_index(self):
pass
def test_reindex_cast(self):
pass
def test_insert(self):
pass
def test_delete(self):
newb = self.fblock.copy()
newb.delete(0)
tm.assertIsInstance(newb.mgr_locs, lib.BlockPlacement)
tm.assert_numpy_array_equal(newb.mgr_locs.as_array,
np.array([2, 4], dtype=np.int64))
self.assertTrue((newb.values[0] == 1).all())
newb = self.fblock.copy()
newb.delete(1)
tm.assertIsInstance(newb.mgr_locs, lib.BlockPlacement)
tm.assert_numpy_array_equal(newb.mgr_locs.as_array,
np.array([0, 4], dtype=np.int64))
self.assertTrue((newb.values[1] == 2).all())
newb = self.fblock.copy()
newb.delete(2)
tm.assert_numpy_array_equal(newb.mgr_locs.as_array,
np.array([0, 2], dtype=np.int64))
self.assertTrue((newb.values[1] == 1).all())
newb = self.fblock.copy()
self.assertRaises(Exception, newb.delete, 3)
def test_split_block_at(self):
# with dup column support this method was taken out
# GH3679
raise nose.SkipTest("skipping for now")
bs = list(self.fblock.split_block_at('a'))
self.assertEqual(len(bs), 1)
self.assertTrue(np.array_equal(bs[0].items, ['c', 'e']))
bs = list(self.fblock.split_block_at('c'))
self.assertEqual(len(bs), 2)
self.assertTrue(np.array_equal(bs[0].items, ['a']))
self.assertTrue(np.array_equal(bs[1].items, ['e']))
bs = list(self.fblock.split_block_at('e'))
self.assertEqual(len(bs), 1)
self.assertTrue(np.array_equal(bs[0].items, ['a', 'c']))
# bblock = get_bool_ex(['f'])
# bs = list(bblock.split_block_at('f'))
# self.assertEqual(len(bs), 0)
class TestDatetimeBlock(tm.TestCase):
_multiprocess_can_split_ = True
def test_try_coerce_arg(self):
block = create_block('datetime', [0])
# coerce None
none_coerced = block._try_coerce_args(block.values, None)[2]
self.assertTrue(pd.Timestamp(none_coerced) is pd.NaT)
# coerce different types of date bojects
vals = (np.datetime64('2010-10-10'), datetime(2010, 10, 10),
date(2010, 10, 10))
for val in vals:
coerced = block._try_coerce_args(block.values, val)[2]
self.assertEqual(np.int64, type(coerced))
self.assertEqual(pd.Timestamp('2010-10-10'), pd.Timestamp(coerced))
class TestBlockManager(tm.TestCase):
_multiprocess_can_split_ = True
def setUp(self):
self.mgr = create_mgr(
'a: f8; b: object; c: f8; d: object; e: f8;'
'f: bool; g: i8; h: complex; i: datetime-1; j: datetime-2;'
'k: M8[ns, US/Eastern]; l: M8[ns, CET];')
def test_constructor_corner(self):
pass
def test_attrs(self):
mgr = create_mgr('a,b,c: f8-1; d,e,f: f8-2')
self.assertEqual(mgr.nblocks, 2)
self.assertEqual(len(mgr), 6)
def test_is_mixed_dtype(self):
self.assertFalse(create_mgr('a,b:f8').is_mixed_type)
self.assertFalse(create_mgr('a:f8-1; b:f8-2').is_mixed_type)
self.assertTrue(create_mgr('a,b:f8; c,d: f4').is_mixed_type)
self.assertTrue(create_mgr('a,b:f8; c,d: object').is_mixed_type)
def test_is_indexed_like(self):
mgr1 = create_mgr('a,b: f8')
mgr2 = create_mgr('a:i8; b:bool')
mgr3 = create_mgr('a,b,c: f8')
self.assertTrue(mgr1._is_indexed_like(mgr1))
self.assertTrue(mgr1._is_indexed_like(mgr2))
self.assertTrue(mgr1._is_indexed_like(mgr3))
self.assertFalse(mgr1._is_indexed_like(mgr1.get_slice(
slice(-1), axis=1)))
def test_duplicate_ref_loc_failure(self):
tmp_mgr = create_mgr('a:bool; a: f8')
axes, blocks = tmp_mgr.axes, tmp_mgr.blocks
blocks[0].mgr_locs = np.array([0])
blocks[1].mgr_locs = np.array([0])
# test trying to create block manager with overlapping ref locs
self.assertRaises(AssertionError, BlockManager, blocks, axes)
blocks[0].mgr_locs = np.array([0])
blocks[1].mgr_locs = np.array([1])
mgr = BlockManager(blocks, axes)
mgr.iget(1)
def test_contains(self):
self.assertIn('a', self.mgr)
self.assertNotIn('baz', self.mgr)
def test_pickle(self):
mgr2 = self.round_trip_pickle(self.mgr)
assert_frame_equal(DataFrame(self.mgr), DataFrame(mgr2))
# share ref_items
# self.assertIs(mgr2.blocks[0].ref_items, mgr2.blocks[1].ref_items)
# GH2431
self.assertTrue(hasattr(mgr2, "_is_consolidated"))
self.assertTrue(hasattr(mgr2, "_known_consolidated"))
# reset to False on load
self.assertFalse(mgr2._is_consolidated)
self.assertFalse(mgr2._known_consolidated)
def test_non_unique_pickle(self):
mgr = create_mgr('a,a,a:f8')
mgr2 = self.round_trip_pickle(mgr)
assert_frame_equal(DataFrame(mgr), DataFrame(mgr2))
mgr = create_mgr('a: f8; a: i8')
mgr2 = self.round_trip_pickle(mgr)
assert_frame_equal(DataFrame(mgr), DataFrame(mgr2))
def test_categorical_block_pickle(self):
mgr = create_mgr('a: category')
mgr2 = self.round_trip_pickle(mgr)
assert_frame_equal(DataFrame(mgr), DataFrame(mgr2))
smgr = create_single_mgr('category')
smgr2 = self.round_trip_pickle(smgr)
assert_series_equal(Series(smgr), Series(smgr2))
def test_get_scalar(self):
for item in self.mgr.items:
for i, index in enumerate(self.mgr.axes[1]):
res = self.mgr.get_scalar((item, index))
exp = self.mgr.get(item, fastpath=False)[i]
self.assertEqual(res, exp)
exp = self.mgr.get(item).internal_values()[i]
self.assertEqual(res, exp)
def test_get(self):
cols = Index(list('abc'))
values = np.random.rand(3, 3)
block = make_block(values=values.copy(), placement=np.arange(3))
mgr = BlockManager(blocks=[block], axes=[cols, np.arange(3)])
assert_almost_equal(mgr.get('a', fastpath=False), values[0])
assert_almost_equal(mgr.get('b', fastpath=False), values[1])
assert_almost_equal(mgr.get('c', fastpath=False), values[2])
assert_almost_equal(mgr.get('a').internal_values(), values[0])
assert_almost_equal(mgr.get('b').internal_values(), values[1])
assert_almost_equal(mgr.get('c').internal_values(), values[2])
def test_set(self):
mgr = create_mgr('a,b,c: int', item_shape=(3, ))
mgr.set('d', np.array(['foo'] * 3))
mgr.set('b', np.array(['bar'] * 3))
tm.assert_numpy_array_equal(mgr.get('a').internal_values(),
np.array([0] * 3))
tm.assert_numpy_array_equal(mgr.get('b').internal_values(),
np.array(['bar'] * 3, dtype=np.object_))
tm.assert_numpy_array_equal(mgr.get('c').internal_values(),
np.array([2] * 3))
tm.assert_numpy_array_equal(mgr.get('d').internal_values(),
np.array(['foo'] * 3, dtype=np.object_))
def test_insert(self):
self.mgr.insert(0, 'inserted', np.arange(N))
self.assertEqual(self.mgr.items[0], 'inserted')
assert_almost_equal(self.mgr.get('inserted'), np.arange(N))
for blk in self.mgr.blocks:
yield self.assertIs, self.mgr.items, blk.ref_items
def test_set_change_dtype(self):
self.mgr.set('baz', np.zeros(N, dtype=bool))
self.mgr.set('baz', np.repeat('foo', N))
self.assertEqual(self.mgr.get('baz').dtype, np.object_)
mgr2 = self.mgr.consolidate()
mgr2.set('baz', np.repeat('foo', N))
self.assertEqual(mgr2.get('baz').dtype, np.object_)
mgr2.set('quux', randn(N).astype(int))
self.assertEqual(mgr2.get('quux').dtype, np.int_)
mgr2.set('quux', randn(N))
self.assertEqual(mgr2.get('quux').dtype, np.float_)
def test_set_change_dtype_slice(self): # GH8850
cols = MultiIndex.from_tuples([('1st', 'a'), ('2nd', 'b'), ('3rd', 'c')
])
df = DataFrame([[1.0, 2, 3], [4.0, 5, 6]], columns=cols)
df['2nd'] = df['2nd'] * 2.0
self.assertEqual(sorted(df.blocks.keys()), ['float64', 'int64'])
assert_frame_equal(df.blocks['float64'], DataFrame(
[[1.0, 4.0], [4.0, 10.0]], columns=cols[:2]))
assert_frame_equal(df.blocks['int64'], DataFrame(
[[3], [6]], columns=cols[2:]))
def test_copy(self):
cp = self.mgr.copy(deep=False)
for blk, cp_blk in zip(self.mgr.blocks, cp.blocks):
# view assertion
self.assertTrue(cp_blk.equals(blk))
self.assertTrue(cp_blk.values.base is blk.values.base)
cp = self.mgr.copy(deep=True)
for blk, cp_blk in zip(self.mgr.blocks, cp.blocks):
# copy assertion we either have a None for a base or in case of
# some blocks it is an array (e.g. datetimetz), but was copied
self.assertTrue(cp_blk.equals(blk))
if cp_blk.values.base is not None and blk.values.base is not None:
self.assertFalse(cp_blk.values.base is blk.values.base)
else:
self.assertTrue(cp_blk.values.base is None and blk.values.base
is None)
def test_sparse(self):
mgr = create_mgr('a: sparse-1; b: sparse-2')
# what to test here?
self.assertEqual(mgr.as_matrix().dtype, np.float64)
def test_sparse_mixed(self):
mgr = create_mgr('a: sparse-1; b: sparse-2; c: f8')
self.assertEqual(len(mgr.blocks), 3)
self.assertIsInstance(mgr, BlockManager)
# what to test here?
def test_as_matrix_float(self):
mgr = create_mgr('c: f4; d: f2; e: f8')
self.assertEqual(mgr.as_matrix().dtype, np.float64)
mgr = create_mgr('c: f4; d: f2')
self.assertEqual(mgr.as_matrix().dtype, np.float32)
def test_as_matrix_int_bool(self):
mgr = create_mgr('a: bool-1; b: bool-2')
self.assertEqual(mgr.as_matrix().dtype, np.bool_)
mgr = create_mgr('a: i8-1; b: i8-2; c: i4; d: i2; e: u1')
self.assertEqual(mgr.as_matrix().dtype, np.int64)
mgr = create_mgr('c: i4; d: i2; e: u1')
self.assertEqual(mgr.as_matrix().dtype, np.int32)
def test_as_matrix_datetime(self):
mgr = create_mgr('h: datetime-1; g: datetime-2')
self.assertEqual(mgr.as_matrix().dtype, 'M8[ns]')
def test_as_matrix_datetime_tz(self):
mgr = create_mgr('h: M8[ns, US/Eastern]; g: M8[ns, CET]')
self.assertEqual(mgr.get('h').dtype, 'datetime64[ns, US/Eastern]')
self.assertEqual(mgr.get('g').dtype, 'datetime64[ns, CET]')
self.assertEqual(mgr.as_matrix().dtype, 'object')
def test_astype(self):
# coerce all
mgr = create_mgr('c: f4; d: f2; e: f8')
for t in ['float16', 'float32', 'float64', 'int32', 'int64']:
t = np.dtype(t)
tmgr = mgr.astype(t)
self.assertEqual(tmgr.get('c').dtype.type, t)
self.assertEqual(tmgr.get('d').dtype.type, t)
self.assertEqual(tmgr.get('e').dtype.type, t)
# mixed
mgr = create_mgr('a,b: object; c: bool; d: datetime;'
'e: f4; f: f2; g: f8')
for t in ['float16', 'float32', 'float64', 'int32', 'int64']:
t = np.dtype(t)
tmgr = mgr.astype(t, raise_on_error=False)
self.assertEqual(tmgr.get('c').dtype.type, t)
self.assertEqual(tmgr.get('e').dtype.type, t)
self.assertEqual(tmgr.get('f').dtype.type, t)
self.assertEqual(tmgr.get('g').dtype.type, t)
self.assertEqual(tmgr.get('a').dtype.type, np.object_)
self.assertEqual(tmgr.get('b').dtype.type, np.object_)
if t != np.int64:
self.assertEqual(tmgr.get('d').dtype.type, np.datetime64)
else:
self.assertEqual(tmgr.get('d').dtype.type, t)
def test_convert(self):
def _compare(old_mgr, new_mgr):
""" compare the blocks, numeric compare ==, object don't """
old_blocks = set(old_mgr.blocks)
new_blocks = set(new_mgr.blocks)
self.assertEqual(len(old_blocks), len(new_blocks))
# compare non-numeric
for b in old_blocks:
found = False
for nb in new_blocks:
if (b.values == nb.values).all():
found = True
break
self.assertTrue(found)
for b in new_blocks:
found = False
for ob in old_blocks:
if (b.values == ob.values).all():
found = True
break
self.assertTrue(found)
# noops
mgr = create_mgr('f: i8; g: f8')
new_mgr = mgr.convert()
_compare(mgr, new_mgr)
mgr = create_mgr('a, b: object; f: i8; g: f8')
new_mgr = mgr.convert()
_compare(mgr, new_mgr)
# convert
mgr = create_mgr('a,b,foo: object; f: i8; g: f8')
mgr.set('a', np.array(['1'] * N, dtype=np.object_))
mgr.set('b', np.array(['2.'] * N, dtype=np.object_))
mgr.set('foo', np.array(['foo.'] * N, dtype=np.object_))
new_mgr = mgr.convert(numeric=True)
self.assertEqual(new_mgr.get('a').dtype, np.int64)
self.assertEqual(new_mgr.get('b').dtype, np.float64)
self.assertEqual(new_mgr.get('foo').dtype, np.object_)
self.assertEqual(new_mgr.get('f').dtype, np.int64)
self.assertEqual(new_mgr.get('g').dtype, np.float64)
mgr = create_mgr('a,b,foo: object; f: i4; bool: bool; dt: datetime;'
'i: i8; g: f8; h: f2')
mgr.set('a', np.array(['1'] * N, dtype=np.object_))
mgr.set('b', np.array(['2.'] * N, dtype=np.object_))
mgr.set('foo', np.array(['foo.'] * N, dtype=np.object_))
new_mgr = mgr.convert(numeric=True)
self.assertEqual(new_mgr.get('a').dtype, np.int64)
self.assertEqual(new_mgr.get('b').dtype, np.float64)
self.assertEqual(new_mgr.get('foo').dtype, np.object_)
self.assertEqual(new_mgr.get('f').dtype, np.int32)
self.assertEqual(new_mgr.get('bool').dtype, np.bool_)
self.assertEqual(new_mgr.get('dt').dtype.type, np.datetime64)
self.assertEqual(new_mgr.get('i').dtype, np.int64)
self.assertEqual(new_mgr.get('g').dtype, np.float64)
self.assertEqual(new_mgr.get('h').dtype, np.float16)
def test_interleave(self):
# self
for dtype in ['f8', 'i8', 'object', 'bool', 'complex', 'M8[ns]',
'm8[ns]']:
mgr = create_mgr('a: {0}'.format(dtype))
self.assertEqual(mgr.as_matrix().dtype, dtype)
mgr = create_mgr('a: {0}; b: {0}'.format(dtype))
self.assertEqual(mgr.as_matrix().dtype, dtype)
# will be converted according the actual dtype of the underlying
mgr = create_mgr('a: category')
self.assertEqual(mgr.as_matrix().dtype, 'i8')
mgr = create_mgr('a: category; b: category')
self.assertEqual(mgr.as_matrix().dtype, 'i8'),
mgr = create_mgr('a: category; b: category2')
self.assertEqual(mgr.as_matrix().dtype, 'object')
mgr = create_mgr('a: category2')
self.assertEqual(mgr.as_matrix().dtype, 'object')
mgr = create_mgr('a: category2; b: category2')
self.assertEqual(mgr.as_matrix().dtype, 'object')
# combinations
mgr = create_mgr('a: f8')
self.assertEqual(mgr.as_matrix().dtype, 'f8')
mgr = create_mgr('a: f8; b: i8')
self.assertEqual(mgr.as_matrix().dtype, 'f8')
mgr = create_mgr('a: f4; b: i8')
self.assertEqual(mgr.as_matrix().dtype, 'f4')
mgr = create_mgr('a: f4; b: i8; d: object')
self.assertEqual(mgr.as_matrix().dtype, 'object')
mgr = create_mgr('a: bool; b: i8')
self.assertEqual(mgr.as_matrix().dtype, 'object')
mgr = create_mgr('a: complex')
self.assertEqual(mgr.as_matrix().dtype, 'complex')
mgr = create_mgr('a: f8; b: category')
self.assertEqual(mgr.as_matrix().dtype, 'object')
mgr = create_mgr('a: M8[ns]; b: category')
self.assertEqual(mgr.as_matrix().dtype, 'object')
mgr = create_mgr('a: M8[ns]; b: bool')
self.assertEqual(mgr.as_matrix().dtype, 'object')
mgr = create_mgr('a: M8[ns]; b: i8')
self.assertEqual(mgr.as_matrix().dtype, 'object')
mgr = create_mgr('a: m8[ns]; b: bool')
self.assertEqual(mgr.as_matrix().dtype, 'object')
mgr = create_mgr('a: m8[ns]; b: i8')
self.assertEqual(mgr.as_matrix().dtype, 'object')
mgr = create_mgr('a: M8[ns]; b: m8[ns]')
self.assertEqual(mgr.as_matrix().dtype, 'object')
def test_interleave_non_unique_cols(self):
df = DataFrame([
[pd.Timestamp('20130101'), 3.5],
[pd.Timestamp('20130102'), 4.5]],
columns=['x', 'x'],
index=[1, 2])
df_unique = df.copy()
df_unique.columns = ['x', 'y']
self.assertEqual(df_unique.values.shape, df.values.shape)
tm.assert_numpy_array_equal(df_unique.values[0], df.values[0])
tm.assert_numpy_array_equal(df_unique.values[1], df.values[1])
def test_consolidate(self):
pass
def test_consolidate_ordering_issues(self):
self.mgr.set('f', randn(N))
self.mgr.set('d', randn(N))
self.mgr.set('b', randn(N))
self.mgr.set('g', randn(N))
self.mgr.set('h', randn(N))
# we have datetime/tz blocks in self.mgr
cons = self.mgr.consolidate()
self.assertEqual(cons.nblocks, 4)
cons = self.mgr.consolidate().get_numeric_data()
self.assertEqual(cons.nblocks, 1)
tm.assertIsInstance(cons.blocks[0].mgr_locs, lib.BlockPlacement)
tm.assert_numpy_array_equal(cons.blocks[0].mgr_locs.as_array,
np.arange(len(cons.items), dtype=np.int64))
def test_reindex_index(self):
pass
def test_reindex_items(self):
# mgr is not consolidated, f8 & f8-2 blocks
mgr = create_mgr('a: f8; b: i8; c: f8; d: i8; e: f8;'
'f: bool; g: f8-2')
reindexed = mgr.reindex_axis(['g', 'c', 'a', 'd'], axis=0)
self.assertEqual(reindexed.nblocks, 2)
tm.assert_index_equal(reindexed.items, pd.Index(['g', 'c', 'a', 'd']))
assert_almost_equal(
mgr.get('g', fastpath=False), reindexed.get('g', fastpath=False))
assert_almost_equal(
mgr.get('c', fastpath=False), reindexed.get('c', fastpath=False))
assert_almost_equal(
mgr.get('a', fastpath=False), reindexed.get('a', fastpath=False))
assert_almost_equal(
mgr.get('d', fastpath=False), reindexed.get('d', fastpath=False))
assert_almost_equal(
mgr.get('g').internal_values(),
reindexed.get('g').internal_values())
assert_almost_equal(
mgr.get('c').internal_values(),
reindexed.get('c').internal_values())
assert_almost_equal(
mgr.get('a').internal_values(),
reindexed.get('a').internal_values())
assert_almost_equal(
mgr.get('d').internal_values(),
reindexed.get('d').internal_values())
def test_multiindex_xs(self):
mgr = create_mgr('a,b,c: f8; d,e,f: i8')
index = MultiIndex(levels=[['foo', 'bar', 'baz', 'qux'], ['one', 'two',
'three']],
labels=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3],
[0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
names=['first', 'second'])
mgr.set_axis(1, index)
result = mgr.xs('bar', axis=1)
self.assertEqual(result.shape, (6, 2))
self.assertEqual(result.axes[1][0], ('bar', 'one'))
self.assertEqual(result.axes[1][1], ('bar', 'two'))
def test_get_numeric_data(self):
mgr = create_mgr('int: int; float: float; complex: complex;'
'str: object; bool: bool; obj: object; dt: datetime',
item_shape=(3, ))
mgr.set('obj', np.array([1, 2, 3], dtype=np.object_))
numeric = mgr.get_numeric_data()
tm.assert_index_equal(numeric.items,
pd.Index(['int', 'float', 'complex', 'bool']))
assert_almost_equal(
mgr.get('float', fastpath=False), numeric.get('float',
fastpath=False))
assert_almost_equal(
mgr.get('float').internal_values(),
numeric.get('float').internal_values())
# Check sharing
numeric.set('float', np.array([100., 200., 300.]))
assert_almost_equal(
mgr.get('float', fastpath=False), np.array([100., 200., 300.]))
assert_almost_equal(
mgr.get('float').internal_values(), np.array([100., 200., 300.]))
numeric2 = mgr.get_numeric_data(copy=True)
tm.assert_index_equal(numeric.items,
pd.Index(['int', 'float', 'complex', 'bool']))
numeric2.set('float', np.array([1000., 2000., 3000.]))
assert_almost_equal(
mgr.get('float', fastpath=False), np.array([100., 200., 300.]))
assert_almost_equal(
mgr.get('float').internal_values(), np.array([100., 200., 300.]))
def test_get_bool_data(self):
mgr = create_mgr('int: int; float: float; complex: complex;'
'str: object; bool: bool; obj: object; dt: datetime',
item_shape=(3, ))
mgr.set('obj', np.array([True, False, True], dtype=np.object_))
bools = mgr.get_bool_data()
tm.assert_index_equal(bools.items, pd.Index(['bool']))
assert_almost_equal(mgr.get('bool', fastpath=False),
bools.get('bool', fastpath=False))
assert_almost_equal(
mgr.get('bool').internal_values(),
bools.get('bool').internal_values())
bools.set('bool', np.array([True, False, True]))
tm.assert_numpy_array_equal(mgr.get('bool', fastpath=False),
np.array([True, False, True]))
tm.assert_numpy_array_equal(mgr.get('bool').internal_values(),
np.array([True, False, True]))
# Check sharing
bools2 = mgr.get_bool_data(copy=True)
bools2.set('bool', np.array([False, True, False]))
tm.assert_numpy_array_equal(mgr.get('bool', fastpath=False),
np.array([True, False, True]))
tm.assert_numpy_array_equal(mgr.get('bool').internal_values(),
np.array([True, False, True]))
def test_unicode_repr_doesnt_raise(self):
repr(create_mgr(u('b,\u05d0: object')))
def test_missing_unicode_key(self):
df = DataFrame({"a": [1]})
try:
df.ix[:, u("\u05d0")] # should not raise UnicodeEncodeError
except KeyError:
pass # this is the expected exception
def test_equals(self):
# unique items
bm1 = create_mgr('a,b,c: i8-1; d,e,f: i8-2')
bm2 = BlockManager(bm1.blocks[::-1], bm1.axes)
self.assertTrue(bm1.equals(bm2))
bm1 = create_mgr('a,a,a: i8-1; b,b,b: i8-2')
bm2 = BlockManager(bm1.blocks[::-1], bm1.axes)
self.assertTrue(bm1.equals(bm2))
def test_equals_block_order_different_dtypes(self):
# GH 9330
mgr_strings = [
"a:i8;b:f8", # basic case
"a:i8;b:f8;c:c8;d:b", # many types
"a:i8;e:dt;f:td;g:string", # more types
"a:i8;b:category;c:category2;d:category2", # categories
"c:sparse;d:sparse_na;b:f8", # sparse
]
for mgr_string in mgr_strings:
bm = create_mgr(mgr_string)
block_perms = itertools.permutations(bm.blocks)
for bm_perm in block_perms:
bm_this = BlockManager(bm_perm, bm.axes)
self.assertTrue(bm.equals(bm_this))
self.assertTrue(bm_this.equals(bm))
def test_single_mgr_ctor(self):
mgr = create_single_mgr('f8', num_rows=5)
self.assertEqual(mgr.as_matrix().tolist(), [0., 1., 2., 3., 4.])
class TestIndexing(object):
# Nosetests-style data-driven tests.
#
# This test applies different indexing routines to block managers and
# compares the outcome to the result of same operations on np.ndarray.
#
# NOTE: sparse (SparseBlock with fill_value != np.nan) fail a lot of tests
# and are disabled.
MANAGERS = [
create_single_mgr('f8', N),
create_single_mgr('i8', N),
# create_single_mgr('sparse', N),
create_single_mgr('sparse_na', N),
# 2-dim
create_mgr('a,b,c,d,e,f: f8', item_shape=(N,)),
create_mgr('a,b,c,d,e,f: i8', item_shape=(N,)),
create_mgr('a,b: f8; c,d: i8; e,f: string', item_shape=(N,)),
create_mgr('a,b: f8; c,d: i8; e,f: f8', item_shape=(N,)),
# create_mgr('a: sparse', item_shape=(N,)),
create_mgr('a: sparse_na', item_shape=(N,)),
# 3-dim
create_mgr('a,b,c,d,e,f: f8', item_shape=(N, N)),
create_mgr('a,b,c,d,e,f: i8', item_shape=(N, N)),
create_mgr('a,b: f8; c,d: i8; e,f: string', item_shape=(N, N)),
create_mgr('a,b: f8; c,d: i8; e,f: f8', item_shape=(N, N)),
# create_mgr('a: sparse', item_shape=(1, N)),
]
# MANAGERS = [MANAGERS[6]]
def test_get_slice(self):
def assert_slice_ok(mgr, axis, slobj):
# import pudb; pudb.set_trace()
mat = mgr.as_matrix()
# we maybe using an ndarray to test slicing and
# might not be the full length of the axis
if isinstance(slobj, np.ndarray):
ax = mgr.axes[axis]
if len(ax) and len(slobj) and len(slobj) != len(ax):
slobj = np.concatenate([slobj, np.zeros(
len(ax) - len(slobj), dtype=bool)])
sliced = mgr.get_slice(slobj, axis=axis)
mat_slobj = (slice(None), ) * axis + (slobj, )
tm.assert_numpy_array_equal(mat[mat_slobj], sliced.as_matrix(),
check_dtype=False)
tm.assert_index_equal(mgr.axes[axis][slobj], sliced.axes[axis])
for mgr in self.MANAGERS:
for ax in range(mgr.ndim):
# slice
yield assert_slice_ok, mgr, ax, slice(None)
yield assert_slice_ok, mgr, ax, slice(3)
yield assert_slice_ok, mgr, ax, slice(100)
yield assert_slice_ok, mgr, ax, slice(1, 4)
yield assert_slice_ok, mgr, ax, slice(3, 0, -2)
# boolean mask
yield assert_slice_ok, mgr, ax, np.array([], dtype=np.bool_)
yield (assert_slice_ok, mgr, ax,
np.ones(mgr.shape[ax], dtype=np.bool_))
yield (assert_slice_ok, mgr, ax,
np.zeros(mgr.shape[ax], dtype=np.bool_))
if mgr.shape[ax] >= 3:
yield (assert_slice_ok, mgr, ax,
np.arange(mgr.shape[ax]) % 3 == 0)
yield (assert_slice_ok, mgr, ax, np.array(
[True, True, False], dtype=np.bool_))
# fancy indexer
yield assert_slice_ok, mgr, ax, []
yield assert_slice_ok, mgr, ax, lrange(mgr.shape[ax])
if mgr.shape[ax] >= 3:
yield assert_slice_ok, mgr, ax, [0, 1, 2]
yield assert_slice_ok, mgr, ax, [-1, -2, -3]
def test_take(self):
def assert_take_ok(mgr, axis, indexer):
mat = mgr.as_matrix()
taken = mgr.take(indexer, axis)
tm.assert_numpy_array_equal(np.take(mat, indexer, axis),
taken.as_matrix(), check_dtype=False)
tm.assert_index_equal(mgr.axes[axis].take(indexer),
taken.axes[axis])
for mgr in self.MANAGERS:
for ax in range(mgr.ndim):
# take/fancy indexer
yield assert_take_ok, mgr, ax, []
yield assert_take_ok, mgr, ax, [0, 0, 0]
yield assert_take_ok, mgr, ax, lrange(mgr.shape[ax])
if mgr.shape[ax] >= 3:
yield assert_take_ok, mgr, ax, [0, 1, 2]
yield assert_take_ok, mgr, ax, [-1, -2, -3]
def test_reindex_axis(self):
def assert_reindex_axis_is_ok(mgr, axis, new_labels, fill_value):
mat = mgr.as_matrix()
indexer = mgr.axes[axis].get_indexer_for(new_labels)
reindexed = mgr.reindex_axis(new_labels, axis,
fill_value=fill_value)
tm.assert_numpy_array_equal(algos.take_nd(mat, indexer, axis,
fill_value=fill_value),
reindexed.as_matrix(),
check_dtype=False)
tm.assert_index_equal(reindexed.axes[axis], new_labels)
for mgr in self.MANAGERS:
for ax in range(mgr.ndim):
for fill_value in (None, np.nan, 100.):
yield (assert_reindex_axis_is_ok, mgr, ax,
pd.Index([]), fill_value)
yield (assert_reindex_axis_is_ok, mgr, ax, mgr.axes[ax],
fill_value)
yield (assert_reindex_axis_is_ok, mgr, ax,
mgr.axes[ax][[0, 0, 0]], fill_value)
yield (assert_reindex_axis_is_ok, mgr, ax,
pd.Index(['foo', 'bar', 'baz']), fill_value)
yield (assert_reindex_axis_is_ok, mgr, ax,
pd.Index(['foo', mgr.axes[ax][0], 'baz']),
fill_value)
if mgr.shape[ax] >= 3:
yield (assert_reindex_axis_is_ok, mgr, ax,
mgr.axes[ax][:-3], fill_value)
yield (assert_reindex_axis_is_ok, mgr, ax,
mgr.axes[ax][-3::-1], fill_value)
yield (assert_reindex_axis_is_ok, mgr, ax,
mgr.axes[ax][[0, 1, 2, 0, 1, 2]], fill_value)
def test_reindex_indexer(self):
def assert_reindex_indexer_is_ok(mgr, axis, new_labels, indexer,
fill_value):
mat = mgr.as_matrix()
reindexed_mat = algos.take_nd(mat, indexer, axis,
fill_value=fill_value)
reindexed = mgr.reindex_indexer(new_labels, indexer, axis,
fill_value=fill_value)
tm.assert_numpy_array_equal(reindexed_mat,
reindexed.as_matrix(),
check_dtype=False)
tm.assert_index_equal(reindexed.axes[axis], new_labels)
for mgr in self.MANAGERS:
for ax in range(mgr.ndim):
for fill_value in (None, np.nan, 100.):
yield (assert_reindex_indexer_is_ok, mgr, ax,
pd.Index([]), [], fill_value)
yield (assert_reindex_indexer_is_ok, mgr, ax,
mgr.axes[ax], np.arange(mgr.shape[ax]), fill_value)
yield (assert_reindex_indexer_is_ok, mgr, ax,
pd.Index(['foo'] * mgr.shape[ax]),
np.arange(mgr.shape[ax]), fill_value)
yield (assert_reindex_indexer_is_ok, mgr, ax,
mgr.axes[ax][::-1], np.arange(mgr.shape[ax]),
fill_value)
yield (assert_reindex_indexer_is_ok, mgr, ax, mgr.axes[ax],
np.arange(mgr.shape[ax])[::-1], fill_value)
yield (assert_reindex_indexer_is_ok, mgr, ax,
pd.Index(['foo', 'bar', 'baz']),
[0, 0, 0], fill_value)
yield (assert_reindex_indexer_is_ok, mgr, ax,
pd.Index(['foo', 'bar', 'baz']),
[-1, 0, -1], fill_value)
yield (assert_reindex_indexer_is_ok, mgr, ax,
pd.Index(['foo', mgr.axes[ax][0], 'baz']),
[-1, -1, -1], fill_value)
if mgr.shape[ax] >= 3:
yield (assert_reindex_indexer_is_ok, mgr, ax,
pd.Index(['foo', 'bar', 'baz']),
[0, 1, 2], fill_value)
# test_get_slice(slice_like, axis)
# take(indexer, axis)
# reindex_axis(new_labels, axis)
# reindex_indexer(new_labels, indexer, axis)
class TestBlockPlacement(tm.TestCase):
_multiprocess_can_split_ = True
def test_slice_len(self):
self.assertEqual(len(BlockPlacement(slice(0, 4))), 4)
self.assertEqual(len(BlockPlacement(slice(0, 4, 2))), 2)
self.assertEqual(len(BlockPlacement(slice(0, 3, 2))), 2)
self.assertEqual(len(BlockPlacement(slice(0, 1, 2))), 1)
self.assertEqual(len(BlockPlacement(slice(1, 0, -1))), 1)
def test_zero_step_raises(self):
self.assertRaises(ValueError, BlockPlacement, slice(1, 1, 0))
self.assertRaises(ValueError, BlockPlacement, slice(1, 2, 0))
def test_unbounded_slice_raises(self):
def assert_unbounded_slice_error(slc):
self.assertRaisesRegexp(ValueError, "unbounded slice",
lambda: BlockPlacement(slc))
assert_unbounded_slice_error(slice(None, None))
assert_unbounded_slice_error(slice(10, None))
assert_unbounded_slice_error(slice(None, None, -1))
assert_unbounded_slice_error(slice(None, 10, -1))
# These are "unbounded" because negative index will change depending on
# container shape.
assert_unbounded_slice_error(slice(-1, None))
assert_unbounded_slice_error(slice(None, -1))
assert_unbounded_slice_error(slice(-1, -1))
assert_unbounded_slice_error(slice(-1, None, -1))
assert_unbounded_slice_error(slice(None, -1, -1))
assert_unbounded_slice_error(slice(-1, -1, -1))
def test_not_slice_like_slices(self):
def assert_not_slice_like(slc):
self.assertTrue(not BlockPlacement(slc).is_slice_like)
assert_not_slice_like(slice(0, 0))
assert_not_slice_like(slice(100, 0))
assert_not_slice_like(slice(100, 100, -1))
assert_not_slice_like(slice(0, 100, -1))
self.assertTrue(not BlockPlacement(slice(0, 0)).is_slice_like)
self.assertTrue(not BlockPlacement(slice(100, 100)).is_slice_like)
def test_array_to_slice_conversion(self):
def assert_as_slice_equals(arr, slc):
self.assertEqual(BlockPlacement(arr).as_slice, slc)
assert_as_slice_equals([0], slice(0, 1, 1))
assert_as_slice_equals([100], slice(100, 101, 1))
assert_as_slice_equals([0, 1, 2], slice(0, 3, 1))
assert_as_slice_equals([0, 5, 10], slice(0, 15, 5))
assert_as_slice_equals([0, 100], slice(0, 200, 100))
assert_as_slice_equals([2, 1], slice(2, 0, -1))
assert_as_slice_equals([2, 1, 0], slice(2, None, -1))
assert_as_slice_equals([100, 0], slice(100, None, -100))
def test_not_slice_like_arrays(self):
def assert_not_slice_like(arr):
self.assertTrue(not BlockPlacement(arr).is_slice_like)
assert_not_slice_like([])
assert_not_slice_like([-1])
assert_not_slice_like([-1, -2, -3])
assert_not_slice_like([-10])
assert_not_slice_like([-1])
assert_not_slice_like([-1, 0, 1, 2])
assert_not_slice_like([-2, 0, 2, 4])
assert_not_slice_like([1, 0, -1])
assert_not_slice_like([1, 1, 1])
def test_slice_iter(self):
self.assertEqual(list(BlockPlacement(slice(0, 3))), [0, 1, 2])
self.assertEqual(list(BlockPlacement(slice(0, 0))), [])
self.assertEqual(list(BlockPlacement(slice(3, 0))), [])
self.assertEqual(list(BlockPlacement(slice(3, 0, -1))), [3, 2, 1])
self.assertEqual(list(BlockPlacement(slice(3, None, -1))),
[3, 2, 1, 0])
def test_slice_to_array_conversion(self):
def assert_as_array_equals(slc, asarray):
tm.assert_numpy_array_equal(
BlockPlacement(slc).as_array,
np.asarray(asarray, dtype=np.int64))
assert_as_array_equals(slice(0, 3), [0, 1, 2])
assert_as_array_equals(slice(0, 0), [])
assert_as_array_equals(slice(3, 0), [])
assert_as_array_equals(slice(3, 0, -1), [3, 2, 1])
assert_as_array_equals(slice(3, None, -1), [3, 2, 1, 0])
assert_as_array_equals(slice(31, None, -10), [31, 21, 11, 1])
def test_blockplacement_add(self):
bpl = BlockPlacement(slice(0, 5))
self.assertEqual(bpl.add(1).as_slice, slice(1, 6, 1))
self.assertEqual(bpl.add(np.arange(5)).as_slice, slice(0, 10, 2))
self.assertEqual(list(bpl.add(np.arange(5, 0, -1))), [5, 5, 5, 5, 5])
def test_blockplacement_add_int(self):
def assert_add_equals(val, inc, result):
self.assertEqual(list(BlockPlacement(val).add(inc)), result)
assert_add_equals(slice(0, 0), 0, [])
assert_add_equals(slice(1, 4), 0, [1, 2, 3])
assert_add_equals(slice(3, 0, -1), 0, [3, 2, 1])
assert_add_equals(slice(2, None, -1), 0, [2, 1, 0])
assert_add_equals([1, 2, 4], 0, [1, 2, 4])
assert_add_equals(slice(0, 0), 10, [])
assert_add_equals(slice(1, 4), 10, [11, 12, 13])
assert_add_equals(slice(3, 0, -1), 10, [13, 12, 11])
assert_add_equals(slice(2, None, -1), 10, [12, 11, 10])
assert_add_equals([1, 2, 4], 10, [11, 12, 14])
assert_add_equals(slice(0, 0), -1, [])
assert_add_equals(slice(1, 4), -1, [0, 1, 2])
assert_add_equals(slice(3, 0, -1), -1, [2, 1, 0])
assert_add_equals([1, 2, 4], -1, [0, 1, 3])
self.assertRaises(ValueError,
lambda: BlockPlacement(slice(1, 4)).add(-10))
self.assertRaises(ValueError,
lambda: BlockPlacement([1, 2, 4]).add(-10))
self.assertRaises(ValueError,
lambda: BlockPlacement(slice(2, None, -1)).add(-1))
if __name__ == '__main__':
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
exit=False) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('statmaps', '0046_auto_20150428_0616'),
]
operations = [
migrations.AddField(
model_name='nidmresultstatisticmap',
name='analysis_level',
field=models.CharField(choices=[(b'S', b'single-subject'), (b'G', b'group'), (b'M', b'meta-analysis'), (b'Other', b'Other')], max_length=200, blank=True, help_text=b'What level of summary data was used as the input to this analysis?', null=True, verbose_name=b'Analysis level'),
preserve_default=True,
),
migrations.AddField(
model_name='statisticmap',
name='analysis_level',
field=models.CharField(choices=[(b'S', b'single-subject'), (b'G', b'group'), (b'M', b'meta-analysis'), (b'Other', b'Other')], max_length=200, blank=True, help_text=b'What level of summary data was used as the input to this analysis?', null=True, verbose_name=b'Analysis level'),
preserve_default=True,
),
migrations.AlterField(
model_name='image',
name='polymorphic_ctype',
field=models.ForeignKey(related_name='polymorphic_statmaps.image_set+', editable=False, to='contenttypes.ContentType', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='nidmresultstatisticmap',
name='map_type',
field=models.CharField(help_text=b'Type of statistic that is the basis of the inference', max_length=200, verbose_name=b'Map type', choices=[(b'T', b'T map'), (b'Z', b'Z map'), (b'F', b'F map'), (b'X2', b'Chi squared map'), (b'P', b'P map (given null hypothesis)'), (b'Q', b'weight/beta map'), (b'R', b'ROI/mask'), (b'Pa', b'parcellation'), (b'Other', b'Other')]),
preserve_default=True,
),
migrations.AlterField(
model_name='statisticmap',
name='map_type',
field=models.CharField(help_text=b'Type of statistic that is the basis of the inference', max_length=200, verbose_name=b'Map type', choices=[(b'T', b'T map'), (b'Z', b'Z map'), (b'F', b'F map'), (b'X2', b'Chi squared map'), (b'P', b'P map (given null hypothesis)'), (b'Q', b'weight/beta map'), (b'R', b'ROI/mask'), (b'Pa', b'parcellation'), (b'Other', b'Other')]),
preserve_default=True,
),
] | unknown | codeparrot/codeparrot-clean | ||
#!/bin/sh
for nm in `cat $1` ; do
n=`echo ${nm} |tr ':' ' ' |awk '{print $1}'`
echo "#undef je_${n}"
done | unknown | github | https://github.com/redis/redis | deps/jemalloc/include/jemalloc/internal/public_unnamespace.sh |
/*
* Copyright 2014-2023 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.server.auth
internal actual suspend fun OAuthAuthenticationProvider.oauth1a(
authProviderName: String?,
context: AuthenticationContext
) {
throw NotImplementedError("OAuth1 is not supported on native targets")
} | kotlin | github | https://github.com/ktorio/ktor | ktor-server/ktor-server-plugins/ktor-server-auth/posix/src/OAuthNative.kt |
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2014 Thomas Voegtlin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from threading import Lock
from bitcoin import Hash, hash_encode
from transaction import Transaction
from util import print_error, print_msg
class WalletSynchronizer():
'''The synchronizer keeps the wallet up-to-date with its set of
addresses and their transactions. It subscribes over the network
to wallet addresses, gets the wallet to generate new addresses
when necessary, requests the transaction history of any addresses
we don't have the full history of, and requests binary transaction
data of any transactions the wallet doesn't have.
External interface: __init__() and add() member functions.
'''
def __init__(self, wallet, network):
self.wallet = wallet
self.network = network
self.new_addresses = set()
# Entries are (tx_hash, tx_height) tuples
self.requested_tx = set()
self.requested_histories = {}
self.requested_addrs = set()
self.lock = Lock()
self.initialize()
def print_error(self, *msg):
print_error("[Synchronizer]", *msg)
def print_msg(self, *msg):
print_msg("[Synchronizer]", *msg)
def parse_response(self, response):
if response.get('error'):
self.print_error("response error:", response)
return None, None
return response['params'], response['result']
def is_up_to_date(self):
return (not self.requested_tx and not self.requested_histories
and not self.requested_addrs)
def add(self, address):
'''This can be called from the proxy or GUI threads.'''
with self.lock:
self.new_addresses.add(address)
def subscribe_to_addresses(self, addresses):
if addresses:
self.requested_addrs |= addresses
msgs = map(lambda addr: ('blockchain.address.subscribe', [addr]),
addresses)
self.network.send(msgs, self.addr_subscription_response)
def addr_subscription_response(self, response):
params, result = self.parse_response(response)
if not params:
return
addr = params[0]
if addr in self.requested_addrs: # Notifications won't be in
self.requested_addrs.remove(addr)
history = self.wallet.get_address_history(addr)
if self.wallet.get_status(history) != result:
if self.requested_histories.get(addr) is None:
self.network.send([('blockchain.address.get_history', [addr])],
self.addr_history_response)
self.requested_histories[addr] = result
def addr_history_response(self, response):
params, result = self.parse_response(response)
if not params:
return
addr = params[0]
self.print_error("receiving history", addr, len(result))
server_status = self.requested_histories.pop(addr)
# Check that txids are unique
hashes = set(map(lambda item: item['tx_hash'], result))
if len(hashes) != len(result):
self.print_error("error: server history has non-unique txids: %s"% addr)
return
# Check that the status corresponds to what was announced
hist = map(lambda item: (item['tx_hash'], item['height']), result)
if self.wallet.get_status(hist) != server_status:
self.print_error("error: status mismatch: %s" % addr)
return
# Store received history
self.wallet.receive_history_callback(addr, hist)
# Request transactions we don't have
self.request_missing_txs(hist)
def tx_response(self, response):
params, result = self.parse_response(response)
if not params:
return
tx_hash, tx_height = params
assert tx_hash == hash_encode(Hash(result.decode('hex')))
tx = Transaction(result)
try:
tx.deserialize()
except Exception:
self.print_msg("cannot deserialize transaction, skipping", tx_hash)
return
self.wallet.receive_tx_callback(tx_hash, tx, tx_height)
self.requested_tx.remove((tx_hash, tx_height))
self.print_error("received tx:", tx_hash, len(tx.raw))
if not self.requested_tx:
self.network.trigger_callback('updated')
# Updated gets called too many times from other places as
# well; if we used that signal we get the notification
# three times
self.network.trigger_callback("new_transaction")
def request_missing_txs(self, hist):
# "hist" is a list of [tx_hash, tx_height] lists
missing = set()
for tx_hash, tx_height in hist:
if self.wallet.transactions.get(tx_hash) is None:
missing.add((tx_hash, tx_height))
missing -= self.requested_tx
if missing:
requests = [('blockchain.transaction.get', tx) for tx in missing]
self.network.send(requests, self.tx_response)
self.requested_tx |= missing
def initialize(self):
'''Check the initial state of the wallet. Subscribe to all its
addresses, and request any transactions in its address history
we don't have.
'''
for history in self.wallet.history.values():
# Old electrum servers returned ['*'] when all history for
# the address was pruned. This no longer happens but may
# remain in old wallets.
if history == ['*']:
continue
self.request_missing_txs(history)
if self.requested_tx:
self.print_error("missing tx", self.requested_tx)
self.subscribe_to_addresses(set(self.wallet.addresses(True)))
def main_loop(self):
'''Called from the network proxy thread main loop.'''
# 1. Create new addresses
self.wallet.synchronize()
# 2. Subscribe to new addresses
with self.lock:
addresses = self.new_addresses
self.new_addresses = set()
self.subscribe_to_addresses(addresses)
# 3. Detect if situation has changed
up_to_date = self.is_up_to_date()
if up_to_date != self.wallet.is_up_to_date():
self.wallet.set_up_to_date(up_to_date)
if up_to_date:
self.wallet.save_transactions()
self.network.trigger_callback('updated') | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2013 Big Switch Networks, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Adapted from neutron.tests.unit.test_l3_plugin
# @author: Sumit Naiksatam, sumitnaiksatam@gmail.com
#
import contextlib
import copy
import mock
from oslo.config import cfg
from six import moves
from webob import exc
from neutron.common import test_lib
from neutron import context
from neutron.extensions import l3
from neutron import manager
from neutron.openstack.common import uuidutils
from neutron.plugins.bigswitch.extensions import routerrule
from neutron.tests.unit.bigswitch import fake_server
from neutron.tests.unit.bigswitch import test_base
from neutron.tests.unit import test_api_v2
from neutron.tests.unit import test_extension_extradhcpopts as test_extradhcp
from neutron.tests.unit import test_l3_plugin
HTTPCON = 'neutron.plugins.bigswitch.servermanager.httplib.HTTPConnection'
_uuid = uuidutils.generate_uuid
class RouterRulesTestExtensionManager(object):
def get_resources(self):
l3.RESOURCE_ATTRIBUTE_MAP['routers'].update(
routerrule.EXTENDED_ATTRIBUTES_2_0['routers'])
return l3.L3.get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
class DHCPOptsTestCase(test_base.BigSwitchTestBase,
test_extradhcp.TestExtraDhcpOpt):
def setUp(self, plugin=None):
self.setup_patches()
self.setup_config_files()
super(test_extradhcp.ExtraDhcpOptDBTestCase,
self).setUp(plugin=self._plugin_name)
self.startHttpPatch()
class RouterDBTestBase(test_base.BigSwitchTestBase,
test_l3_plugin.L3BaseForIntTests,
test_l3_plugin.L3NatTestCaseMixin):
def setUp(self):
self.setup_patches()
self.setup_config_files()
ext_mgr = RouterRulesTestExtensionManager()
super(RouterDBTestBase, self).setUp(plugin=self._plugin_name,
ext_mgr=ext_mgr)
cfg.CONF.set_default('allow_overlapping_ips', False)
self.plugin_obj = manager.NeutronManager.get_plugin()
self.startHttpPatch()
def tearDown(self):
super(RouterDBTestBase, self).tearDown()
del test_lib.test_config['config_files']
class RouterDBTestCase(RouterDBTestBase,
test_l3_plugin.L3NatDBIntTestCase):
def test_router_remove_router_interface_wrong_subnet_returns_400(self):
with self.router() as r:
with self.subnet() as s:
with self.subnet(cidr='10.0.10.0/24') as s1:
with self.port(subnet=s1, no_delete=True) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
p['port']['id'],
exc.HTTPBadRequest.code)
#remove properly to clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_router_remove_router_interface_wrong_port_returns_404(self):
with self.router() as r:
with self.subnet() as s:
with self.port(subnet=s, no_delete=True) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
# create another port for testing failure case
res = self._create_port('json', p['port']['network_id'])
p2 = self.deserialize('json', res)
self._router_interface_action('remove',
r['router']['id'],
None,
p2['port']['id'],
exc.HTTPNotFound.code)
# remove correct interface to cleanup
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
# remove extra port created
self._delete('ports', p2['port']['id'])
def test_multi_tenant_flip_alllocation(self):
tenant1_id = _uuid()
tenant2_id = _uuid()
with contextlib.nested(
self.network(tenant_id=tenant1_id),
self.network(tenant_id=tenant2_id)) as (n1, n2):
with contextlib.nested(
self.subnet(network=n1, cidr='11.0.0.0/24'),
self.subnet(network=n2, cidr='12.0.0.0/24'),
self.subnet(cidr='13.0.0.0/24')) as (s1, s2, psub):
with contextlib.nested(
self.router(tenant_id=tenant1_id),
self.router(tenant_id=tenant2_id),
self.port(subnet=s1, tenant_id=tenant1_id),
self.port(subnet=s2, tenant_id=tenant2_id)) as (r1, r2,
p1, p2):
self._set_net_external(psub['subnet']['network_id'])
s1id = p1['port']['fixed_ips'][0]['subnet_id']
s2id = p2['port']['fixed_ips'][0]['subnet_id']
s1 = {'subnet': {'id': s1id}}
s2 = {'subnet': {'id': s2id}}
self._add_external_gateway_to_router(
r1['router']['id'],
psub['subnet']['network_id'])
self._add_external_gateway_to_router(
r2['router']['id'],
psub['subnet']['network_id'])
self._router_interface_action(
'add', r1['router']['id'],
s1['subnet']['id'], None)
self._router_interface_action(
'add', r2['router']['id'],
s2['subnet']['id'], None)
fl1 = self._make_floatingip_for_tenant_port(
net_id=psub['subnet']['network_id'],
port_id=p1['port']['id'],
tenant_id=tenant1_id)
self.httpPatch.stop()
multiFloatPatch = mock.patch(
HTTPCON,
new=fake_server.VerifyMultiTenantFloatingIP)
multiFloatPatch.start()
fl2 = self._make_floatingip_for_tenant_port(
net_id=psub['subnet']['network_id'],
port_id=p2['port']['id'],
tenant_id=tenant2_id)
multiFloatPatch.stop()
self.httpPatch.start()
self._delete('floatingips', fl1['floatingip']['id'])
self._delete('floatingips', fl2['floatingip']['id'])
self._router_interface_action(
'remove', r1['router']['id'],
s1['subnet']['id'], None)
self._router_interface_action(
'remove', r2['router']['id'],
s2['subnet']['id'], None)
def _make_floatingip_for_tenant_port(self, net_id, port_id, tenant_id):
data = {'floatingip': {'floating_network_id': net_id,
'tenant_id': tenant_id,
'port_id': port_id}}
floatingip_req = self.new_create_request('floatingips', data, self.fmt)
res = floatingip_req.get_response(self.ext_api)
return self.deserialize(self.fmt, res)
def test_floatingip_with_invalid_create_port(self):
self._test_floatingip_with_invalid_create_port(
'neutron.plugins.bigswitch.plugin.NeutronRestProxyV2')
def test_create_floatingip_no_ext_gateway_return_404(self):
with self.subnet(cidr='10.0.10.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
with self.port() as private_port:
with self.router():
res = self._create_floatingip(
'json',
public_sub['subnet']['network_id'],
port_id=private_port['port']['id'])
self.assertEqual(res.status_int, exc.HTTPNotFound.code)
def test_router_update_gateway(self):
with self.router() as r:
with self.subnet() as s1:
with self.subnet(cidr='10.0.10.0/24') as s2:
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
net_id = (body['router']
['external_gateway_info']['network_id'])
self.assertEqual(net_id, s1['subnet']['network_id'])
self._set_net_external(s2['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s2['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
net_id = (body['router']
['external_gateway_info']['network_id'])
self.assertEqual(net_id, s2['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s2['subnet']['network_id'])
def test_router_add_interface_overlapped_cidr(self):
self.skipTest("Plugin does not support")
def test_router_add_interface_overlapped_cidr_returns_400(self):
self.skipTest("Plugin does not support")
def test_list_nets_external(self):
self.skipTest("Plugin does not support")
def test_router_update_gateway_with_existed_floatingip(self):
with self.subnet(cidr='10.0.10.0/24') as subnet:
self._set_net_external(subnet['subnet']['network_id'])
with self.floatingip_with_assoc() as fip:
self._add_external_gateway_to_router(
fip['floatingip']['router_id'],
subnet['subnet']['network_id'],
expected_code=exc.HTTPConflict.code)
def test_router_remove_interface_wrong_subnet_returns_400(self):
with self.router() as r:
with self.subnet(cidr='10.0.10.0/24') as s:
with self.port(no_delete=True) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
p['port']['id'],
exc.HTTPBadRequest.code)
#remove properly to clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_router_remove_interface_wrong_port_returns_404(self):
with self.router() as r:
with self.subnet(cidr='10.0.10.0/24'):
with self.port(no_delete=True) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
# create another port for testing failure case
res = self._create_port('json', p['port']['network_id'])
p2 = self.deserialize('json', res)
self._router_interface_action('remove',
r['router']['id'],
None,
p2['port']['id'],
exc.HTTPNotFound.code)
# remove correct interface to cleanup
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
# remove extra port created
self._delete('ports', p2['port']['id'])
def test_send_data(self):
fmt = 'json'
plugin_obj = manager.NeutronManager.get_plugin()
with self.router() as r:
r_id = r['router']['id']
with self.subnet(cidr='10.0.10.0/24') as s:
s_id = s['subnet']['id']
with self.router() as r1:
r1_id = r1['router']['id']
body = self._router_interface_action('add', r_id, s_id,
None)
self.assertIn('port_id', body)
r_port_id = body['port_id']
body = self._show('ports', r_port_id)
self.assertEqual(body['port']['device_id'], r_id)
with self.subnet(cidr='10.0.20.0/24') as s1:
s1_id = s1['subnet']['id']
body = self._router_interface_action('add', r1_id,
s1_id, None)
self.assertIn('port_id', body)
r1_port_id = body['port_id']
body = self._show('ports', r1_port_id)
self.assertEqual(body['port']['device_id'], r1_id)
with self.subnet(cidr='11.0.0.0/24') as public_sub:
public_net_id = public_sub['subnet']['network_id']
self._set_net_external(public_net_id)
with self.port() as prv_port:
prv_fixed_ip = prv_port['port']['fixed_ips'][0]
priv_sub_id = prv_fixed_ip['subnet_id']
self._add_external_gateway_to_router(
r_id, public_net_id)
self._router_interface_action('add', r_id,
priv_sub_id,
None)
priv_port_id = prv_port['port']['id']
res = self._create_floatingip(
fmt, public_net_id,
port_id=priv_port_id)
self.assertEqual(res.status_int,
exc.HTTPCreated.code)
floatingip = self.deserialize(fmt, res)
result = plugin_obj._send_all_data()
self.assertEqual(result[0], 200)
self._delete('floatingips',
floatingip['floatingip']['id'])
self._remove_external_gateway_from_router(
r_id, public_net_id)
self._router_interface_action('remove', r_id,
priv_sub_id,
None)
self._router_interface_action('remove', r_id, s_id,
None)
self._show('ports', r_port_id,
expected_code=exc.HTTPNotFound.code)
self._router_interface_action('remove', r1_id, s1_id,
None)
self._show('ports', r1_port_id,
expected_code=exc.HTTPNotFound.code)
def test_router_rules_update(self):
with self.router() as r:
r_id = r['router']['id']
router_rules = [{'destination': '1.2.3.4/32',
'source': '4.3.2.1/32',
'action': 'permit',
'nexthops': ['4.4.4.4', '4.4.4.5']}]
body = self._update('routers', r_id,
{'router': {'router_rules': router_rules}})
body = self._show('routers', r['router']['id'])
self.assertIn('router_rules', body['router'])
rules = body['router']['router_rules']
self.assertEqual(_strip_rule_ids(rules), router_rules)
# Try after adding another rule
router_rules.append({'source': 'external',
'destination': '8.8.8.8/32',
'action': 'permit', 'nexthops': []})
body = self._update('routers', r['router']['id'],
{'router': {'router_rules': router_rules}})
body = self._show('routers', r['router']['id'])
self.assertIn('router_rules', body['router'])
rules = body['router']['router_rules']
self.assertEqual(_strip_rule_ids(rules), router_rules)
def test_router_rules_separation(self):
with self.router() as r1:
with self.router() as r2:
r1_id = r1['router']['id']
r2_id = r2['router']['id']
router1_rules = [{'destination': '5.6.7.8/32',
'source': '8.7.6.5/32',
'action': 'permit',
'nexthops': ['8.8.8.8', '9.9.9.9']}]
router2_rules = [{'destination': '1.2.3.4/32',
'source': '4.3.2.1/32',
'action': 'permit',
'nexthops': ['4.4.4.4', '4.4.4.5']}]
body1 = self._update('routers', r1_id,
{'router':
{'router_rules': router1_rules}})
body2 = self._update('routers', r2_id,
{'router':
{'router_rules': router2_rules}})
body1 = self._show('routers', r1_id)
body2 = self._show('routers', r2_id)
rules1 = body1['router']['router_rules']
rules2 = body2['router']['router_rules']
self.assertEqual(_strip_rule_ids(rules1), router1_rules)
self.assertEqual(_strip_rule_ids(rules2), router2_rules)
def test_router_rules_validation(self):
with self.router() as r:
r_id = r['router']['id']
good_rules = [{'destination': '1.2.3.4/32',
'source': '4.3.2.1/32',
'action': 'permit',
'nexthops': ['4.4.4.4', '4.4.4.5']}]
body = self._update('routers', r_id,
{'router': {'router_rules': good_rules}})
body = self._show('routers', r_id)
self.assertIn('router_rules', body['router'])
self.assertEqual(good_rules,
_strip_rule_ids(body['router']['router_rules']))
# Missing nexthops should be populated with an empty list
light_rules = copy.deepcopy(good_rules)
del light_rules[0]['nexthops']
body = self._update('routers', r_id,
{'router': {'router_rules': light_rules}})
body = self._show('routers', r_id)
self.assertIn('router_rules', body['router'])
light_rules[0]['nexthops'] = []
self.assertEqual(light_rules,
_strip_rule_ids(body['router']['router_rules']))
# bad CIDR
bad_rules = copy.deepcopy(good_rules)
bad_rules[0]['destination'] = '1.1.1.1'
body = self._update('routers', r_id,
{'router': {'router_rules': bad_rules}},
expected_code=exc.HTTPBadRequest.code)
# bad next hop
bad_rules = copy.deepcopy(good_rules)
bad_rules[0]['nexthops'] = ['1.1.1.1', 'f2']
body = self._update('routers', r_id,
{'router': {'router_rules': bad_rules}},
expected_code=exc.HTTPBadRequest.code)
# bad action
bad_rules = copy.deepcopy(good_rules)
bad_rules[0]['action'] = 'dance'
body = self._update('routers', r_id,
{'router': {'router_rules': bad_rules}},
expected_code=exc.HTTPBadRequest.code)
# duplicate rule with opposite action
bad_rules = copy.deepcopy(good_rules)
bad_rules.append(copy.deepcopy(bad_rules[0]))
bad_rules.append(copy.deepcopy(bad_rules[0]))
bad_rules[1]['source'] = 'any'
bad_rules[2]['action'] = 'deny'
body = self._update('routers', r_id,
{'router': {'router_rules': bad_rules}},
expected_code=exc.HTTPBadRequest.code)
# duplicate nexthop
bad_rules = copy.deepcopy(good_rules)
bad_rules[0]['nexthops'] = ['1.1.1.1', '1.1.1.1']
body = self._update('routers', r_id,
{'router': {'router_rules': bad_rules}},
expected_code=exc.HTTPBadRequest.code)
# make sure light rules persisted during bad updates
body = self._show('routers', r_id)
self.assertIn('router_rules', body['router'])
self.assertEqual(light_rules,
_strip_rule_ids(body['router']['router_rules']))
def test_router_rules_config_change(self):
cfg.CONF.set_override('tenant_default_router_rule',
['*:any:any:deny',
'*:8.8.8.8/32:any:permit:1.2.3.4'],
'ROUTER')
with self.router() as r:
body = self._show('routers', r['router']['id'])
expected_rules = [{'source': 'any', 'destination': 'any',
'nexthops': [], 'action': 'deny'},
{'source': '8.8.8.8/32', 'destination': 'any',
'nexthops': ['1.2.3.4'], 'action': 'permit'}]
self.assertEqual(expected_rules,
_strip_rule_ids(body['router']['router_rules']))
def test_rule_exhaustion(self):
cfg.CONF.set_override('max_router_rules', 10, 'ROUTER')
with self.router() as r:
rules = []
for i in moves.xrange(1, 12):
rule = {'source': 'any', 'nexthops': [],
'destination': '1.1.1.' + str(i) + '/32',
'action': 'permit'}
rules.append(rule)
self._update('routers', r['router']['id'],
{'router': {'router_rules': rules}},
expected_code=exc.HTTPBadRequest.code)
def test_rollback_on_router_create(self):
tid = test_api_v2._uuid()
self.httpPatch.stop()
with mock.patch(HTTPCON, new=fake_server.HTTPConnectionMock500):
self._create_router('json', tid)
self.assertTrue(len(self._get_routers(tid)) == 0)
def test_rollback_on_router_update(self):
with self.router() as r:
data = {'router': {'name': 'aNewName'}}
self.httpPatch.stop()
with mock.patch(HTTPCON, new=fake_server.HTTPConnectionMock500):
self.new_update_request(
'routers', data, r['router']['id']).get_response(self.api)
self.httpPatch.start()
updatedr = self._get_routers(r['router']['tenant_id'])[0]
# name should have stayed the same due to failure
self.assertEqual(r['router']['name'], updatedr['name'])
def test_rollback_on_router_delete(self):
with self.router() as r:
self.httpPatch.stop()
with mock.patch(HTTPCON, new=fake_server.HTTPConnectionMock500):
self._delete('routers', r['router']['id'],
expected_code=exc.HTTPInternalServerError.code)
self.httpPatch.start()
self.assertEqual(r['router']['id'],
self._get_routers(r['router']['tenant_id']
)[0]['id'])
def _get_routers(self, tenant_id):
ctx = context.Context('', tenant_id)
return self.plugin_obj.get_routers(ctx)
def _strip_rule_ids(rules):
cleaned = []
for rule in rules:
del rule['id']
cleaned.append(rule)
return cleaned | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python3
import sys, os, argparse
def format_n(x):
x = float(x)
x = '{:.3g}'.format(x)
x = x.replace('e-0', 'e-')
if x.startswith('0.'):
x = x[1:]
if x.startswith('-0.'):
x = '-' + x[2:]
return x
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Quantize network file to decrease the file size.')
parser.add_argument("input",
help='Input file', type=str)
parser.add_argument("-o", "--output",
help='Output file. Defaults to input + "_quantized"',
required=False, type=str, default=None)
args = parser.parse_args()
if args.output == None:
output_name = os.path.splitext(sys.argv[1])
output_name = output_name[0] + '_quantized' + output_name[1]
else:
output_name = args.output
output = open(output_name, 'w')
calculate_error = True
error = 0
with open(args.input, 'r') as f:
for line in f:
line = line.split(' ')
lineq = list(map(format_n, line))
if calculate_error:
e = sum((float(line[i]) - float(lineq[i]))**2 for i in range(len(line)))
error += e/len(line)
output.write(' '.join(lineq) + '\n')
if calculate_error:
print('Weight file difference L2-norm: {}'.format(error**0.5))
output.close() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python -tt
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Copyright 2007 Red Hat, Inc - written by seth vidal skvidal at fedoraproject.org
import os
import rpm
import types
from yum.packages import YumLocalPackage
from yum.Errors import *
from yum import misc
import utils
import tempfile
class CreateRepoPackage(YumLocalPackage):
def __init__(self, ts, package, sumtype=None, external_data={}):
YumLocalPackage.__init__(self, ts, package)
if sumtype:
self.checksum_type = sumtype
if external_data:
for (key, val) in external_data.items():
setattr(self, key, val)
def _do_checksum(self):
"""return a checksum for a package:
- check if the checksum cache is enabled
if not - return the checksum
if so - check to see if it has a cache file
if so, open it and return the first line's contents
if not, grab the checksum and write it to a file for this pkg
"""
# already got it
if self._checksum:
return self._checksum
# not using the cachedir
if not hasattr(self, '_cachedir') or not self._cachedir:
self._checksum = misc.checksum(self.checksum_type, self.localpath)
self._checksums = [(self.checksum_type, self._checksum, 1)]
return self._checksum
t = []
if type(self.hdr[rpm.RPMTAG_SIGGPG]) is not types.NoneType:
t.append("".join(self.hdr[rpm.RPMTAG_SIGGPG]))
if type(self.hdr[rpm.RPMTAG_SIGPGP]) is not types.NoneType:
t.append("".join(self.hdr[rpm.RPMTAG_SIGPGP]))
if type(self.hdr[rpm.RPMTAG_HDRID]) is not types.NoneType:
t.append("".join(self.hdr[rpm.RPMTAG_HDRID]))
kcsum = misc.Checksums(checksums=[self.checksum_type])
kcsum.update("".join(t))
key = kcsum.hexdigest()
csumtag = '%s-%s-%s-%s' % (os.path.basename(self.localpath),
key, self.size, self.filetime)
csumfile = '%s/%s' % (self._cachedir, csumtag)
if os.path.exists(csumfile) and float(self.filetime) <= float(os.stat(csumfile)[-2]):
csumo = open(csumfile, 'r')
checksum = csumo.readline()
csumo.close()
else:
checksum = misc.checksum(self.checksum_type, self.localpath)
# This is atomic cache creation via. rename, so we can have two
# tasks using the same cachedir ... mash does this.
try:
(csumo, tmpfilename) = tempfile.mkstemp(dir=self._cachedir)
csumo = os.fdopen(csumo, 'w', -1)
csumo.write(checksum)
csumo.close()
os.rename(tmpfilename, csumfile)
except:
pass
self._checksum = checksum
self._checksums = [(self.checksum_type, checksum, 1)]
return self._checksum
# sqlite-direct dump code below here :-/
def _sqlite_null(self, item):
if not item:
return None
return item
def do_primary_sqlite_dump(self, cur):
"""insert primary data in place, this assumes the tables exist"""
if self.crp_reldir and self.localpath.startswith(self.crp_reldir):
relpath = self.localpath.replace(self.crp_reldir, '')
if relpath[0] == '/': relpath = relpath[1:]
else:
relpath = self.localpath
p = (self.crp_packagenumber, self.checksum, self.name, self.arch,
self.version, self.epoch, self.release, self.summary.strip(),
self.description.strip(), self._sqlite_null(self.url), self.filetime,
self.buildtime, self._sqlite_null(self.license),
self._sqlite_null(self.vendor), self._sqlite_null(self.group),
self._sqlite_null(self.buildhost), self._sqlite_null(self.sourcerpm),
self.hdrstart, self.hdrend, self._sqlite_null(self.packager),
self.packagesize, self.size, self.archivesize, relpath,
self.crp_baseurl, self.checksum_type)
q = """insert into packages values (?, ?, ?, ?, ?, ?,
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?, ?, ?, ?,
?, ?, ?)"""
# write out all of do_primary_sqlite as an executescript - work on the
# quoting for pretty much any contingency - take from sqlutils.py
#
# e
#p = None
#q = """insert into packages values (%s, %s, %s, %s, """
cur.execute(q, p)
# provides, obsoletes, conflicts
for pco in ('obsoletes', 'provides', 'conflicts'):
thispco = []
for (name, flag, (epoch, ver, rel)) in getattr(self, pco):
thispco.append((name, flag, epoch, ver, rel, self.crp_packagenumber))
q = "insert into %s values (?, ?, ?, ?, ?, ?)" % pco
cur.executemany(q, thispco)
# requires
reqs = []
for (name, flag, (epoch, ver, rel), pre) in self._requires_with_pre():
if name.startswith('rpmlib('):
continue
pre_bool = 'FALSE'
if pre == 1:
pre_bool = 'TRUE'
reqs.append((name, flag, epoch, ver,rel, self.crp_packagenumber, pre_bool))
q = "insert into requires values (?, ?, ?, ?, ?, ?, ?)"
cur.executemany(q, reqs)
# files
p = []
for f in self._return_primary_files():
p.append((f,))
if p:
q = "insert into files values (?, 'file', %s)" % self.crp_packagenumber
cur.executemany(q, p)
# dirs
p = []
for f in self._return_primary_dirs():
p.append((f,))
if p:
q = "insert into files values (?, 'dir', %s)" % self.crp_packagenumber
cur.executemany(q, p)
# ghosts
p = []
for f in self._return_primary_files(list_of_files = self.returnFileEntries('ghost')):
p.append((f,))
if p:
q = "insert into files values (?, 'ghost', %s)" % self.crp_packagenumber
cur.executemany(q, p)
def do_filelists_sqlite_dump(self, cur):
"""inserts filelists data in place, this assumes the tables exist"""
# insert packagenumber + checksum into 'packages' table
q = 'insert into packages values (?, ?)'
p = (self.crp_packagenumber, self.checksum)
cur.execute(q, p)
# break up filelists and encode them
dirs = {}
for (filetype, files) in [('file', self.filelist), ('dir', self.dirlist),
('ghost', self.ghostlist)]:
for filename in files:
(dirname,filename) = (os.path.split(filename))
if not dirs.has_key(dirname):
dirs[dirname] = {'files':[], 'types':[]}
dirs[dirname]['files'].append(filename)
dirs[dirname]['types'].append(filetype)
# insert packagenumber|dir|files|types into files table
p = []
for (dirname,direc) in dirs.items():
p.append((self.crp_packagenumber, dirname,
utils.encodefilenamelist(direc['files']),
utils.encodefiletypelist(direc['types'])))
if p:
q = 'insert into filelist values (?, ?, ?, ?)'
cur.executemany(q, p)
def do_other_sqlite_dump(self, cur):
"""inserts changelog data in place, this assumes the tables exist"""
# insert packagenumber + checksum into 'packages' table
q = 'insert into packages values (?, ?)'
p = (self.crp_packagenumber, self.checksum)
cur.execute(q, p)
if self.changelog:
q = 'insert into changelog ("pkgKey", "date", "author", "changelog") values (%s, ?, ?, ?)' % self.crp_packagenumber
cur.executemany(q, self.changelog)
def do_sqlite_dump(self, md_sqlite):
"""write the metadata out to the sqlite dbs"""
self.do_primary_sqlite_dump(md_sqlite.primary_cursor)
md_sqlite.pri_cx.commit()
self.do_filelists_sqlite_dump(md_sqlite.filelists_cursor)
md_sqlite.file_cx.commit()
self.do_other_sqlite_dump(md_sqlite.other_cursor)
md_sqlite.other_cx.commit() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
DOCUMENTATION = '''
---
module: rax_mon_check
short_description: Create or delete a Rackspace Cloud Monitoring check for an
existing entity.
description:
- Create or delete a Rackspace Cloud Monitoring check associated with an
existing rax_mon_entity. A check is a specific test or measurement that is
performed, possibly from different monitoring zones, on the systems you
monitor. Rackspace monitoring module flow | rax_mon_entity ->
*rax_mon_check* -> rax_mon_notification -> rax_mon_notification_plan ->
rax_mon_alarm
version_added: "2.0"
options:
state:
description:
- Ensure that a check with this C(label) exists or does not exist.
choices: ["present", "absent"]
entity_id:
description:
- ID of the rax_mon_entity to target with this check.
required: true
label:
description:
- Defines a label for this check, between 1 and 64 characters long.
required: true
check_type:
description:
- The type of check to create. C(remote.) checks may be created on any
rax_mon_entity. C(agent.) checks may only be created on rax_mon_entities
that have a non-null C(agent_id).
choices:
- remote.dns
- remote.ftp-banner
- remote.http
- remote.imap-banner
- remote.mssql-banner
- remote.mysql-banner
- remote.ping
- remote.pop3-banner
- remote.postgresql-banner
- remote.smtp-banner
- remote.smtp
- remote.ssh
- remote.tcp
- remote.telnet-banner
- agent.filesystem
- agent.memory
- agent.load_average
- agent.cpu
- agent.disk
- agent.network
- agent.plugin
required: true
monitoring_zones_poll:
description:
- Comma-separated list of the names of the monitoring zones the check should
run from. Available monitoring zones include mzdfw, mzhkg, mziad, mzlon,
mzord and mzsyd. Required for remote.* checks; prohibited for agent.* checks.
target_hostname:
description:
- One of `target_hostname` and `target_alias` is required for remote.* checks,
but prohibited for agent.* checks. The hostname this check should target.
Must be a valid IPv4, IPv6, or FQDN.
target_alias:
description:
- One of `target_alias` and `target_hostname` is required for remote.* checks,
but prohibited for agent.* checks. Use the corresponding key in the entity's
`ip_addresses` hash to resolve an IP address to target.
details:
description:
- Additional details specific to the check type. Must be a hash of strings
between 1 and 255 characters long, or an array or object containing 0 to
256 items.
disabled:
description:
- If "yes", ensure the check is created, but don't actually use it yet.
choices: [ "yes", "no" ]
metadata:
description:
- Hash of arbitrary key-value pairs to accompany this check if it fires.
Keys and values must be strings between 1 and 255 characters long.
period:
description:
- The number of seconds between each time the check is performed. Must be
greater than the minimum period set on your account.
timeout:
description:
- The number of seconds this check will wait when attempting to collect
results. Must be less than the period.
author: Ash Wilson
extends_documentation_fragment: rackspace.openstack
'''
EXAMPLES = '''
- name: Create a monitoring check
gather_facts: False
hosts: local
connection: local
tasks:
- name: Associate a check with an existing entity.
rax_mon_check:
credentials: ~/.rax_pub
state: present
entity_id: "{{ the_entity['entity']['id'] }}"
label: the_check
check_type: remote.ping
monitoring_zones_poll: mziad,mzord,mzdfw
details:
count: 10
meta:
hurf: durf
register: the_check
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def cloud_check(module, state, entity_id, label, check_type,
monitoring_zones_poll, target_hostname, target_alias, details,
disabled, metadata, period, timeout):
# Coerce attributes.
if monitoring_zones_poll and not isinstance(monitoring_zones_poll, list):
monitoring_zones_poll = [monitoring_zones_poll]
if period:
period = int(period)
if timeout:
timeout = int(timeout)
changed = False
check = None
cm = pyrax.cloud_monitoring
if not cm:
module.fail_json(msg='Failed to instantiate client. This typically '
'indicates an invalid region or an incorrectly '
'capitalized region name.')
entity = cm.get_entity(entity_id)
if not entity:
module.fail_json(msg='Failed to instantiate entity. "%s" may not be'
' a valid entity id.' % entity_id)
existing = [e for e in entity.list_checks() if e.label == label]
if existing:
check = existing[0]
if state == 'present':
if len(existing) > 1:
module.fail_json(msg='%s existing checks have a label of %s.' %
(len(existing), label))
should_delete = False
should_create = False
should_update = False
if check:
# Details may include keys set to default values that are not
# included in the initial creation.
#
# Only force a recreation of the check if one of the *specified*
# keys is missing or has a different value.
if details:
for (key, value) in details.iteritems():
if key not in check.details:
should_delete = should_create = True
elif value != check.details[key]:
should_delete = should_create = True
should_update = label != check.label or \
(target_hostname and target_hostname != check.target_hostname) or \
(target_alias and target_alias != check.target_alias) or \
(disabled != check.disabled) or \
(metadata and metadata != check.metadata) or \
(period and period != check.period) or \
(timeout and timeout != check.timeout) or \
(monitoring_zones_poll and monitoring_zones_poll != check.monitoring_zones_poll)
if should_update and not should_delete:
check.update(label=label,
disabled=disabled,
metadata=metadata,
monitoring_zones_poll=monitoring_zones_poll,
timeout=timeout,
period=period,
target_alias=target_alias,
target_hostname=target_hostname)
changed = True
else:
# The check doesn't exist yet.
should_create = True
if should_delete:
check.delete()
if should_create:
check = cm.create_check(entity,
label=label,
check_type=check_type,
target_hostname=target_hostname,
target_alias=target_alias,
monitoring_zones_poll=monitoring_zones_poll,
details=details,
disabled=disabled,
metadata=metadata,
period=period,
timeout=timeout)
changed = True
elif state == 'absent':
if check:
check.delete()
changed = True
else:
module.fail_json(msg='state must be either present or absent.')
if check:
check_dict = {
"id": check.id,
"label": check.label,
"type": check.type,
"target_hostname": check.target_hostname,
"target_alias": check.target_alias,
"monitoring_zones_poll": check.monitoring_zones_poll,
"details": check.details,
"disabled": check.disabled,
"metadata": check.metadata,
"period": check.period,
"timeout": check.timeout
}
module.exit_json(changed=changed, check=check_dict)
else:
module.exit_json(changed=changed)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
entity_id=dict(required=True),
label=dict(required=True),
check_type=dict(required=True),
monitoring_zones_poll=dict(),
target_hostname=dict(),
target_alias=dict(),
details=dict(type='dict', default={}),
disabled=dict(type='bool', default=False),
metadata=dict(type='dict', default={}),
period=dict(type='int'),
timeout=dict(type='int'),
state=dict(default='present', choices=['present', 'absent'])
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together()
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
entity_id = module.params.get('entity_id')
label = module.params.get('label')
check_type = module.params.get('check_type')
monitoring_zones_poll = module.params.get('monitoring_zones_poll')
target_hostname = module.params.get('target_hostname')
target_alias = module.params.get('target_alias')
details = module.params.get('details')
disabled = module.boolean(module.params.get('disabled'))
metadata = module.params.get('metadata')
period = module.params.get('period')
timeout = module.params.get('timeout')
state = module.params.get('state')
setup_rax_module(module, pyrax)
cloud_check(module, state, entity_id, label, check_type,
monitoring_zones_poll, target_hostname, target_alias, details,
disabled, metadata, period, timeout)
# Import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
# Invoke the module.
main() | unknown | codeparrot/codeparrot-clean | ||
import numpy as np
from astropy.coordinates import ICRS, SkyCoord
import cdshealpix
from astropy.wcs.utils import skycoord_to_pixel
from matplotlib.path import Path
from matplotlib.patches import PathPatch
from .utils import build_plotting_moc
from . import culling_backfacing_cells
from . import axis_viewport
def compute_healpix_vertices(depth, ipix, wcs):
path_vertices = np.array([])
codes = np.array([])
depth = int(depth)
step = 1
if depth < 3:
step = 2
ipix_lon, ipix_lat = cdshealpix.vertices(ipix, depth)
ipix_lon = ipix_lon[:, [2, 3, 0, 1]]
ipix_lat = ipix_lat[:, [2, 3, 0, 1]]
ipix_boundaries = SkyCoord(ipix_lon, ipix_lat, frame=ICRS())
# Projection on the given WCS
xp, yp = skycoord_to_pixel(ipix_boundaries, wcs=wcs)
c1 = np.vstack((xp[:, 0], yp[:, 0])).T
c2 = np.vstack((xp[:, 1], yp[:, 1])).T
c3 = np.vstack((xp[:, 2], yp[:, 2])).T
c4 = np.vstack((xp[:, 3], yp[:, 3])).T
# if depth < 3:
# c5 = np.vstack((xp[:, 4], yp[:, 4])).T
# c6 = np.vstack((xp[:, 5], yp[:, 5])).T
# c7 = np.vstack((xp[:, 6], yp[:, 6])).T
# c8 = np.vstack((xp[:, 7], yp[:, 7])).T
# cells = np.hstack((c1, c2, c3, c4, c5, c6, c7, c8, np.zeros((c1.shape[0], 2))))
# path_vertices = cells.reshape((9*c1.shape[0], 2))
# single_code = np.array([Path.MOVETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY])
# else:
cells = np.hstack((c1, c2, c3, c4, np.zeros((c1.shape[0], 2))))
path_vertices = cells.reshape((5*c1.shape[0], 2))
single_code = np.array([Path.MOVETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY])
codes = np.tile(single_code, c1.shape[0])
return path_vertices, codes
def compute_the_patches(moc, wcs):
depth_ipix_d = moc.serialize(format="json")
depth_ipix_clean_d = culling_backfacing_cells.from_moc(depth_ipix_d=depth_ipix_d, wcs=wcs)
patches = []
for depth, ipix in depth_ipix_clean_d.items():
patch = compute_healpix_vertices(depth=depth,
ipix=ipix,
wcs=wcs)
patches.append(patch)
return patches
def add_patches_to_mpl_axe(patches, ax, wcs, **kw_mpl_pathpatch):
first_patch = patches[0]
vertices_first_patch, codes_first_patch = first_patch
path_vertices = np.array(vertices_first_patch)
path_codes = np.array(codes_first_patch)
for vertices, codes in patches[1:]:
path_vertices = np.vstack((path_vertices, vertices))
path_codes = np.hstack((path_codes, codes))
path = Path(path_vertices, path_codes)
patches_mpl = PathPatch(path, **kw_mpl_pathpatch)
# Add the patches to the mpl axis
ax.add_patch(patches_mpl)
axis_viewport.set(ax, wcs)
def fill(moc, ax, wcs, **kw_mpl_pathpatch):
# Simplify the MOC for plotting purposes:
# 1. Degrade the MOC if the FOV is enough big so that we cannot see the smallest HEALPix cells.
# 2. For small FOVs, plot the MOC & POLYGONAL_MOC_FROM_FOV.
moc_to_plot = build_plotting_moc(moc=moc, wcs=wcs)
# If the FOV contains no cells, then moc_to_plot (i.e. the intersection between the moc
# and the MOC created from the FOV polygon) will be empty.
# If it is the case, we exit the method without doing anything.
if not moc_to_plot.empty():
patches = compute_the_patches(moc=moc_to_plot, wcs=wcs)
add_patches_to_mpl_axe(patches=patches, ax=ax, wcs=wcs, **kw_mpl_pathpatch) | unknown | codeparrot/codeparrot-clean | ||
# promoteUsedTemporaries
## File
`src/ReactiveScopes/PromoteUsedTemporaries.ts`
## Purpose
This pass promotes temporary variables (identifiers with no name) to named variables when they need to be referenced across scope boundaries or in code generation. Temporaries are intermediate values that the compiler creates during lowering; they are typically inlined at their use sites during codegen. However, some temporaries must be emitted as separate declarations - this pass identifies and names them.
The pass ensures that:
1. Scope dependencies and declarations have proper names for codegen
2. Variables referenced across reactive scope boundaries are named
3. JSX tag identifiers get special naming (`T0`, `T1`, etc.)
4. Temporaries with interposing side-effects are promoted to preserve ordering
## Input Invariants
- The ReactiveFunction has undergone scope construction and dependency propagation
- Identifiers may have `name === null` (temporaries) or be named
- Scopes have `dependencies`, `declarations`, and `reassignments` populated
- Pruned scopes are properly marked with `kind: 'pruned-scope'`
## Output Guarantees
- All scope dependencies have non-null names
- All scope declarations have non-null names
- JSX tag temporaries use uppercase naming (`T0`, `T1`, ...)
- Regular temporaries use lowercase naming (`#t{id}`)
- All instances of a promoted identifier share the same name (via DeclarationId tracking)
- Temporaries with interposing mutating instructions are promoted to preserve source ordering
## Algorithm
The pass operates in four phases using visitor classes:
### Phase 1: CollectPromotableTemporaries
Collects information about which temporaries may need promotion:
```typescript
class CollectPromotableTemporaries {
// Tracks pruned scope declarations and whether they're used outside their scope
pruned: Map<DeclarationId, {activeScopes: Array<ScopeId>; usedOutsideScope: boolean}>
// Tracks identifiers used as JSX tags (need uppercase names)
tags: Set<DeclarationId>
}
```
- When visiting a `JsxExpression`, adds the tag identifier to `tags`
- When visiting a `PrunedScope`, records its declarations
- Tracks when pruned declarations are used in different scopes
### Phase 2: PromoteTemporaries
Promotes temporaries that appear in positions requiring names:
```typescript
override visitScope(scopeBlock: ReactiveScopeBlock, state: State): void {
// Promote all dependencies without names
for (const dep of scopeBlock.scope.dependencies) {
if (identifier.name == null) {
promoteIdentifier(identifier, state);
}
}
// Promote all declarations without names
for (const [, declaration] of scopeBlock.scope.declarations) {
if (declaration.identifier.name == null) {
promoteIdentifier(declaration.identifier, state);
}
}
}
```
Also promotes:
- Function parameters without names
- Pruned scope declarations used outside their scope
### Phase 3: PromoteInterposedTemporaries
Handles ordering-sensitive promotion:
```typescript
class PromoteInterposedTemporaries {
// Instructions that emit as statements can interpose between temp defs and uses
// If such an instruction occurs, mark pending temporaries as needing promotion
override visitInstruction(instruction: ReactiveInstruction, state: InterState): void {
// For instructions that become statements (calls, stores, etc.):
if (willBeStatement && !constStore) {
// Mark all pending temporaries as needing promotion
for (const [key, [ident, _]] of state.entries()) {
state.set(key, [ident, true]); // Mark as needing promotion
}
}
}
}
```
This preserves source ordering when side-effects occur between a temporary's definition and use.
### Phase 4: PromoteAllInstancesOfPromotedTemporaries
Ensures all instances of a promoted identifier share the same name:
```typescript
class PromoteAllInstancesOfPromotedTemporaries {
override visitPlace(_id: InstructionId, place: Place, state: State): void {
if (place.identifier.name === null &&
state.promoted.has(place.identifier.declarationId)) {
promoteIdentifier(place.identifier, state);
}
}
}
```
### Naming Convention
```typescript
function promoteIdentifier(identifier: Identifier, state: State): void {
if (state.tags.has(identifier.declarationId)) {
promoteTemporaryJsxTag(identifier); // Uses #T{id} for JSX tags
} else {
promoteTemporary(identifier); // Uses #t{id} for regular temps
}
state.promoted.add(identifier.declarationId);
}
```
## Edge Cases
### JSX Tag Temporaries
JSX tags require uppercase names to be valid JSX syntax. The pass tracks which temporaries are used as JSX tags and uses `T0`, `T1`, etc. instead of `t0`, `t1`.
### Pruned Scope Declarations
Declarations in pruned scopes are only promoted if they're actually used outside the pruned scope, avoiding unnecessary variable declarations.
### Const vs Let Temporaries
The pass tracks const identifiers specially - they don't need promotion for ordering purposes since they can't be mutated by interposing instructions.
### Global Loads
Values loaded from globals (and their property loads) are treated as const-like for promotion purposes.
### Method Call Properties
The property identifier in a method call is treated as const-like to avoid unnecessary promotion.
## TODOs
None in the source file.
## Example
### Fixture: `simple.js`
**Input:**
```javascript
export default function foo(x, y) {
if (x) {
return foo(false, y);
}
return [y * 10];
}
```
**Before PromoteUsedTemporaries:**
```
scope @0 [...] dependencies=[y$14] declarations=[$19_@0]
scope @1 [...] dependencies=[$22] declarations=[$23_@1]
```
**After PromoteUsedTemporaries:**
```
scope @0 [...] dependencies=[y$14] declarations=[#t5$19_@0]
scope @1 [...] dependencies=[#t9$22] declarations=[#t10$23_@1]
```
Key observations:
- `$19_@0` is promoted to `#t5$19_@0` because it's a scope declaration
- `$22` is promoted to `#t9$22` because it's a scope dependency
- `$23_@1` is promoted to `#t10$23_@1` because it's a scope declaration
- The `#t` prefix indicates this is a promoted temporary (later renamed by `renameVariables`)
**Generated Code:**
```javascript
import { c as _c } from "react/compiler-runtime";
export default function foo(x, y) {
const $ = _c(4);
if (x) {
let t0;
if ($[0] !== y) {
t0 = foo(false, y);
$[0] = y;
$[1] = t0;
} else {
t0 = $[1];
}
return t0;
}
const t0 = y * 10;
let t1;
if ($[2] !== t0) {
t1 = [t0];
$[2] = t0;
$[3] = t1;
} else {
t1 = $[3];
}
return t1;
}
```
The promoted temporaries (`#t5`, `#t9`, `#t10`) become the named variables (`t0`, `t1`) in the output after `renameVariables` runs. | unknown | github | https://github.com/facebook/react | compiler/packages/babel-plugin-react-compiler/docs/passes/29-promoteUsedTemporaries.md |
//===--- IndexAction.h - Run the indexer as a frontend action ----*- C++-*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_CLANG_TOOLS_EXTRA_CLANGD_INDEX_INDEXACTION_H
#define LLVM_CLANG_TOOLS_EXTRA_CLANGD_INDEX_INDEXACTION_H
#include "index/SymbolCollector.h"
#include "clang/Frontend/FrontendAction.h"
namespace clang {
namespace clangd {
struct IndexFileIn;
// Creates an action that indexes translation units and delivers the results
// for IndexContentsCallback (each call corresponds to one TU).
//
// Only a subset of SymbolCollector::Options are respected:
// - include paths are always collected, and canonicalized appropriately
// - references are always counted
// - all references are collected (if RefsCallback is non-null)
// - the symbol origin is set to Static if not specified by caller
std::unique_ptr<FrontendAction> createStaticIndexingAction(
SymbolCollector::Options Opts,
std::function<void(IndexFileIn)> IndexContentsCallback);
} // namespace clangd
} // namespace clang
#endif | c | github | https://github.com/llvm/llvm-project | clang-tools-extra/clangd/index/IndexAction.h |
<option value="{{ widget.value }}"{% include "django/forms/widgets/attrs.html" %}>{{ widget.label }}</option> | html | github | https://github.com/django/django | django/forms/jinja2/django/forms/widgets/select_option.html |
# fly ArduCopter in SITL
# Flight mode switch positions are set-up in arducopter.param to be
# switch 1 = Circle
# switch 2 = Land
# switch 3 = RTL
# switch 4 = Auto
# switch 5 = Loiter
# switch 6 = Stabilize
import util, pexpect, sys, time, math, shutil, os
from common import *
from pymavlink import mavutil, mavwp
import random
# get location of scripts
testdir=os.path.dirname(os.path.realpath(__file__))
FRAME='+'
TARGET='sitl'
HOME=mavutil.location(-35.362938,149.165085,584,270)
AVCHOME=mavutil.location(40.072842,-105.230575,1586,0)
homeloc = None
num_wp = 0
speedup_default = 5
def hover(mavproxy, mav, hover_throttle=1450):
mavproxy.send('rc 3 %u\n' % hover_throttle)
return True
def arm_motors(mavproxy, mav):
'''arm motors'''
print("Arming motors")
mavproxy.send('switch 6\n') # stabilize mode
wait_mode(mav, 'STABILIZE')
mavproxy.send('rc 3 1000\n')
mavproxy.send('rc 4 2000\n')
mavproxy.expect('APM: ARMING MOTORS')
mavproxy.send('rc 4 1500\n')
mav.motors_armed_wait()
print("MOTORS ARMED OK")
return True
def disarm_motors(mavproxy, mav):
'''disarm motors'''
print("Disarming motors")
mavproxy.send('switch 6\n') # stabilize mode
wait_mode(mav, 'STABILIZE')
mavproxy.send('rc 3 1000\n')
mavproxy.send('rc 4 1000\n')
mavproxy.expect('APM: DISARMING MOTORS')
mavproxy.send('rc 4 1500\n')
mav.motors_disarmed_wait()
print("MOTORS DISARMED OK")
return True
def takeoff(mavproxy, mav, alt_min = 30, takeoff_throttle=1700):
'''takeoff get to 30m altitude'''
mavproxy.send('switch 6\n') # stabilize mode
wait_mode(mav, 'STABILIZE')
mavproxy.send('rc 3 %u\n' % takeoff_throttle)
m = mav.recv_match(type='VFR_HUD', blocking=True)
if (m.alt < alt_min):
wait_altitude(mav, alt_min, (alt_min + 5))
hover(mavproxy, mav)
print("TAKEOFF COMPLETE")
return True
# loiter - fly south west, then hold loiter within 5m position and altitude
def loiter(mavproxy, mav, holdtime=10, maxaltchange=5, maxdistchange=5):
'''hold loiter position'''
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
# first aim south east
print("turn south east")
mavproxy.send('rc 4 1580\n')
if not wait_heading(mav, 170):
return False
mavproxy.send('rc 4 1500\n')
#fly south east 50m
mavproxy.send('rc 2 1100\n')
if not wait_distance(mav, 50):
return False
mavproxy.send('rc 2 1500\n')
# wait for copter to slow moving
if not wait_groundspeed(mav, 0, 2):
return False
success = True
m = mav.recv_match(type='VFR_HUD', blocking=True)
start_altitude = m.alt
start = mav.location()
tstart = get_sim_time(mav)
tholdstart = get_sim_time(mav)
print("Holding loiter at %u meters for %u seconds" % (start_altitude, holdtime))
while get_sim_time(mav) < tstart + holdtime:
m = mav.recv_match(type='VFR_HUD', blocking=True)
pos = mav.location()
delta = get_distance(start, pos)
alt_delta = math.fabs(m.alt - start_altitude)
print("Loiter Dist: %.2fm, alt:%u" % (delta, m.alt))
if alt_delta > maxaltchange:
print("Loiter alt shifted %u meters (> limit of %u)" % (alt_delta, maxaltchange))
success = False
if delta > maxdistchange:
print("Loiter shifted %u meters (> limit of %u)" % (delta, maxdistchange))
success = False
if success:
print("Loiter OK for %u seconds" % holdtime)
else:
print("Loiter FAILED")
return success
def change_alt(mavproxy, mav, alt_min, climb_throttle=1920, descend_throttle=1080):
'''change altitude'''
m = mav.recv_match(type='VFR_HUD', blocking=True)
if(m.alt < alt_min):
print("Rise to alt:%u from %u" % (alt_min, m.alt))
mavproxy.send('rc 3 %u\n' % climb_throttle)
wait_altitude(mav, alt_min, (alt_min + 5))
else:
print("Lower to alt:%u from %u" % (alt_min, m.alt))
mavproxy.send('rc 3 %u\n' % descend_throttle)
wait_altitude(mav, (alt_min -5), alt_min)
hover(mavproxy, mav)
return True
# fly a square in stabilize mode
def fly_square(mavproxy, mav, side=50, timeout=300):
'''fly a square, flying N then E'''
tstart = get_sim_time(mav)
success = True
# ensure all sticks in the middle
mavproxy.send('rc 1 1500\n')
mavproxy.send('rc 2 1500\n')
mavproxy.send('rc 3 1500\n')
mavproxy.send('rc 4 1500\n')
# switch to loiter mode temporarily to stop us from rising
mavproxy.send('switch 5\n')
wait_mode(mav, 'LOITER')
# first aim north
print("turn right towards north")
mavproxy.send('rc 4 1580\n')
if not wait_heading(mav, 10):
print("Failed to reach heading")
success = False
mavproxy.send('rc 4 1500\n')
mav.recv_match(condition='RC_CHANNELS_RAW.chan4_raw==1500', blocking=True)
# save bottom left corner of box as waypoint
print("Save WP 1 & 2")
save_wp(mavproxy, mav)
# switch back to stabilize mode
mavproxy.send('rc 3 1430\n')
mavproxy.send('switch 6\n')
wait_mode(mav, 'STABILIZE')
# pitch forward to fly north
print("Going north %u meters" % side)
mavproxy.send('rc 2 1300\n')
if not wait_distance(mav, side):
print("Failed to reach distance of %u") % side
success = False
mavproxy.send('rc 2 1500\n')
# save top left corner of square as waypoint
print("Save WP 3")
save_wp(mavproxy, mav)
# roll right to fly east
print("Going east %u meters" % side)
mavproxy.send('rc 1 1700\n')
if not wait_distance(mav, side):
print("Failed to reach distance of %u") % side
success = False
mavproxy.send('rc 1 1500\n')
# save top right corner of square as waypoint
print("Save WP 4")
save_wp(mavproxy, mav)
# pitch back to fly south
print("Going south %u meters" % side)
mavproxy.send('rc 2 1700\n')
if not wait_distance(mav, side):
print("Failed to reach distance of %u") % side
success = False
mavproxy.send('rc 2 1500\n')
# save bottom right corner of square as waypoint
print("Save WP 5")
save_wp(mavproxy, mav)
# roll left to fly west
print("Going west %u meters" % side)
mavproxy.send('rc 1 1300\n')
if not wait_distance(mav, side):
print("Failed to reach distance of %u") % side
success = False
mavproxy.send('rc 1 1500\n')
# save bottom left corner of square (should be near home) as waypoint
print("Save WP 6")
save_wp(mavproxy, mav)
# descend to 10m
print("Descend to 10m in Loiter")
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
mavproxy.send('rc 3 1300\n')
time_left = timeout - (get_sim_time(mav) - tstart)
print("timeleft = %u" % time_left)
if time_left < 20:
time_left = 20
if not wait_altitude(mav, -10, 10, time_left):
print("Failed to reach alt of 10m")
success = False
save_wp(mavproxy, mav)
return success
def fly_RTL(mavproxy, mav, side=60, timeout=250):
'''Return, land'''
print("# Enter RTL")
mavproxy.send('switch 3\n')
tstart = get_sim_time(mav)
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
pos = mav.location()
home_distance = get_distance(HOME, pos)
print("Alt: %u HomeDistance: %.0f" % (m.alt, home_distance))
if(m.alt <= 1 and home_distance < 10):
return True
return False
def fly_throttle_failsafe(mavproxy, mav, side=60, timeout=180):
'''Fly east, Failsafe, return, land'''
# switch to loiter mode temporarily to stop us from rising
mavproxy.send('switch 5\n')
wait_mode(mav, 'LOITER')
# first aim east
print("turn east")
mavproxy.send('rc 4 1580\n')
if not wait_heading(mav, 135):
return False
mavproxy.send('rc 4 1500\n')
# switch to stabilize mode
mavproxy.send('switch 6\n')
wait_mode(mav, 'STABILIZE')
hover(mavproxy, mav)
failed = False
# fly east 60 meters
print("# Going forward %u meters" % side)
mavproxy.send('rc 2 1350\n')
if not wait_distance(mav, side, 5, 60):
failed = True
mavproxy.send('rc 2 1500\n')
# pull throttle low
print("# Enter Failsafe")
mavproxy.send('rc 3 900\n')
tstart = get_sim_time(mav)
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
pos = mav.location()
home_distance = get_distance(HOME, pos)
print("Alt: %u HomeDistance: %.0f" % (m.alt, home_distance))
# check if we've reached home
if m.alt <= 1 and home_distance < 10:
# reduce throttle
mavproxy.send('rc 3 1100\n')
# switch back to stabilize
mavproxy.send('switch 2\n') # land mode
wait_mode(mav, 'LAND')
mavproxy.send('switch 6\n') # stabilize mode
wait_mode(mav, 'STABILIZE')
print("Reached failsafe home OK")
return True
print("Failed to land on failsafe RTL - timed out after %u seconds" % timeout)
# reduce throttle
mavproxy.send('rc 3 1100\n')
# switch back to stabilize mode
mavproxy.send('switch 2\n') # land mode
wait_mode(mav, 'LAND')
mavproxy.send('switch 6\n') # stabilize mode
wait_mode(mav, 'STABILIZE')
return False
def fly_battery_failsafe(mavproxy, mav, timeout=30):
# assume failure
success = False
# switch to loiter mode so that we hold position
mavproxy.send('switch 5\n')
wait_mode(mav, 'LOITER')
mavproxy.send("rc 3 1500\n")
# enable battery failsafe
mavproxy.send("param set FS_BATT_ENABLE 1\n")
# trigger low voltage
mavproxy.send('param set SIM_BATT_VOLTAGE 10\n')
# wait for LAND mode
new_mode = wait_mode(mav, 'LAND')
if new_mode == 'LAND':
success = True
# disable battery failsafe
mavproxy.send('param set FS_BATT_ENABLE 0\n')
# return status
if success:
print("Successfully entered LAND mode after battery failsafe")
else:
print("Failed to enter LAND mode after battery failsafe")
return success
# fly_stability_patch - fly south, then hold loiter within 5m position and altitude and reduce 1 motor to 60% efficiency
def fly_stability_patch(mavproxy, mav, holdtime=30, maxaltchange=5, maxdistchange=10):
'''hold loiter position'''
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
# first south
print("turn south")
mavproxy.send('rc 4 1580\n')
if not wait_heading(mav, 180):
return False
mavproxy.send('rc 4 1500\n')
#fly west 80m
mavproxy.send('rc 2 1100\n')
if not wait_distance(mav, 80):
return False
mavproxy.send('rc 2 1500\n')
# wait for copter to slow moving
if not wait_groundspeed(mav, 0, 2):
return False
success = True
m = mav.recv_match(type='VFR_HUD', blocking=True)
start_altitude = m.alt
start = mav.location()
tstart = get_sim_time(mav)
tholdstart = get_sim_time(mav)
print("Holding loiter at %u meters for %u seconds" % (start_altitude, holdtime))
# cut motor 1 to 55% efficiency
print("Cutting motor 1 to 55% efficiency")
mavproxy.send('param set SIM_ENGINE_MUL 0.55\n')
while get_sim_time(mav) < tstart + holdtime:
m = mav.recv_match(type='VFR_HUD', blocking=True)
pos = mav.location()
delta = get_distance(start, pos)
alt_delta = math.fabs(m.alt - start_altitude)
print("Loiter Dist: %.2fm, alt:%u" % (delta, m.alt))
if alt_delta > maxaltchange:
print("Loiter alt shifted %u meters (> limit of %u)" % (alt_delta, maxaltchange))
success = False
if delta > maxdistchange:
print("Loiter shifted %u meters (> limit of %u)" % (delta, maxdistchange))
success = False
# restore motor 1 to 100% efficiency
mavproxy.send('param set SIM_ENGINE_MUL 1.0\n')
if success:
print("Stability patch and Loiter OK for %u seconds" % holdtime)
else:
print("Stability Patch FAILED")
return success
# fly_fence_test - fly east until you hit the horizontal circular fence
def fly_fence_test(mavproxy, mav, timeout=180):
'''hold loiter position'''
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
# enable fence
mavproxy.send('param set FENCE_ENABLE 1\n')
# first east
print("turn east")
mavproxy.send('rc 4 1580\n')
if not wait_heading(mav, 160):
return False
mavproxy.send('rc 4 1500\n')
# fly forward (east) at least 20m
pitching_forward = True
mavproxy.send('rc 2 1100\n')
if not wait_distance(mav, 20):
return False
# start timer
tstart = get_sim_time(mav)
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
pos = mav.location()
home_distance = get_distance(HOME, pos)
print("Alt: %u HomeDistance: %.0f" % (m.alt, home_distance))
# recenter pitch sticks once we reach home so we don't fly off again
if pitching_forward and home_distance < 10 :
pitching_forward = False
mavproxy.send('rc 2 1500\n')
# disable fence
mavproxy.send('param set FENCE_ENABLE 0\n')
if m.alt <= 1 and home_distance < 10:
# reduce throttle
mavproxy.send('rc 3 1000\n')
# switch mode to stabilize
mavproxy.send('switch 2\n') # land mode
wait_mode(mav, 'LAND')
mavproxy.send('switch 6\n') # stabilize mode
wait_mode(mav, 'STABILIZE')
print("Reached home OK")
return True
# disable fence
mavproxy.send('param set FENCE_ENABLE 0\n')
# reduce throttle
mavproxy.send('rc 3 1000\n')
# switch mode to stabilize
mavproxy.send('switch 2\n') # land mode
wait_mode(mav, 'LAND')
mavproxy.send('switch 6\n') # stabilize mode
wait_mode(mav, 'STABILIZE')
print("Fence test failed to reach home - timed out after %u seconds" % timeout)
return False
def show_gps_and_sim_positions(mavproxy, on_off):
if on_off == True:
# turn on simulator display of gps and actual position
mavproxy.send('map set showgpspos 1\n')
mavproxy.send('map set showsimpos 1\n')
else:
# turn off simulator display of gps and actual position
mavproxy.send('map set showgpspos 0\n')
mavproxy.send('map set showsimpos 0\n')
# fly_gps_glitch_loiter_test - fly south east in loiter and test reaction to gps glitch
def fly_gps_glitch_loiter_test(mavproxy, mav, timeout=30, max_distance=20):
'''hold loiter position'''
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
# turn on simulator display of gps and actual position
show_gps_and_sim_positions(mavproxy, True)
# set-up gps glitch array
glitch_lat = [0.0002996,0.0006958,0.0009431,0.0009991,0.0009444,0.0007716,0.0006221]
glitch_lon = [0.0000717,0.0000912,0.0002761,0.0002626,0.0002807,0.0002049,0.0001304]
glitch_num = len(glitch_lat)
print("GPS Glitches:")
for i in range(1,glitch_num):
print("glitch %d %.7f %.7f" % (i,glitch_lat[i],glitch_lon[i]))
# turn south east
print("turn south east")
mavproxy.send('rc 4 1580\n')
if not wait_heading(mav, 150):
show_gps_and_sim_positions(mavproxy, False)
return False
mavproxy.send('rc 4 1500\n')
# fly forward (south east) at least 60m
mavproxy.send('rc 2 1100\n')
if not wait_distance(mav, 60):
show_gps_and_sim_positions(mavproxy, False)
return False
mavproxy.send('rc 2 1500\n')
# wait for copter to slow down
if not wait_groundspeed(mav, 0, 1):
show_gps_and_sim_positions(mavproxy, False)
return False
# record time and position
tstart = get_sim_time(mav)
tnow = tstart
start_pos = sim_location(mav)
success = True
# initialise current glitch
glitch_current = 0;
print("Apply first glitch")
mavproxy.send('param set SIM_GPS_GLITCH_X %.7f\n' % glitch_lat[glitch_current])
mavproxy.send('param set SIM_GPS_GLITCH_Y %.7f\n' % glitch_lon[glitch_current])
# record position for 30 seconds
while tnow < tstart + timeout:
tnow = get_sim_time(mav)
desired_glitch_num = int((tnow - tstart) * 2.2)
if desired_glitch_num > glitch_current and glitch_current != -1:
glitch_current = desired_glitch_num
# turn off glitching if we've reached the end of the glitch list
if glitch_current >= glitch_num:
glitch_current = -1
print("Completed Glitches")
mavproxy.send('param set SIM_GPS_GLITCH_X 0\n')
mavproxy.send('param set SIM_GPS_GLITCH_Y 0\n')
else:
print("Applying glitch %u" % glitch_current)
#move onto the next glitch
mavproxy.send('param set SIM_GPS_GLITCH_X %.7f\n' % glitch_lat[glitch_current])
mavproxy.send('param set SIM_GPS_GLITCH_Y %.7f\n' % glitch_lon[glitch_current])
# start displaying distance moved after all glitches applied
if (glitch_current == -1):
m = mav.recv_match(type='VFR_HUD', blocking=True)
curr_pos = sim_location(mav)
moved_distance = get_distance(curr_pos, start_pos)
print("Alt: %u Moved: %.0f" % (m.alt, moved_distance))
if moved_distance > max_distance:
print("Moved over %u meters, Failed!" % max_distance)
success = False
# disable gps glitch
if glitch_current != -1:
glitch_current = -1
mavproxy.send('param set SIM_GPS_GLITCH_X 0\n')
mavproxy.send('param set SIM_GPS_GLITCH_Y 0\n')
show_gps_and_sim_positions(mavproxy, False)
if success:
print("GPS glitch test passed! stayed within %u meters for %u seconds" % (max_distance, timeout))
else:
print("GPS glitch test FAILED!")
return success
# fly_gps_glitch_auto_test - fly mission and test reaction to gps glitch
def fly_gps_glitch_auto_test(mavproxy, mav, timeout=30, max_distance=100):
# set-up gps glitch array
glitch_lat = [0.0002996,0.0006958,0.0009431,0.0009991,0.0009444,0.0007716,0.0006221]
glitch_lon = [0.0000717,0.0000912,0.0002761,0.0002626,0.0002807,0.0002049,0.0001304]
glitch_num = len(glitch_lat)
print("GPS Glitches:")
for i in range(1,glitch_num):
print("glitch %d %.7f %.7f" % (i,glitch_lat[i],glitch_lon[i]))
# Fly mission #1
print("# Load copter_glitch_mission")
if not load_mission_from_file(mavproxy, mav, os.path.join(testdir, "copter_glitch_mission.txt")):
print("load copter_glitch_mission failed")
return False
# turn on simulator display of gps and actual position
show_gps_and_sim_positions(mavproxy, True)
# load the waypoint count
global homeloc
global num_wp
print("test: Fly a mission from 1 to %u" % num_wp)
mavproxy.send('wp set 1\n')
# switch into AUTO mode and raise throttle
mavproxy.send('switch 4\n') # auto mode
wait_mode(mav, 'AUTO')
mavproxy.send('rc 3 1500\n')
# wait until 100m from home
if not wait_distance(mav, 100, 5, 60):
show_gps_and_sim_positions(mavproxy, False)
return False
# record time and position
tstart = get_sim_time(mav)
tnow = tstart
start_pos = sim_location(mav)
# initialise current glitch
glitch_current = 0;
print("Apply first glitch")
mavproxy.send('param set SIM_GPS_GLITCH_X %.7f\n' % glitch_lat[glitch_current])
mavproxy.send('param set SIM_GPS_GLITCH_Y %.7f\n' % glitch_lon[glitch_current])
# record position for 30 seconds
while glitch_current < glitch_num:
tnow = get_sim_time(mav)
desired_glitch_num = int((tnow - tstart) * 2)
if desired_glitch_num > glitch_current and glitch_current != -1:
glitch_current = desired_glitch_num
# apply next glitch
if glitch_current < glitch_num:
print("Applying glitch %u" % glitch_current)
mavproxy.send('param set SIM_GPS_GLITCH_X %.7f\n' % glitch_lat[glitch_current])
mavproxy.send('param set SIM_GPS_GLITCH_Y %.7f\n' % glitch_lon[glitch_current])
# turn off glitching
print("Completed Glitches")
mavproxy.send('param set SIM_GPS_GLITCH_X 0\n')
mavproxy.send('param set SIM_GPS_GLITCH_Y 0\n')
# continue with the mission
ret = wait_waypoint(mav, 0, num_wp-1, timeout=500, mode='AUTO')
# wait for arrival back home
m = mav.recv_match(type='VFR_HUD', blocking=True)
pos = mav.location()
dist_to_home = get_distance(HOME, pos)
while dist_to_home > 5:
m = mav.recv_match(type='VFR_HUD', blocking=True)
pos = mav.location()
dist_to_home = get_distance(HOME, pos)
print("Dist from home: %u" % dist_to_home)
# turn off simulator display of gps and actual position
show_gps_and_sim_positions(mavproxy, False)
print("GPS Glitch test Auto completed: passed=%s" % ret)
return ret
#fly_simple - assumes the simple bearing is initialised to be directly north
# flies a box with 100m west, 15 seconds north, 50 seconds east, 15 seconds south
def fly_simple(mavproxy, mav, side=50, timeout=120):
failed = False
# hold position in loiter
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
#set SIMPLE mode for all flight modes
mavproxy.send('param set SIMPLE 63\n')
# switch to stabilize mode
mavproxy.send('switch 6\n')
wait_mode(mav, 'STABILIZE')
mavproxy.send('rc 3 1430\n')
# fly south 50m
print("# Flying south %u meters" % side)
mavproxy.send('rc 1 1300\n')
if not wait_distance(mav, side, 5, 60):
failed = True
mavproxy.send('rc 1 1500\n')
# fly west 8 seconds
print("# Flying west for 8 seconds")
mavproxy.send('rc 2 1300\n')
tstart = get_sim_time(mav)
while get_sim_time(mav) < (tstart + 8):
m = mav.recv_match(type='VFR_HUD', blocking=True)
delta = (get_sim_time(mav) - tstart)
#print("%u" % delta)
mavproxy.send('rc 2 1500\n')
# fly north 25 meters
print("# Flying north %u meters" % (side/2.0))
mavproxy.send('rc 1 1700\n')
if not wait_distance(mav, side/2, 5, 60):
failed = True
mavproxy.send('rc 1 1500\n')
# fly east 8 seconds
print("# Flying east for 8 seconds")
mavproxy.send('rc 2 1700\n')
tstart = get_sim_time(mav)
while get_sim_time(mav) < (tstart + 8):
m = mav.recv_match(type='VFR_HUD', blocking=True)
delta = (get_sim_time(mav) - tstart)
#print("%u" % delta)
mavproxy.send('rc 2 1500\n')
#restore to default
mavproxy.send('param set SIMPLE 0\n')
#hover in place
hover(mavproxy, mav)
return not failed
#fly_super_simple - flies a circle around home for 45 seconds
def fly_super_simple(mavproxy, mav, timeout=45):
failed = False
# hold position in loiter
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
# fly forward 20m
print("# Flying forward 20 meters")
mavproxy.send('rc 2 1300\n')
if not wait_distance(mav, 20, 5, 60):
failed = True
mavproxy.send('rc 2 1500\n')
#set SUPER SIMPLE mode for all flight modes
mavproxy.send('param set SUPER_SIMPLE 63\n')
# switch to stabilize mode
mavproxy.send('switch 6\n')
wait_mode(mav, 'STABILIZE')
mavproxy.send('rc 3 1430\n')
# start copter yawing slowly
mavproxy.send('rc 4 1550\n')
# roll left for timeout seconds
print("# rolling left from pilot's point of view for %u seconds" % timeout)
mavproxy.send('rc 1 1300\n')
tstart = get_sim_time(mav)
while get_sim_time(mav) < (tstart + timeout):
m = mav.recv_match(type='VFR_HUD', blocking=True)
delta = (get_sim_time(mav) - tstart)
# stop rolling and yawing
mavproxy.send('rc 1 1500\n')
mavproxy.send('rc 4 1500\n')
#restore simple mode parameters to default
mavproxy.send('param set SUPER_SIMPLE 0\n')
#hover in place
hover(mavproxy, mav)
return not failed
#fly_circle - flies a circle with 20m radius
def fly_circle(mavproxy, mav, maxaltchange=10, holdtime=36):
# hold position in loiter
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
# face west
print("turn west")
mavproxy.send('rc 4 1580\n')
if not wait_heading(mav, 270):
return False
mavproxy.send('rc 4 1500\n')
#set CIRCLE radius
mavproxy.send('param set CIRCLE_RADIUS 3000\n')
# fly forward (east) at least 100m
mavproxy.send('rc 2 1100\n')
if not wait_distance(mav, 100):
return False
# return pitch stick back to middle
mavproxy.send('rc 2 1500\n')
# set CIRCLE mode
mavproxy.send('switch 1\n') # circle mode
wait_mode(mav, 'CIRCLE')
# wait
m = mav.recv_match(type='VFR_HUD', blocking=True)
start_altitude = m.alt
tstart = get_sim_time(mav)
tholdstart = get_sim_time(mav)
print("Circle at %u meters for %u seconds" % (start_altitude, holdtime))
while get_sim_time(mav) < tstart + holdtime:
m = mav.recv_match(type='VFR_HUD', blocking=True)
print("heading %u" % m.heading)
print("CIRCLE OK for %u seconds" % holdtime)
return True
# fly_auto_test - fly mission which tests a significant number of commands
def fly_auto_test(mavproxy, mav):
# Fly mission #1
print("# Load copter_mission")
if not load_mission_from_file(mavproxy, mav, os.path.join(testdir, "copter_mission.txt")):
print("load copter_mission failed")
return False
# load the waypoint count
global homeloc
global num_wp
print("test: Fly a mission from 1 to %u" % num_wp)
mavproxy.send('wp set 1\n')
# switch into AUTO mode and raise throttle
mavproxy.send('switch 4\n') # auto mode
wait_mode(mav, 'AUTO')
mavproxy.send('rc 3 1500\n')
# fly the mission
ret = wait_waypoint(mav, 0, num_wp-1, timeout=500, mode='AUTO')
# set throttle to minimum
mavproxy.send('rc 3 1000\n')
# wait for disarm
mav.motors_disarmed_wait()
print("MOTORS DISARMED OK")
print("Auto mission completed: passed=%s" % ret)
return ret
# fly_avc_test - fly AVC mission
def fly_avc_test(mavproxy, mav):
# upload mission from file
print("# Load copter_AVC2013_mission")
if not load_mission_from_file(mavproxy, mav, os.path.join(testdir, "copter_AVC2013_mission.txt")):
print("load copter_AVC2013_mission failed")
return False
# load the waypoint count
global homeloc
global num_wp
print("Fly AVC mission from 1 to %u" % num_wp)
mavproxy.send('wp set 1\n')
# switch into AUTO mode and raise throttle
mavproxy.send('switch 4\n') # auto mode
wait_mode(mav, 'AUTO')
mavproxy.send('rc 3 1500\n')
# fly the mission
ret = wait_waypoint(mav, 0, num_wp-1, timeout=500, mode='AUTO')
# set throttle to minimum
mavproxy.send('rc 3 1000\n')
# wait for disarm
mav.motors_disarmed_wait()
print("MOTORS DISARMED OK")
print("AVC mission completed: passed=%s" % ret)
return ret
def land(mavproxy, mav, timeout=60):
'''land the quad'''
print("STARTING LANDING")
mavproxy.send('switch 2\n') # land mode
wait_mode(mav, 'LAND')
print("Entered Landing Mode")
ret = wait_altitude(mav, -5, 1)
print("LANDING: ok= %s" % ret)
return ret
def fly_mission(mavproxy, mav, height_accuracy=-1, target_altitude=None):
'''fly a mission from a file'''
global homeloc
global num_wp
print("test: Fly a mission from 1 to %u" % num_wp)
mavproxy.send('wp set 1\n')
mavproxy.send('switch 4\n') # auto mode
wait_mode(mav, 'AUTO')
ret = wait_waypoint(mav, 0, num_wp-1, timeout=500, mode='AUTO')
expect_msg = "Reached Command #%u" % (num_wp-1)
if (ret):
mavproxy.expect(expect_msg)
print("test: MISSION COMPLETE: passed=%s" % ret)
# wait here until ready
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
return ret
def load_mission_from_file(mavproxy, mav, filename):
'''Load a mission from a file to flight controller'''
global num_wp
mavproxy.send('wp load %s\n' % filename)
mavproxy.expect('flight plan received')
mavproxy.send('wp list\n')
mavproxy.expect('Requesting [0-9]+ waypoints')
# update num_wp
wploader = mavwp.MAVWPLoader()
wploader.load(filename)
num_wp = wploader.count()
return True
def save_mission_to_file(mavproxy, mav, filename):
global num_wp
mavproxy.send('wp save %s\n' % filename)
mavproxy.expect('Saved ([0-9]+) waypoints')
num_wp = int(mavproxy.match.group(1))
print("num_wp: %d" % num_wp)
return True
def setup_rc(mavproxy):
'''setup RC override control'''
for chan in range(1,9):
mavproxy.send('rc %u 1500\n' % chan)
# zero throttle
mavproxy.send('rc 3 1000\n')
def fly_ArduCopter(viewerip=None, map=False):
'''fly ArduCopter in SIL
you can pass viewerip as an IP address to optionally send fg and
mavproxy packets too for local viewing of the flight in real time
'''
global homeloc
if TARGET != 'sitl':
util.build_SIL('ArduCopter', target=TARGET)
home = "%f,%f,%u,%u" % (HOME.lat, HOME.lng, HOME.alt, HOME.heading)
sil = util.start_SIL('ArduCopter', wipe=True, model='+', home=home, speedup=speedup_default)
mavproxy = util.start_MAVProxy_SIL('ArduCopter', options='--sitl=127.0.0.1:5501 --out=127.0.0.1:19550 --quadcopter')
mavproxy.expect('Received [0-9]+ parameters')
# setup test parameters
mavproxy.send("param load %s/copter_params.parm\n" % testdir)
mavproxy.expect('Loaded [0-9]+ parameters')
# reboot with new parameters
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
sil = util.start_SIL('ArduCopter', model='+', home=home, speedup=speedup_default)
options = '--sitl=127.0.0.1:5501 --out=127.0.0.1:19550 --quadcopter --streamrate=5'
if viewerip:
options += ' --out=%s:14550' % viewerip
if map:
options += ' --map'
mavproxy = util.start_MAVProxy_SIL('ArduCopter', options=options)
mavproxy.expect('Telemetry log: (\S+)')
logfile = mavproxy.match.group(1)
print("LOGFILE %s" % logfile)
buildlog = util.reltopdir("../buildlogs/ArduCopter-test.tlog")
print("buildlog=%s" % buildlog)
copyTLog = False
if os.path.exists(buildlog):
os.unlink(buildlog)
try:
os.link(logfile, buildlog)
except Exception:
print( "WARN: Failed to create symlink: " + logfile + " => " + buildlog + ", Will copy tlog manually to target location" )
copyTLog = True
# the received parameters can come before or after the ready to fly message
mavproxy.expect(['Received [0-9]+ parameters', 'Ready to FLY'])
mavproxy.expect(['Received [0-9]+ parameters', 'Ready to FLY'])
util.expect_setup_callback(mavproxy, expect_callback)
expect_list_clear()
expect_list_extend([sil, mavproxy])
# get a mavlink connection going
try:
mav = mavutil.mavlink_connection('127.0.0.1:19550', robust_parsing=True)
except Exception, msg:
print("Failed to start mavlink connection on 127.0.0.1:19550" % msg)
raise
mav.message_hooks.append(message_hook)
mav.idle_hooks.append(idle_hook)
failed = False
failed_test_msg = "None"
try:
mav.wait_heartbeat()
setup_rc(mavproxy)
homeloc = mav.location()
# wait 10sec to allow EKF to settle
wait_seconds(mav, 10)
# Arm
print("# Arm motors")
if not arm_motors(mavproxy, mav):
failed_test_msg = "arm_motors failed"
print(failed_test_msg)
failed = True
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Fly a square in Stabilize mode
print("#")
print("########## Fly a square and save WPs with CH7 switch ##########")
print("#")
if not fly_square(mavproxy, mav):
failed_test_msg = "fly_square failed"
print(failed_test_msg)
failed = True
# save the stored mission to file
print("# Save out the CH7 mission to file")
if not save_mission_to_file(mavproxy, mav, os.path.join(testdir, "ch7_mission.txt")):
failed_test_msg = "save_mission_to_file failed"
print(failed_test_msg)
failed = True
# fly the stored mission
print("# Fly CH7 saved mission")
if not fly_mission(mavproxy, mav,height_accuracy = 0.5, target_altitude=10):
failed_test_msg = "fly ch7_mission failed"
print(failed_test_msg)
failed = True
# Throttle Failsafe
print("#")
print("########## Test Failsafe ##########")
print("#")
if not fly_throttle_failsafe(mavproxy, mav):
failed_test_msg = "fly_throttle_failsafe failed"
print(failed_test_msg)
failed = True
# Takeoff
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Battery failsafe
if not fly_battery_failsafe(mavproxy, mav):
failed_test_msg = "fly_battery_failsafe failed"
print(failed_test_msg)
failed = True
# Takeoff
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Stability patch
print("#")
print("########## Test Stability Patch ##########")
print("#")
if not fly_stability_patch(mavproxy, mav, 30):
failed_test_msg = "fly_stability_patch failed"
print(failed_test_msg)
failed = True
# RTL
print("# RTL #")
if not fly_RTL(mavproxy, mav):
failed_test_msg = "fly_RTL after stab patch failed"
print(failed_test_msg)
failed = True
# Takeoff
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Fence test
print("#")
print("########## Test Horizontal Fence ##########")
print("#")
if not fly_fence_test(mavproxy, mav, 180):
failed_test_msg = "fly_fence_test failed"
print(failed_test_msg)
failed = True
# Takeoff
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Fly GPS Glitch Loiter test
print("# GPS Glitch Loiter Test")
if not fly_gps_glitch_loiter_test(mavproxy, mav):
failed_test_msg = "fly_gps_glitch_loiter_test failed"
print(failed_test_msg)
failed = True
# RTL after GPS Glitch Loiter test
print("# RTL #")
if not fly_RTL(mavproxy, mav):
failed_test_msg = "fly_RTL failed"
print(failed_test_msg)
failed = True
# Fly GPS Glitch test in auto mode
print("# GPS Glitch Auto Test")
if not fly_gps_glitch_auto_test(mavproxy, mav):
failed_test_msg = "fly_gps_glitch_auto_test failed"
print(failed_test_msg)
failed = True
# take-off ahead of next test
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Loiter for 10 seconds
print("#")
print("########## Test Loiter for 10 seconds ##########")
print("#")
if not loiter(mavproxy, mav):
failed_test_msg = "loiter failed"
print(failed_test_msg)
failed = True
# Loiter Climb
print("#")
print("# Loiter - climb to 30m")
print("#")
if not change_alt(mavproxy, mav, 30):
failed_test_msg = "change_alt climb failed"
print(failed_test_msg)
failed = True
# Loiter Descend
print("#")
print("# Loiter - descend to 20m")
print("#")
if not change_alt(mavproxy, mav, 20):
failed_test_msg = "change_alt descend failed"
print(failed_test_msg)
failed = True
# RTL
print("#")
print("########## Test RTL ##########")
print("#")
if not fly_RTL(mavproxy, mav):
failed_test_msg = "fly_RTL after Loiter climb/descend failed"
print(failed_test_msg)
failed = True
# Takeoff
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Simple mode
print("# Fly in SIMPLE mode")
if not fly_simple(mavproxy, mav):
failed_test_msg = "fly_simple failed"
print(failed_test_msg)
failed = True
# RTL
print("#")
print("########## Test RTL ##########")
print("#")
if not fly_RTL(mavproxy, mav):
failed_test_msg = "fly_RTL after simple mode failed"
print(failed_test_msg)
failed = True
# Takeoff
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Fly a circle in super simple mode
print("# Fly a circle in SUPER SIMPLE mode")
if not fly_super_simple(mavproxy, mav):
failed_test_msg = "fly_super_simple failed"
print(failed_test_msg)
failed = True
# RTL
print("# RTL #")
if not fly_RTL(mavproxy, mav):
failed_test_msg = "fly_RTL after super simple mode failed"
print(failed_test_msg)
failed = True
# Takeoff
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Circle mode
print("# Fly CIRCLE mode")
if not fly_circle(mavproxy, mav):
failed_test_msg = "fly_circle failed"
print(failed_test_msg)
failed = True
# RTL
print("#")
print("########## Test RTL ##########")
print("#")
if not fly_RTL(mavproxy, mav):
failed_test_msg = "fly_RTL after circle failed"
print(failed_test_msg)
failed = True
print("# Fly copter mission")
if not fly_auto_test(mavproxy, mav):
failed_test_msg = "fly_auto_test failed"
print(failed_test_msg)
failed = True
else:
print("Flew copter mission OK")
# wait for disarm
mav.motors_disarmed_wait()
if not log_download(mavproxy, mav, util.reltopdir("../buildlogs/ArduCopter-log.bin")):
failed_test_msg = "log_download failed"
print(failed_test_msg)
failed = True
except pexpect.TIMEOUT, failed_test_msg:
failed_test_msg = "Timeout"
failed = True
mav.close()
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
if os.path.exists('ArduCopter-valgrind.log'):
os.chmod('ArduCopter-valgrind.log', 0644)
shutil.copy("ArduCopter-valgrind.log", util.reltopdir("../buildlogs/ArduCopter-valgrind.log"))
# [2014/05/07] FC Because I'm doing a cross machine build (source is on host, build is on guest VM) I cannot hard link
# This flag tells me that I need to copy the data out
if copyTLog:
shutil.copy(logfile, buildlog)
if failed:
print("FAILED: %s" % failed_test_msg)
return False
return True
def fly_CopterAVC(viewerip=None, map=False):
'''fly ArduCopter in SIL for AVC2013 mission
'''
global homeloc
if TARGET != 'sitl':
util.build_SIL('ArduCopter', target=TARGET)
home = "%f,%f,%u,%u" % (AVCHOME.lat, AVCHOME.lng, AVCHOME.alt, AVCHOME.heading)
sil = util.start_SIL('ArduCopter', wipe=True, model='+', home=home, speedup=speedup_default)
mavproxy = util.start_MAVProxy_SIL('ArduCopter', options='--sitl=127.0.0.1:5501 --out=127.0.0.1:19550 --quadcopter')
mavproxy.expect('Received [0-9]+ parameters')
# setup test parameters
mavproxy.send("param load %s/copter_AVC2013_params.parm\n" % testdir)
mavproxy.expect('Loaded [0-9]+ parameters')
# reboot with new parameters
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
sil = util.start_SIL('ArduCopter', model='+', home=home, speedup=speedup_default)
options = '--sitl=127.0.0.1:5501 --out=127.0.0.1:19550 --quadcopter --streamrate=5'
if viewerip:
options += ' --out=%s:14550' % viewerip
if map:
options += ' --map'
mavproxy = util.start_MAVProxy_SIL('ArduCopter', options=options)
mavproxy.expect('Telemetry log: (\S+)')
logfile = mavproxy.match.group(1)
print("LOGFILE %s" % logfile)
buildlog = util.reltopdir("../buildlogs/CopterAVC-test.tlog")
print("buildlog=%s" % buildlog)
if os.path.exists(buildlog):
os.unlink(buildlog)
try:
os.link(logfile, buildlog)
except Exception:
pass
# the received parameters can come before or after the ready to fly message
mavproxy.expect(['Received [0-9]+ parameters', 'Ready to FLY'])
mavproxy.expect(['Received [0-9]+ parameters', 'Ready to FLY'])
util.expect_setup_callback(mavproxy, expect_callback)
expect_list_clear()
expect_list_extend([sil, mavproxy])
if map:
mavproxy.send('map icon 40.072467969730496 -105.2314389590174\n')
mavproxy.send('map icon 40.072600990533829 -105.23146100342274\n')
# get a mavlink connection going
try:
mav = mavutil.mavlink_connection('127.0.0.1:19550', robust_parsing=True)
except Exception, msg:
print("Failed to start mavlink connection on 127.0.0.1:19550" % msg)
raise
mav.message_hooks.append(message_hook)
mav.idle_hooks.append(idle_hook)
failed = False
failed_test_msg = "None"
try:
mav.wait_heartbeat()
setup_rc(mavproxy)
homeloc = mav.location()
# wait 10sec to allow EKF to settle
wait_seconds(mav, 10)
# Arm
print("# Arm motors")
if not arm_motors(mavproxy, mav):
failed_test_msg = "arm_motors failed"
print(failed_test_msg)
failed = True
print("# Fly AVC mission")
if not fly_avc_test(mavproxy, mav):
failed_test_msg = "fly_avc_test failed"
print(failed_test_msg)
failed = True
else:
print("Flew AVC mission OK")
#mission includes disarm at end so should be ok to download logs now
if not log_download(mavproxy, mav, util.reltopdir("../buildlogs/CopterAVC-log.bin")):
failed_test_msg = "log_download failed"
print(failed_test_msg)
failed = True
except pexpect.TIMEOUT, failed_test_msg:
failed_test_msg = "Timeout"
failed = True
mav.close()
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
if failed:
print("FAILED: %s" % failed_test_msg)
return False
return True | unknown | codeparrot/codeparrot-clean | ||
// Copyright IBM Corp. 2016, 2025
// SPDX-License-Identifier: BUSL-1.1
package consul
import (
"sync"
"testing"
realtesting "testing"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/vault/helper/testhelpers/consul"
physConsul "github.com/hashicorp/vault/physical/consul"
"github.com/hashicorp/vault/vault"
)
func MakeConsulBackend(t testing.TB, logger hclog.Logger) *vault.PhysicalBackendBundle {
cleanup, config := consul.PrepareTestContainer(t.(*realtesting.T), "", false, true)
consulConf := map[string]string{
"address": config.Address(),
"token": config.Token,
"max_parallel": "32",
}
consulBackend, err := physConsul.NewConsulBackend(consulConf, logger)
if err != nil {
t.Fatal(err)
}
return &vault.PhysicalBackendBundle{
Backend: consulBackend,
Cleanup: cleanup,
}
}
func ConsulBackendSetup(conf *vault.CoreConfig, opts *vault.TestClusterOptions) {
m := &consulContainerManager{}
opts.PhysicalFactory = m.Backend
}
// consulContainerManager exposes Backend which matches the PhysicalFactory func
// type. When called, it will ensure that a separate Consul container is started
// for each distinct vault cluster that calls it and ensures that each Vault
// core gets a separate Consul backend instance since that contains state
// related to lock sessions. The whole test framework doesn't have a concept of
// "cluster names" outside of the prefix attached to the logger and other
// backend factories, mostly via SharedPhysicalFactory currently implicitly rely
// on being called in a sequence of core 0, 1, 2,... on one cluster and then
// core 0, 1, 2... on the next and so on. Refactoring lots of things to make
// first-class cluster identifiers a thing seems like a heavy lift given that we
// already rely on sequence of calls everywhere else anyway so we do the same
// here - each time the Backend method is called with coreIdx == 0 we create a
// whole new Consul and assume subsequent non 0 index cores are in the same
// cluster.
type consulContainerManager struct {
mu sync.Mutex
current *consulContainerBackendFactory
}
func (m *consulContainerManager) Backend(t testing.TB, coreIdx int,
logger hclog.Logger, conf map[string]interface{},
) *vault.PhysicalBackendBundle {
m.mu.Lock()
if coreIdx == 0 || m.current == nil {
// Create a new consul container factory
m.current = &consulContainerBackendFactory{}
}
f := m.current
m.mu.Unlock()
return f.Backend(t, coreIdx, logger, conf)
}
type consulContainerBackendFactory struct {
mu sync.Mutex
refCount int
cleanupFn func()
config map[string]string
}
func (f *consulContainerBackendFactory) Backend(t testing.TB, coreIdx int,
logger hclog.Logger, conf map[string]interface{},
) *vault.PhysicalBackendBundle {
f.mu.Lock()
defer f.mu.Unlock()
if f.refCount == 0 {
f.startContainerLocked(t)
logger.Debug("started consul container", "clusterID", conf["cluster_id"],
"address", f.config["address"])
}
f.refCount++
consulBackend, err := physConsul.NewConsulBackend(f.config, logger.Named("consul"))
if err != nil {
t.Fatal(err)
}
return &vault.PhysicalBackendBundle{
Backend: consulBackend,
Cleanup: f.cleanup,
}
}
func (f *consulContainerBackendFactory) startContainerLocked(t testing.TB) {
cleanup, config := consul.PrepareTestContainer(t.(*realtesting.T), "", false, true)
f.config = map[string]string{
"address": config.Address(),
"token": config.Token,
"max_parallel": "32",
}
f.cleanupFn = cleanup
}
func (f *consulContainerBackendFactory) cleanup() {
f.mu.Lock()
defer f.mu.Unlock()
if f.refCount < 1 || f.cleanupFn == nil {
return
}
f.refCount--
if f.refCount == 0 {
f.cleanupFn()
f.cleanupFn = nil
}
} | go | github | https://github.com/hashicorp/vault | helper/testhelpers/teststorage/consul/consul.go |
# system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
from __future__ import print_function
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print("Press control+C to stop and show the summary")
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
raw_syscalls__sys_enter(**locals())
def print_syscall_totals():
if for_comm is not None:
print("\nsyscall events for %s:\n" % (for_comm))
else:
print("\nsyscall events by comm/pid:\n")
print("%-40s %10s" % ("comm [pid]/syscalls", "count"))
print("%-40s %10s" % ("----------------------------------------",
"----------"))
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print("\n%s [%d]" % (comm, pid))
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].items(),
key = lambda kv: (kv[1], kv[0]), reverse = True):
print(" %-38s %10d" % (syscall_name(id), val)) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
import unittest2
import openerp.tests.common as common
from openerp.addons.connector.backend import (Backend,
get_backend,
BACKENDS)
from openerp.addons.connector.exception import NoConnectorUnitError
from openerp.addons.connector.connector import (Binder,
ConnectorUnit)
from openerp.addons.connector.unit.mapper import ExportMapper
from openerp.addons.connector.unit.backend_adapter import BackendAdapter
from openerp.addons.connector.session import ConnectorSession
class test_backend(unittest2.TestCase):
""" Test Backend """
def setUp(self):
super(test_backend, self).setUp()
self.service = 'calamitorium'
def tearDown(self):
super(test_backend, self).tearDown()
BACKENDS.backends.clear()
def test_new_backend(self):
""" Create a backend"""
version = '1.14'
backend = Backend(self.service, version=version)
self.assertEqual(backend.service, self.service)
self.assertEqual(backend.version, version)
def test_parent(self):
""" Bind the backend to a parent backend"""
version = '1.14'
backend = Backend(self.service)
child_backend = Backend(parent=backend, version=version)
self.assertEqual(child_backend.service, backend.service)
def test_no_service(self):
""" Should raise an error because no service or parent is defined"""
with self.assertRaises(ValueError):
Backend(version='1.14')
def test_get_backend(self):
""" Find a backend """
backend = Backend(self.service)
found_ref = get_backend(self.service)
self.assertEqual(backend, found_ref)
def test_no_backend_found(self):
""" Can't find a backend """
with self.assertRaises(ValueError):
get_backend('torium')
def test_backend_version(self):
""" Find a backend with a version """
parent = Backend(self.service)
backend = Backend(parent=parent, version='1.14')
found_ref = get_backend(self.service, version='1.14')
self.assertEqual(backend, found_ref)
def test_repr(self):
parent = Backend(self.service)
self.assertEqual(str(parent), "Backend('calamitorium')")
self.assertEqual(repr(parent), "<Backend 'calamitorium'>")
backend = Backend(parent=parent, version='1.14')
self.assertEqual(str(backend), "Backend('calamitorium', '1.14')")
self.assertEqual(repr(backend), "<Backend 'calamitorium', '1.14'>")
class test_backend_register(common.TransactionCase):
""" Test registration of classes on the Backend"""
def setUp(self):
super(test_backend_register, self).setUp()
self.service = 'calamitorium'
self.version = '1.14'
self.parent = Backend(self.service)
self.backend = Backend(parent=self.parent, version=self.version)
self.session = ConnectorSession(self.cr,
self.uid)
def tearDown(self):
super(test_backend_register, self).tearDown()
BACKENDS.backends.clear()
del self.backend._class_entries[:]
def test_register_class(self):
class BenderBinder(Binder):
_model_name = 'res.users'
self.backend.register_class(BenderBinder)
ref = self.backend.get_class(Binder,
self.session,
'res.users')
self.assertEqual(ref, BenderBinder)
def test_register_class_decorator(self):
@self.backend
class ZoidbergMapper(ExportMapper):
_model_name = 'res.users'
ref = self.backend.get_class(ExportMapper,
self.session,
'res.users')
self.assertEqual(ref, ZoidbergMapper)
def test_register_class_parent(self):
""" It should get the parent's class when no class is defined"""
@self.parent
class FryBinder(Binder):
_model_name = 'res.users'
ref = self.backend.get_class(Binder,
self.session,
'res.users')
self.assertEqual(ref, FryBinder)
def test_no_register_error(self):
""" Error when asking for a class and none is found"""
with self.assertRaises(NoConnectorUnitError):
self.backend.get_class(BackendAdapter,
self.session,
'res.users')
def test_get_class_installed_module(self):
""" Only class from an installed module should be returned """
class LambdaUnit(ConnectorUnit):
_model_name = 'res.users'
@self.backend
class LambdaYesUnit(LambdaUnit):
_model_name = 'res.users'
class LambdaNoUnit(LambdaUnit):
_model_name = 'res.users'
# trick the origin of the class, let it think
# that it comes from the OpenERP module 'not installed module'
LambdaNoUnit._openerp_module_ = 'not installed module'
self.backend(LambdaNoUnit)
matching_cls = self.backend.get_class(LambdaUnit,
self.session,
'res.users')
self.assertEqual(matching_cls, LambdaYesUnit)
def test_get_class_replacing_module(self):
""" Returns the replacing ConnectorUnit"""
class LambdaUnit(ConnectorUnit):
_model_name = 'res.users'
@self.backend
class LambdaNoUnit(LambdaUnit):
_model_name = 'res.users'
@self.backend(replacing=LambdaNoUnit)
class LambdaYesUnit(LambdaUnit):
_model_name = 'res.users'
matching_cls = self.backend.get_class(LambdaUnit,
self.session,
'res.users')
self.assertEqual(matching_cls, LambdaYesUnit)
def test_get_class_replacing_uninstalled_module(self):
""" Does not return the replacing ConnectorUnit of an
uninstalled module """
class LambdaUnit(ConnectorUnit):
_model_name = 'res.users'
@self.backend
class LambdaYesUnit(LambdaUnit):
_model_name = 'res.users'
class LambdaNoUnit(LambdaUnit):
_model_name = 'res.users'
# trick the origin of the class, let it think
# that it comes from the OpenERP module 'not installed module'
LambdaNoUnit._openerp_module_ = 'not installed module'
self.backend(LambdaNoUnit, replacing=LambdaYesUnit)
matching_cls = self.backend.get_class(LambdaUnit,
self.session,
'res.users')
self.assertEqual(matching_cls, LambdaYesUnit)
def test_get_class_replacing_diamond(self):
""" Replace several classes in a diamond fashion """
class LambdaUnit(ConnectorUnit):
_model_name = 'res.users'
@self.backend
class LambdaNoUnit(LambdaUnit):
_model_name = 'res.users'
@self.backend
class LambdaNo2Unit(LambdaUnit):
_model_name = 'res.users'
@self.backend(replacing=(LambdaNoUnit, LambdaNo2Unit))
class LambdaYesUnit(LambdaUnit):
_model_name = 'res.users'
matching_cls = self.backend.get_class(LambdaUnit,
self.session,
'res.users')
self.assertEqual(matching_cls, LambdaYesUnit)
def test_get_class_replacing_unregistered(self):
""" Replacing an unregistered class raise ValueError """
class LambdaUnit(ConnectorUnit):
_model_name = 'res.users'
with self.assertRaises(ValueError):
@self.backend(replacing=LambdaUnit)
class LambdaNoUnit(LambdaUnit):
_model_name = 'res.users'
def test_get_class_replacing_self(self):
""" A class should not be able to replace itself """
class LambdaUnit(ConnectorUnit):
_model_name = 'res.users'
@self.backend
class LambdaRecurseUnit(LambdaUnit):
_model_name = 'res.users'
with self.assertRaises(ValueError):
self.backend.register_class(LambdaRecurseUnit,
replacing=LambdaRecurseUnit) | unknown | codeparrot/codeparrot-clean | ||
import rospy
import roslib
import subprocess
from functools import partial
from os import path
import actionlib
from rqt_robot_dashboard.widgets import IconToolButton
from python_qt_binding.QtCore import QSize
from std_srvs.srv import Empty
from rqt_robot_dashboard import util
class DriversStatusWidget(IconToolButton):
def __init__(self):
self._not_ok_icon = [path.join(roslib.packages.get_pkg_dir('rqt_youbot_dashboard'), "icons/motor-grey.png")]
self._ok_icon = [path.join(roslib.packages.get_pkg_dir('rqt_youbot_dashboard'), "icons/motor-green.png")]
icons = [self._not_ok_icon, self._ok_icon]
super(DriversStatusWidget, self).__init__('', icons=icons)
self.setFixedSize(QSize(40, 40))
self.update_state(0)
self.setToolTip("Driver: Stale")
self.clicked.connect(self.toggle)
self._platform_state_message = None
def update_state(self, state):
super(DriversStatusWidget, self).update_state(state)
pass
def set_stale(self):
self.update_state(0)
def set_ok(self):
self.update_state(1)
self.setToolTip("Restart driver")
def toggle(self):
if (self._platform_state_message is not None):
reconnect = rospy.ServiceProxy("/reconnect", Empty)
try:
reconnect()
except rospy.ServiceException, e:
util.dasherr("Failed to reconnect the driver: service call failed with error: %s" % (e))
def close(self):
try:
self._srv_arm.unregister()
except AttributeError, e:
rospy.logwarn("Failed to unregister reconnect drivers service")
def update_platform_state(self, msg):
self._platform_state_message = msg
self._last_platform_state_message = rospy.get_time() | unknown | codeparrot/codeparrot-clean | ||
- hosts: host1,host2
gather_facts: no
tasks:
- block:
- block:
- name: EXPECTED FAILURE host1 fails
fail:
when: inventory_hostname == 'host1'
- set_fact:
only_host2_fact: yes
- name: should not fail
fail:
when: only_host2_fact is not defined
always:
- block:
- meta: clear_host_errors
- assert:
that:
- only_host2_fact is defined
when:
- inventory_hostname == 'host2' | unknown | github | https://github.com/ansible/ansible | test/integration/targets/blocks/72725.yml |
- name: create test directories
file:
path: '{{ remote_tmp_dir }}/dir-traversal/{{ item }}'
state: directory
loop:
- source
- target
- roles
- name: create test content
copy:
dest: '{{ remote_tmp_dir }}/dir-traversal/source/content.txt'
content: |
some content to write
- name: build dangerous dir traversal role
script:
chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
cmd: create-role-archive.py dangerous.tar content.txt {{ remote_tmp_dir }}/dir-traversal/target/target-file-to-overwrite.txt
executable: '{{ ansible_playbook_python }}'
- name: install dangerous role
command:
cmd: ansible-galaxy role install --roles-path '{{ remote_tmp_dir }}/dir-traversal/roles' dangerous.tar
chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
environment:
ANSIBLE_NOCOLOR: True
ANSIBLE_FORCE_COLOR: False
ignore_errors: true
register: galaxy_install_dangerous
- name: check for overwritten file
stat:
path: '{{ remote_tmp_dir }}/dir-traversal/target/target-file-to-overwrite.txt'
register: dangerous_overwrite_stat
- name: get overwritten content
slurp:
path: '{{ remote_tmp_dir }}/dir-traversal/target/target-file-to-overwrite.txt'
register: dangerous_overwrite_content
when: dangerous_overwrite_stat.stat.exists
- assert:
that:
- dangerous_overwrite_content.content|default('')|b64decode == ''
- not dangerous_overwrite_stat.stat.exists
- galaxy_install_dangerous is failed
- "'is not a subpath of the role' in (galaxy_install_dangerous.stderr | regex_replace('\n', ' '))"
- name: remove tarfile for next test
file:
path: '{{ item }}'
state: absent
loop:
- '{{ remote_tmp_dir }}/dir-traversal/source/dangerous.tar'
- '{{ remote_tmp_dir }}/dir-traversal/roles/dangerous.tar'
- name: build dangerous dir traversal role that includes .. in the symlink path
script:
chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
cmd: create-role-archive.py dangerous.tar content.txt {{ remote_tmp_dir }}/dir-traversal/source/../target/target-file-to-overwrite.txt
executable: '{{ ansible_playbook_python }}'
- name: install dangerous role
command:
cmd: 'ansible-galaxy role install --roles-path {{ remote_tmp_dir }}/dir-traversal/roles dangerous.tar'
chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
environment:
ANSIBLE_NOCOLOR: True
ANSIBLE_FORCE_COLOR: False
ignore_errors: true
register: galaxy_install_dangerous
- name: check for overwritten file
stat:
path: '{{ remote_tmp_dir }}/dir-traversal/target/target-file-to-overwrite.txt'
register: dangerous_overwrite_stat
- name: get overwritten content
slurp:
path: '{{ remote_tmp_dir }}/dir-traversal/target/target-file-to-overwrite.txt'
register: dangerous_overwrite_content
when: dangerous_overwrite_stat.stat.exists
- assert:
that:
- dangerous_overwrite_content.content|default('')|b64decode == ''
- not dangerous_overwrite_stat.stat.exists
- galaxy_install_dangerous is failed
- "'is not a subpath of the role' in (galaxy_install_dangerous.stderr | regex_replace('\n', ' '))"
- name: remove tarfile for next test
file:
path: '{{ remote_tmp_dir }}/dir-traversal/source/dangerous.tar'
state: absent
- name: build dangerous dir traversal role that includes .. in the relative symlink path
script:
chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
cmd: create-role-archive.py dangerous_rel.tar content.txt ../context.txt
- name: install dangerous role with relative symlink
command:
cmd: 'ansible-galaxy role install --roles-path {{ remote_tmp_dir }}/dir-traversal/roles dangerous_rel.tar'
chdir: '{{ remote_tmp_dir }}/dir-traversal/source'
environment:
ANSIBLE_NOCOLOR: True
ANSIBLE_FORCE_COLOR: False
ignore_errors: true
register: galaxy_install_dangerous
- name: check for symlink outside role
stat:
path: "{{ remote_tmp_dir | realpath }}/dir-traversal/roles/symlink"
register: symlink_outside_role
- assert:
that:
- not symlink_outside_role.stat.exists
- galaxy_install_dangerous is failed
- "'is not a subpath of the role' in (galaxy_install_dangerous.stderr | regex_replace('\n', ' '))"
- name: remove test directories
file:
path: '{{ remote_tmp_dir }}/dir-traversal/{{ item }}'
state: absent
loop:
- source
- target
- roles | unknown | github | https://github.com/ansible/ansible | test/integration/targets/ansible-galaxy-role/tasks/dir-traversal.yml |
// Copyright IBM Corp. 2016, 2025
// SPDX-License-Identifier: BUSL-1.1
package vault
import (
"context"
"encoding/json"
"fmt"
"strings"
"github.com/golang/protobuf/proto"
wrapping "github.com/hashicorp/go-kms-wrapping/v2"
"github.com/hashicorp/vault/sdk/physical"
"github.com/hashicorp/vault/vault/seal"
)
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Seal Wrapping
type PartialWrapFailCallback func(context.Context, map[string]error) error
// Helper function to use for partial wrap fail callbacks where we don't want to allow a partial failure. See
// for example barrier or recovery key wrapping. Just don't allow for those risky scenarios
var DisallowPartialSealWrap = func(ctx context.Context, errs map[string]error) error {
return &seal.PartialSealWrapError{seal.JoinSealWrapErrors("not allowing operation to proceed without full wrapping involving all configured seals", errs)}
}
// SealWrapValue creates a SealWrappedValue wrapper with the entryValue being optionally encrypted with the give seal Access.
func SealWrapValue(ctx context.Context, access seal.Access, encrypt bool, entryValue []byte, wrapFailCallback PartialWrapFailCallback) (*SealWrappedValue, error) {
if access == nil {
return newTransitorySealWrappedValue(&wrapping.BlobInfo{
Wrapped: false,
Ciphertext: entryValue,
}), nil
}
if !encrypt {
// Maybe this should also be a transitory value, since we want to encrypt
// as soon as we can?
return NewPlaintextSealWrappedValue(access.Generation(), entryValue), nil
}
multiWrapValue, errs := access.Encrypt(ctx, entryValue, nil)
if multiWrapValue == nil {
// no seal encryption was successful
return nil, seal.JoinSealWrapErrors("error seal wrapping value: encryption generated no results", errs)
}
if len(errs) > 0 {
// Partial failure, note this by calling the provided callback, if present
if err := wrapFailCallback(ctx, errs); err != nil {
// If the callback returns an error, caller is indicating it doesn't want this to proceed
return nil, err
}
}
// Why are we "cleaning up" the blob infos?
var ret []*wrapping.BlobInfo
for _, blobInfo := range multiWrapValue.Slots {
ret = append(ret, &wrapping.BlobInfo{
Wrapped: true,
Ciphertext: blobInfo.Ciphertext,
Iv: blobInfo.Iv,
Hmac: blobInfo.Hmac,
KeyInfo: blobInfo.KeyInfo,
})
}
return NewSealWrappedValue(&seal.MultiWrapValue{
Generation: multiWrapValue.Generation,
Slots: ret,
}), nil
}
// UnsealWrapValue uses the seal Access to decrypt the wrappedEntryValue. It returns the decrypted value
// and a flag indicating whether the wrappedEntryValue is current (according to Access.IsUpToDate).
// migration is in progress.
func UnsealWrapValue(ctx context.Context, access seal.Access, entryKey string, wrappedEntryValue *SealWrappedValue) (entryValue []byte, uptodate bool, err error) {
multiWrapValue := &seal.MultiWrapValue{
Generation: wrappedEntryValue.GetGeneration(),
}
for _, blobInfo := range wrappedEntryValue.GetSlots() {
// TODO(SEALHA): Why doesn't sealWrapValue() set ValuePath? Could it be a migration issue? Can we stop setting it?
blobInfoWithValuePath := &wrapping.BlobInfo{
ValuePath: entryKey,
Ciphertext: blobInfo.Ciphertext,
Iv: blobInfo.Iv,
Hmac: blobInfo.Hmac,
KeyInfo: blobInfo.KeyInfo,
}
multiWrapValue.Slots = append(multiWrapValue.Slots, blobInfoWithValuePath)
}
entryValue, uptodate, err = access.Decrypt(ctx, multiWrapValue, nil)
if err != nil {
if isSealOldKeyError(err) {
uptodate = false
} else {
return nil, false, err
}
}
return entryValue, uptodate, nil
}
// MarshalSealWrappedValue marshals a SealWrappedValue into a byte slice. If the seal wrapped value contains
// a single wrapping.BlobInfo, the BlobInfo will be marshalled directly; otherwise the SealWrappedValue
// will be.
func MarshalSealWrappedValue(wrappedEntryValue *SealWrappedValue) ([]byte, error) {
if len(wrappedEntryValue.value.Slots) > 1 {
return wrappedEntryValue.marshal()
}
return proto.Marshal(wrappedEntryValue.value.Slots[0])
}
// UnmarshalSealWrappedValue attempts to unmarshal a SealWrappedValue. This method can unmarshal marshalled
// SealWrappedValues as well as wrapping.BlobInfos. When a BlobInfo is encountered, a "transitory"
// SealWrappedValue will be returned.
func UnmarshalSealWrappedValue(value []byte) (*SealWrappedValue, error) {
swv := &SealWrappedValue{}
swvErr := swv.unmarshal(value)
if swvErr == nil {
return swv, nil
}
blobInfo := &wrapping.BlobInfo{}
blobInfoErr := proto.Unmarshal(value, blobInfo)
if blobInfoErr == nil {
return newTransitorySealWrappedValue(blobInfo), nil
}
return nil, fmt.Errorf("error unmarshalling seal wrapped value: %w, %w", swvErr, blobInfoErr)
}
// UnmarshalSealWrappedValueWithCanary unmarshalls a byte array into a SealWrappedValue, taking care of
// removing the 's' canary value.
// This method returns true if a SealWrappedValue was successfully unmarshaled.
func UnmarshalSealWrappedValueWithCanary(value []byte) (*SealWrappedValue, bool) {
eLen := len(value)
if eLen > 0 && value[eLen-1] == 's' {
if wrappedEntryValue, err := UnmarshalSealWrappedValue(value[:eLen-1]); err == nil {
return wrappedEntryValue, true
}
// Else, note that having the canary value present is not a guarantee that
// the value is wrapped, so if there is an error we will simply return a nil BlobInfo.
}
return nil, false
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Stored Barrier Keys (a.k.a. Root Key)
// SealWrapStoredBarrierKeys takes the barrier (root) keys, encrypts them using the seal access,
// and returns a physical.Entry for storage.
func SealWrapStoredBarrierKeys(ctx context.Context, access seal.Access, keys [][]byte) (*physical.Entry, error) {
// Note that even though keys is a slice, it seems to always contain a single key.
buf, err := json.Marshal(keys)
if err != nil {
return nil, fmt.Errorf("failed to encode keys for storage: %w", err)
}
wrappedEntryValue, err := SealWrapValue(ctx, access, true, buf, DisallowPartialSealWrap)
if err != nil {
return nil, &ErrEncrypt{Err: fmt.Errorf("failed to encrypt keys for storage: %w", err)}
}
// Watch out, Wrapped has to be false for StoredBarrierKeysPath, since it used to be that the BlobInfo
// returned by access.Encrypt() was marshalled directly. It probably would not matter if the value
// was true, but setting if to false here makes TestSealWrapBackend_StorageBarrierKeyUpgrade_FromIVEntry
// pass (maybe other tests as well?).
for _, blobInfo := range wrappedEntryValue.GetSlots() {
blobInfo.Wrapped = false
}
wrappedValue, err := MarshalSealWrappedValue(wrappedEntryValue)
if err != nil {
return nil, fmt.Errorf("failed to marshal value for storage: %w", err)
}
return &physical.Entry{
Key: StoredBarrierKeysPath,
Value: wrappedValue,
}, nil
}
// UnsealWrapStoredBarrierKeys is the counterpart to SealWrapStoredBarrierKeys.
func UnsealWrapStoredBarrierKeys(ctx context.Context, access seal.Access, pe *physical.Entry) ([][]byte, error) {
wrappedEntryValue, err := UnmarshalSealWrappedValue(pe.Value)
if err != nil {
return nil, fmt.Errorf("failed to proto decode stored keys: %w", err)
}
return decodeBarrierKeys(ctx, access, &wrappedEntryValue.value)
}
func decodeBarrierKeys(ctx context.Context, access seal.Access, multiWrapValue *seal.MultiWrapValue) ([][]byte, error) {
pt, _, err := access.Decrypt(ctx, multiWrapValue, nil)
if err != nil {
if strings.Contains(err.Error(), "message authentication failed") {
return nil, &ErrInvalidKey{Reason: fmt.Sprintf("failed to decrypt keys from storage: %v", err)}
}
return nil, &ErrDecrypt{Err: fmt.Errorf("failed to decrypt keys from storage: %w", err)}
}
// Decode the barrier entry
var keys [][]byte
if err := json.Unmarshal(pt, &keys); err != nil {
return nil, fmt.Errorf("failed to decode stored keys: %v", err)
}
return keys, nil
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Recovery Key
// SealWrapRecoveryKey encrypts the recovery key using the given seal access and returns a physical.Entry for storage.
func SealWrapRecoveryKey(ctx context.Context, access seal.Access, key []byte) (*physical.Entry, error) {
wrappedEntryValue, err := SealWrapValue(ctx, access, true, key, DisallowPartialSealWrap)
if err != nil {
return nil, &ErrEncrypt{Err: fmt.Errorf("failed to encrypt recovery key for storage: %w", err)}
}
wrappedValue, err := MarshalSealWrappedValue(wrappedEntryValue)
if err != nil {
return nil, fmt.Errorf("failed to marshal value for storage: %w", err)
}
return &physical.Entry{
Key: recoveryKeyPath,
Value: wrappedValue,
}, nil
}
// UnsealWrapRecoveryKey is the counterpart to SealWrapRecoveryKey.
func UnsealWrapRecoveryKey(ctx context.Context, access seal.Access, pe *physical.Entry) ([]byte, error) {
wrappedEntryValue, err := UnmarshalSealWrappedValue(pe.Value)
if err != nil {
return nil, fmt.Errorf("failed to proto decode recevory key: %w", err)
}
pt, _, err := UnsealWrapValue(ctx, access, pe.Key, wrappedEntryValue)
return pt, err
} | go | github | https://github.com/hashicorp/vault | vault/seal_util.go |
# -*- coding: utf-8 -*-;
#
# gmail2kayac.py --- notify new mail comming in gmail inbox to iPhone user
# through im.kayac.com
#
# external packages
# - Universal Feed Parser
# http://www.feedparser.org/
# - Pit
#
# API reference
# 1. im.kayac.com
# http://im.kayac.com
#
__author__ = 'ymotongpoo <ymotongpoo AT gmail DOT com>'
__version__ = '0.1'
__date__ = '2009/12/09 (Wed)'
import sys
import urllib
from pit import Pit
try:
import feedparser
except:
print 'Universal Feed Parser is necessary'
sys.exit(2)
try:
import json
except:
import simplejson as json
GMAILATOM = 'https://%s:%s@mail.google.com/mail/feed/atom/'
KAYAC_URL = 'http://im.kayac.com/api/post/%s'
def notify(message, user, password='', handler=''):
url = KAYAC_URL % (user,)
postparams = dict( message = message,
handler = handler,
password = password )
params = urllib.urlencode(postparams)
p = urllib.urlopen(url, params)
r = json.loads(p.read())
return r
def main():
gmail = Pit.get('gmail', {'require' : {
'user' : 'Gmail account name',
'pass' : 'password for Gmail account',
'fullcount' : '0'
}})
kayac = Pit.get('kayac', {'require' : {
'user' : 'im.kayac.com account name',
'pass' : 'password for im.kayac.com account'
}})
d = feedparser.parse( GMAILATOM % (gmail['user'], gmail['pass']) )
if int(d.feed.fullcount) == int(gmail['fullcount']):
sys.exit(0)
elif int(d.feed.fullcount) > int(gmail['fullcount']):
for e in d.entries:
title = e.title
author = e.author
message = '%s <%s> [%s@gmail.com]' % (title, author, gmail['user'])
r = notify(message, kayac['user'], kayac['pass'])
if r['result'] != 'posted':
print 'failed', r
sys.exit(2)
Pit.set('gmail', {'data' : {
'user' : gmail['user'],
'pass' : gmail['pass'],
'fullcount' : str(d.feed.fullcount)}})
if __name__ == '__main__':
try:
main()
except Exception, e:
print Exception, e | unknown | codeparrot/codeparrot-clean | ||
# Grafana Dashboards
The files in this directory contain example Grafana dashboard configurations to act as a starting point for users
looking to monitor CockroachDB via Grafana.
Note that while the CockroachDB team provides these dashboards as an example, these dashboards are not guaranteed to
function out of the box with all deployment modes for Prometheus/Grafana/etc., and the CockroachDB team does not offer
support as such.
Note that the dashboards assume the presence of the following labels on each CockroachDB metric:
- `job="cockroachdb"`: to denote the CockroachDB job that the metrics belong to.
- `cluster`: to identify the specific cluster that the metrics belong to.
- `instance`: to identify the specific node/instance that the metrics belong to.
Users may have to modify the dashboards according to their own deployment configurations, such as the metric label selectors
used, before they function properly.
Please refer to the
[documentation on how to monitor CockroachDB with Prometheus](https://www.cockroachlabs.com/docs/stable/monitor-cockroachdb-with-prometheus)
for more details. | unknown | github | https://github.com/cockroachdb/cockroach | monitoring/grafana-dashboards/by-cluster/README.md |
//// [tests/cases/compiler/ambientEnumElementInitializer6.ts] ////
//// [ambientEnumElementInitializer6.ts]
declare namespace M {
enum E {
e = 3
}
}
//// [ambientEnumElementInitializer6.js]
"use strict"; | javascript | github | https://github.com/microsoft/TypeScript | tests/baselines/reference/ambientEnumElementInitializer6.js |
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
parse_duration,
unified_strdate,
str_to_int,
int_or_none,
float_or_none,
ISO639Utils,
determine_ext,
)
class AdobeTVBaseIE(InfoExtractor):
_API_BASE_URL = 'http://tv.adobe.com/api/v4/'
class AdobeTVIE(AdobeTVBaseIE):
_VALID_URL = r'https?://tv\.adobe\.com/(?:(?P<language>fr|de|es|jp)/)?watch/(?P<show_urlname>[^/]+)/(?P<id>[^/]+)'
_TEST = {
'url': 'http://tv.adobe.com/watch/the-complete-picture-with-julieanne-kost/quick-tip-how-to-draw-a-circle-around-an-object-in-photoshop/',
'md5': '9bc5727bcdd55251f35ad311ca74fa1e',
'info_dict': {
'id': '10981',
'ext': 'mp4',
'title': 'Quick Tip - How to Draw a Circle Around an Object in Photoshop',
'description': 'md5:99ec318dc909d7ba2a1f2b038f7d2311',
'thumbnail': r're:https?://.*\.jpg$',
'upload_date': '20110914',
'duration': 60,
'view_count': int,
},
}
def _real_extract(self, url):
language, show_urlname, urlname = re.match(self._VALID_URL, url).groups()
if not language:
language = 'en'
video_data = self._download_json(
self._API_BASE_URL + 'episode/get/?language=%s&show_urlname=%s&urlname=%s&disclosure=standard' % (language, show_urlname, urlname),
urlname)['data'][0]
formats = [{
'url': source['url'],
'format_id': source.get('quality_level') or source['url'].split('-')[-1].split('.')[0] or None,
'width': int_or_none(source.get('width')),
'height': int_or_none(source.get('height')),
'tbr': int_or_none(source.get('video_data_rate')),
} for source in video_data['videos']]
self._sort_formats(formats)
return {
'id': compat_str(video_data['id']),
'title': video_data['title'],
'description': video_data.get('description'),
'thumbnail': video_data.get('thumbnail'),
'upload_date': unified_strdate(video_data.get('start_date')),
'duration': parse_duration(video_data.get('duration')),
'view_count': str_to_int(video_data.get('playcount')),
'formats': formats,
}
class AdobeTVPlaylistBaseIE(AdobeTVBaseIE):
def _parse_page_data(self, page_data):
return [self.url_result(self._get_element_url(element_data)) for element_data in page_data]
def _extract_playlist_entries(self, url, display_id):
page = self._download_json(url, display_id)
entries = self._parse_page_data(page['data'])
for page_num in range(2, page['paging']['pages'] + 1):
entries.extend(self._parse_page_data(
self._download_json(url + '&page=%d' % page_num, display_id)['data']))
return entries
class AdobeTVShowIE(AdobeTVPlaylistBaseIE):
_VALID_URL = r'https?://tv\.adobe\.com/(?:(?P<language>fr|de|es|jp)/)?show/(?P<id>[^/]+)'
_TEST = {
'url': 'http://tv.adobe.com/show/the-complete-picture-with-julieanne-kost',
'info_dict': {
'id': '36',
'title': 'The Complete Picture with Julieanne Kost',
'description': 'md5:fa50867102dcd1aa0ddf2ab039311b27',
},
'playlist_mincount': 136,
}
def _get_element_url(self, element_data):
return element_data['urls'][0]
def _real_extract(self, url):
language, show_urlname = re.match(self._VALID_URL, url).groups()
if not language:
language = 'en'
query = 'language=%s&show_urlname=%s' % (language, show_urlname)
show_data = self._download_json(self._API_BASE_URL + 'show/get/?%s' % query, show_urlname)['data'][0]
return self.playlist_result(
self._extract_playlist_entries(self._API_BASE_URL + 'episode/?%s' % query, show_urlname),
compat_str(show_data['id']),
show_data['show_name'],
show_data['show_description'])
class AdobeTVChannelIE(AdobeTVPlaylistBaseIE):
_VALID_URL = r'https?://tv\.adobe\.com/(?:(?P<language>fr|de|es|jp)/)?channel/(?P<id>[^/]+)(?:/(?P<category_urlname>[^/]+))?'
_TEST = {
'url': 'http://tv.adobe.com/channel/development',
'info_dict': {
'id': 'development',
},
'playlist_mincount': 96,
}
def _get_element_url(self, element_data):
return element_data['url']
def _real_extract(self, url):
language, channel_urlname, category_urlname = re.match(self._VALID_URL, url).groups()
if not language:
language = 'en'
query = 'language=%s&channel_urlname=%s' % (language, channel_urlname)
if category_urlname:
query += '&category_urlname=%s' % category_urlname
return self.playlist_result(
self._extract_playlist_entries(self._API_BASE_URL + 'show/?%s' % query, channel_urlname),
channel_urlname)
class AdobeTVVideoIE(InfoExtractor):
_VALID_URL = r'https?://video\.tv\.adobe\.com/v/(?P<id>\d+)'
_TEST = {
# From https://helpx.adobe.com/acrobat/how-to/new-experience-acrobat-dc.html?set=acrobat--get-started--essential-beginners
'url': 'https://video.tv.adobe.com/v/2456/',
'md5': '43662b577c018ad707a63766462b1e87',
'info_dict': {
'id': '2456',
'ext': 'mp4',
'title': 'New experience with Acrobat DC',
'description': 'New experience with Acrobat DC',
'duration': 248.667,
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_data = self._parse_json(self._search_regex(
r'var\s+bridge\s*=\s*([^;]+);', webpage, 'bridged data'), video_id)
formats = [{
'format_id': '%s-%s' % (determine_ext(source['src']), source.get('height')),
'url': source['src'],
'width': int_or_none(source.get('width')),
'height': int_or_none(source.get('height')),
'tbr': int_or_none(source.get('bitrate')),
} for source in video_data['sources']]
self._sort_formats(formats)
# For both metadata and downloaded files the duration varies among
# formats. I just pick the max one
duration = max(filter(None, [
float_or_none(source.get('duration'), scale=1000)
for source in video_data['sources']]))
subtitles = {}
for translation in video_data.get('translations', []):
lang_id = translation.get('language_w3c') or ISO639Utils.long2short(translation['language_medium'])
if lang_id not in subtitles:
subtitles[lang_id] = []
subtitles[lang_id].append({
'url': translation['vttPath'],
'ext': 'vtt',
})
return {
'id': video_id,
'formats': formats,
'title': video_data['title'],
'description': video_data.get('description'),
'thumbnail': video_data['video'].get('poster'),
'duration': duration,
'subtitles': subtitles,
} | unknown | codeparrot/codeparrot-clean | ||
from django import oldforms
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth.forms import PasswordResetForm, PasswordChangeForm
from django.contrib.sites.models import Site, RequestSite
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils.http import urlquote
from django.utils.translation import ugettext as _
def login(request, template_name='registration/login.html', redirect_field_name=REDIRECT_FIELD_NAME):
"Displays the login form and handles the login action."
manipulator = AuthenticationForm()
redirect_to = request.REQUEST.get(redirect_field_name, '')
if request.POST:
errors = manipulator.get_validation_errors(request.POST)
if not errors:
# Light security check -- make sure redirect_to isn't garbage.
if not redirect_to or '//' in redirect_to or ' ' in redirect_to:
from django.conf import settings
redirect_to = settings.LOGIN_REDIRECT_URL
from django.contrib.auth import login
login(request, manipulator.get_user())
if request.session.test_cookie_worked():
request.session.delete_test_cookie()
return HttpResponseRedirect(redirect_to)
else:
errors = {}
request.session.set_test_cookie()
if Site._meta.installed:
current_site = Site.objects.get_current()
else:
current_site = RequestSite(request)
return render_to_response(template_name, {
'form': oldforms.FormWrapper(manipulator, request.POST, errors),
redirect_field_name: redirect_to,
'site_name': current_site.name,
}, context_instance=RequestContext(request))
def logout(request, next_page=None, template_name='registration/logged_out.html'):
"Logs out the user and displays 'You are logged out' message."
from django.contrib.auth import logout
logout(request)
if next_page is None:
return render_to_response(template_name, {'title': _('Logged out')}, context_instance=RequestContext(request))
else:
# Redirect to this page until the session has been cleared.
return HttpResponseRedirect(next_page or request.path)
def logout_then_login(request, login_url=None):
"Logs out the user if he is logged in. Then redirects to the log-in page."
if not login_url:
from django.conf import settings
login_url = settings.LOGIN_URL
return logout(request, login_url)
def redirect_to_login(next, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"Redirects the user to the login page, passing the given 'next' page"
if not login_url:
from django.conf import settings
login_url = settings.LOGIN_URL
return HttpResponseRedirect('%s?%s=%s' % (login_url, urlquote(redirect_field_name), urlquote(next)))
def password_reset(request, is_admin_site=False, template_name='registration/password_reset_form.html',
email_template_name='registration/password_reset_email.html'):
new_data, errors = {}, {}
form = PasswordResetForm()
if request.POST:
new_data = request.POST.copy()
errors = form.get_validation_errors(new_data)
if not errors:
if is_admin_site:
form.save(domain_override=request.META['HTTP_HOST'])
else:
if Site._meta.installed:
form.save(email_template_name=email_template_name)
else:
form.save(domain_override=RequestSite(request).domain, email_template_name=email_template_name)
return HttpResponseRedirect('%sdone/' % request.path)
return render_to_response(template_name, {'form': oldforms.FormWrapper(form, new_data, errors)},
context_instance=RequestContext(request))
def password_reset_done(request, template_name='registration/password_reset_done.html'):
return render_to_response(template_name, context_instance=RequestContext(request))
def password_change(request, template_name='registration/password_change_form.html'):
new_data, errors = {}, {}
form = PasswordChangeForm(request.user)
if request.POST:
new_data = request.POST.copy()
errors = form.get_validation_errors(new_data)
if not errors:
form.save(new_data)
return HttpResponseRedirect('%sdone/' % request.path)
return render_to_response(template_name, {'form': oldforms.FormWrapper(form, new_data, errors)},
context_instance=RequestContext(request))
password_change = login_required(password_change)
def password_change_done(request, template_name='registration/password_change_done.html'):
return render_to_response(template_name, context_instance=RequestContext(request)) | unknown | codeparrot/codeparrot-clean | ||
/* Copyright 2021 - 2025 R. Thomas
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "logging.hpp"
#include "LIEF/DEX/Field.hpp"
#include "LIEF/DEX/Class.hpp"
#include "LIEF/DEX/hash.hpp"
#include "LIEF/DEX/enums.hpp"
#include "LIEF/DEX/EnumToString.hpp"
#include "LIEF/DEX/Method.hpp"
#include <numeric>
#include <utility>
namespace LIEF {
namespace DEX {
Field::Field(const Field&) = default;
Field& Field::operator=(const Field&) = default;
Field::Field() = default;
Field::Field(std::string name, Class* parent) :
name_{std::move(name)},
parent_{parent}
{}
const std::string& Field::name() const {
return name_;
}
bool Field::has_class() const {
return parent_ != nullptr;
}
const Class* Field::cls() const {
return parent_;
}
Class* Field::cls() {
return const_cast<Class*>(static_cast<const Field*>(this)->cls());
}
size_t Field::index() const {
return original_index_;
}
bool Field::is_static() const {
return is_static_;
}
void Field::set_static(bool v) {
is_static_ = v;
}
bool Field::has(ACCESS_FLAGS f) const {
return (access_flags_ & f) != 0u;
}
Field::access_flags_list_t Field::access_flags() const {
Field::access_flags_list_t flags;
std::copy_if(std::begin(access_flags_list), std::end(access_flags_list),
std::back_inserter(flags),
[this] (ACCESS_FLAGS f) { return has(f); });
return flags;
}
const Type* Field::type() const {
CHECK(type_ != nullptr, "Type is null!");
return type_;
}
Type* Field::type() {
return const_cast<Type*>(static_cast<const Field*>(this)->type());
}
void Field::accept(Visitor& visitor) const {
visitor.visit(*this);
}
std::ostream& operator<<(std::ostream& os, const Field& field) {
std::string pretty_cls_name = field.cls()->fullname();
if (!pretty_cls_name.empty()) {
pretty_cls_name = pretty_cls_name.substr(1, pretty_cls_name.size() - 2);
std::replace(std::begin(pretty_cls_name), std::end(pretty_cls_name), '/', '.');
}
Method::access_flags_list_t aflags = field.access_flags();
std::string flags_str = std::accumulate(
std::begin(aflags),
std::end(aflags),
std::string{},
[] (const std::string& l, ACCESS_FLAGS r) {
std::string str = to_string(r);
std::transform(std::begin(str), std::end(str), std::begin(str), ::tolower);
return l.empty() ? str : l + " " + str;
});
if (!flags_str.empty()) {
os << flags_str << " ";
}
os << field.type()
<< " "
<< pretty_cls_name << "->" << field.name();
return os;
}
Field::~Field() = default;
}
} | cpp | github | https://github.com/nodejs/node | deps/LIEF/src/DEX/Field.cpp |
'''
These classes are based on starter code from CS 221 class at Stanford
'''
import collections, random
from math import sqrt
# Abstract class: an RLAlgorithm performs reinforcement learning. All it needs
# to know is the set of available actions to take. The simulator (see
# simulate()) will call getAction() to get an action, perform the action, and
# then provide feedback (via incorporateFeedback()) to the RL algorithm, so it can adjust
# its parameters.
class Learner:
# Your algorithm will be asked to produce an action given a state.
def getAction(self, state):
raise NotImplementedError("Override me")
# We will call this function when simulating an MDP, and you should update
# parameters.
# If |state| is a terminal state, this function will be called with (s, a,
# 0, None). When this function is called, it indicates that taking action
# |action| in state |state| resulted in reward |reward| and a transition to state
# |newState|.
def incorporateFeedback(self, state, action, reward, newState):
raise NotImplementedError("Override me")
class QLearner(Learner):
def __init__(self, model, actions, discount, feature_extractor, exploration_prob=0.2):
self.discount = discount
self.feature_extractor = feature_extractor
self.exploration_prob = exploration_prob
# self.weights = collections.Counter()
self.model = model
self.numIters = 0
self.actions = actions
# Return the Q function associated with the weights and features
def getQ(self, state, action):
return self.model.eval(self.feature_extractor(state, action))
# This algorithm will produce an action given a state.
# Here we use the epsilon-greedy algorithm: with probability
# |exploration_prob|, take a random action.
def getAction(self, state):
self.numIters += 1
if random.random() < self.exploration_prob:
return random.choice(self.actions(state))
else:
return max((self.getQ(state, action), action) for action in self.actions(state))[1]
# Call this function to get the step size to update the weights.
def getStepSize(self):
return 0.01
# We will call this function with (s, a, r, s'), which you should use to update |weights|.
# Note that if s is a terminal state, then s' will be None. Remember to check for this.
# You should update the weights using self.getStepSize(); use
# self.getQ() to compute the current estimate of the parameters.
def incorporateFeedback(self, state, action, reward, newState):
# compute residual
if newState is None:
residual = reward - self.getQ(state, action)
else:
residual = reward + self.discount * max(self.getQ(newState, newAction) for newAction in self.actions(newState)) - self.getQ(state, action)
# update weights
features = self.feature_extractor(state, action)
self.model.update(features, residual, self.getStepSize()) | unknown | codeparrot/codeparrot-clean | ||
import { flushSync } from 'svelte';
import { test } from '../../test';
export default test({
html: `
<label><input type="checkbox" value="x"> x</label>
<label><input type="checkbox" value="y"> y</label>
<label><input type="checkbox" value="z"> z</label>
<p></p>
<label><input type="checkbox" value="x"> x</label>
<label><input type="checkbox" value="y"> y</label>
<label><input type="checkbox" value="z"> z</label>
<p></p>
<label><input type="checkbox" value="x"> x</label>
<label><input type="checkbox" value="y"> y</label>
<label><input type="checkbox" value="z"> z</label>
<p></p>
`,
test({ assert, target, window }) {
const inputs = target.querySelectorAll('input');
assert.equal(inputs[0].checked, false);
assert.equal(inputs[1].checked, false);
assert.equal(inputs[2].checked, false);
assert.equal(inputs[3].checked, false);
assert.equal(inputs[4].checked, false);
assert.equal(inputs[5].checked, false);
assert.equal(inputs[6].checked, false);
assert.equal(inputs[7].checked, false);
assert.equal(inputs[8].checked, false);
const event = new window.Event('change');
inputs[2].checked = true;
inputs[2].dispatchEvent(event);
flushSync();
assert.htmlEqual(
target.innerHTML,
`
<label><input type="checkbox" value="x"> x</label>
<label><input type="checkbox" value="y"> y</label>
<label><input type="checkbox" value="z"> z</label>
<p>z</p>
<label><input type="checkbox" value="x"> x</label>
<label><input type="checkbox" value="y"> y</label>
<label><input type="checkbox" value="z"> z</label>
<p></p>
<label><input type="checkbox" value="x"> x</label>
<label><input type="checkbox" value="y"> y</label>
<label><input type="checkbox" value="z"> z</label>
<p></p>
`
);
inputs[4].checked = true;
inputs[4].dispatchEvent(event);
flushSync();
assert.htmlEqual(
target.innerHTML,
`
<label><input type="checkbox" value="x"> x</label>
<label><input type="checkbox" value="y"> y</label>
<label><input type="checkbox" value="z"> z</label>
<p>z</p>
<label><input type="checkbox" value="x"> x</label>
<label><input type="checkbox" value="y"> y</label>
<label><input type="checkbox" value="z"> z</label>
<p>y</p>
<label><input type="checkbox" value="x"> x</label>
<label><input type="checkbox" value="y"> y</label>
<label><input type="checkbox" value="z"> z</label>
<p></p>
`
);
inputs[5].checked = true;
inputs[5].dispatchEvent(event);
flushSync();
assert.htmlEqual(
target.innerHTML,
`
<label><input type="checkbox" value="x"> x</label>
<label><input type="checkbox" value="y"> y</label>
<label><input type="checkbox" value="z"> z</label>
<p>z</p>
<label><input type="checkbox" value="x"> x</label>
<label><input type="checkbox" value="y"> y</label>
<label><input type="checkbox" value="z"> z</label>
<p>y, z</p>
<label><input type="checkbox" value="x"> x</label>
<label><input type="checkbox" value="y"> y</label>
<label><input type="checkbox" value="z"> z</label>
<p></p>
`
);
}
}); | javascript | github | https://github.com/sveltejs/svelte | packages/svelte/tests/runtime-legacy/samples/binding-input-group-each-6/_config.js |
#!/usr/bin/python2.4
#
# CDDL HEADER START
#
# The contents of this file are subject to the terms of the
# Common Development and Distribution License (the "License").
# You may not use this file except in compliance with the License.
#
# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
# or http://www.opensolaris.org/os/licensing.
# See the License for the specific language governing permissions
# and limitations under the License.
#
# When distributing Covered Code, include this CDDL HEADER in each
# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
# If applicable, add the following below this CDDL HEADER, with the
# fields enclosed by brackets "[]" replaced with your own identifying
# information: Portions Copyright [yyyy] [name of copyright owner]
#
# CDDL HEADER END
#
# Copyright 2009 Sun Microsystems, Inc. All rights reserved.
# Use is subject to license terms.
#
import cherrypy
import itertools
import pkg.server.catalog
import pkg.fmri
import pkg.version
import pkg.server.api_errors as api_errors
import pkg.server.query_parser as qp
import pkg.version as version
CURRENT_API_VERSION = 5
class BaseInterface(object):
"""This class represents a base API object that is provided by the
server to clients. A base API object is required when creating
objects for any other interface provided by the API. This allows
the server to provide a set of private object references that are
needed by interfaces to provide functionality to clients.
"""
# A semi-private reference to a cherrypy request object.
_request = None
# A semi-private reference to a SvrConfig object.
_svrconfig = None
# A semi-private reference to a RepositoryConfig object.
_rcconfig = None
def __init__(self, request, svrconfig, rcconfig):
self._request = request
self._svrconfig = svrconfig
self._rcconfig = rcconfig
class _Interface(object):
"""Private base class used for api interface objects.
"""
def __init__(self, version_id, base):
compatible_versions = set([3, 4, 5])
if version_id not in compatible_versions:
raise api_errors.VersionException(CURRENT_API_VERSION,
version_id)
class CatalogInterface(_Interface):
"""This class presents an interface to server catalog objects that
clients may use.
"""
def __init__(self, version_id, base):
_Interface.__init__(self, version_id, base)
catalog = None
if not base._svrconfig.is_mirror():
catalog = base._svrconfig.catalog
self.__catalog = catalog
def fmris(self):
"""Returns a list of FMRIs as it iterates over the contents of
the server's catalog. Returns an empty list if the catalog is
not available.
"""
if not self.__catalog:
return []
return self.__catalog.fmris()
def get_matching_pattern_fmris(self, patterns):
"""Returns a sorted list of PkgFmri objects, newest versions
first, for packages matching those found in the 'patterns' list.
"""
c = self.__catalog
if not c:
return []
return pkg.server.catalog.extract_matching_fmris(c.fmris(),
patterns=patterns)
def get_matching_version_fmris(self, versions):
"""Returns a sorted list of PkgFmri objects, newest versions
first, for packages matching those found in the 'versions' list.
'versions' should be a list of strings of the format:
release,build_release-branch:datetime
...with a value of '*' provided for any component to be ignored.
'*' or '?' may be used within each component value and will act
as wildcard characters ('*' for one or more characters, '?' for
a single character).
"""
c = self.__catalog
if not c:
return []
return pkg.server.catalog.extract_matching_fmris(c.fmris(),
versions=versions)
@property
def last_modified(self):
"""Returns a datetime object representing the date and time at
which the catalog was last modified. Returns None if not
available.
"""
if not self.__catalog:
return None
lm = self.__catalog.last_modified()
if not lm:
return None
return pkg.server.catalog.ts_to_datetime(lm)
@property
def package_count(self):
"""The total number of packages in the catalog. Returns None
if the catalog is not available.
"""
if not self.__catalog:
return None
return self.__catalog.npkgs()
def search(self, tokens, case_sensitive=False,
return_type=qp.Query.RETURN_PACKAGES, start_point=None,
num_to_return=None, matching_version=None, return_latest=False):
"""Searches the catalog for actions or packages (as determined
by 'return_type') matching the specified 'tokens'.
'tokens' is a string using pkg(5) query syntax.
'case_sensitive' is an optional, boolean value indicating
whether matching entries must have the same case as that of
the provided tokens.
'return_type' is an optional, constant value indicating the
type of results to be returned. This constant value should be
one provided by the pkg.server.query_parser.Query class.
'start_point' is an optional, integer value indicating how many
search results should be discarded before returning any results.
None is interpreted to mean 0.
'num_to_return' is an optional, integer value indicating how
many search results should be returned. None means return all
results.
'matching_version' is a string in the format expected by the
pkg.version.MatchingVersion class that will be used to further
filter the search results as they are retrieved.
'return_latest' is an optional, boolean value that will cause
only the latest versions of packages to be returned. Ignored
if 'return_type' is not qp.Query.RETURN_PACKAGES.
"""
if not tokens:
return []
tokens = tokens.split()
if not self.search_available:
return []
if start_point is None:
start_point = 0
def filter_results(results, mver):
found = 0
last_stem = None
for result in results:
if found and \
((found - start_point) >= num_to_return):
break
if result[1] == qp.Query.RETURN_PACKAGES:
pfmri = result[2]
elif result[1] == qp.Query.RETURN_ACTIONS:
pfmri = result[2][0]
if mver is not None:
if mver != version.Version(pfmri.split(
"@", 1)[1], None):
continue
if return_latest and \
result[1] == qp.Query.RETURN_PACKAGES:
# Latest version filtering can only be
# done for packages as only they are
# guaranteed to be in version order.
stem = result[2].split("@", 1)[0]
if last_stem == stem:
continue
else:
last_stem = stem
found += 1
if found > start_point:
yield result
def filtered_search(results, mver):
try:
result = results.next()
except StopIteration:
return
return_type = result[1]
results = itertools.chain([result], results)
if return_latest and \
return_type == qp.Query.RETURN_PACKAGES:
def cmp_fmris(resa, resb):
a = pkg.fmri.PkgFmri(resa[2])
b = pkg.fmri.PkgFmri(resb[2])
if a.pkg_name == b.pkg_name:
# Version in descending order.
return cmp(a.version,
b.version) * -1
return cmp(a, b)
return filter_results(sorted(results,
cmp=cmp_fmris), mver)
return filter_results(results, mver)
if matching_version or return_latest:
# Additional filtering needs to be performed and
# the results yielded one by one.
mver = None
if matching_version:
mver = version.MatchingVersion(matching_version,
None)
# Results should be retrieved here so that an exception
# can be immediately raised.
query = qp.Query(" ".join(tokens), case_sensitive,
return_type, None, None)
results = self.__catalog.search(query)
return filtered_search(results, mver)
query = qp.Query(" ".join(tokens), case_sensitive,
return_type, num_to_return, start_point)
return self.__catalog.search(query)
@property
def search_available(self):
"""Returns a Boolean value indicating whether search
functionality is available for the catalog.
"""
if not self.__catalog:
return False
return self.__catalog.search_available()
class ConfigInterface(_Interface):
"""This class presents a read-only interface to configuration
information and statistics about the depot that clients may use.
"""
def __init__(self, version_id, base):
_Interface.__init__(self, version_id, base)
self.__svrconfig = base._svrconfig
self.__rcconfig = base._rcconfig
@property
def catalog_requests(self):
"""The number of /catalog operation requests that have occurred
during the current server session.
"""
return self.__svrconfig.catalog_requests
@property
def content_root(self):
"""The file system path where the server's content and web
directories are located.
"""
return self.__svrconfig.content_root
@property
def file_requests(self):
"""The number of /file operation requests that have occurred
during the current server session.
"""
return self.__svrconfig.file_requests
@property
def filelist_requests(self):
"""The number of /filelist operation requests that have occurred
during the current server session.
"""
return self.__svrconfig.flist_requests
@property
def filelist_file_requests(self):
"""The number of files served by /filelist operations requested
during the current server session.
"""
return self.__svrconfig.flist_files
@property
def in_flight_transactions(self):
"""The number of package transactions awaiting completion.
"""
return len(self.__svrconfig.in_flight_trans)
@property
def manifest_requests(self):
"""The number of /manifest operation requests that have occurred
during the current server session.
"""
return self.__svrconfig.manifest_requests
@property
def mirror(self):
"""A Boolean value indicating whether the server is currently
operating in mirror mode.
"""
return self.__svrconfig.mirror
@property
def readonly(self):
"""A Boolean value indicating whether the server is currently
operating in readonly mode.
"""
return self.__svrconfig.read_only
@property
def rename_requests(self):
"""The number of /rename operation requests that have occurred
during the current server session.
"""
return self.__svrconfig.pkgs_renamed
@property
def web_root(self):
"""The file system path where the server's web content is
located.
"""
return self.__svrconfig.web_root
def get_repo_attrs(self):
"""Returns a dictionary of repository configuration
attributes organized by section, with each section's keys
as a list.
Available attributes are as follows:
Section Attribute Description
========== ========== ===============
publisher alias An alternative name for the
publisher of the packages in
the repository.
prefix The name of the publisher of
the packages in the repository.
repository collection_type A constant value indicating the
type of packages in the
repository. See the pydoc for
pkg.client.publisher.Repository
for details.
description A string value containing a
descriptive paragraph for the
repository.
detailed_url A comma-separated list of URIs
where more information about the
repository can be found.
legal_uris A comma-separated list of URIs
where licensing, legal, and
terms of service information
for the repository can be found.
maintainer A human readable string
describing the entity
maintaining the repository. For
an individual, this string is
expected to be their name or
name and email.
maintainer_url A URI associated with the entity
maintaining the repository.
mirrors A comma-separated list of URIs
where package content can be
retrieved.
name A short, descriptive name for
the repository.
origins A comma-separated list of URIs
where package metadata can be
retrieved.
refresh_seconds An integer value indicating the
number of seconds clients should
wait before refreshing cached
repository catalog or repository
metadata information.
registration_uri A URI indicating a location
clients can use to register or
obtain credentials needed to
access the repository.
related_uris A comma-separated list of URIs
of related repositories that a
client may be interested in.
feed id A Universally Unique Identifier
(UUID) used to permanently,
uniquely identify the feed.
name A short, descriptive name for
RSS/Atom feeds generated by the
depot serving the repository.
description A descriptive paragraph for the
feed.
publisher A fully-qualified domain name or
email address that is used to
generate a unique identifier for
each entry in the feed.
icon A filename of a small image that
is used to visually represent
the feed.
logo A filename of a large image that
is used by user agents to
visually brand or identify the
feed.
window A numeric value representing the
number of hours, before the feed
for the repository was last
generated, to include when
creating the feed for the
repository updatelog.
"""
return self.__rcconfig.get_attributes()
def get_repo_attr_value(self, section, attr):
"""Returns the current value of a repository configuration
attribute for the specified section.
"""
return self.__rcconfig.get_attribute(section, attr)
class RequestInterface(_Interface):
"""This class presents an interface to server request objects that
clients may use.
"""
def __init__(self, version_id, base):
_Interface.__init__(self, version_id, base)
self.__request = base._request
def get_accepted_languages(self):
"""Returns a list of the languages accepted by the client
sorted by priority. This information is derived from the
Accept-Language header provided by the client.
"""
alist = []
for entry in self.__request.headers.elements("Accept-Language"):
alist.append(str(entry).split(";")[0])
return alist
def get_rel_path(self, uri):
"""Returns uri relative to the current request path.
"""
return pkg.misc.get_rel_path(self.__request, uri)
def log(self, msg):
"""Instruct the server to log the provided message to its error
logs.
"""
return cherrypy.log(msg)
@property
def params(self):
"""A dict containing the parameters sent in the request, either
in the query string or in the request body.
"""
return self.__request.params
@property
def path_info(self):
"""A string containing the "path_info" portion of the requested
URL.
"""
return self.__request.path_info
@property
def query_string(self):
"""A string containing the "query_string" portion of the
requested URL.
"""
return cherrypy.request.query_string
def url(self, path="", qs="", script_name=None, relative=None):
"""Create an absolute URL for the given path.
If 'path' starts with a slash ('/'), this will return (base +
script_name + path + qs). If it does not start with a slash,
this returns (base url + script_name [+ request.path_info] +
path + qs).
If script_name is None, an appropriate value will be
automatically determined from the current request path.
If no parameters are specified, an absolute URL for the current
request path (minus the querystring) by passing no args. If
url(qs=request.query_string), is called, the original client URL
(assuming no internal redirections) should be returned.
If relative is None or not provided, an appropriate value will
be automatically determined. If False, the output will be an
absolute URL (including the scheme, host, vhost, and
script_name). If True, the output will instead be a URL that
is relative to the current request path, perhaps including '..'
atoms. If relative is the string 'server', the output will
instead be a URL that is relative to the server root; i.e., it
will start with a slash.
"""
return cherrypy.url(path=path, qs=qs, script_name=script_name,
relative=relative) | unknown | codeparrot/codeparrot-clean | ||
# SPDX-License-Identifier: (GPL-2.0 OR BSD-2-Clause)
# Copyright (C) 2022 Renesas Electronics Corp.
%YAML 1.2
---
$id: http://devicetree.org/schemas/media/renesas,rzg2l-cru.yaml#
$schema: http://devicetree.org/meta-schemas/core.yaml#
title: Renesas RZ/G2L (and alike SoC's) Camera Data Receiving Unit (CRU) Image processing
maintainers:
- Lad Prabhakar <prabhakar.mahadev-lad.rj@bp.renesas.com>
description:
The CRU image processing module is a data conversion module equipped with pixel
color space conversion, LUT, pixel format conversion, etc. An MIPI CSI-2 input and
parallel (including ITU-R BT.656) input are provided as the image sensor interface.
properties:
compatible:
oneOf:
- items:
- enum:
- renesas,r9a07g043-cru # RZ/G2UL
- renesas,r9a07g044-cru # RZ/G2{L,LC}
- renesas,r9a07g054-cru # RZ/V2L
- const: renesas,rzg2l-cru
- const: renesas,r9a09g047-cru # RZ/G3E
reg:
maxItems: 1
interrupts:
oneOf:
- items:
- description: CRU Interrupt for image_conv
- description: CRU Interrupt for image_conv_err
- description: CRU AXI master error interrupt
- items:
- description: CRU Interrupt for image_conv
- description: CRU AXI master error interrupt
- description: CRU Video Data AXI Master Address 0 Write End interrupt
- description: CRU Statistics data AXI master addr 0 write end interrupt
- description: CRU Video statistics data AXI master addr 0 write end interrupt
interrupt-names:
oneOf:
- items:
- const: image_conv
- const: image_conv_err
- const: axi_mst_err
- items:
- const: image_conv
- const: axi_mst_err
- const: vd_addr_wend
- const: sd_addr_wend
- const: vsd_addr_wend
clocks:
items:
- description: CRU Main clock
- description: CRU Register access clock
- description: CRU image transfer clock
clock-names:
items:
- const: video
- const: apb
- const: axi
power-domains:
maxItems: 1
resets:
items:
- description: CRU_PRESETN reset terminal
- description: CRU_ARESETN reset terminal
reset-names:
items:
- const: presetn
- const: aresetn
ports:
$ref: /schemas/graph.yaml#/properties/ports
properties:
port@0:
$ref: /schemas/graph.yaml#/$defs/port-base
unevaluatedProperties: false
description:
Input port node, single endpoint describing a parallel input source.
properties:
endpoint:
$ref: video-interfaces.yaml#
unevaluatedProperties: false
properties:
hsync-active: true
vsync-active: true
bus-width: true
data-shift: true
port@1:
$ref: /schemas/graph.yaml#/properties/port
description:
Input port node, describing the Image Processing module connected to the
CSI-2 receiver.
required:
- compatible
- reg
- interrupts
- interrupt-names
- clocks
- clock-names
- resets
- reset-names
- power-domains
allOf:
- if:
properties:
compatible:
contains:
enum:
- renesas,r9a07g044-cru
- renesas,r9a07g054-cru
then:
properties:
interrupts:
maxItems: 3
interrupt-names:
maxItems: 3
ports:
required:
- port@0
- port@1
- if:
properties:
compatible:
contains:
enum:
- renesas,r9a07g043-cru
then:
properties:
interrupts:
maxItems: 3
interrupt-names:
maxItems: 3
ports:
properties:
port@0: false
required:
- port@1
- if:
properties:
compatible:
contains:
const: renesas,r9a09g047-cru
then:
properties:
interrupts:
minItems: 5
interrupt-names:
minItems: 5
ports:
properties:
port@0: false
required:
- port@1
additionalProperties: false
examples:
# Device node example with CSI-2
- |
#include <dt-bindings/clock/r9a07g044-cpg.h>
#include <dt-bindings/interrupt-controller/arm-gic.h>
cru: video@10830000 {
compatible = "renesas,r9a07g044-cru", "renesas,rzg2l-cru";
reg = <0x10830000 0x400>;
interrupts = <GIC_SPI 167 IRQ_TYPE_LEVEL_HIGH>,
<GIC_SPI 168 IRQ_TYPE_LEVEL_HIGH>,
<GIC_SPI 169 IRQ_TYPE_LEVEL_HIGH>;
interrupt-names = "image_conv", "image_conv_err", "axi_mst_err";
clocks = <&cpg CPG_MOD R9A07G044_CRU_VCLK>,
<&cpg CPG_MOD R9A07G044_CRU_PCLK>,
<&cpg CPG_MOD R9A07G044_CRU_ACLK>;
clock-names = "video", "apb", "axi";
power-domains = <&cpg>;
resets = <&cpg R9A07G044_CRU_PRESETN>,
<&cpg R9A07G044_CRU_ARESETN>;
reset-names = "presetn", "aresetn";
ports {
#address-cells = <1>;
#size-cells = <0>;
port@0 {
#address-cells = <1>;
#size-cells = <0>;
reg = <0>;
cru_parallel_in: endpoint@0 {
reg = <0>;
remote-endpoint = <&ov5642>;
hsync-active = <1>;
vsync-active = <1>;
};
};
port@1 {
#address-cells = <1>;
#size-cells = <0>;
reg = <1>;
cru_csi_in: endpoint@0 {
reg = <0>;
remote-endpoint = <&csi_cru_in>;
};
};
};
}; | unknown | github | https://github.com/torvalds/linux | Documentation/devicetree/bindings/media/renesas,rzg2l-cru.yaml |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
#author:lijian
#date: 2016
#Copyright: free
import os
import re
from yuelibs import errno
from yuelibs import constants
from yuelibs import utils
class LocalFsStorage(object):
def __init__(self, path='', flag=False):
self.path = path
self.meta = os.path.join(path, 'meta')
self.image = os.path.join(path, 'image')
self.template = os.path.join(path, 'template')
if flag:
self.delete()
self._create()
self.mount = self._getMount()
self.disk = self._getDevice()
self.all,self.free = self._getStatus()
def _create(self):
if len(self.path)>0 and os.path.exists(self.path):
pass
else:
os.makedirs(self.path)
if not os.path.exists(self.image):
os.mkdir(self.image)
if not os.path.exists(self.template):
os.mkdir(self.template)
def delete(self):
try:
if os.path.exists(self.path):
utils.execShellCommand('rm -rf %s' % self.path)
except:
return errno.ERR_DELETE_STORAGE
return errno.Success
def getAllSpace(self):
return self.all
def getFreeSpace(self):
return self.free
def getDevice(self):
return self.disk
def getMount(self):
return self.mount
def _getStatus(self):
cmd = 'df -h'
out,err,errcode = utils.execShellCommand(cmd)
out = out.split('\n')
for line in out:
list1 = line.split(' ')
length = len(list1)
for i in range(1,length):
for elem in list1:
if elem==' ' or len(elem)==0:
list1.remove(elem)
for elem in list1:
if list1[-1] == self.mount:
return (list1[1],list1[3])
return (0,0)
def _getDevice(self):
cmd = 'lsblk -l'
out,err,errcode = utils.execShellCommand(cmd)
out = out.split('\n')
disk = ''
for line in out:
list1 = line.split(' ')
for elem in list1:
if elem == ' ':
list1.remove(elem)
if list1[-2] == 'disk':
disk = list1[0]
continue
if list1[-1] == self.mount:
return disk
def _isMount(self, path):
return utils.ismount(path)
def _getMount(self):
path = self.path
if path.endswith('/'):
path = self.path[:-1]
length = len(path)
for i in range(1, length):
if path.endswith("/"):
if len(path) > 1:
path = path[:-1]
if len(path) == 1:
return path
continue
if self._isMount(path):
return path
return '/'
def getIso(isopath):
iso = []
ret = []
for parent, dirnames, filenames in os.walk(isopath):
iso += filenames
for file in iso:
if file.endswith(".iso") or file.endswith(".ISO"):
ret.append(file)
return ret | unknown | codeparrot/codeparrot-clean | ||
import { withPageAuthRequired } from "@auth0/nextjs-auth0";
import Layout from "../../components/layout";
import { User } from "../../interfaces";
type ProfileProps = {
user: User;
};
export default function Profile({ user }: ProfileProps) {
return (
<Layout user={user}>
<h1>Profile</h1>
<div>
<h3>Profile (server rendered)</h3>
<img src={user.picture} alt="user picture" />
<p>nickname: {user.nickname}</p>
<p>name: {user.name}</p>
</div>
</Layout>
);
}
// Protected route, checking authentication status before rendering the page.(SSR)
// It's slower than a static page with client side authentication
export const getServerSideProps = withPageAuthRequired(); | typescript | github | https://github.com/vercel/next.js | examples/auth0/pages/advanced/ssr-profile.tsx |
/* Copyright 2022 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CORE_TFRT_FALLBACK_OP_KERNEL_RUNNER_CACHE_H_
#define TENSORFLOW_CORE_TFRT_FALLBACK_OP_KERNEL_RUNNER_CACHE_H_
#include <functional>
#include <memory>
#include "absl/container/flat_hash_map.h"
#include "absl/status/status.h"
#include "absl/status/statusor.h"
#include "absl/strings/string_view.h"
#include "tensorflow/core/tfrt/fallback/op_kernel_runner.h"
#include "tfrt/host_context/location.h" // from @tf_runtime
namespace tensorflow {
namespace tfrt_stub {
class OpLocationKey {
public:
explicit OpLocationKey(tfrt::Location loc) : loc_(loc) {}
template <typename H>
friend H AbslHashValue(H h, const OpLocationKey& key) {
// NOTE: Each BEF file has its own LocationHandler. Using LocationHandler
// as part of cache key here can avoid cache collision between different
// BEF file.
return H::combine(std::move(h), key.loc_.data, key.loc_.GetHandler());
}
friend bool operator==(const OpLocationKey& x, const OpLocationKey& y) {
return x.loc_.data == y.loc_.data &&
x.loc_.GetHandler() == y.loc_.GetHandler();
}
private:
tfrt::Location loc_;
};
// OpKernelRunnerCache is similar to OpKernelRunnerTable but thread-safe.
class OpKernelRunnerCache {
public:
OpKernelRunnerCache() = default;
absl::StatusOr<OpKernelRunner*> GetOrCreate(
tfrt::Location loc, absl::string_view op_name,
absl::string_view device_name, int num_args,
const std::function<absl::Status(tensorflow::AttrValueMap*)>&
attr_builder,
const tensorflow::DeviceMgr& device_manager,
const tensorflow::ProcessFunctionLibraryRuntime&
process_function_library_runtime);
private:
mutable mutex mu_;
absl::flat_hash_map<OpLocationKey, std::unique_ptr<OpKernelRunner>> map_
TF_GUARDED_BY(mu_);
};
} // namespace tfrt_stub
} // namespace tensorflow
#endif // TENSORFLOW_CORE_TFRT_FALLBACK_OP_KERNEL_RUNNER_CACHE_H_ | c | github | https://github.com/tensorflow/tensorflow | tensorflow/core/tfrt/fallback/op_kernel_runner_cache.h |
"""Custom install scripts for CloudMan environment.
From Enis Afgan: https://bitbucket.org/afgane/mi-deployment
"""
import os
import contextlib
from fabric.api import cd
from fabric.contrib.files import settings, hide
from cloudbio.custom.shared import (_make_tmp_dir, _setup_conf_file)
from cloudbio.cloudman import (_configure_cloudman, _configure_novnc,
_configure_desktop, _configure_ec2_autorun)
from cloudbio.galaxy import _install_nginx
CDN_ROOT_URL = "http://linuxcourse.rutgers.edu/rate/Clusters/download"
REPO_ROOT_URL = "https://bitbucket.org/afgane/mi-deployment/raw/tip"
def install_cloudman(env):
""" A meta method for installing all of CloudMan components.
Allows CloudMan and all of its dependencies to be installed via:
fab -f fabfile.py -i <key> -H ubuntu@<IP> install_custom:cloudman
"""
env.logger.debug("Installing CloudMan")
_configure_cloudman(env, use_repo_autorun=False)
install_nginx(env)
install_proftpd(env)
install_sge(env)
install_novnc(env)
def install_ec2_autorun(env):
_configure_ec2_autorun(env)
def install_novnc(env):
_configure_novnc(env)
_configure_desktop(env)
def install_nginx(env):
_install_nginx(env)
def install_proftpd(env):
"""Highly configurable GPL-licensed FTP server software.
http://proftpd.org/
"""
version = "1.3.4c"
postgres_ver = "9.1"
url = "ftp://ftp.tpnet.pl/pub/linux/proftpd/distrib/source/proftpd-%s.tar.gz" % version
modules = "mod_sql:mod_sql_postgres:mod_sql_passwd"
extra_modules = env.get("extra_proftp_modules", "") # Comma separated list of extra modules
if extra_modules:
modules = "%s:%s" % (modules, extra_modules.replace(",", ":"))
install_dir = os.path.join(env.install_dir, 'proftpd')
remote_conf_dir = os.path.join(install_dir, "etc")
# Skip install if already available
if env.safe_exists(remote_conf_dir):
env.logger.debug("ProFTPd seems to already be installed in {0}".format(install_dir))
return
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s" % url)
with settings(hide('stdout')):
env.safe_run("tar xvzf %s" % os.path.split(url)[1])
with cd("proftpd-%s" % version):
env.safe_run("CFLAGS='-I/usr/include/postgresql' ./configure --prefix=%s "
"--disable-auth-file --disable-ncurses --disable-ident --disable-shadow "
"--enable-openssl --with-modules=%s "
"--with-libraries=/usr/lib/postgresql/%s/lib" % (install_dir, modules, postgres_ver))
env.safe_sudo("make")
env.safe_sudo("make install")
env.safe_sudo("make clean")
# Get the init.d startup script
initd_script = 'proftpd.initd'
initd_url = os.path.join(REPO_ROOT_URL, 'conf_files', initd_script)
remote_file = "/etc/init.d/proftpd"
env.safe_sudo("wget --output-document=%s %s" % (remote_file, initd_url))
env.safe_sed(remote_file, 'REPLACE_THIS_WITH_CUSTOM_INSTALL_DIR', install_dir, use_sudo=True)
env.safe_sudo("chmod 755 %s" % remote_file)
# Set the configuration file
conf_file = 'proftpd.conf'
remote_file = os.path.join(remote_conf_dir, conf_file)
if "postgres_port" not in env:
env.postgres_port = '5910'
if "galaxy_ftp_user_password" not in env:
env.galaxy_ftp_user_password = 'fu5yOj2sn'
proftpd_conf = {'galaxy_uid': env.safe_run('id -u galaxy'),
'galaxy_fs': '/mnt/galaxy', # Should be a var but uncertain how to get it
'install_dir': install_dir}
_setup_conf_file(env, remote_file, conf_file, overrides=proftpd_conf,
default_source="proftpd.conf.template")
# Get the custom welcome msg file
welcome_msg_file = 'welcome_msg.txt'
welcome_url = os.path.join(REPO_ROOT_URL, 'conf_files', welcome_msg_file)
env.safe_sudo("wget --output-document=%s %s" %
(os.path.join(remote_conf_dir, welcome_msg_file), welcome_url))
# Stow
env.safe_sudo("cd %s; stow proftpd" % env.install_dir)
env.logger.debug("----- ProFTPd %s installed to %s -----" % (version, install_dir))
def install_sge(env):
"""Sun Grid Engine.
"""
out_dir = "ge6.2u5"
url = "%s/ge62u5_lx24-amd64.tar.gz" % CDN_ROOT_URL
install_dir = env.install_dir
if env.safe_exists(os.path.join(install_dir, out_dir)):
return
with _make_tmp_dir() as work_dir:
with contextlib.nested(cd(work_dir), settings(hide('stdout'))):
env.safe_run("wget %s" % url)
env.safe_sudo("chown %s %s" % (env.user, install_dir))
env.safe_run("tar -C %s -xvzf %s" % (install_dir, os.path.split(url)[1]))
env.logger.debug("SGE setup") | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for testing `LinearOperator` and sub-classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import numpy as np
import six
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.linalg import linalg_impl as linalg
from tensorflow.python.ops.linalg import linear_operator_util
from tensorflow.python.platform import test
class OperatorBuildInfo(object):
"""Object encoding expected shape for a test.
Encodes the expected shape of a matrix for a test. Also
allows additional metadata for the test harness.
"""
def __init__(self, shape, **kwargs):
self.shape = shape
self.__dict__.update(kwargs)
@six.add_metaclass(abc.ABCMeta) # pylint: disable=no-init
class LinearOperatorDerivedClassTest(test.TestCase):
"""Tests for derived classes.
Subclasses should implement every abstractmethod, and this will enable all
test methods to work.
"""
# Absolute/relative tolerance for tests.
_atol = {
dtypes.float16: 1e-3,
dtypes.float32: 1e-6,
dtypes.float64: 1e-12,
dtypes.complex64: 1e-6,
dtypes.complex128: 1e-12
}
_rtol = {
dtypes.float16: 1e-3,
dtypes.float32: 1e-6,
dtypes.float64: 1e-12,
dtypes.complex64: 1e-6,
dtypes.complex128: 1e-12
}
def assertAC(self, x, y):
"""Derived classes can set _atol, _rtol to get different tolerance."""
dtype = dtypes.as_dtype(x.dtype)
atol = self._atol[dtype]
rtol = self._rtol[dtype]
self.assertAllClose(x, y, atol=atol, rtol=rtol)
@property
def _adjoint_options(self):
return [False, True]
@property
def _adjoint_arg_options(self):
return [False, True]
@property
def _dtypes_to_test(self):
# TODO(langmore) Test tf.float16 once tf.matrix_solve works in 16bit.
return [dtypes.float32, dtypes.float64, dtypes.complex64, dtypes.complex128]
@property
def _use_placeholder_options(self):
return [False, True]
@abc.abstractproperty
def _operator_build_infos(self):
"""Returns list of OperatorBuildInfo, encapsulating the shape to test."""
raise NotImplementedError("operator_build_infos has not been implemented.")
@abc.abstractmethod
def _operator_and_matrix(self, build_info, dtype, use_placeholder):
"""Build a batch matrix and an Operator that should have similar behavior.
Every operator acts like a (batch) matrix. This method returns both
together, and is used by tests.
Args:
build_info: `OperatorBuildInfo`, encoding shape information about the
operator.
dtype: Numpy dtype. Data type of returned array/operator.
use_placeholder: Python bool. If True, initialize the operator with a
placeholder of undefined shape and correct dtype.
Returns:
operator: `LinearOperator` subclass instance.
mat: `Tensor` representing operator.
"""
# Create a matrix as a numpy array with desired shape/dtype.
# Create a LinearOperator that should have the same behavior as the matrix.
raise NotImplementedError("Not implemented yet.")
@abc.abstractmethod
def _make_rhs(self, operator, adjoint, with_batch=True):
"""Make a rhs appropriate for calling operator.solve(rhs).
Args:
operator: A `LinearOperator`
adjoint: Python `bool`. If `True`, we are making a 'rhs' value for the
adjoint operator.
with_batch: Python `bool`. If `True`, create `rhs` with the same batch
shape as operator, and otherwise create a matrix without any batch
shape.
Returns:
A `Tensor`
"""
raise NotImplementedError("_make_rhs is not defined.")
@abc.abstractmethod
def _make_x(self, operator, adjoint, with_batch=True):
"""Make an 'x' appropriate for calling operator.matmul(x).
Args:
operator: A `LinearOperator`
adjoint: Python `bool`. If `True`, we are making an 'x' value for the
adjoint operator.
with_batch: Python `bool`. If `True`, create `x` with the same batch shape
as operator, and otherwise create a matrix without any batch shape.
Returns:
A `Tensor`
"""
raise NotImplementedError("_make_x is not defined.")
@property
def _tests_to_skip(self):
"""List of test names to skip."""
# Subclasses should over-ride if they want to skip some tests.
# To skip "test_foo", add "foo" to this list.
return []
def _skip_if_tests_to_skip_contains(self, test_name):
"""If self._tests_to_skip contains test_name, raise SkipTest exception.
See tests below for usage.
Args:
test_name: String name corresponding to a test.
Raises:
SkipTest Exception, if test_name is in self._tests_to_skip.
"""
if test_name in self._tests_to_skip:
self.skipTest(
"{} skipped because it was added to self._tests_to_skip.".format(
test_name))
def test_to_dense(self):
self._skip_if_tests_to_skip_contains("to_dense")
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
for dtype in self._dtypes_to_test:
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self._operator_and_matrix(
build_info, dtype, use_placeholder=use_placeholder)
op_dense = operator.to_dense()
if not use_placeholder:
self.assertAllEqual(build_info.shape, op_dense.get_shape())
op_dense_v, mat_v = sess.run([op_dense, mat])
self.assertAC(op_dense_v, mat_v)
def test_det(self):
self._skip_if_tests_to_skip_contains("det")
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
for dtype in self._dtypes_to_test:
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self._operator_and_matrix(
build_info, dtype, use_placeholder=use_placeholder)
op_det = operator.determinant()
if not use_placeholder:
self.assertAllEqual(build_info.shape[:-2], op_det.get_shape())
op_det_v, mat_det_v = sess.run(
[op_det, linalg_ops.matrix_determinant(mat)])
self.assertAC(op_det_v, mat_det_v)
def test_log_abs_det(self):
self._skip_if_tests_to_skip_contains("log_abs_det")
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
for dtype in self._dtypes_to_test:
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self._operator_and_matrix(
build_info, dtype, use_placeholder=use_placeholder)
op_log_abs_det = operator.log_abs_determinant()
_, mat_log_abs_det = linalg.slogdet(mat)
if not use_placeholder:
self.assertAllEqual(
build_info.shape[:-2], op_log_abs_det.get_shape())
op_log_abs_det_v, mat_log_abs_det_v = sess.run(
[op_log_abs_det, mat_log_abs_det])
self.assertAC(op_log_abs_det_v, mat_log_abs_det_v)
def _test_matmul(self, with_batch):
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
# If batch dimensions are omitted, but there are
# no batch dimensions for the linear operator, then
# skip the test case. This is already checked with
# with_batch=True.
if not with_batch and len(build_info.shape) <= 2:
continue
for dtype in self._dtypes_to_test:
for adjoint in self._adjoint_options:
for adjoint_arg in self._adjoint_arg_options:
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self._operator_and_matrix(
build_info, dtype, use_placeholder=use_placeholder)
x = self._make_x(
operator, adjoint=adjoint, with_batch=with_batch)
# If adjoint_arg, compute A X^H^H = A X.
if adjoint_arg:
op_matmul = operator.matmul(
linalg.adjoint(x),
adjoint=adjoint,
adjoint_arg=adjoint_arg)
else:
op_matmul = operator.matmul(x, adjoint=adjoint)
mat_matmul = linear_operator_util.matmul_with_broadcast(
mat, x, adjoint_a=adjoint)
if not use_placeholder:
self.assertAllEqual(op_matmul.get_shape(),
mat_matmul.get_shape())
op_matmul_v, mat_matmul_v = sess.run(
[op_matmul, mat_matmul])
self.assertAC(op_matmul_v, mat_matmul_v)
def test_matmul(self):
self._skip_if_tests_to_skip_contains("matmul")
self._test_matmul(with_batch=True)
def test_matmul_with_broadcast(self):
self._skip_if_tests_to_skip_contains("matmul_with_broadcast")
self._test_matmul(with_batch=False)
def _test_solve(self, with_batch):
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
# If batch dimensions are omitted, but there are
# no batch dimensions for the linear operator, then
# skip the test case. This is already checked with
# with_batch=True.
if not with_batch and len(build_info.shape) <= 2:
continue
for dtype in self._dtypes_to_test:
for adjoint in self._adjoint_options:
for adjoint_arg in self._adjoint_arg_options:
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self._operator_and_matrix(
build_info, dtype, use_placeholder=use_placeholder)
rhs = self._make_rhs(
operator, adjoint=adjoint, with_batch=with_batch)
# If adjoint_arg, solve A X = (rhs^H)^H = rhs.
if adjoint_arg:
op_solve = operator.solve(
linalg.adjoint(rhs),
adjoint=adjoint,
adjoint_arg=adjoint_arg)
else:
op_solve = operator.solve(
rhs, adjoint=adjoint, adjoint_arg=adjoint_arg)
mat_solve = linear_operator_util.matrix_solve_with_broadcast(
mat, rhs, adjoint=adjoint)
if not use_placeholder:
self.assertAllEqual(op_solve.get_shape(),
mat_solve.get_shape())
op_solve_v, mat_solve_v = sess.run([op_solve, mat_solve])
self.assertAC(op_solve_v, mat_solve_v)
def test_solve(self):
self._skip_if_tests_to_skip_contains("solve")
self._test_solve(with_batch=True)
def test_solve_with_broadcast(self):
self._skip_if_tests_to_skip_contains("solve_with_broadcast")
self._test_solve(with_batch=False)
def test_trace(self):
self._skip_if_tests_to_skip_contains("trace")
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
for dtype in self._dtypes_to_test:
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self._operator_and_matrix(
build_info, dtype, use_placeholder=use_placeholder)
op_trace = operator.trace()
mat_trace = math_ops.trace(mat)
if not use_placeholder:
self.assertAllEqual(op_trace.get_shape(), mat_trace.get_shape())
op_trace_v, mat_trace_v = sess.run([op_trace, mat_trace])
self.assertAC(op_trace_v, mat_trace_v)
def test_add_to_tensor(self):
self._skip_if_tests_to_skip_contains("add_to_tensor")
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
for dtype in self._dtypes_to_test:
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self._operator_and_matrix(
build_info, dtype, use_placeholder=use_placeholder)
op_plus_2mat = operator.add_to_tensor(2 * mat)
if not use_placeholder:
self.assertAllEqual(build_info.shape, op_plus_2mat.get_shape())
op_plus_2mat_v, mat_v = sess.run([op_plus_2mat, mat])
self.assertAC(op_plus_2mat_v, 3 * mat_v)
def test_diag_part(self):
self._skip_if_tests_to_skip_contains("diag_part")
for use_placeholder in self._use_placeholder_options:
for build_info in self._operator_build_infos:
for dtype in self._dtypes_to_test:
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self._operator_and_matrix(
build_info, dtype, use_placeholder=use_placeholder)
op_diag_part = operator.diag_part()
mat_diag_part = array_ops.matrix_diag_part(mat)
if not use_placeholder:
self.assertAllEqual(mat_diag_part.get_shape(),
op_diag_part.get_shape())
op_diag_part_, mat_diag_part_ = sess.run(
[op_diag_part, mat_diag_part])
self.assertAC(op_diag_part_, mat_diag_part_)
@six.add_metaclass(abc.ABCMeta)
class SquareLinearOperatorDerivedClassTest(LinearOperatorDerivedClassTest):
"""Base test class appropriate for square operators.
Sub-classes must still define all abstractmethods from
LinearOperatorDerivedClassTest that are not defined here.
"""
@property
def _operator_build_infos(self):
build_info = OperatorBuildInfo
# non-batch operators (n, n) and batch operators.
return [
build_info((0, 0)),
build_info((1, 1)),
build_info((1, 3, 3)),
build_info((3, 4, 4)),
build_info((2, 1, 4, 4))]
def _make_rhs(self, operator, adjoint, with_batch=True):
# This operator is square, so rhs and x will have same shape.
# adjoint value makes no difference because the operator shape doesn't
# change since it is square, but be pedantic.
return self._make_x(operator, adjoint=not adjoint, with_batch=with_batch)
def _make_x(self, operator, adjoint, with_batch=True):
# Value of adjoint makes no difference because the operator is square.
# Return the number of systems to solve, R, equal to 1 or 2.
r = self._get_num_systems(operator)
# If operator.shape = [B1,...,Bb, N, N] this returns a random matrix of
# shape [B1,...,Bb, N, R], R = 1 or 2.
if operator.shape.is_fully_defined():
batch_shape = operator.batch_shape.as_list()
n = operator.domain_dimension.value
if with_batch:
x_shape = batch_shape + [n, r]
else:
x_shape = [n, r]
else:
batch_shape = operator.batch_shape_tensor()
n = operator.domain_dimension_tensor()
if with_batch:
x_shape = array_ops.concat((batch_shape, [n, r]), 0)
else:
x_shape = [n, r]
return random_normal(x_shape, dtype=operator.dtype)
def _get_num_systems(self, operator):
"""Get some number, either 1 or 2, depending on operator."""
if operator.tensor_rank is None or operator.tensor_rank % 2:
return 1
else:
return 2
@six.add_metaclass(abc.ABCMeta)
class NonSquareLinearOperatorDerivedClassTest(LinearOperatorDerivedClassTest):
"""Base test class appropriate for generic rectangular operators.
Square shapes are never tested by this class, so if you want to test your
operator with a square shape, create two test classes, the other subclassing
SquareLinearOperatorFullMatrixTest.
Sub-classes must still define all abstractmethods from
LinearOperatorDerivedClassTest that are not defined here.
"""
@property
def _tests_to_skip(self):
"""List of test names to skip."""
return ["solve", "solve_with_broadcast", "det", "log_abs_det"]
@property
def _operator_build_infos(self):
build_info = OperatorBuildInfo
# non-batch operators (n, n) and batch operators.
return [
build_info((2, 1)),
build_info((1, 2)),
build_info((1, 3, 2)),
build_info((3, 3, 4)),
build_info((2, 1, 2, 4))]
def _make_rhs(self, operator, adjoint, with_batch=True):
# TODO(langmore) Add once we're testing solve_ls.
raise NotImplementedError(
"_make_rhs not implemented because we don't test solve")
def _make_x(self, operator, adjoint, with_batch=True):
# Return the number of systems for the argument 'x' for .matmul(x)
r = self._get_num_systems(operator)
# If operator.shape = [B1,...,Bb, M, N] this returns a random matrix of
# shape [B1,...,Bb, N, R], R = 1 or 2.
if operator.shape.is_fully_defined():
batch_shape = operator.batch_shape.as_list()
if adjoint:
n = operator.range_dimension.value
else:
n = operator.domain_dimension.value
if with_batch:
x_shape = batch_shape + [n, r]
else:
x_shape = [n, r]
else:
batch_shape = operator.batch_shape_tensor()
if adjoint:
n = operator.range_dimension_tensor()
else:
n = operator.domain_dimension_tensor()
if with_batch:
x_shape = array_ops.concat((batch_shape, [n, r]), 0)
else:
x_shape = [n, r]
return random_normal(x_shape, dtype=operator.dtype)
def _get_num_systems(self, operator):
"""Get some number, either 1 or 2, depending on operator."""
if operator.tensor_rank is None or operator.tensor_rank % 2:
return 1
else:
return 2
def random_positive_definite_matrix(shape, dtype, force_well_conditioned=False):
"""[batch] positive definite matrix.
Args:
shape: `TensorShape` or Python list. Shape of the returned matrix.
dtype: `TensorFlow` `dtype` or Python dtype.
force_well_conditioned: Python bool. If `True`, returned matrix has
eigenvalues with modulus in `(1, 4)`. Otherwise, eigenvalues are
chi-squared random variables.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
if not tensor_util.is_tensor(shape):
shape = tensor_shape.TensorShape(shape)
# Matrix must be square.
shape[-1].assert_is_compatible_with(shape[-2])
with ops.name_scope("random_positive_definite_matrix"):
tril = random_tril_matrix(
shape, dtype, force_well_conditioned=force_well_conditioned)
return math_ops.matmul(tril, tril, adjoint_b=True)
def random_tril_matrix(shape,
dtype,
force_well_conditioned=False,
remove_upper=True):
"""[batch] lower triangular matrix.
Args:
shape: `TensorShape` or Python `list`. Shape of the returned matrix.
dtype: `TensorFlow` `dtype` or Python dtype
force_well_conditioned: Python `bool`. If `True`, returned matrix will have
eigenvalues with modulus in `(1, 2)`. Otherwise, eigenvalues are unit
normal random variables.
remove_upper: Python `bool`.
If `True`, zero out the strictly upper triangle.
If `False`, the lower triangle of returned matrix will have desired
properties, but will not have the strictly upper triangle zero'd out.
Returns:
`Tensor` with desired shape and dtype.
"""
with ops.name_scope("random_tril_matrix"):
# Totally random matrix. Has no nice properties.
tril = random_normal(shape, dtype=dtype)
if remove_upper:
tril = array_ops.matrix_band_part(tril, -1, 0)
# Create a diagonal with entries having modulus in [1, 2].
if force_well_conditioned:
maxval = ops.convert_to_tensor(np.sqrt(2.), dtype=dtype.real_dtype)
diag = random_sign_uniform(
shape[:-1], dtype=dtype, minval=1., maxval=maxval)
tril = array_ops.matrix_set_diag(tril, diag)
return tril
def random_normal(shape, mean=0.0, stddev=1.0, dtype=dtypes.float32, seed=None):
"""Tensor with (possibly complex) Gaussian entries.
Samples are distributed like
```
N(mean, stddev^2), if dtype is real,
X + iY, where X, Y ~ N(mean, stddev^2) if dtype is complex.
```
Args:
shape: `TensorShape` or Python list. Shape of the returned tensor.
mean: `Tensor` giving mean of normal to sample from.
stddev: `Tensor` giving stdev of normal to sample from.
dtype: `TensorFlow` `dtype` or numpy dtype
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope("random_normal"):
samples = random_ops.random_normal(
shape, mean=mean, stddev=stddev, dtype=dtype.real_dtype, seed=seed)
if dtype.is_complex:
if seed is not None:
seed += 1234
more_samples = random_ops.random_normal(
shape, mean=mean, stddev=stddev, dtype=dtype.real_dtype, seed=seed)
samples = math_ops.complex(samples, more_samples)
return samples
def random_uniform(shape,
minval=None,
maxval=None,
dtype=dtypes.float32,
seed=None):
"""Tensor with (possibly complex) Uniform entries.
Samples are distributed like
```
Uniform[minval, maxval], if dtype is real,
X + iY, where X, Y ~ Uniform[minval, maxval], if dtype is complex.
```
Args:
shape: `TensorShape` or Python list. Shape of the returned tensor.
minval: `0-D` `Tensor` giving the minimum values.
maxval: `0-D` `Tensor` giving the maximum values.
dtype: `TensorFlow` `dtype` or Python dtype
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope("random_uniform"):
samples = random_ops.random_uniform(
shape, dtype=dtype.real_dtype, minval=minval, maxval=maxval, seed=seed)
if dtype.is_complex:
if seed is not None:
seed += 12345
more_samples = random_ops.random_uniform(
shape,
dtype=dtype.real_dtype,
minval=minval,
maxval=maxval,
seed=seed)
samples = math_ops.complex(samples, more_samples)
return samples
def random_sign_uniform(shape,
minval=None,
maxval=None,
dtype=dtypes.float32,
seed=None):
"""Tensor with (possibly complex) random entries from a "sign Uniform".
Letting `Z` be a random variable equal to `-1` and `1` with equal probability,
Samples from this `Op` are distributed like
```
Z * X, where X ~ Uniform[minval, maxval], if dtype is real,
Z * (X + iY), where X, Y ~ Uniform[minval, maxval], if dtype is complex.
```
Args:
shape: `TensorShape` or Python list. Shape of the returned tensor.
minval: `0-D` `Tensor` giving the minimum values.
maxval: `0-D` `Tensor` giving the maximum values.
dtype: `TensorFlow` `dtype` or Python dtype
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope("random_sign_uniform"):
unsigned_samples = random_uniform(
shape, minval=minval, maxval=maxval, dtype=dtype, seed=seed)
if seed is not None:
seed += 12
signs = math_ops.sign(
random_ops.random_uniform(shape, minval=-1., maxval=1., seed=seed))
return unsigned_samples * math_ops.cast(signs, unsigned_samples.dtype)
def random_normal_correlated_columns(shape,
mean=0.0,
stddev=1.0,
dtype=dtypes.float32,
eps=1e-4,
seed=None):
"""Batch matrix with (possibly complex) Gaussian entries and correlated cols.
Returns random batch matrix `A` with specified element-wise `mean`, `stddev`,
living close to an embedded hyperplane.
Suppose `shape[-2:] = (M, N)`.
If `M < N`, `A` is a random `M x N` [batch] matrix with iid Gaussian entries.
If `M >= N`, then the colums of `A` will be made almost dependent as follows:
```
L = random normal N x N-1 matrix, mean = 0, stddev = 1 / sqrt(N - 1)
B = random normal M x N-1 matrix, mean = 0, stddev = stddev.
G = (L B^H)^H, a random normal M x N matrix, living on N-1 dim hyperplane
E = a random normal M x N matrix, mean = 0, stddev = eps
mu = a constant M x N matrix, equal to the argument "mean"
A = G + E + mu
```
Args:
shape: Python list of integers.
Shape of the returned tensor. Must be at least length two.
mean: `Tensor` giving mean of normal to sample from.
stddev: `Tensor` giving stdev of normal to sample from.
dtype: `TensorFlow` `dtype` or numpy dtype
eps: Distance each column is perturbed from the low-dimensional subspace.
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
Raises:
ValueError: If `shape` is not at least length 2.
"""
dtype = dtypes.as_dtype(dtype)
if len(shape) < 2:
raise ValueError(
"Argument shape must be at least length 2. Found: %s" % shape)
# Shape is the final shape, e.g. [..., M, N]
shape = list(shape)
batch_shape = shape[:-2]
m, n = shape[-2:]
# If there is only one column, "they" are by definition correlated.
if n < 2 or n < m:
return random_normal(
shape, mean=mean, stddev=stddev, dtype=dtype, seed=seed)
# Shape of the matrix with only n - 1 columns that we will embed in higher
# dimensional space.
smaller_shape = batch_shape + [m, n - 1]
# Shape of the embedding matrix, mapping batch matrices
# from [..., N-1, M] to [..., N, M]
embedding_mat_shape = batch_shape + [n, n - 1]
# This stddev for the embedding_mat ensures final result has correct stddev.
stddev_mat = 1 / np.sqrt(n - 1)
with ops.name_scope("random_normal_correlated_columns"):
smaller_mat = random_normal(
smaller_shape, mean=0.0, stddev=stddev_mat, dtype=dtype, seed=seed)
if seed is not None:
seed += 1287
embedding_mat = random_normal(embedding_mat_shape, dtype=dtype, seed=seed)
embedded_t = math_ops.matmul(embedding_mat, smaller_mat, transpose_b=True)
embedded = array_ops.matrix_transpose(embedded_t)
mean_mat = array_ops.ones_like(embedded) * mean
return embedded + random_normal(shape, stddev=eps, dtype=dtype) + mean_mat | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test cases for debug XLA dumps."""
import glob
import os
import numpy as np
from tensorflow.compiler.tests import xla_test
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import googletest
class XlaDumpToDirTest(xla_test.XLATestCase):
"""Test that ensures --XLA_FLAGS=--dump_to_xla=<dir> produces output."""
def _compute(self):
with self.session() as sess, self.device_scope():
data = np.array([0], dtype=np.float32)
indices = np.array([0], dtype=np.int32)
d = array_ops.placeholder(data.dtype, shape=data.shape)
i = array_ops.placeholder(indices.dtype, shape=indices.shape)
sess.run(math_ops.segment_max_v2(data, indices, 1), {d: data, i: indices})
def testDumpToTempDir(self):
tmp_dir = self.create_tempdir().full_path
os.environ['XLA_FLAGS'] = '--xla_dump_to=' + tmp_dir
self._compute()
self.assertNotEmpty(glob.glob(os.path.join(tmp_dir, 'module_0*')))
if __name__ == '__main__':
googletest.main() | python | github | https://github.com/tensorflow/tensorflow | tensorflow/compiler/tests/xla_dump_to_test.py |
import collections
import weakref
from . import math
from ._SDL import *
from ._GL import *
from ._Window import *
UserQuitEvent = collections.namedtuple("UserQuitEvent", [])
ResizeEvent = collections.namedtuple("ResizeEvent", ["window", "new_size"])
KeyPressEvent = collections.namedtuple("KeyPressEvent", ["window", "key"])
KeyReleaseEvent = collections.namedtuple("KeyReleaseEvent", ["window", "key"])
MouseButtonPressEvent = collections.namedtuple("MouseButtonPressEvent", ["window", "position", "button"])
MouseButtonReleaseEvent = collections.namedtuple("MouseButtonReleaseEvent", ["window", "position", "button"])
_key_events = {
SDL_KEYDOWN: KeyPressEvent,
SDL_KEYUP: KeyReleaseEvent,
}
_mouse_button_events = {
SDL_MOUSEBUTTONDOWN: MouseButtonPressEvent,
SDL_MOUSEBUTTONUP: MouseButtonReleaseEvent,
}
_mouse_button_names = {
SDL_BUTTON_LEFT: "Left",
SDL_BUTTON_MIDDLE: "Wheel",
SDL_BUTTON_RIGHT: "Right",
}
def get_more_events():
e = SDL_Event()
while SDL_PollEvent(e):
if e.type == SDL_QUIT:
yield UserQuitEvent()
elif e.type == SDL_WINDOWEVENT and e.window.windowID in Window._all:
window = Window._all[e.window.windowID]
if e.window.event == SDL_WINDOWEVENT_SIZE_CHANGED:
w, h = e.window.data1, e.window.data2
context = weakref.ref(window._context)()
if context:
context.ensure_active()
glViewport(0, 0, w, h)
yield ResizeEvent(window, (w, h))
elif e.type in _key_events and e.key.windowID in Window._all:
event = _key_events[e.type]
window = Window._all[e.key.windowID]
key = str(SDL_GetKeyName(e.key.keysym.sym), "UTF-8")
yield event(window, key)
elif e.type in _mouse_button_events and e.button.windowID in Window._all:
event = _mouse_button_events[e.type]
window = Window._all[e.button.windowID]
position = math.Vector(e.button.x, e.button.y)
button = _mouse_button_names[e.button.button]
yield event(window, position, button) | unknown | codeparrot/codeparrot-clean | ||
# Volatility
# Copyright (c) 2008-2013 Volatility Foundation
# Copyright (c) 2008 Brendan Dolan-Gavitt <bdolangavitt@wesleyan.edu>
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Brendan Dolan-Gavitt
@license: GNU General Public License 2.0
@contact: bdolangavitt@wesleyan.edu
"""
#pylint: disable-msg=C0111
import volatility.win32.rawreg as rawreg
import volatility.win32.hive as hive
import volatility.win32.lsasecrets as lsasecrets
import volatility.win32.hashdump as hashdump
from Crypto.Hash import HMAC
from Crypto.Cipher import ARC4
from struct import unpack
def get_nlkm(secaddr, lsakey):
return lsasecrets.get_secret_by_name(secaddr, 'NL$KM', lsakey)
def decrypt_hash(edata, nlkm, ch):
hmac_md5 = HMAC.new(nlkm, ch)
rc4key = hmac_md5.digest()
rc4 = ARC4.new(rc4key)
data = rc4.encrypt(edata)
return data
def parse_cache_entry(cache_data):
(uname_len, domain_len) = unpack("<HH", cache_data[:4])
(domain_name_len,) = unpack("<H", cache_data[60:62])
ch = cache_data[64:80]
enc_data = cache_data[96:]
return (uname_len, domain_len, domain_name_len, enc_data, ch)
def parse_decrypted_cache(dec_data, uname_len,
domain_len, domain_name_len):
uname_off = 72
pad = 2 * ((uname_len / 2) % 2)
domain_off = uname_off + uname_len + pad
pad = 2 * ((domain_len / 2) % 2)
domain_name_off = domain_off + domain_len + pad
hashh = dec_data[:0x10]
username = dec_data[uname_off:uname_off + uname_len]
username = username.decode('utf-16-le')
domain = dec_data[domain_off:domain_off + domain_len]
domain = domain.decode('utf-16-le')
domain_name = dec_data[domain_name_off:domain_name_off + domain_name_len]
domain_name = domain_name.decode('utf-16-le')
return (username, domain, domain_name, hashh)
def dump_hashes(sysaddr, secaddr):
bootkey = hashdump.get_bootkey(sysaddr)
if not bootkey:
return None
lsakey = lsasecrets.get_lsa_key(secaddr, bootkey)
if not lsakey:
return None
nlkm = get_nlkm(secaddr, lsakey)
if not nlkm:
return None
root = rawreg.get_root(secaddr)
if not root:
return None
cache = rawreg.open_key(root, ["Cache"])
if not cache:
return None
hashes = []
for v in rawreg.values(cache):
if v.Name == "NL$Control":
continue
data = v.obj_vm.read(v.Data, v.DataLength)
(uname_len, domain_len, domain_name_len,
enc_data, ch) = parse_cache_entry(data)
# Skip if nothing in this cache entry
if uname_len == 0:
continue
dec_data = decrypt_hash(enc_data, nlkm, ch)
(username, domain, domain_name,
hashh) = parse_decrypted_cache(dec_data, uname_len,
domain_len, domain_name_len)
hashes.append((username, domain, domain_name, hashh))
return hashes
def dump_memory_hashes(addr_space, config, syshive, sechive):
sysaddr = hive.HiveAddressSpace(addr_space, config, syshive)
secaddr = hive.HiveAddressSpace(addr_space, config, sechive)
for (u, d, dn, hashh) in dump_hashes(sysaddr, secaddr):
print "{0}:{1}:{2}:{3}".format(u.lower(), hashh.encode('hex'),
d.lower(), dn.lower())
def dump_file_hashes(syshive_fname, sechive_fname):
sysaddr = hive.HiveFileAddressSpace(syshive_fname)
secaddr = hive.HiveFileAddressSpace(sechive_fname)
for (u, d, dn, hashh) in dump_hashes(sysaddr, secaddr):
print "{0}:{1}:{2}:{3}".format(u.lower(), hashh.encode('hex'),
d.lower(), dn.lower()) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_proxy_arp
short_description: Configure proxy-ARP in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify system feature and proxy_arp category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
system_proxy_arp:
description:
- Configure proxy-ARP.
default: null
type: dict
suboptions:
end_ip:
description:
- End IP of IP range to be proxied.
type: str
id:
description:
- Unique integer ID of the entry.
required: true
type: int
interface:
description:
- Interface acting proxy-ARP. Source system.interface.name.
type: str
ip:
description:
- IP address or start IP to be proxied.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure proxy-ARP.
fortios_system_proxy_arp:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
system_proxy_arp:
end_ip: "<your_own_value>"
id: "4"
interface: "<your_own_value> (source system.interface.name)"
ip: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_system_proxy_arp_data(json):
option_list = ['end_ip', 'id', 'interface',
'ip']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def system_proxy_arp(data, fos):
vdom = data['vdom']
state = data['state']
system_proxy_arp_data = data['system_proxy_arp']
filtered_data = underscore_to_hyphen(filter_system_proxy_arp_data(system_proxy_arp_data))
if state == "present":
return fos.set('system',
'proxy-arp',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('system',
'proxy-arp',
mkey=filtered_data['id'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_system(data, fos):
if data['system_proxy_arp']:
resp = system_proxy_arp(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"system_proxy_arp": {
"required": False, "type": "dict", "default": None,
"options": {
"end_ip": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"},
"interface": {"required": False, "type": "str"},
"ip": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_system(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_system(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robot.output import LEVELS
from .jsbuildingcontext import JsBuildingContext
from .jsexecutionresult import JsExecutionResult
class JsModelBuilder(object):
def __init__(self, log_path=None, split_log=False,
prune_input_to_save_memory=False):
self._context = JsBuildingContext(log_path, split_log,
prune_input_to_save_memory)
def build_from(self, result_from_xml):
# Statistics must be build first because building suite may prune input.
return JsExecutionResult(
statistics=StatisticsBuilder().build(result_from_xml.statistics),
suite=SuiteBuilder(self._context).build(result_from_xml.suite),
errors=ErrorsBuilder(self._context).build(result_from_xml.errors),
strings=self._context.strings,
basemillis=self._context.basemillis,
split_results=self._context.split_results,
min_level=self._context.min_level
)
class _Builder(object):
_statuses = {'FAIL': 0, 'PASS': 1, 'NOT_RUN': 2}
def __init__(self, context):
self._context = context
self._string = self._context.string
self._html = self._context.html
self._timestamp = self._context.timestamp
def _get_status(self, item):
model = (self._statuses[item.status],
self._timestamp(item.starttime),
item.elapsedtime)
msg = getattr(item, 'message', '')
if not msg:
return model
elif msg.startswith('*HTML*'):
msg = self._string(msg[6:].lstrip(), escape=False)
else:
msg = self._string(msg)
return model + (msg,)
def _build_keywords(self, kws, split=False):
splitting = self._context.start_splitting_if_needed(split)
model = tuple(self._build_keyword(k) for k in kws)
return model if not splitting else self._context.end_splitting(model)
class SuiteBuilder(_Builder):
def __init__(self, context):
_Builder.__init__(self, context)
self._build_suite = self.build
self._build_test = TestBuilder(context).build
self._build_keyword = KeywordBuilder(context).build
def build(self, suite):
with self._context.prune_input(suite.suites, suite.tests, suite.keywords):
stats = self._get_statistics(suite) # Must be done before pruning
return (self._string(suite.name, attr=True),
self._string(suite.source),
self._context.relative_source(suite.source),
self._html(suite.doc),
tuple(self._yield_metadata(suite)),
self._get_status(suite),
tuple(self._build_suite(s) for s in suite.suites),
tuple(self._build_test(t) for t in suite.tests),
tuple(self._build_keyword(k, split=True) for k in suite.keywords),
stats)
def _yield_metadata(self, suite):
for name, value in suite.metadata.iteritems():
yield self._string(name)
yield self._html(value)
def _get_statistics(self, suite):
stats = suite.statistics # Access property only once
return (stats.all.total,
stats.all.passed,
stats.critical.total,
stats.critical.passed)
class TestBuilder(_Builder):
def __init__(self, context):
_Builder.__init__(self, context)
self._build_keyword = KeywordBuilder(context).build
def build(self, test):
with self._context.prune_input(test.keywords):
return (self._string(test.name, attr=True),
self._string(test.timeout),
int(test.critical),
self._html(test.doc),
tuple(self._string(t) for t in test.tags),
self._get_status(test),
self._build_keywords(test.keywords, split=True))
class KeywordBuilder(_Builder):
_types = {'kw': 0, 'setup': 1, 'teardown': 2, 'for': 3, 'foritem': 4}
def __init__(self, context):
_Builder.__init__(self, context)
self._build_keyword = self.build
self._build_message = MessageBuilder(context).build
def build(self, kw, split=False):
with self._context.prune_input(kw.messages, kw.keywords):
return (self._types[kw.type],
self._string(kw.kwname, attr=True),
self._string(kw.libname, attr=True),
self._string(kw.timeout),
self._html(kw.doc),
self._string(', '.join(kw.args)),
self._string(', '.join(kw.assign)),
self._string(', '.join(kw.tags)),
self._get_status(kw),
self._build_keywords(kw.keywords, split),
tuple(self._build_message(m) for m in kw.messages))
class MessageBuilder(_Builder):
def build(self, msg):
if msg.level in ('WARN','ERROR'):
self._context.create_link_target(msg)
self._context.message_level(msg.level)
return self._build(msg)
def _build(self, msg):
return (self._timestamp(msg.timestamp),
LEVELS[msg.level],
self._string(msg.html_message, escape=False))
class StatisticsBuilder(object):
def build(self, statistics):
return (self._build_stats(statistics.total),
self._build_stats(statistics.tags),
self._build_stats(statistics.suite))
def _build_stats(self, stats):
return tuple(stat.get_attributes(include_label=True, include_elapsed=True,
exclude_empty=True, html_escape=True)
for stat in stats)
class ErrorsBuilder(_Builder):
def __init__(self, context):
_Builder.__init__(self, context)
self._build_message = ErrorMessageBuilder(context).build
def build(self, errors):
with self._context.prune_input(errors.messages):
return tuple(self._build_message(msg) for msg in errors)
class ErrorMessageBuilder(MessageBuilder):
def build(self, msg):
model = self._build(msg)
link = self._context.link(msg)
return model if link is None else model + (link,) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
"""
Minimal controler for 433Mhz RF Wireless Power Outlets
FROM: http://timleland.com/wireless-power-outlets/
https://github.com/timleland/rfoutlet
"""
import argparse
from pyoutlet import Switcher
def _args_parser():
"""
Argument parser
"""
p = argparse.ArgumentParser(description="\033[1m\033[5m\033[32m{}\033[0m\n\n".format('PYOUTLET'),
formatter_class=argparse.RawTextHelpFormatter)
p.add_argument('operation', nargs='?', action='store', help='Turn ON/OFF operation')
p.add_argument('outlet', nargs='?', action='store', help='Outlet label or #')
p.add_argument('-i', '--info', action='store_true', help='︎ℹ️ Show outlets CODES and labels')
return p
def main():
"""
CLI main method
"""
parser = _args_parser()
args = parser.parse_args()
switch = Switcher()
ok = False
if args.info:
print(switch)
print('JSON configuration for "homebridge-rcswitch-gpiomem":\n{}\n'.format(switch.homebridge_accessories))
ok = True
elif args.operation.lower() == 'on':
ok = switch.turn_on_outlet(args.outlet, verbose=True)
elif args.operation.lower() == 'off':
ok = switch.turn_off_outlet(args.outlet, verbose=True)
if not ok:
print('OPERATION ERROR !?\n')
parser.print_help()
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
from ctypes import c_void_p
from types import NoneType
from django.contrib.gis.geos.error import GEOSException, GEOSIndexError
# Trying to import GDAL libraries, if available. Have to place in
# try/except since this package may be used outside GeoDjango.
try:
from django.contrib.gis import gdal
except ImportError:
# A 'dummy' gdal module.
class GDALInfo(object):
HAS_GDAL = False
GEOJSON = False
gdal = GDALInfo()
# NumPy supported?
try:
import numpy
except ImportError:
numpy = False
class GEOSBase(object):
"""
Base object for GEOS objects that has a pointer access property
that controls access to the underlying C pointer.
"""
# Initially the pointer is NULL.
_ptr = None
# Default allowed pointer type.
ptr_type = c_void_p
# Pointer access property.
def _get_ptr(self):
# Raise an exception if the pointer isn't valid don't
# want to be passing NULL pointers to routines --
# that's very bad.
if self._ptr: return self._ptr
else: raise GEOSException('NULL GEOS %s pointer encountered.' % self.__class__.__name__)
def _set_ptr(self, ptr):
# Only allow the pointer to be set with pointers of the
# compatible type or None (NULL).
if isinstance(ptr, (self.ptr_type, NoneType)):
self._ptr = ptr
else:
raise TypeError('Incompatible pointer type')
# Property for controlling access to the GEOS object pointers. Using
# this raises an exception when the pointer is NULL, thus preventing
# the C library from attempting to access an invalid memory location.
ptr = property(_get_ptr, _set_ptr) | unknown | codeparrot/codeparrot-clean | ||
#! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <acme@redhat.com>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, watermark = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU | perf.SAMPLE_TID)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=line-too-long
"""Inputs and Readers. See the @{$python/io_ops} guide.
@@placeholder
@@placeholder_with_default
@@sparse_placeholder
@@ReaderBase
@@TextLineReader
@@WholeFileReader
@@IdentityReader
@@TFRecordReader
@@FixedLengthRecordReader
@@decode_csv
@@decode_raw
@@VarLenFeature
@@FixedLenFeature
@@FixedLenSequenceFeature
@@SparseFeature
@@parse_example
@@parse_single_example
@@parse_tensor
@@decode_json_example
@@QueueBase
@@FIFOQueue
@@PaddingFIFOQueue
@@RandomShuffleQueue
@@PriorityQueue
@@ConditionalAccumulatorBase
@@ConditionalAccumulator
@@SparseConditionalAccumulator
@@matching_files
@@read_file
@@write_file
@@match_filenames_once
@@limit_epochs
@@input_producer
@@range_input_producer
@@slice_input_producer
@@string_input_producer
@@batch
@@maybe_batch
@@batch_join
@@maybe_batch_join
@@shuffle_batch
@@maybe_shuffle_batch
@@shuffle_batch_join
@@maybe_shuffle_batch_join
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.lib.io import python_io
from tensorflow.python.ops import gen_data_flow_ops
from tensorflow.python.ops import gen_io_ops
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_io_ops import *
# pylint: enable=wildcard-import
# pylint: disable=protected-access
def _save(filename, tensor_names, tensors, tensor_slices=None, name="save"):
"""Save a list of tensors to a file with given names.
Example usage without slice info:
Save("/foo/bar", ["w", "b"], [w, b])
Example usage with slices:
Save("/foo/bar", ["w", "w"], [slice0, slice1],
tensor_slices=["4 10 0,2:-", "4 10 2,2:-"])
Args:
filename: the file name of the sstable.
tensor_names: a list of strings.
tensors: the list of tensors to be saved.
tensor_slices: Optional list of strings to specify the shape and slices of
a larger virtual tensor that each tensor is a part of. If not specified
each tensor is saved as a full slice.
name: string. Optional name for the op.
Requires:
The length of tensors should match the size of tensor_names and of
tensor_slices.
Returns:
An Operation that saves the tensors.
"""
if tensor_slices is None:
return gen_io_ops._save(filename, tensor_names, tensors, name=name)
else:
return gen_io_ops._save_slices(filename, tensor_names, tensor_slices,
tensors, name=name)
def _restore_slice(file_pattern, tensor_name, shape_and_slice, tensor_type,
name="restore_slice", preferred_shard=-1):
"""Restore a tensor slice from a set of files with a given pattern.
Example usage:
RestoreSlice("/foo/bar-?????-of-?????", "w", "10 10 0,2:-", DT_FLOAT)
Args:
file_pattern: the file pattern used to match a set of checkpoint files.
tensor_name: the name of the tensor to restore.
shape_and_slice: the shape-and-slice spec of the slice.
tensor_type: the type of the tensor to restore.
name: string. Optional name for the op.
preferred_shard: Int. Optional shard to open first in the checkpoint file.
Returns:
A tensor of type "tensor_type".
"""
base_type = dtypes.as_dtype(tensor_type).base_dtype
return gen_io_ops._restore_slice(
file_pattern, tensor_name, shape_and_slice, base_type,
preferred_shard, name=name)
class ReaderBase(object):
"""Base class for different Reader types, that produce a record every step.
Conceptually, Readers convert string 'work units' into records (key,
value pairs). Typically the 'work units' are filenames and the
records are extracted from the contents of those files. We want a
single record produced per step, but a work unit can correspond to
many records.
Therefore we introduce some decoupling using a queue. The queue
contains the work units and the Reader dequeues from the queue when
it is asked to produce a record (via Read()) but it has finished the
last work unit.
"""
def __init__(self, reader_ref, supports_serialize=False):
"""Creates a new ReaderBase.
Args:
reader_ref: The operation that implements the reader.
supports_serialize: True if the reader implementation can
serialize its state.
"""
self._reader_ref = reader_ref
self._supports_serialize = supports_serialize
@property
def reader_ref(self):
"""Op that implements the reader."""
return self._reader_ref
def read(self, queue, name=None):
"""Returns the next record (key, value pair) produced by a reader.
Will dequeue a work unit from queue if necessary (e.g. when the
Reader needs to start reading from a new file since it has
finished with the previous file).
Args:
queue: A Queue or a mutable string Tensor representing a handle
to a Queue, with string work items.
name: A name for the operation (optional).
Returns:
A tuple of Tensors (key, value).
key: A string scalar Tensor.
value: A string scalar Tensor.
"""
if isinstance(queue, ops.Tensor):
queue_ref = queue
else:
queue_ref = queue.queue_ref
if self._reader_ref.dtype == dtypes.resource:
return gen_io_ops._reader_read_v2(self._reader_ref, queue_ref, name=name)
else:
# For compatibility with pre-resource queues, create a ref(string) tensor
# which can be looked up as the same queue by a resource manager.
old_queue_op = gen_data_flow_ops._fake_queue(queue_ref)
return gen_io_ops._reader_read(self._reader_ref, old_queue_op, name=name)
def read_up_to(self, queue, num_records, # pylint: disable=invalid-name
name=None):
"""Returns up to num_records (key, value pairs) produced by a reader.
Will dequeue a work unit from queue if necessary (e.g., when the
Reader needs to start reading from a new file since it has
finished with the previous file).
It may return less than num_records even before the last batch.
Args:
queue: A Queue or a mutable string Tensor representing a handle
to a Queue, with string work items.
num_records: Number of records to read.
name: A name for the operation (optional).
Returns:
A tuple of Tensors (keys, values).
keys: A 1-D string Tensor.
values: A 1-D string Tensor.
"""
if isinstance(queue, ops.Tensor):
queue_ref = queue
else:
queue_ref = queue.queue_ref
if self._reader_ref.dtype == dtypes.resource:
return gen_io_ops._reader_read_up_to_v2(self._reader_ref,
queue_ref,
num_records,
name=name)
else:
# For compatibility with pre-resource queues, create a ref(string) tensor
# which can be looked up as the same queue by a resource manager.
old_queue_op = gen_data_flow_ops._fake_queue(queue_ref)
return gen_io_ops._reader_read_up_to_v2(self._reader_ref,
old_queue_op,
num_records,
name=name)
def num_records_produced(self, name=None):
"""Returns the number of records this reader has produced.
This is the same as the number of Read executions that have
succeeded.
Args:
name: A name for the operation (optional).
Returns:
An int64 Tensor.
"""
if self._reader_ref.dtype == dtypes.resource:
return gen_io_ops._reader_num_records_produced_v2(self._reader_ref,
name=name)
else:
return gen_io_ops._reader_num_records_produced(self._reader_ref,
name=name)
def num_work_units_completed(self, name=None):
"""Returns the number of work units this reader has finished processing.
Args:
name: A name for the operation (optional).
Returns:
An int64 Tensor.
"""
if self._reader_ref.dtype == dtypes.resource:
return gen_io_ops._reader_num_work_units_completed_v2(self._reader_ref,
name=name)
else:
return gen_io_ops._reader_num_work_units_completed(self._reader_ref,
name=name)
def serialize_state(self, name=None):
"""Produce a string tensor that encodes the state of a reader.
Not all Readers support being serialized, so this can produce an
Unimplemented error.
Args:
name: A name for the operation (optional).
Returns:
A string Tensor.
"""
if self._reader_ref.dtype == dtypes.resource:
return gen_io_ops._reader_serialize_state_v2(self._reader_ref, name=name)
else:
return gen_io_ops._reader_serialize_state(self._reader_ref, name=name)
def restore_state(self, state, name=None):
"""Restore a reader to a previously saved state.
Not all Readers support being restored, so this can produce an
Unimplemented error.
Args:
state: A string Tensor.
Result of a SerializeState of a Reader with matching type.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
if self._reader_ref.dtype == dtypes.resource:
return gen_io_ops._reader_restore_state_v2(
self._reader_ref, state, name=name)
else:
return gen_io_ops._reader_restore_state(
self._reader_ref, state, name=name)
@property
def supports_serialize(self):
"""Whether the Reader implementation can serialize its state."""
return self._supports_serialize
def reset(self, name=None):
"""Restore a reader to its initial clean state.
Args:
name: A name for the operation (optional).
Returns:
The created Operation.
"""
if self._reader_ref.dtype == dtypes.resource:
return gen_io_ops._reader_reset_v2(self._reader_ref, name=name)
else:
return gen_io_ops._reader_reset(self._reader_ref, name=name)
ops.NotDifferentiable("ReaderRead")
ops.NotDifferentiable("ReaderReadUpTo")
ops.NotDifferentiable("ReaderNumRecordsProduced")
ops.NotDifferentiable("ReaderNumWorkUnitsCompleted")
ops.NotDifferentiable("ReaderSerializeState")
ops.NotDifferentiable("ReaderRestoreState")
ops.NotDifferentiable("ReaderReset")
class WholeFileReader(ReaderBase):
"""A Reader that outputs the entire contents of a file as a value.
To use, enqueue filenames in a Queue. The output of Read will
be a filename (key) and the contents of that file (value).
See ReaderBase for supported methods.
"""
def __init__(self, name=None):
"""Create a WholeFileReader.
Args:
name: A name for the operation (optional).
"""
rr = gen_io_ops._whole_file_reader_v2(name=name)
super(WholeFileReader, self).__init__(rr, supports_serialize=True)
ops.NotDifferentiable("WholeFileReader")
class TextLineReader(ReaderBase):
"""A Reader that outputs the lines of a file delimited by newlines.
Newlines are stripped from the output.
See ReaderBase for supported methods.
"""
# TODO(josh11b): Support serializing and restoring state.
def __init__(self, skip_header_lines=None, name=None):
"""Create a TextLineReader.
Args:
skip_header_lines: An optional int. Defaults to 0. Number of lines
to skip from the beginning of every file.
name: A name for the operation (optional).
"""
rr = gen_io_ops._text_line_reader_v2(skip_header_lines=skip_header_lines,
name=name)
super(TextLineReader, self).__init__(rr)
ops.NotDifferentiable("TextLineReader")
class FixedLengthRecordReader(ReaderBase):
"""A Reader that outputs fixed-length records from a file.
See ReaderBase for supported methods.
"""
# TODO(josh11b): Support serializing and restoring state.
def __init__(self, record_bytes, header_bytes=None, footer_bytes=None,
name=None):
"""Create a FixedLengthRecordReader.
Args:
record_bytes: An int.
header_bytes: An optional int. Defaults to 0.
footer_bytes: An optional int. Defaults to 0.
name: A name for the operation (optional).
"""
rr = gen_io_ops._fixed_length_record_reader_v2(
record_bytes=record_bytes, header_bytes=header_bytes,
footer_bytes=footer_bytes, name=name)
super(FixedLengthRecordReader, self).__init__(rr)
ops.NotDifferentiable("FixedLengthRecordReader")
class TFRecordReader(ReaderBase):
"""A Reader that outputs the records from a TFRecords file.
See ReaderBase for supported methods.
"""
# TODO(josh11b): Support serializing and restoring state.
def __init__(self, name=None, options=None):
"""Create a TFRecordReader.
Args:
name: A name for the operation (optional).
options: A TFRecordOptions object (optional).
"""
compression_type = python_io.TFRecordOptions.get_compression_type_string(
options)
rr = gen_io_ops._tf_record_reader_v2(
name=name, compression_type=compression_type)
super(TFRecordReader, self).__init__(rr)
ops.NotDifferentiable("TFRecordReader")
class IdentityReader(ReaderBase):
"""A Reader that outputs the queued work as both the key and value.
To use, enqueue strings in a Queue. Read will take the front
work string and output (work, work).
See ReaderBase for supported methods.
"""
def __init__(self, name=None):
"""Create a IdentityReader.
Args:
name: A name for the operation (optional).
"""
rr = gen_io_ops._identity_reader_v2(name=name)
super(IdentityReader, self).__init__(rr, supports_serialize=True)
ops.NotDifferentiable("IdentityReader") | unknown | codeparrot/codeparrot-clean | ||
package kotlinx.coroutines.exceptions
import platform.posix.*
import kotlin.native.concurrent.*
actual inline fun yieldThread() { sched_yield() }
actual fun currentThreadName(): String = Worker.current.name | kotlin | github | https://github.com/Kotlin/kotlinx.coroutines | kotlinx-coroutines-core/native/test/ConcurrentTestUtilities.kt |
"""
Block Structure Transformer Registry implemented using the platform's
PluginManager.
"""
from base64 import b64encode
from hashlib import sha1
from openedx.core.lib.api.plugins import PluginManager
from openedx.core.lib.cache_utils import memoized
class TransformerRegistry(PluginManager):
"""
Registry for all of the block structure transformers that have been
made available.
All block structure transformers should implement
`BlockStructureTransformer`.
"""
NAMESPACE = 'openedx.block_structure_transformer'
USE_PLUGIN_MANAGER = True
@classmethod
def get_registered_transformers(cls):
"""
Returns a set of all registered transformers.
Returns:
{BlockStructureTransformer} - All transformers that are
registered with the platform's PluginManager.
"""
if cls.USE_PLUGIN_MANAGER:
return set(cls.get_available_plugins().itervalues())
else:
return set()
@classmethod
@memoized
def get_write_version_hash(cls):
"""
Returns a deterministic hash value of the WRITE_VERSION of all
registered transformers.
"""
hash_obj = sha1()
sorted_transformers = sorted(cls.get_registered_transformers(), key=lambda t: t.name())
for transformer in sorted_transformers:
hash_obj.update(transformer.name().encode('utf-8'))
hash_obj.update(str(transformer.WRITE_VERSION))
return b64encode(hash_obj.digest())
@classmethod
def find_unregistered(cls, transformers):
"""
Find and returns the names of all the transformers from the
given list that aren't registered with the platform's
PluginManager.
Arguments:
transformers ([BlockStructureTransformer] - List of
transformers to check in the registry.
Returns:
set([string]) - Set of names of a subset of the given
transformers that weren't found in the registry.
"""
registered_transformer_names = set(reg_trans.name() for reg_trans in cls.get_registered_transformers())
requested_transformer_names = set(transformer.name() for transformer in transformers)
return requested_transformer_names - registered_transformer_names | unknown | codeparrot/codeparrot-clean | ||
import decimal
import enum
import json
import unittest
import uuid
from django import forms
from django.core import checks, exceptions, serializers, validators
from django.core.exceptions import FieldError
from django.core.management import call_command
from django.db import IntegrityError, connection, models
from django.db.models.expressions import RawSQL
from django.db.models.functions import Cast
from django.test import TransactionTestCase, modify_settings, override_settings
from django.test.utils import isolate_apps
from django.utils import timezone
from . import (
PostgreSQLSimpleTestCase, PostgreSQLTestCase, PostgreSQLWidgetTestCase,
)
from .models import (
ArrayEnumModel, ArrayFieldSubclass, CharArrayModel, DateTimeArrayModel,
IntegerArrayModel, NestedIntegerArrayModel, NullableIntegerArrayModel,
OtherTypesArrayModel, PostgreSQLModel, Tag,
)
try:
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.contrib.postgres.fields.array import IndexTransform, SliceTransform
from django.contrib.postgres.forms import (
SimpleArrayField, SplitArrayField, SplitArrayWidget,
)
from django.db.backends.postgresql.base import PSYCOPG2_VERSION
from psycopg2.extras import NumericRange
except ImportError:
pass
@isolate_apps('postgres_tests')
class BasicTests(PostgreSQLSimpleTestCase):
def test_get_field_display(self):
class MyModel(PostgreSQLModel):
field = ArrayField(
models.CharField(max_length=16),
choices=[
['Media', [(['vinyl', 'cd'], 'Audio')]],
(('mp3', 'mp4'), 'Digital'),
],
)
tests = (
(['vinyl', 'cd'], 'Audio'),
(('mp3', 'mp4'), 'Digital'),
(('a', 'b'), "('a', 'b')"),
(['c', 'd'], "['c', 'd']"),
)
for value, display in tests:
with self.subTest(value=value, display=display):
instance = MyModel(field=value)
self.assertEqual(instance.get_field_display(), display)
def test_get_field_display_nested_array(self):
class MyModel(PostgreSQLModel):
field = ArrayField(
ArrayField(models.CharField(max_length=16)),
choices=[
[
'Media',
[([['vinyl', 'cd'], ('x',)], 'Audio')],
],
((['mp3'], ('mp4',)), 'Digital'),
],
)
tests = (
([['vinyl', 'cd'], ('x',)], 'Audio'),
((['mp3'], ('mp4',)), 'Digital'),
((('a', 'b'), ('c',)), "(('a', 'b'), ('c',))"),
([['a', 'b'], ['c']], "[['a', 'b'], ['c']]"),
)
for value, display in tests:
with self.subTest(value=value, display=display):
instance = MyModel(field=value)
self.assertEqual(instance.get_field_display(), display)
class TestSaveLoad(PostgreSQLTestCase):
def test_integer(self):
instance = IntegerArrayModel(field=[1, 2, 3])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_char(self):
instance = CharArrayModel(field=['hello', 'goodbye'])
instance.save()
loaded = CharArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_dates(self):
instance = DateTimeArrayModel(
datetimes=[timezone.now()],
dates=[timezone.now().date()],
times=[timezone.now().time()],
)
instance.save()
loaded = DateTimeArrayModel.objects.get()
self.assertEqual(instance.datetimes, loaded.datetimes)
self.assertEqual(instance.dates, loaded.dates)
self.assertEqual(instance.times, loaded.times)
def test_tuples(self):
instance = IntegerArrayModel(field=(1,))
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertSequenceEqual(instance.field, loaded.field)
def test_integers_passed_as_strings(self):
# This checks that get_prep_value is deferred properly
instance = IntegerArrayModel(field=['1'])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(loaded.field, [1])
def test_default_null(self):
instance = NullableIntegerArrayModel()
instance.save()
loaded = NullableIntegerArrayModel.objects.get(pk=instance.pk)
self.assertIsNone(loaded.field)
self.assertEqual(instance.field, loaded.field)
def test_null_handling(self):
instance = NullableIntegerArrayModel(field=None)
instance.save()
loaded = NullableIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
instance = IntegerArrayModel(field=None)
with self.assertRaises(IntegrityError):
instance.save()
def test_nested(self):
instance = NestedIntegerArrayModel(field=[[1, 2], [3, 4]])
instance.save()
loaded = NestedIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_other_array_types(self):
instance = OtherTypesArrayModel(
ips=['192.168.0.1', '::1'],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
tags=[Tag(1), Tag(2), Tag(3)],
json=[{'a': 1}, {'b': 2}],
int_ranges=[NumericRange(10, 20), NumericRange(30, 40)],
bigint_ranges=[
NumericRange(7000000000, 10000000000),
NumericRange(50000000000, 70000000000),
]
)
instance.save()
loaded = OtherTypesArrayModel.objects.get()
self.assertEqual(instance.ips, loaded.ips)
self.assertEqual(instance.uuids, loaded.uuids)
self.assertEqual(instance.decimals, loaded.decimals)
self.assertEqual(instance.tags, loaded.tags)
self.assertEqual(instance.json, loaded.json)
self.assertEqual(instance.int_ranges, loaded.int_ranges)
self.assertEqual(instance.bigint_ranges, loaded.bigint_ranges)
def test_null_from_db_value_handling(self):
instance = OtherTypesArrayModel.objects.create(
ips=['192.168.0.1', '::1'],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
tags=None,
)
instance.refresh_from_db()
self.assertIsNone(instance.tags)
self.assertEqual(instance.json, [])
self.assertIsNone(instance.int_ranges)
self.assertIsNone(instance.bigint_ranges)
def test_model_set_on_base_field(self):
instance = IntegerArrayModel()
field = instance._meta.get_field('field')
self.assertEqual(field.model, IntegerArrayModel)
self.assertEqual(field.base_field.model, IntegerArrayModel)
def test_nested_nullable_base_field(self):
if PSYCOPG2_VERSION < (2, 7, 5):
self.skipTest('See https://github.com/psycopg/psycopg2/issues/325')
instance = NullableIntegerArrayModel.objects.create(
field_nested=[[None, None], [None, None]],
)
self.assertEqual(instance.field_nested, [[None, None], [None, None]])
class TestQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.objs = NullableIntegerArrayModel.objects.bulk_create([
NullableIntegerArrayModel(field=[1]),
NullableIntegerArrayModel(field=[2]),
NullableIntegerArrayModel(field=[2, 3]),
NullableIntegerArrayModel(field=[20, 30, 40]),
NullableIntegerArrayModel(field=None),
])
def test_empty_list(self):
NullableIntegerArrayModel.objects.create(field=[])
obj = NullableIntegerArrayModel.objects.annotate(
empty_array=models.Value([], output_field=ArrayField(models.IntegerField())),
).filter(field=models.F('empty_array')).get()
self.assertEqual(obj.field, [])
self.assertEqual(obj.empty_array, [])
def test_exact(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__exact=[1]),
self.objs[:1]
)
def test_exact_charfield(self):
instance = CharArrayModel.objects.create(field=['text'])
self.assertSequenceEqual(
CharArrayModel.objects.filter(field=['text']),
[instance]
)
def test_exact_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field=[[1, 2], [3, 4]]),
[instance]
)
def test_isnull(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__isnull=True),
self.objs[-1:]
)
def test_gt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__gt=[0]),
self.objs[:4]
)
def test_lt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__lt=[2]),
self.objs[:1]
)
def test_in(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[1], [2]]),
self.objs[:2]
)
def test_in_subquery(self):
IntegerArrayModel.objects.create(field=[2, 3])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__in=IntegerArrayModel.objects.all().values_list('field', flat=True)
),
self.objs[2:3]
)
@unittest.expectedFailure
def test_in_including_F_object(self):
# This test asserts that Array objects passed to filters can be
# constructed to contain F objects. This currently doesn't work as the
# psycopg2 mogrify method that generates the ARRAY() syntax is
# expecting literals, not column references (#27095).
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[models.F('id')]]),
self.objs[:2]
)
def test_in_as_F_object(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[models.F('field')]),
self.objs[:4]
)
def test_contained_by(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contained_by=[1, 2]),
self.objs[:2]
)
@unittest.expectedFailure
def test_contained_by_including_F_object(self):
# This test asserts that Array objects passed to filters can be
# constructed to contain F objects. This currently doesn't work as the
# psycopg2 mogrify method that generates the ARRAY() syntax is
# expecting literals, not column references (#27095).
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contained_by=[models.F('id'), 2]),
self.objs[:2]
)
def test_contains(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contains=[2]),
self.objs[1:3]
)
def test_icontains(self):
# Using the __icontains lookup with ArrayField is inefficient.
instance = CharArrayModel.objects.create(field=['FoO'])
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__icontains='foo'),
[instance]
)
def test_contains_charfield(self):
# Regression for #22907
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contains=['text']),
[]
)
def test_contained_by_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contained_by=['text']),
[]
)
def test_overlap_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__overlap=['text']),
[]
)
def test_lookups_autofield_array(self):
qs = NullableIntegerArrayModel.objects.filter(
field__0__isnull=False,
).values('field__0').annotate(
arrayagg=ArrayAgg('id'),
).order_by('field__0')
tests = (
('contained_by', [self.objs[1].pk, self.objs[2].pk, 0], [2]),
('contains', [self.objs[2].pk], [2]),
('exact', [self.objs[3].pk], [20]),
('overlap', [self.objs[1].pk, self.objs[3].pk], [2, 20]),
)
for lookup, value, expected in tests:
with self.subTest(lookup=lookup):
self.assertSequenceEqual(
qs.filter(
**{'arrayagg__' + lookup: value},
).values_list('field__0', flat=True),
expected,
)
def test_index(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0=2),
self.objs[1:3]
)
def test_index_chained(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0__lt=3),
self.objs[0:3]
)
def test_index_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0=1),
[instance]
)
@unittest.expectedFailure
def test_index_used_on_nested_data(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0=[1, 2]),
[instance]
)
def test_index_transform_expression(self):
expr = RawSQL("string_to_array(%s, ';')", ['1;2'])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__0=Cast(
IndexTransform(1, models.IntegerField, expr),
output_field=models.IntegerField(),
),
),
self.objs[:1],
)
def test_overlap(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__overlap=[1, 2]),
self.objs[0:3]
)
def test_len(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len__lte=2),
self.objs[0:3]
)
def test_len_empty_array(self):
obj = NullableIntegerArrayModel.objects.create(field=[])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len=0),
[obj]
)
def test_slice(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_1=[2]),
self.objs[1:3]
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_2=[2, 3]),
self.objs[2:3]
)
def test_order_by_slice(self):
more_objs = (
NullableIntegerArrayModel.objects.create(field=[1, 637]),
NullableIntegerArrayModel.objects.create(field=[2, 1]),
NullableIntegerArrayModel.objects.create(field=[3, -98123]),
NullableIntegerArrayModel.objects.create(field=[4, 2]),
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.order_by('field__1'),
[
more_objs[2], more_objs[1], more_objs[3], self.objs[2],
self.objs[3], more_objs[0], self.objs[4], self.objs[1],
self.objs[0],
]
)
@unittest.expectedFailure
def test_slice_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0_1=[1]),
[instance]
)
def test_slice_transform_expression(self):
expr = RawSQL("string_to_array(%s, ';')", ['9;2;3'])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_2=SliceTransform(2, 3, expr)),
self.objs[2:3],
)
def test_usage_in_subquery(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
id__in=NullableIntegerArrayModel.objects.filter(field__len=3)
),
[self.objs[3]]
)
def test_enum_lookup(self):
class TestEnum(enum.Enum):
VALUE_1 = 'value_1'
instance = ArrayEnumModel.objects.create(array_of_enums=[TestEnum.VALUE_1])
self.assertSequenceEqual(
ArrayEnumModel.objects.filter(array_of_enums__contains=[TestEnum.VALUE_1]),
[instance]
)
def test_unsupported_lookup(self):
msg = "Unsupported lookup '0_bar' for ArrayField or join on the field not permitted."
with self.assertRaisesMessage(FieldError, msg):
list(NullableIntegerArrayModel.objects.filter(field__0_bar=[2]))
msg = "Unsupported lookup '0bar' for ArrayField or join on the field not permitted."
with self.assertRaisesMessage(FieldError, msg):
list(NullableIntegerArrayModel.objects.filter(field__0bar=[2]))
def test_grouping_by_annotations_with_array_field_param(self):
value = models.Value([1], output_field=ArrayField(models.IntegerField()))
self.assertEqual(
NullableIntegerArrayModel.objects.annotate(
array_length=models.Func(value, 1, function='ARRAY_LENGTH'),
).values('array_length').annotate(
count=models.Count('pk'),
).get()['array_length'],
1,
)
class TestDateTimeExactQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
now = timezone.now()
cls.datetimes = [now]
cls.dates = [now.date()]
cls.times = [now.time()]
cls.objs = [
DateTimeArrayModel.objects.create(datetimes=cls.datetimes, dates=cls.dates, times=cls.times),
]
def test_exact_datetimes(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(datetimes=self.datetimes),
self.objs
)
def test_exact_dates(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(dates=self.dates),
self.objs
)
def test_exact_times(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(times=self.times),
self.objs
)
class TestOtherTypesExactQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.ips = ['192.168.0.1', '::1']
cls.uuids = [uuid.uuid4()]
cls.decimals = [decimal.Decimal(1.25), 1.75]
cls.tags = [Tag(1), Tag(2), Tag(3)]
cls.objs = [
OtherTypesArrayModel.objects.create(
ips=cls.ips,
uuids=cls.uuids,
decimals=cls.decimals,
tags=cls.tags,
)
]
def test_exact_ip_addresses(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(ips=self.ips),
self.objs
)
def test_exact_uuids(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(uuids=self.uuids),
self.objs
)
def test_exact_decimals(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(decimals=self.decimals),
self.objs
)
def test_exact_tags(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(tags=self.tags),
self.objs
)
@isolate_apps('postgres_tests')
class TestChecks(PostgreSQLSimpleTestCase):
def test_field_checks(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.CharField())
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField is missing a max_length.
self.assertEqual(errors[0].id, 'postgres.E001')
self.assertIn('max_length', errors[0].msg)
def test_invalid_base_fields(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.ManyToManyField('postgres_tests.IntegerArrayModel'))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, 'postgres.E002')
def test_invalid_default(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=[])
model = MyModel()
self.assertEqual(model.check(), [
checks.Warning(
msg=(
"ArrayField default should be a callable instead of an "
"instance so that it's not shared between all field "
"instances."
),
hint='Use a callable instead, e.g., use `list` instead of `[]`.',
obj=MyModel._meta.get_field('field'),
id='fields.E010',
)
])
def test_valid_default(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=list)
model = MyModel()
self.assertEqual(model.check(), [])
def test_valid_default_none(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=None)
model = MyModel()
self.assertEqual(model.check(), [])
def test_nested_field_checks(self):
"""
Nested ArrayFields are permitted.
"""
class MyModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.CharField()))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField is missing a max_length.
self.assertEqual(errors[0].id, 'postgres.E001')
self.assertIn('max_length', errors[0].msg)
def test_choices_tuple_list(self):
class MyModel(PostgreSQLModel):
field = ArrayField(
models.CharField(max_length=16),
choices=[
[
'Media',
[(['vinyl', 'cd'], 'Audio'), (('vhs', 'dvd'), 'Video')],
],
(['mp3', 'mp4'], 'Digital'),
],
)
self.assertEqual(MyModel._meta.get_field('field').check(), [])
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific tests")
class TestMigrations(TransactionTestCase):
available_apps = ['postgres_tests']
def test_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(type(new.base_field), type(field.base_field))
self.assertIsNot(new.base_field, field.base_field)
def test_deconstruct_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.size, field.size)
def test_deconstruct_args(self):
field = ArrayField(models.CharField(max_length=20))
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.base_field.max_length, field.base_field.max_length)
def test_subclass_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.fields.ArrayField')
field = ArrayFieldSubclass()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'postgres_tests.models.ArrayFieldSubclass')
@override_settings(MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_default_migrations",
})
def test_adding_field_with_default(self):
# See #22962
table_name = 'postgres_tests_integerarraydefaultmodel'
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
call_command('migrate', 'postgres_tests', verbosity=0)
with connection.cursor() as cursor:
self.assertIn(table_name, connection.introspection.table_names(cursor))
call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
@override_settings(MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_index_migrations",
})
def test_adding_arrayfield_with_index(self):
"""
ArrayField shouldn't have varchar_patterns_ops or text_patterns_ops indexes.
"""
table_name = 'postgres_tests_chartextarrayindexmodel'
call_command('migrate', 'postgres_tests', verbosity=0)
with connection.cursor() as cursor:
like_constraint_columns_list = [
v['columns']
for k, v in list(connection.introspection.get_constraints(cursor, table_name).items())
if k.endswith('_like')
]
# Only the CharField should have a LIKE index.
self.assertEqual(like_constraint_columns_list, [['char2']])
# All fields should have regular indexes.
with connection.cursor() as cursor:
indexes = [
c['columns'][0]
for c in connection.introspection.get_constraints(cursor, table_name).values()
if c['index'] and len(c['columns']) == 1
]
self.assertIn('char', indexes)
self.assertIn('char2', indexes)
self.assertIn('text', indexes)
call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
class TestSerialization(PostgreSQLSimpleTestCase):
test_data = (
'[{"fields": {"field": "[\\"1\\", \\"2\\", null]"}, "model": "postgres_tests.integerarraymodel", "pk": null}]'
)
def test_dumping(self):
instance = IntegerArrayModel(field=[1, 2, None])
data = serializers.serialize('json', [instance])
self.assertEqual(json.loads(data), json.loads(self.test_data))
def test_loading(self):
instance = list(serializers.deserialize('json', self.test_data))[0].object
self.assertEqual(instance.field, [1, 2, None])
class TestValidation(PostgreSQLSimpleTestCase):
def test_unbounded(self):
field = ArrayField(models.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, None], None)
self.assertEqual(cm.exception.code, 'item_invalid')
self.assertEqual(
cm.exception.message % cm.exception.params,
'Item 2 in the array did not validate: This field cannot be null.'
)
def test_blank_true(self):
field = ArrayField(models.IntegerField(blank=True, null=True))
# This should not raise a validation error
field.clean([1, None], None)
def test_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
field.clean([1, 2, 3], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, 2, 3, 4], None)
self.assertEqual(cm.exception.messages[0], 'List contains 4 items, it should contain no more than 3.')
def test_nested_array_mismatch(self):
field = ArrayField(ArrayField(models.IntegerField()))
field.clean([[1, 2], [3, 4]], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([[1, 2], [3, 4, 5]], None)
self.assertEqual(cm.exception.code, 'nested_array_mismatch')
self.assertEqual(cm.exception.messages[0], 'Nested arrays must have the same length.')
def test_with_base_field_error_params(self):
field = ArrayField(models.CharField(max_length=2))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['abc'], None)
self.assertEqual(len(cm.exception.error_list), 1)
exception = cm.exception.error_list[0]
self.assertEqual(
exception.message,
'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).'
)
self.assertEqual(exception.code, 'item_invalid')
self.assertEqual(exception.params, {'nth': 1, 'value': 'abc', 'limit_value': 2, 'show_value': 3})
def test_with_validators(self):
field = ArrayField(models.IntegerField(validators=[validators.MinValueValidator(1)]))
field.clean([1, 2], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([0], None)
self.assertEqual(len(cm.exception.error_list), 1)
exception = cm.exception.error_list[0]
self.assertEqual(
exception.message,
'Item 1 in the array did not validate: Ensure this value is greater than or equal to 1.'
)
self.assertEqual(exception.code, 'item_invalid')
self.assertEqual(exception.params, {'nth': 1, 'value': 0, 'limit_value': 1, 'show_value': 0})
class TestSimpleFormField(PostgreSQLSimpleTestCase):
def test_valid(self):
field = SimpleArrayField(forms.CharField())
value = field.clean('a,b,c')
self.assertEqual(value, ['a', 'b', 'c'])
def test_to_python_fail(self):
field = SimpleArrayField(forms.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,9')
self.assertEqual(cm.exception.messages[0], 'Item 1 in the array did not validate: Enter a whole number.')
def test_validate_fail(self):
field = SimpleArrayField(forms.CharField(required=True))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,')
self.assertEqual(cm.exception.messages[0], 'Item 3 in the array did not validate: This field is required.')
def test_validate_fail_base_field_error_params(self):
field = SimpleArrayField(forms.CharField(max_length=2))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('abc,c,defg')
errors = cm.exception.error_list
self.assertEqual(len(errors), 2)
first_error = errors[0]
self.assertEqual(
first_error.message,
'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).'
)
self.assertEqual(first_error.code, 'item_invalid')
self.assertEqual(first_error.params, {'nth': 1, 'value': 'abc', 'limit_value': 2, 'show_value': 3})
second_error = errors[1]
self.assertEqual(
second_error.message,
'Item 3 in the array did not validate: Ensure this value has at most 2 characters (it has 4).'
)
self.assertEqual(second_error.code, 'item_invalid')
self.assertEqual(second_error.params, {'nth': 3, 'value': 'defg', 'limit_value': 2, 'show_value': 4})
def test_validators_fail(self):
field = SimpleArrayField(forms.RegexField('[a-e]{2}'))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,bc,de')
self.assertEqual(cm.exception.messages[0], 'Item 1 in the array did not validate: Enter a valid value.')
def test_delimiter(self):
field = SimpleArrayField(forms.CharField(), delimiter='|')
value = field.clean('a|b|c')
self.assertEqual(value, ['a', 'b', 'c'])
def test_delimiter_with_nesting(self):
field = SimpleArrayField(SimpleArrayField(forms.CharField()), delimiter='|')
value = field.clean('a,b|c,d')
self.assertEqual(value, [['a', 'b'], ['c', 'd']])
def test_prepare_value(self):
field = SimpleArrayField(forms.CharField())
value = field.prepare_value(['a', 'b', 'c'])
self.assertEqual(value, 'a,b,c')
def test_max_length(self):
field = SimpleArrayField(forms.CharField(), max_length=2)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,c')
self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no more than 2.')
def test_min_length(self):
field = SimpleArrayField(forms.CharField(), min_length=4)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,c')
self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no fewer than 4.')
def test_required(self):
field = SimpleArrayField(forms.CharField(), required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('')
self.assertEqual(cm.exception.messages[0], 'This field is required.')
def test_model_field_formfield(self):
model_field = ArrayField(models.CharField(max_length=27))
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertIsInstance(form_field.base_field, forms.CharField)
self.assertEqual(form_field.base_field.max_length, 27)
def test_model_field_formfield_size(self):
model_field = ArrayField(models.CharField(max_length=27), size=4)
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertEqual(form_field.max_length, 4)
def test_model_field_choices(self):
model_field = ArrayField(models.IntegerField(choices=((1, 'A'), (2, 'B'))))
form_field = model_field.formfield()
self.assertEqual(form_field.clean('1,2'), [1, 2])
def test_already_converted_value(self):
field = SimpleArrayField(forms.CharField())
vals = ['a', 'b', 'c']
self.assertEqual(field.clean(vals), vals)
def test_has_changed(self):
field = SimpleArrayField(forms.IntegerField())
self.assertIs(field.has_changed([1, 2], [1, 2]), False)
self.assertIs(field.has_changed([1, 2], '1,2'), False)
self.assertIs(field.has_changed([1, 2], '1,2,3'), True)
self.assertIs(field.has_changed([1, 2], 'a,b'), True)
def test_has_changed_empty(self):
field = SimpleArrayField(forms.CharField())
self.assertIs(field.has_changed(None, None), False)
self.assertIs(field.has_changed(None, ''), False)
self.assertIs(field.has_changed(None, []), False)
self.assertIs(field.has_changed([], None), False)
self.assertIs(field.has_changed([], ''), False)
class TestSplitFormField(PostgreSQLSimpleTestCase):
def test_valid(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {'array_0': 'a', 'array_1': 'b', 'array_2': 'c'}
form = SplitForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'array': ['a', 'b', 'c']})
def test_required(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), required=True, size=3)
data = {'array_0': '', 'array_1': '', 'array_2': ''}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'array': ['This field is required.']})
def test_remove_trailing_nulls(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(required=False), size=5, remove_trailing_nulls=True)
data = {'array_0': 'a', 'array_1': '', 'array_2': 'b', 'array_3': '', 'array_4': ''}
form = SplitForm(data)
self.assertTrue(form.is_valid(), form.errors)
self.assertEqual(form.cleaned_data, {'array': ['a', '', 'b']})
def test_remove_trailing_nulls_not_required(self):
class SplitForm(forms.Form):
array = SplitArrayField(
forms.CharField(required=False),
size=2,
remove_trailing_nulls=True,
required=False,
)
data = {'array_0': '', 'array_1': ''}
form = SplitForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'array': []})
def test_required_field(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {'array_0': 'a', 'array_1': 'b', 'array_2': ''}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'array': ['Item 3 in the array did not validate: This field is required.']})
def test_invalid_integer(self):
msg = 'Item 2 in the array did not validate: Ensure this value is less than or equal to 100.'
with self.assertRaisesMessage(exceptions.ValidationError, msg):
SplitArrayField(forms.IntegerField(max_value=100), size=2).clean([0, 101])
# To locate the widget's template.
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
def test_rendering(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
self.assertHTMLEqual(str(SplitForm()), '''
<tr>
<th><label for="id_array_0">Array:</label></th>
<td>
<input id="id_array_0" name="array_0" type="text" required>
<input id="id_array_1" name="array_1" type="text" required>
<input id="id_array_2" name="array_2" type="text" required>
</td>
</tr>
''')
def test_invalid_char_length(self):
field = SplitArrayField(forms.CharField(max_length=2), size=3)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['abc', 'c', 'defg'])
self.assertEqual(cm.exception.messages, [
'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).',
'Item 3 in the array did not validate: Ensure this value has at most 2 characters (it has 4).',
])
def test_splitarraywidget_value_omitted_from_data(self):
class Form(forms.ModelForm):
field = SplitArrayField(forms.IntegerField(), required=False, size=2)
class Meta:
model = IntegerArrayModel
fields = ('field',)
form = Form({'field_0': '1', 'field_1': '2'})
self.assertEqual(form.errors, {})
obj = form.save(commit=False)
self.assertEqual(obj.field, [1, 2])
def test_splitarrayfield_has_changed(self):
class Form(forms.ModelForm):
field = SplitArrayField(forms.IntegerField(), required=False, size=2)
class Meta:
model = IntegerArrayModel
fields = ('field',)
tests = [
({}, {'field_0': '', 'field_1': ''}, True),
({'field': None}, {'field_0': '', 'field_1': ''}, True),
({'field': [1]}, {'field_0': '', 'field_1': ''}, True),
({'field': [1]}, {'field_0': '1', 'field_1': '0'}, True),
({'field': [1, 2]}, {'field_0': '1', 'field_1': '2'}, False),
({'field': [1, 2]}, {'field_0': 'a', 'field_1': 'b'}, True),
]
for initial, data, expected_result in tests:
with self.subTest(initial=initial, data=data):
obj = IntegerArrayModel(**initial)
form = Form(data, instance=obj)
self.assertIs(form.has_changed(), expected_result)
def test_splitarrayfield_remove_trailing_nulls_has_changed(self):
class Form(forms.ModelForm):
field = SplitArrayField(forms.IntegerField(), required=False, size=2, remove_trailing_nulls=True)
class Meta:
model = IntegerArrayModel
fields = ('field',)
tests = [
({}, {'field_0': '', 'field_1': ''}, False),
({'field': None}, {'field_0': '', 'field_1': ''}, False),
({'field': []}, {'field_0': '', 'field_1': ''}, False),
({'field': [1]}, {'field_0': '1', 'field_1': ''}, False),
]
for initial, data, expected_result in tests:
with self.subTest(initial=initial, data=data):
obj = IntegerArrayModel(**initial)
form = Form(data, instance=obj)
self.assertIs(form.has_changed(), expected_result)
class TestSplitFormWidget(PostgreSQLWidgetTestCase):
def test_get_context(self):
self.assertEqual(
SplitArrayWidget(forms.TextInput(), size=2).get_context('name', ['val1', 'val2']),
{
'widget': {
'name': 'name',
'is_hidden': False,
'required': False,
'value': "['val1', 'val2']",
'attrs': {},
'template_name': 'postgres/widgets/split_array.html',
'subwidgets': [
{
'name': 'name_0',
'is_hidden': False,
'required': False,
'value': 'val1',
'attrs': {},
'template_name': 'django/forms/widgets/text.html',
'type': 'text',
},
{
'name': 'name_1',
'is_hidden': False,
'required': False,
'value': 'val2',
'attrs': {},
'template_name': 'django/forms/widgets/text.html',
'type': 'text',
},
]
}
}
)
def test_render(self):
self.check_html(
SplitArrayWidget(forms.TextInput(), size=2), 'array', None,
"""
<input name="array_0" type="text">
<input name="array_1" type="text">
"""
)
def test_render_attrs(self):
self.check_html(
SplitArrayWidget(forms.TextInput(), size=2),
'array', ['val1', 'val2'], attrs={'id': 'foo'},
html=(
"""
<input id="foo_0" name="array_0" type="text" value="val1">
<input id="foo_1" name="array_1" type="text" value="val2">
"""
)
)
def test_value_omitted_from_data(self):
widget = SplitArrayWidget(forms.TextInput(), size=2)
self.assertIs(widget.value_omitted_from_data({}, {}, 'field'), True)
self.assertIs(widget.value_omitted_from_data({'field_0': 'value'}, {}, 'field'), False)
self.assertIs(widget.value_omitted_from_data({'field_1': 'value'}, {}, 'field'), False)
self.assertIs(widget.value_omitted_from_data({'field_0': 'value', 'field_1': 'value'}, {}, 'field'), False) | unknown | codeparrot/codeparrot-clean | ||
from django.views import generic
from accounts.models import Menu
from django.http import HttpResponse
from django.shortcuts import render
from carton.cart import Cart
class HomePage(generic.TemplateView):
template_name = "home.html"
class AboutPage(generic.TemplateView):
template_name = "about.html"
def truptiNC(request):
#menu = Menu.objects.values('item_name')
#price = Menu.objects.values('price')
menu = Menu.objects.all()
return render(request, 'menutr.html', {'trupti':menu})
class TruptiNC(generic.TemplateView):
template_name = "menutr.html"
#def get_queryset(self):
#menus = Menu.objects.all()
#menu_items = []
#for menu in menus:
#if menu.nc.username =='GBNC':
#menu_items.append(menu.item_name)
#print("*******************************")
#print(menu_items[0])
#Sreturn render(request, 'menutr.html', {'trupti':menus.item_name[0]})
class GBNC(generic.TemplateView):
template_name = "menugb.html"
def GBNC(request):
#menu = Menu.objects.values('item_name')
#price = Menu.objects.values('price')
menu = Menu.objects.all()
return render(request, 'menugb.html', {'GBNC':menu})
class prashNC(generic.TemplateView):
template_name = "menup.html"
def add(request, num=None):
cart = Cart(request.session)
item = Menu.objects.get(id=num)
cart.add(item, price=item.price)
return HttpResponse("Added")
def show(request):
return render(request, 'shopping_cart.html') | unknown | codeparrot/codeparrot-clean | ||
# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from flask import jsonify
from indico.core.config import Config
from MaKaC.webinterface.rh.conferenceModif import RHConferenceModifBase
from indico_piwik.views import WPStatistics
from indico_piwik.reports import (ReportCountries, ReportDevices, ReportDownloads, ReportGeneral, ReportMaterial,
ReportVisitsPerDay)
class RHStatistics(RHConferenceModifBase):
def _checkParams(self, params):
RHConferenceModifBase._checkParams(self, params)
self._params = params
self._params['loading_gif'] = '{}/images/loading.gif'.format(Config.getInstance().getBaseURL())
self._params['report'] = ReportGeneral.get(event_id=params.get('confId'), contrib_id=params.get('contrib_id'),
start_date=params.get('start_date'), end_date=params.get('end_date'))
def _process(self):
return WPStatistics.render_template('statistics.html', self._conf, **self._params)
class RHApiBase(RHConferenceModifBase):
def _checkParams(self, params):
RHConferenceModifBase._checkParams(self, params)
self._report_params = {'start_date': params.get('start_date'),
'end_date': params.get('end_date')}
class RHApiEventBase(RHApiBase):
def _checkParams(self, params):
RHApiBase._checkParams(self, params)
self._report_params['event_id'] = params['confId']
self._report_params['contrib_id'] = params.get('contrib_id')
class RHApiDownloads(RHApiEventBase):
def _checkParams(self, params):
RHApiEventBase._checkParams(self, params)
self._report_params['download_url'] = params['download_url']
def _process(self):
return jsonify(ReportDownloads.get(**self._report_params))
class RHApiEventVisitsPerDay(RHApiEventBase):
def _process(self):
return jsonify(ReportVisitsPerDay.get(**self._report_params))
class RHApiEventGraphCountries(RHApiEventBase):
def _process(self):
return jsonify(ReportCountries.get(**self._report_params))
class RHApiEventGraphDevices(RHApiEventBase):
def _process(self):
return jsonify(ReportDevices.get(**self._report_params))
class RHApiMaterial(RHApiEventBase):
def _process(self):
return jsonify(ReportMaterial.get(**self._report_params)) | unknown | codeparrot/codeparrot-clean | ||
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package stackmigrate
import (
"fmt"
"io"
"net/http"
"net/http/httptest"
"os"
"path"
"path/filepath"
"strings"
"testing"
svchost "github.com/hashicorp/terraform-svchost"
"github.com/hashicorp/terraform-svchost/auth"
"github.com/hashicorp/terraform-svchost/disco"
"github.com/hashicorp/terraform/internal/addrs"
"github.com/hashicorp/terraform/internal/httpclient"
"github.com/hashicorp/terraform/internal/states"
"github.com/hashicorp/terraform/internal/states/statefile"
"github.com/hashicorp/terraform/version"
"github.com/zclconf/go-cty/cty"
)
func TestLoad_Local(t *testing.T) {
state := states.BuildState(func(s *states.SyncState) {
s.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"id":"bar","foo":"value","bar":"value"}`),
Status: states.ObjectReady,
},
addrs.AbsProviderConfig{
Provider: addrs.NewDefaultProvider("test"),
Module: addrs.RootModule,
},
)
s.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "baz",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"id":"foo","foo":"value","bar":"value"}`),
Status: states.ObjectReady,
Dependencies: []addrs.ConfigResource{mustResourceAddr("test_instance.foo")},
},
addrs.AbsProviderConfig{
Provider: addrs.NewDefaultProvider("test"),
Module: addrs.RootModule,
},
)
})
statePath := TestStateFile(t, state)
loader := &Loader{}
loadedState, diags := loader.LoadState(strings.TrimSuffix(statePath, "/terraform.tfstate"))
if diags.HasErrors() {
t.Fatalf("failed to load state: %s", diags.Err())
}
if !statefile.StatesMarshalEqual(state, loadedState) {
t.Fatalf("loaded state does not match original state")
}
}
func TestLoad(t *testing.T) {
state := states.BuildState(func(s *states.SyncState) {
s.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"id":"bar","foo":"value","bar":"value"}`),
Status: states.ObjectReady,
},
addrs.AbsProviderConfig{
Provider: addrs.NewDefaultProvider("test"),
Module: addrs.RootModule,
},
)
s.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "baz",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"id":"foo","foo":"value","bar":"value"}`),
Status: states.ObjectReady,
Dependencies: []addrs.ConfigResource{mustResourceAddr("test_instance.foo")},
},
addrs.AbsProviderConfig{
Provider: addrs.NewDefaultProvider("test"),
Module: addrs.RootModule,
},
)
})
statePath := TestStateFile(t, state)
s := testServer(t, statePath)
backendStatePath := testBackendStateFile(t, cty.ObjectVal(map[string]cty.Value{
"organization": cty.StringVal("hashicorp"),
"hostname": cty.StringVal("localhost"),
"workspaces": cty.ObjectVal(map[string]cty.Value{
"name": cty.NullVal(cty.String),
"prefix": cty.StringVal("my-app-"),
}),
}))
dir := strings.TrimSuffix(backendStatePath, ".terraform/.terraform.tfstate")
defer s.Close()
loader := Loader{Discovery: testDisco(s)}
t.Setenv(WorkspaceNameEnvVar, "test")
loadedState, diags := loader.LoadState(dir)
if diags.HasErrors() {
t.Fatalf("failed to load state: %s", diags.Err())
}
if !statefile.StatesMarshalEqual(state, loadedState) {
t.Fatalf("loaded state does not match original state")
}
}
func mustResourceAddr(s string) addrs.ConfigResource {
addr, diags := addrs.ParseAbsResourceStr(s)
if diags.HasErrors() {
panic(diags.Err())
}
return addr.Config()
}
func testBackendStateFile(t *testing.T, value cty.Value) string {
t.Helper()
path := filepath.Join(t.TempDir(), ".terraform", ".terraform.tfstate")
err := os.MkdirAll(filepath.Dir(path), 0755)
if err != nil {
t.Fatalf("failed to create directories for temporary state file %s: %s", path, err)
}
f, err := os.Create(path)
if err != nil {
t.Fatalf("failed to create temporary state file %s: %s", path, err)
}
fmt.Fprintf(f, `{
"version": 3,
"terraform_version": "1.9.4",
"backend": {
"type": "remote",
"config": {
"hostname": %q,
"organization": %q,
"token": "foo",
"workspaces": {
"name": null,
"prefix": %q
}
},
"hash": 2143736989
}
}`, value.GetAttr("hostname").AsString(),
value.GetAttr("organization").AsString(),
value.GetAttr("workspaces").GetAttr("prefix").AsString())
f.Close()
return path
}
func createTempFile(t *testing.T, dir, filename, content string) string {
t.Helper()
filePath := filepath.Join(dir, filename)
err := os.WriteFile(filePath, []byte(content), 0644)
if err != nil {
t.Fatalf("failed to write temp file: %v", err)
}
return filePath
}
// testDisco returns a *disco.Disco mapping app.terraform.io and
// localhost to a local test server.
func testDisco(s *httptest.Server) *disco.Disco {
services := map[string]interface{}{
"state.v2": fmt.Sprintf("%s/api/v2/", s.URL),
"tfe.v2.1": fmt.Sprintf("%s/api/v2/", s.URL),
"versions.v1": fmt.Sprintf("%s/v1/versions/", s.URL),
}
d := disco.NewWithCredentialsSource(auth.NoCredentials)
d.SetUserAgent(httpclient.TerraformUserAgent(version.String()))
d.ForceHostServices(svchost.Hostname("localhost"), services)
d.ForceHostServices(svchost.Hostname("app.terraform.io"), services)
return d
}
// testServer returns a *httptest.Server used for local testing.
// This server simulates the APIs needed to load a remote state.
func testServer(t *testing.T, statePath string) *httptest.Server {
mux := http.NewServeMux()
f, err := os.Open(statePath)
if err != nil {
t.Fatalf("failed to open state file: %s", err)
}
// Respond to service discovery calls.
mux.HandleFunc("/well-known/terraform.json", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
io.WriteString(w, `{
"state.v2": "/api/v2/",
"tfe.v2.1": "/api/v2/",
"versions.v1": "/v1/versions/"
}`)
})
// Respond to service version constraints calls.
mux.HandleFunc("/v1/versions/", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
io.WriteString(w, fmt.Sprintf(`{
"service": "%s",
"product": "terraform",
"minimum": "0.1.0",
"maximum": "10.0.0"
}`, path.Base(r.URL.Path)))
})
// Respond to pings to get the API version header.
mux.HandleFunc("/api/v2/ping", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
w.Header().Set("TFP-API-Version", "2.4")
})
// Respond to the initial query to read the hashicorp org entitlements.
mux.HandleFunc("/api/v2/organizations/hashicorp/entitlement-set", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/vnd.api+json")
io.WriteString(w, `{
"data": {
"id": "org-GExadygjSbKP8hsY",
"type": "entitlement-sets",
"attributes": {
"operations": true,
"private-module-registry": true,
"sentinel": true,
"state-storage": true,
"teams": true,
"vcs-integrations": true
}
}
}`)
})
// Respond to the initial query to read the no-operations org entitlements.
mux.HandleFunc("/api/v2/organizations/no-operations/entitlement-set", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/vnd.api+json")
io.WriteString(w, `{
"data": {
"id": "org-ufxa3y8jSbKP8hsT",
"type": "entitlement-sets",
"attributes": {
"operations": false,
"private-module-registry": true,
"sentinel": true,
"state-storage": true,
"teams": true,
"vcs-integrations": true
}
}
}`)
})
mux.HandleFunc("/api/v2/organizations/hashicorp/workspaces/my-app-test", func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(200)
io.WriteString(w, `{
"data": {
"id": "ws-EUht4zmoJaZTZMv8",
"type": "workspaces",
"attributes": {
"locked": false,
"name": "my-app-test",
"queue-all-runs": false,
"speculative-enabled": true,
"structured-run-output-enabled": true,
"terraform-version": "1.9.4",
"operations": true,
"execution-mode": "remote",
"file-triggers-enabled": true,
"locked-reason": "",
"source": "terraform"
}
}
}`)
})
mux.HandleFunc("/api/v2/workspaces/ws-EUht4zmoJaZTZMv8/actions/lock", func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(200)
io.WriteString(w, `{
"data": {
"id": "ws-EUht4zmoJaZTZMv8",
"type": "workspaces",
"attributes": {
"locked": true,
"name": "my-app-test",
"queue-all-runs": false,
"speculative-enabled": true,
"structured-run-output-enabled": true,
"terraform-version": "1.9.4",
"source": "terraform",
"source-name": null,
"source-url": null,
"tag-names": []
}
}
}`)
})
mux.HandleFunc("/api/v2/workspaces/ws-EUht4zmoJaZTZMv8/current-state-version", func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(200)
io.WriteString(w, `
{
"data": {
"id": "sv-XJmHFY12zJFmwkWN",
"type": "state-versions",
"attributes": {
"created-at": "2025-02-12T14:16:43.541Z",
"size": 878,
"hosted-state-download-url": "/api/state-versions/sv-XJmHFY12zJFmwkWN/hosted_state",
"hosted-json-state-download-url": "/api/state-versions/sv-XJmHFY12zJFmwkWN/hosted_json_state",
"serial": 1,
"state-version": 4,
"status": "finalized",
"terraform-version": "1.9.4"
}
}
}
`)
})
mux.HandleFunc("/api/state-versions/sv-XJmHFY12zJFmwkWN/hosted_state", func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(200)
io.Copy(w, f)
})
mux.HandleFunc("/api/v2/workspaces/ws-EUht4zmoJaZTZMv8/actions/unlock", func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(200)
io.WriteString(w, `{
"data": {
"id": "ws-EUht4zmoJaZTZMv8",
"type": "workspaces",
"attributes": {
"locked": false,
"name": "my-app-test",
"queue-all-runs": false,
"speculative-enabled": true,
"structured-run-output-enabled": true,
"terraform-version": "1.9.4",
"source": "terraform",
"source-name": null,
"source-url": null,
"tag-names": []
}
}
}`)
})
return httptest.NewServer(mux)
} | go | github | https://github.com/hashicorp/terraform | internal/stacks/stackmigrate/load_test.go |
# -*-mode: python; fill-column: 75; tab-width: 8; coding: iso-latin-1-unix -*-
#
# $Id: Tix.py 81008 2010-05-08 20:59:42Z benjamin.peterson $
#
# Tix.py -- Tix widget wrappers.
#
# For Tix, see http://tix.sourceforge.net
#
# - Sudhir Shenoy (sshenoy@gol.com), Dec. 1995.
# based on an idea of Jean-Marc Lugrin (lugrin@ms.com)
#
# NOTE: In order to minimize changes to Tkinter.py, some of the code here
# (TixWidget.__init__) has been taken from Tkinter (Widget.__init__)
# and will break if there are major changes in Tkinter.
#
# The Tix widgets are represented by a class hierarchy in python with proper
# inheritance of base classes.
#
# As a result after creating a 'w = StdButtonBox', I can write
# w.ok['text'] = 'Who Cares'
# or w.ok['bg'] = w['bg']
# or even w.ok.invoke()
# etc.
#
# Compare the demo tixwidgets.py to the original Tcl program and you will
# appreciate the advantages.
#
from Tkinter import *
from Tkinter import _flatten, _cnfmerge, _default_root
# WARNING - TkVersion is a limited precision floating point number
if TkVersion < 3.999:
raise ImportError, "This version of Tix.py requires Tk 4.0 or higher"
import _tkinter # If this fails your Python may not be configured for Tk
# Some more constants (for consistency with Tkinter)
WINDOW = 'window'
TEXT = 'text'
STATUS = 'status'
IMMEDIATE = 'immediate'
IMAGE = 'image'
IMAGETEXT = 'imagetext'
BALLOON = 'balloon'
AUTO = 'auto'
ACROSSTOP = 'acrosstop'
# A few useful constants for the Grid widget
ASCII = 'ascii'
CELL = 'cell'
COLUMN = 'column'
DECREASING = 'decreasing'
INCREASING = 'increasing'
INTEGER = 'integer'
MAIN = 'main'
MAX = 'max'
REAL = 'real'
ROW = 'row'
S_REGION = 's-region'
X_REGION = 'x-region'
Y_REGION = 'y-region'
# Some constants used by Tkinter dooneevent()
TCL_DONT_WAIT = 1 << 1
TCL_WINDOW_EVENTS = 1 << 2
TCL_FILE_EVENTS = 1 << 3
TCL_TIMER_EVENTS = 1 << 4
TCL_IDLE_EVENTS = 1 << 5
TCL_ALL_EVENTS = 0
# BEWARE - this is implemented by copying some code from the Widget class
# in Tkinter (to override Widget initialization) and is therefore
# liable to break.
import Tkinter, os
# Could probably add this to Tkinter.Misc
class tixCommand:
"""The tix commands provide access to miscellaneous elements
of Tix's internal state and the Tix application context.
Most of the information manipulated by these commands pertains
to the application as a whole, or to a screen or
display, rather than to a particular window.
This is a mixin class, assumed to be mixed to Tkinter.Tk
that supports the self.tk.call method.
"""
def tix_addbitmapdir(self, directory):
"""Tix maintains a list of directories under which
the tix_getimage and tix_getbitmap commands will
search for image files. The standard bitmap directory
is $TIX_LIBRARY/bitmaps. The addbitmapdir command
adds directory into this list. By using this
command, the image files of an applications can
also be located using the tix_getimage or tix_getbitmap
command.
"""
return self.tk.call('tix', 'addbitmapdir', directory)
def tix_cget(self, option):
"""Returns the current value of the configuration
option given by option. Option may be any of the
options described in the CONFIGURATION OPTIONS section.
"""
return self.tk.call('tix', 'cget', option)
def tix_configure(self, cnf=None, **kw):
"""Query or modify the configuration options of the Tix application
context. If no option is specified, returns a dictionary all of the
available options. If option is specified with no value, then the
command returns a list describing the one named option (this list
will be identical to the corresponding sublist of the value
returned if no option is specified). If one or more option-value
pairs are specified, then the command modifies the given option(s)
to have the given value(s); in this case the command returns an
empty string. Option may be any of the configuration options.
"""
# Copied from Tkinter.py
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
cnf = {}
for x in self.tk.split(self.tk.call('tix', 'configure')):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if isinstance(cnf, StringType):
x = self.tk.split(self.tk.call('tix', 'configure', '-'+cnf))
return (x[0][1:],) + x[1:]
return self.tk.call(('tix', 'configure') + self._options(cnf))
def tix_filedialog(self, dlgclass=None):
"""Returns the file selection dialog that may be shared among
different calls from this application. This command will create a
file selection dialog widget when it is called the first time. This
dialog will be returned by all subsequent calls to tix_filedialog.
An optional dlgclass parameter can be passed to specified what type
of file selection dialog widget is desired. Possible options are
tix FileSelectDialog or tixExFileSelectDialog.
"""
if dlgclass is not None:
return self.tk.call('tix', 'filedialog', dlgclass)
else:
return self.tk.call('tix', 'filedialog')
def tix_getbitmap(self, name):
"""Locates a bitmap file of the name name.xpm or name in one of the
bitmap directories (see the tix_addbitmapdir command above). By
using tix_getbitmap, you can avoid hard coding the pathnames of the
bitmap files in your application. When successful, it returns the
complete pathname of the bitmap file, prefixed with the character
'@'. The returned value can be used to configure the -bitmap
option of the TK and Tix widgets.
"""
return self.tk.call('tix', 'getbitmap', name)
def tix_getimage(self, name):
"""Locates an image file of the name name.xpm, name.xbm or name.ppm
in one of the bitmap directories (see the addbitmapdir command
above). If more than one file with the same name (but different
extensions) exist, then the image type is chosen according to the
depth of the X display: xbm images are chosen on monochrome
displays and color images are chosen on color displays. By using
tix_ getimage, you can advoid hard coding the pathnames of the
image files in your application. When successful, this command
returns the name of the newly created image, which can be used to
configure the -image option of the Tk and Tix widgets.
"""
return self.tk.call('tix', 'getimage', name)
def tix_option_get(self, name):
"""Gets the options manitained by the Tix
scheme mechanism. Available options include:
active_bg active_fg bg
bold_font dark1_bg dark1_fg
dark2_bg dark2_fg disabled_fg
fg fixed_font font
inactive_bg inactive_fg input1_bg
input2_bg italic_font light1_bg
light1_fg light2_bg light2_fg
menu_font output1_bg output2_bg
select_bg select_fg selector
"""
# could use self.tk.globalgetvar('tixOption', name)
return self.tk.call('tix', 'option', 'get', name)
def tix_resetoptions(self, newScheme, newFontSet, newScmPrio=None):
"""Resets the scheme and fontset of the Tix application to
newScheme and newFontSet, respectively. This affects only those
widgets created after this call. Therefore, it is best to call the
resetoptions command before the creation of any widgets in a Tix
application.
The optional parameter newScmPrio can be given to reset the
priority level of the Tk options set by the Tix schemes.
Because of the way Tk handles the X option database, after Tix has
been has imported and inited, it is not possible to reset the color
schemes and font sets using the tix config command. Instead, the
tix_resetoptions command must be used.
"""
if newScmPrio is not None:
return self.tk.call('tix', 'resetoptions', newScheme, newFontSet, newScmPrio)
else:
return self.tk.call('tix', 'resetoptions', newScheme, newFontSet)
class Tk(Tkinter.Tk, tixCommand):
"""Toplevel widget of Tix which represents mostly the main window
of an application. It has an associated Tcl interpreter."""
def __init__(self, screenName=None, baseName=None, className='Tix'):
Tkinter.Tk.__init__(self, screenName, baseName, className)
tixlib = os.environ.get('TIX_LIBRARY')
self.tk.eval('global auto_path; lappend auto_path [file dir [info nameof]]')
if tixlib is not None:
self.tk.eval('global auto_path; lappend auto_path {%s}' % tixlib)
self.tk.eval('global tcl_pkgPath; lappend tcl_pkgPath {%s}' % tixlib)
# Load Tix - this should work dynamically or statically
# If it's static, tcl/tix8.1/pkgIndex.tcl should have
# 'load {} Tix'
# If it's dynamic under Unix, tcl/tix8.1/pkgIndex.tcl should have
# 'load libtix8.1.8.3.so Tix'
self.tk.eval('package require Tix')
def destroy(self):
# For safety, remove an delete_window binding before destroy
self.protocol("WM_DELETE_WINDOW", "")
Tkinter.Tk.destroy(self)
# The Tix 'tixForm' geometry manager
class Form:
"""The Tix Form geometry manager
Widgets can be arranged by specifying attachments to other widgets.
See Tix documentation for complete details"""
def config(self, cnf={}, **kw):
self.tk.call('tixForm', self._w, *self._options(cnf, kw))
form = config
def __setitem__(self, key, value):
Form.form(self, {key: value})
def check(self):
return self.tk.call('tixForm', 'check', self._w)
def forget(self):
self.tk.call('tixForm', 'forget', self._w)
def grid(self, xsize=0, ysize=0):
if (not xsize) and (not ysize):
x = self.tk.call('tixForm', 'grid', self._w)
y = self.tk.splitlist(x)
z = ()
for x in y:
z = z + (self.tk.getint(x),)
return z
return self.tk.call('tixForm', 'grid', self._w, xsize, ysize)
def info(self, option=None):
if not option:
return self.tk.call('tixForm', 'info', self._w)
if option[0] != '-':
option = '-' + option
return self.tk.call('tixForm', 'info', self._w, option)
def slaves(self):
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call(
'tixForm', 'slaves', self._w)))
Tkinter.Widget.__bases__ = Tkinter.Widget.__bases__ + (Form,)
class TixWidget(Tkinter.Widget):
"""A TixWidget class is used to package all (or most) Tix widgets.
Widget initialization is extended in two ways:
1) It is possible to give a list of options which must be part of
the creation command (so called Tix 'static' options). These cannot be
given as a 'config' command later.
2) It is possible to give the name of an existing TK widget. These are
child widgets created automatically by a Tix mega-widget. The Tk call
to create these widgets is therefore bypassed in TixWidget.__init__
Both options are for use by subclasses only.
"""
def __init__ (self, master=None, widgetName=None,
static_options=None, cnf={}, kw={}):
# Merge keywords and dictionary arguments
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
# Move static options into extra. static_options must be
# a list of keywords (or None).
extra=()
# 'options' is always a static option
if static_options:
static_options.append('options')
else:
static_options = ['options']
for k,v in cnf.items()[:]:
if k in static_options:
extra = extra + ('-' + k, v)
del cnf[k]
self.widgetName = widgetName
Widget._setup(self, master, cnf)
# If widgetName is None, this is a dummy creation call where the
# corresponding Tk widget has already been created by Tix
if widgetName:
self.tk.call(widgetName, self._w, *extra)
# Non-static options - to be done via a 'config' command
if cnf:
Widget.config(self, cnf)
# Dictionary to hold subwidget names for easier access. We can't
# use the children list because the public Tix names may not be the
# same as the pathname component
self.subwidget_list = {}
# We set up an attribute access function so that it is possible to
# do w.ok['text'] = 'Hello' rather than w.subwidget('ok')['text'] = 'Hello'
# when w is a StdButtonBox.
# We can even do w.ok.invoke() because w.ok is subclassed from the
# Button class if you go through the proper constructors
def __getattr__(self, name):
if name in self.subwidget_list:
return self.subwidget_list[name]
raise AttributeError, name
def set_silent(self, value):
"""Set a variable without calling its action routine"""
self.tk.call('tixSetSilent', self._w, value)
def subwidget(self, name):
"""Return the named subwidget (which must have been created by
the sub-class)."""
n = self._subwidget_name(name)
if not n:
raise TclError, "Subwidget " + name + " not child of " + self._name
# Remove header of name and leading dot
n = n[len(self._w)+1:]
return self._nametowidget(n)
def subwidgets_all(self):
"""Return all subwidgets."""
names = self._subwidget_names()
if not names:
return []
retlist = []
for name in names:
name = name[len(self._w)+1:]
try:
retlist.append(self._nametowidget(name))
except:
# some of the widgets are unknown e.g. border in LabelFrame
pass
return retlist
def _subwidget_name(self,name):
"""Get a subwidget name (returns a String, not a Widget !)"""
try:
return self.tk.call(self._w, 'subwidget', name)
except TclError:
return None
def _subwidget_names(self):
"""Return the name of all subwidgets."""
try:
x = self.tk.call(self._w, 'subwidgets', '-all')
return self.tk.split(x)
except TclError:
return None
def config_all(self, option, value):
"""Set configuration options for all subwidgets (and self)."""
if option == '':
return
elif not isinstance(option, StringType):
option = repr(option)
if not isinstance(value, StringType):
value = repr(value)
names = self._subwidget_names()
for name in names:
self.tk.call(name, 'configure', '-' + option, value)
# These are missing from Tkinter
def image_create(self, imgtype, cnf={}, master=None, **kw):
if not master:
master = Tkinter._default_root
if not master:
raise RuntimeError, 'Too early to create image'
if kw and cnf: cnf = _cnfmerge((cnf, kw))
elif kw: cnf = kw
options = ()
for k, v in cnf.items():
if hasattr(v, '__call__'):
v = self._register(v)
options = options + ('-'+k, v)
return master.tk.call(('image', 'create', imgtype,) + options)
def image_delete(self, imgname):
try:
self.tk.call('image', 'delete', imgname)
except TclError:
# May happen if the root was destroyed
pass
# Subwidgets are child widgets created automatically by mega-widgets.
# In python, we have to create these subwidgets manually to mirror their
# existence in Tk/Tix.
class TixSubWidget(TixWidget):
"""Subwidget class.
This is used to mirror child widgets automatically created
by Tix/Tk as part of a mega-widget in Python (which is not informed
of this)"""
def __init__(self, master, name,
destroy_physically=1, check_intermediate=1):
if check_intermediate:
path = master._subwidget_name(name)
try:
path = path[len(master._w)+1:]
plist = path.split('.')
except:
plist = []
if not check_intermediate:
# immediate descendant
TixWidget.__init__(self, master, None, None, {'name' : name})
else:
# Ensure that the intermediate widgets exist
parent = master
for i in range(len(plist) - 1):
n = '.'.join(plist[:i+1])
try:
w = master._nametowidget(n)
parent = w
except KeyError:
# Create the intermediate widget
parent = TixSubWidget(parent, plist[i],
destroy_physically=0,
check_intermediate=0)
# The Tk widget name is in plist, not in name
if plist:
name = plist[-1]
TixWidget.__init__(self, parent, None, None, {'name' : name})
self.destroy_physically = destroy_physically
def destroy(self):
# For some widgets e.g., a NoteBook, when we call destructors,
# we must be careful not to destroy the frame widget since this
# also destroys the parent NoteBook thus leading to an exception
# in Tkinter when it finally calls Tcl to destroy the NoteBook
for c in self.children.values(): c.destroy()
if self._name in self.master.children:
del self.master.children[self._name]
if self._name in self.master.subwidget_list:
del self.master.subwidget_list[self._name]
if self.destroy_physically:
# This is bypassed only for a few widgets
self.tk.call('destroy', self._w)
# Useful func. to split Tcl lists and return as a dict. From Tkinter.py
def _lst2dict(lst):
dict = {}
for x in lst:
dict[x[0][1:]] = (x[0][1:],) + x[1:]
return dict
# Useful class to create a display style - later shared by many items.
# Contributed by Steffen Kremser
class DisplayStyle:
"""DisplayStyle - handle configuration options shared by
(multiple) Display Items"""
def __init__(self, itemtype, cnf={}, **kw):
master = _default_root # global from Tkinter
if not master and 'refwindow' in cnf: master=cnf['refwindow']
elif not master and 'refwindow' in kw: master= kw['refwindow']
elif not master: raise RuntimeError, "Too early to create display style: no root window"
self.tk = master.tk
self.stylename = self.tk.call('tixDisplayStyle', itemtype,
*self._options(cnf,kw) )
def __str__(self):
return self.stylename
def _options(self, cnf, kw):
if kw and cnf:
cnf = _cnfmerge((cnf, kw))
elif kw:
cnf = kw
opts = ()
for k, v in cnf.items():
opts = opts + ('-'+k, v)
return opts
def delete(self):
self.tk.call(self.stylename, 'delete')
def __setitem__(self,key,value):
self.tk.call(self.stylename, 'configure', '-%s'%key, value)
def config(self, cnf={}, **kw):
return _lst2dict(
self.tk.split(
self.tk.call(
self.stylename, 'configure', *self._options(cnf,kw))))
def __getitem__(self,key):
return self.tk.call(self.stylename, 'cget', '-%s'%key)
######################################################
### The Tix Widget classes - in alphabetical order ###
######################################################
class Balloon(TixWidget):
"""Balloon help widget.
Subwidget Class
--------- -----
label Label
message Message"""
# FIXME: It should inherit -superclass tixShell
def __init__(self, master=None, cnf={}, **kw):
# static seem to be -installcolormap -initwait -statusbar -cursor
static = ['options', 'installcolormap', 'initwait', 'statusbar',
'cursor']
TixWidget.__init__(self, master, 'tixBalloon', static, cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label',
destroy_physically=0)
self.subwidget_list['message'] = _dummyLabel(self, 'message',
destroy_physically=0)
def bind_widget(self, widget, cnf={}, **kw):
"""Bind balloon widget to another.
One balloon widget may be bound to several widgets at the same time"""
self.tk.call(self._w, 'bind', widget._w, *self._options(cnf, kw))
def unbind_widget(self, widget):
self.tk.call(self._w, 'unbind', widget._w)
class ButtonBox(TixWidget):
"""ButtonBox - A container for pushbuttons.
Subwidgets are the buttons added with the add method.
"""
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixButtonBox',
['orientation', 'options'], cnf, kw)
def add(self, name, cnf={}, **kw):
"""Add a button with given name to box."""
btn = self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = _dummyButton(self, name)
return btn
def invoke(self, name):
if name in self.subwidget_list:
self.tk.call(self._w, 'invoke', name)
class ComboBox(TixWidget):
"""ComboBox - an Entry field with a dropdown menu. The user can select a
choice by either typing in the entry subwdget or selecting from the
listbox subwidget.
Subwidget Class
--------- -----
entry Entry
arrow Button
slistbox ScrolledListBox
tick Button
cross Button : present if created with the fancy option"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__ (self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixComboBox',
['editable', 'dropdown', 'fancy', 'options'],
cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
self.subwidget_list['arrow'] = _dummyButton(self, 'arrow')
self.subwidget_list['slistbox'] = _dummyScrolledListBox(self,
'slistbox')
try:
self.subwidget_list['tick'] = _dummyButton(self, 'tick')
self.subwidget_list['cross'] = _dummyButton(self, 'cross')
except TypeError:
# unavailable when -fancy not specified
pass
# align
def add_history(self, str):
self.tk.call(self._w, 'addhistory', str)
def append_history(self, str):
self.tk.call(self._w, 'appendhistory', str)
def insert(self, index, str):
self.tk.call(self._w, 'insert', index, str)
def pick(self, index):
self.tk.call(self._w, 'pick', index)
class Control(TixWidget):
"""Control - An entry field with value change arrows. The user can
adjust the value by pressing the two arrow buttons or by entering
the value directly into the entry. The new value will be checked
against the user-defined upper and lower limits.
Subwidget Class
--------- -----
incr Button
decr Button
entry Entry
label Label"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__ (self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixControl', ['options'], cnf, kw)
self.subwidget_list['incr'] = _dummyButton(self, 'incr')
self.subwidget_list['decr'] = _dummyButton(self, 'decr')
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
def decrement(self):
self.tk.call(self._w, 'decr')
def increment(self):
self.tk.call(self._w, 'incr')
def invoke(self):
self.tk.call(self._w, 'invoke')
def update(self):
self.tk.call(self._w, 'update')
class DirList(TixWidget):
"""DirList - displays a list view of a directory, its previous
directories and its sub-directories. The user can choose one of
the directories displayed in the list or change to another directory.
Subwidget Class
--------- -----
hlist HList
hsb Scrollbar
vsb Scrollbar"""
# FIXME: It should inherit -superclass tixScrolledHList
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirList', ['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def chdir(self, dir):
self.tk.call(self._w, 'chdir', dir)
class DirTree(TixWidget):
"""DirTree - Directory Listing in a hierarchical view.
Displays a tree view of a directory, its previous directories and its
sub-directories. The user can choose one of the directories displayed
in the list or change to another directory.
Subwidget Class
--------- -----
hlist HList
hsb Scrollbar
vsb Scrollbar"""
# FIXME: It should inherit -superclass tixScrolledHList
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirTree', ['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def chdir(self, dir):
self.tk.call(self._w, 'chdir', dir)
class DirSelectBox(TixWidget):
"""DirSelectBox - Motif style file select box.
It is generally used for
the user to choose a file. FileSelectBox stores the files mostly
recently selected into a ComboBox widget so that they can be quickly
selected again.
Subwidget Class
--------- -----
selection ComboBox
filter ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirSelectBox', ['options'], cnf, kw)
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['dircbx'] = _dummyFileComboBox(self, 'dircbx')
class ExFileSelectBox(TixWidget):
"""ExFileSelectBox - MS Windows style file select box.
It provides an convenient method for the user to select files.
Subwidget Class
--------- -----
cancel Button
ok Button
hidden Checkbutton
types ComboBox
dir ComboBox
file ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixExFileSelectBox', ['options'], cnf, kw)
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['hidden'] = _dummyCheckbutton(self, 'hidden')
self.subwidget_list['types'] = _dummyComboBox(self, 'types')
self.subwidget_list['dir'] = _dummyComboBox(self, 'dir')
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['file'] = _dummyComboBox(self, 'file')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
def filter(self):
self.tk.call(self._w, 'filter')
def invoke(self):
self.tk.call(self._w, 'invoke')
# Should inherit from a Dialog class
class DirSelectDialog(TixWidget):
"""The DirSelectDialog widget presents the directories in the file
system in a dialog window. The user can use this dialog window to
navigate through the file system to select the desired directory.
Subwidgets Class
---------- -----
dirbox DirSelectDialog"""
# FIXME: It should inherit -superclass tixDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirSelectDialog',
['options'], cnf, kw)
self.subwidget_list['dirbox'] = _dummyDirSelectBox(self, 'dirbox')
# cancel and ok buttons are missing
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
# Should inherit from a Dialog class
class ExFileSelectDialog(TixWidget):
"""ExFileSelectDialog - MS Windows style file select dialog.
It provides an convenient method for the user to select files.
Subwidgets Class
---------- -----
fsbox ExFileSelectBox"""
# FIXME: It should inherit -superclass tixDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixExFileSelectDialog',
['options'], cnf, kw)
self.subwidget_list['fsbox'] = _dummyExFileSelectBox(self, 'fsbox')
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
class FileSelectBox(TixWidget):
"""ExFileSelectBox - Motif style file select box.
It is generally used for
the user to choose a file. FileSelectBox stores the files mostly
recently selected into a ComboBox widget so that they can be quickly
selected again.
Subwidget Class
--------- -----
selection ComboBox
filter ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileSelectBox', ['options'], cnf, kw)
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
self.subwidget_list['filter'] = _dummyComboBox(self, 'filter')
self.subwidget_list['selection'] = _dummyComboBox(self, 'selection')
def apply_filter(self): # name of subwidget is same as command
self.tk.call(self._w, 'filter')
def invoke(self):
self.tk.call(self._w, 'invoke')
# Should inherit from a Dialog class
class FileSelectDialog(TixWidget):
"""FileSelectDialog - Motif style file select dialog.
Subwidgets Class
---------- -----
btns StdButtonBox
fsbox FileSelectBox"""
# FIXME: It should inherit -superclass tixStdDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileSelectDialog',
['options'], cnf, kw)
self.subwidget_list['btns'] = _dummyStdButtonBox(self, 'btns')
self.subwidget_list['fsbox'] = _dummyFileSelectBox(self, 'fsbox')
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
class FileEntry(TixWidget):
"""FileEntry - Entry field with button that invokes a FileSelectDialog.
The user can type in the filename manually. Alternatively, the user can
press the button widget that sits next to the entry, which will bring
up a file selection dialog.
Subwidgets Class
---------- -----
button Button
entry Entry"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileEntry',
['dialogtype', 'options'], cnf, kw)
self.subwidget_list['button'] = _dummyButton(self, 'button')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
def invoke(self):
self.tk.call(self._w, 'invoke')
def file_dialog(self):
# FIXME: return python object
pass
class HList(TixWidget, XView, YView):
"""HList - Hierarchy display widget can be used to display any data
that have a hierarchical structure, for example, file system directory
trees. The list entries are indented and connected by branch lines
according to their places in the hierachy.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixHList',
['columns', 'options'], cnf, kw)
def add(self, entry, cnf={}, **kw):
return self.tk.call(self._w, 'add', entry, *self._options(cnf, kw))
def add_child(self, parent=None, cnf={}, **kw):
if not parent:
parent = ''
return self.tk.call(
self._w, 'addchild', parent, *self._options(cnf, kw))
def anchor_set(self, entry):
self.tk.call(self._w, 'anchor', 'set', entry)
def anchor_clear(self):
self.tk.call(self._w, 'anchor', 'clear')
def column_width(self, col=0, width=None, chars=None):
if not chars:
return self.tk.call(self._w, 'column', 'width', col, width)
else:
return self.tk.call(self._w, 'column', 'width', col,
'-char', chars)
def delete_all(self):
self.tk.call(self._w, 'delete', 'all')
def delete_entry(self, entry):
self.tk.call(self._w, 'delete', 'entry', entry)
def delete_offsprings(self, entry):
self.tk.call(self._w, 'delete', 'offsprings', entry)
def delete_siblings(self, entry):
self.tk.call(self._w, 'delete', 'siblings', entry)
def dragsite_set(self, index):
self.tk.call(self._w, 'dragsite', 'set', index)
def dragsite_clear(self):
self.tk.call(self._w, 'dragsite', 'clear')
def dropsite_set(self, index):
self.tk.call(self._w, 'dropsite', 'set', index)
def dropsite_clear(self):
self.tk.call(self._w, 'dropsite', 'clear')
def header_create(self, col, cnf={}, **kw):
self.tk.call(self._w, 'header', 'create', col, *self._options(cnf, kw))
def header_configure(self, col, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'header', 'configure', col)))
self.tk.call(self._w, 'header', 'configure', col,
*self._options(cnf, kw))
def header_cget(self, col, opt):
return self.tk.call(self._w, 'header', 'cget', col, opt)
def header_exists(self, col):
return self.tk.call(self._w, 'header', 'exists', col)
def header_delete(self, col):
self.tk.call(self._w, 'header', 'delete', col)
def header_size(self, col):
return self.tk.call(self._w, 'header', 'size', col)
def hide_entry(self, entry):
self.tk.call(self._w, 'hide', 'entry', entry)
def indicator_create(self, entry, cnf={}, **kw):
self.tk.call(
self._w, 'indicator', 'create', entry, *self._options(cnf, kw))
def indicator_configure(self, entry, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'indicator', 'configure', entry)))
self.tk.call(
self._w, 'indicator', 'configure', entry, *self._options(cnf, kw))
def indicator_cget(self, entry, opt):
return self.tk.call(self._w, 'indicator', 'cget', entry, opt)
def indicator_exists(self, entry):
return self.tk.call (self._w, 'indicator', 'exists', entry)
def indicator_delete(self, entry):
self.tk.call(self._w, 'indicator', 'delete', entry)
def indicator_size(self, entry):
return self.tk.call(self._w, 'indicator', 'size', entry)
def info_anchor(self):
return self.tk.call(self._w, 'info', 'anchor')
def info_bbox(self, entry):
return self._getints(
self.tk.call(self._w, 'info', 'bbox', entry)) or None
def info_children(self, entry=None):
c = self.tk.call(self._w, 'info', 'children', entry)
return self.tk.splitlist(c)
def info_data(self, entry):
return self.tk.call(self._w, 'info', 'data', entry)
def info_dragsite(self):
return self.tk.call(self._w, 'info', 'dragsite')
def info_dropsite(self):
return self.tk.call(self._w, 'info', 'dropsite')
def info_exists(self, entry):
return self.tk.call(self._w, 'info', 'exists', entry)
def info_hidden(self, entry):
return self.tk.call(self._w, 'info', 'hidden', entry)
def info_next(self, entry):
return self.tk.call(self._w, 'info', 'next', entry)
def info_parent(self, entry):
return self.tk.call(self._w, 'info', 'parent', entry)
def info_prev(self, entry):
return self.tk.call(self._w, 'info', 'prev', entry)
def info_selection(self):
c = self.tk.call(self._w, 'info', 'selection')
return self.tk.splitlist(c)
def item_cget(self, entry, col, opt):
return self.tk.call(self._w, 'item', 'cget', entry, col, opt)
def item_configure(self, entry, col, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'item', 'configure', entry, col)))
self.tk.call(self._w, 'item', 'configure', entry, col,
*self._options(cnf, kw))
def item_create(self, entry, col, cnf={}, **kw):
self.tk.call(
self._w, 'item', 'create', entry, col, *self._options(cnf, kw))
def item_exists(self, entry, col):
return self.tk.call(self._w, 'item', 'exists', entry, col)
def item_delete(self, entry, col):
self.tk.call(self._w, 'item', 'delete', entry, col)
def entrycget(self, entry, opt):
return self.tk.call(self._w, 'entrycget', entry, opt)
def entryconfigure(self, entry, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'entryconfigure', entry)))
self.tk.call(self._w, 'entryconfigure', entry,
*self._options(cnf, kw))
def nearest(self, y):
return self.tk.call(self._w, 'nearest', y)
def see(self, entry):
self.tk.call(self._w, 'see', entry)
def selection_clear(self, cnf={}, **kw):
self.tk.call(self._w, 'selection', 'clear', *self._options(cnf, kw))
def selection_includes(self, entry):
return self.tk.call(self._w, 'selection', 'includes', entry)
def selection_set(self, first, last=None):
self.tk.call(self._w, 'selection', 'set', first, last)
def show_entry(self, entry):
return self.tk.call(self._w, 'show', 'entry', entry)
class InputOnly(TixWidget):
"""InputOnly - Invisible widget. Unix only.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixInputOnly', None, cnf, kw)
class LabelEntry(TixWidget):
"""LabelEntry - Entry field with label. Packages an entry widget
and a label into one mega widget. It can beused be used to simplify
the creation of ``entry-form'' type of interface.
Subwidgets Class
---------- -----
label Label
entry Entry"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixLabelEntry',
['labelside','options'], cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
class LabelFrame(TixWidget):
"""LabelFrame - Labelled Frame container. Packages a frame widget
and a label into one mega widget. To create widgets inside a
LabelFrame widget, one creates the new widgets relative to the
frame subwidget and manage them inside the frame subwidget.
Subwidgets Class
---------- -----
label Label
frame Frame"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixLabelFrame',
['labelside','options'], cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['frame'] = _dummyFrame(self, 'frame')
class ListNoteBook(TixWidget):
"""A ListNoteBook widget is very similar to the TixNoteBook widget:
it can be used to display many windows in a limited space using a
notebook metaphor. The notebook is divided into a stack of pages
(windows). At one time only one of these pages can be shown.
The user can navigate through these pages by
choosing the name of the desired page in the hlist subwidget."""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixListNoteBook', ['options'], cnf, kw)
# Is this necessary? It's not an exposed subwidget in Tix.
self.subwidget_list['pane'] = _dummyPanedWindow(self, 'pane',
destroy_physically=0)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['shlist'] = _dummyScrolledHList(self, 'shlist')
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name)
return self.subwidget_list[name]
def page(self, name):
return self.subwidget(name)
def pages(self):
# Can't call subwidgets_all directly because we don't want .nbframe
names = self.tk.split(self.tk.call(self._w, 'pages'))
ret = []
for x in names:
ret.append(self.subwidget(x))
return ret
def raise_page(self, name): # raise is a python keyword
self.tk.call(self._w, 'raise', name)
class Meter(TixWidget):
"""The Meter widget can be used to show the progress of a background
job which may take a long time to execute.
"""
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixMeter',
['options'], cnf, kw)
class NoteBook(TixWidget):
"""NoteBook - Multi-page container widget (tabbed notebook metaphor).
Subwidgets Class
---------- -----
nbframe NoteBookFrame
<pages> page widgets added dynamically with the add method"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self,master,'tixNoteBook', ['options'], cnf, kw)
self.subwidget_list['nbframe'] = TixSubWidget(self, 'nbframe',
destroy_physically=0)
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name)
return self.subwidget_list[name]
def delete(self, name):
self.tk.call(self._w, 'delete', name)
self.subwidget_list[name].destroy()
del self.subwidget_list[name]
def page(self, name):
return self.subwidget(name)
def pages(self):
# Can't call subwidgets_all directly because we don't want .nbframe
names = self.tk.split(self.tk.call(self._w, 'pages'))
ret = []
for x in names:
ret.append(self.subwidget(x))
return ret
def raise_page(self, name): # raise is a python keyword
self.tk.call(self._w, 'raise', name)
def raised(self):
return self.tk.call(self._w, 'raised')
class NoteBookFrame(TixWidget):
# FIXME: This is dangerous to expose to be called on its own.
pass
class OptionMenu(TixWidget):
"""OptionMenu - creates a menu button of options.
Subwidget Class
--------- -----
menubutton Menubutton
menu Menu"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixOptionMenu',
['labelside', 'options'], cnf, kw)
self.subwidget_list['menubutton'] = _dummyMenubutton(self, 'menubutton')
self.subwidget_list['menu'] = _dummyMenu(self, 'menu')
def add_command(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', 'command', name, *self._options(cnf, kw))
def add_separator(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', 'separator', name, *self._options(cnf, kw))
def delete(self, name):
self.tk.call(self._w, 'delete', name)
def disable(self, name):
self.tk.call(self._w, 'disable', name)
def enable(self, name):
self.tk.call(self._w, 'enable', name)
class PanedWindow(TixWidget):
"""PanedWindow - Multi-pane container widget
allows the user to interactively manipulate the sizes of several
panes. The panes can be arranged either vertically or horizontally.The
user changes the sizes of the panes by dragging the resize handle
between two panes.
Subwidgets Class
---------- -----
<panes> g/p widgets added dynamically with the add method."""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixPanedWindow', ['orientation', 'options'], cnf, kw)
# add delete forget panecget paneconfigure panes setsize
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name,
check_intermediate=0)
return self.subwidget_list[name]
def delete(self, name):
self.tk.call(self._w, 'delete', name)
self.subwidget_list[name].destroy()
del self.subwidget_list[name]
def forget(self, name):
self.tk.call(self._w, 'forget', name)
def panecget(self, entry, opt):
return self.tk.call(self._w, 'panecget', entry, opt)
def paneconfigure(self, entry, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'paneconfigure', entry)))
self.tk.call(self._w, 'paneconfigure', entry, *self._options(cnf, kw))
def panes(self):
names = self.tk.splitlist(self.tk.call(self._w, 'panes'))
return [self.subwidget(x) for x in names]
class PopupMenu(TixWidget):
"""PopupMenu widget can be used as a replacement of the tk_popup command.
The advantage of the Tix PopupMenu widget is it requires less application
code to manipulate.
Subwidgets Class
---------- -----
menubutton Menubutton
menu Menu"""
# FIXME: It should inherit -superclass tixShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixPopupMenu', ['options'], cnf, kw)
self.subwidget_list['menubutton'] = _dummyMenubutton(self, 'menubutton')
self.subwidget_list['menu'] = _dummyMenu(self, 'menu')
def bind_widget(self, widget):
self.tk.call(self._w, 'bind', widget._w)
def unbind_widget(self, widget):
self.tk.call(self._w, 'unbind', widget._w)
def post_widget(self, widget, x, y):
self.tk.call(self._w, 'post', widget._w, x, y)
class ResizeHandle(TixWidget):
"""Internal widget to draw resize handles on Scrolled widgets."""
def __init__(self, master, cnf={}, **kw):
# There seems to be a Tix bug rejecting the configure method
# Let's try making the flags -static
flags = ['options', 'command', 'cursorfg', 'cursorbg',
'handlesize', 'hintcolor', 'hintwidth',
'x', 'y']
# In fact, x y height width are configurable
TixWidget.__init__(self, master, 'tixResizeHandle',
flags, cnf, kw)
def attach_widget(self, widget):
self.tk.call(self._w, 'attachwidget', widget._w)
def detach_widget(self, widget):
self.tk.call(self._w, 'detachwidget', widget._w)
def hide(self, widget):
self.tk.call(self._w, 'hide', widget._w)
def show(self, widget):
self.tk.call(self._w, 'show', widget._w)
class ScrolledHList(TixWidget):
"""ScrolledHList - HList with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledHList', ['options'],
cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledListBox(TixWidget):
"""ScrolledListBox - Listbox with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledListBox', ['options'], cnf, kw)
self.subwidget_list['listbox'] = _dummyListbox(self, 'listbox')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledText(TixWidget):
"""ScrolledText - Text with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledText', ['options'], cnf, kw)
self.subwidget_list['text'] = _dummyText(self, 'text')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledTList(TixWidget):
"""ScrolledTList - TList with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledTList', ['options'],
cnf, kw)
self.subwidget_list['tlist'] = _dummyTList(self, 'tlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledWindow(TixWidget):
"""ScrolledWindow - Window with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledWindow', ['options'], cnf, kw)
self.subwidget_list['window'] = _dummyFrame(self, 'window')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class Select(TixWidget):
"""Select - Container of button subwidgets. It can be used to provide
radio-box or check-box style of selection options for the user.
Subwidgets are buttons added dynamically using the add method."""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixSelect',
['allowzero', 'radio', 'orientation', 'labelside',
'options'],
cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = _dummyButton(self, name)
return self.subwidget_list[name]
def invoke(self, name):
self.tk.call(self._w, 'invoke', name)
class Shell(TixWidget):
"""Toplevel window.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixShell', ['options', 'title'], cnf, kw)
class DialogShell(TixWidget):
"""Toplevel window, with popup popdown and center methods.
It tells the window manager that it is a dialog window and should be
treated specially. The exact treatment depends on the treatment of
the window manager.
Subwidgets - None"""
# FIXME: It should inherit from Shell
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master,
'tixDialogShell',
['options', 'title', 'mapped',
'minheight', 'minwidth',
'parent', 'transient'], cnf, kw)
def popdown(self):
self.tk.call(self._w, 'popdown')
def popup(self):
self.tk.call(self._w, 'popup')
def center(self):
self.tk.call(self._w, 'center')
class StdButtonBox(TixWidget):
"""StdButtonBox - Standard Button Box (OK, Apply, Cancel and Help) """
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixStdButtonBox',
['orientation', 'options'], cnf, kw)
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['apply'] = _dummyButton(self, 'apply')
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['help'] = _dummyButton(self, 'help')
def invoke(self, name):
if name in self.subwidget_list:
self.tk.call(self._w, 'invoke', name)
class TList(TixWidget, XView, YView):
"""TList - Hierarchy display widget which can be
used to display data in a tabular format. The list entries of a TList
widget are similar to the entries in the Tk listbox widget. The main
differences are (1) the TList widget can display the list entries in a
two dimensional format and (2) you can use graphical images as well as
multiple colors and fonts for the list entries.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixTList', ['options'], cnf, kw)
def active_set(self, index):
self.tk.call(self._w, 'active', 'set', index)
def active_clear(self):
self.tk.call(self._w, 'active', 'clear')
def anchor_set(self, index):
self.tk.call(self._w, 'anchor', 'set', index)
def anchor_clear(self):
self.tk.call(self._w, 'anchor', 'clear')
def delete(self, from_, to=None):
self.tk.call(self._w, 'delete', from_, to)
def dragsite_set(self, index):
self.tk.call(self._w, 'dragsite', 'set', index)
def dragsite_clear(self):
self.tk.call(self._w, 'dragsite', 'clear')
def dropsite_set(self, index):
self.tk.call(self._w, 'dropsite', 'set', index)
def dropsite_clear(self):
self.tk.call(self._w, 'dropsite', 'clear')
def insert(self, index, cnf={}, **kw):
self.tk.call(self._w, 'insert', index, *self._options(cnf, kw))
def info_active(self):
return self.tk.call(self._w, 'info', 'active')
def info_anchor(self):
return self.tk.call(self._w, 'info', 'anchor')
def info_down(self, index):
return self.tk.call(self._w, 'info', 'down', index)
def info_left(self, index):
return self.tk.call(self._w, 'info', 'left', index)
def info_right(self, index):
return self.tk.call(self._w, 'info', 'right', index)
def info_selection(self):
c = self.tk.call(self._w, 'info', 'selection')
return self.tk.splitlist(c)
def info_size(self):
return self.tk.call(self._w, 'info', 'size')
def info_up(self, index):
return self.tk.call(self._w, 'info', 'up', index)
def nearest(self, x, y):
return self.tk.call(self._w, 'nearest', x, y)
def see(self, index):
self.tk.call(self._w, 'see', index)
def selection_clear(self, cnf={}, **kw):
self.tk.call(self._w, 'selection', 'clear', *self._options(cnf, kw))
def selection_includes(self, index):
return self.tk.call(self._w, 'selection', 'includes', index)
def selection_set(self, first, last=None):
self.tk.call(self._w, 'selection', 'set', first, last)
class Tree(TixWidget):
"""Tree - The tixTree widget can be used to display hierachical
data in a tree form. The user can adjust
the view of the tree by opening or closing parts of the tree."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixTree',
['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def autosetmode(self):
'''This command calls the setmode method for all the entries in this
Tree widget: if an entry has no child entries, its mode is set to
none. Otherwise, if the entry has any hidden child entries, its mode is
set to open; otherwise its mode is set to close.'''
self.tk.call(self._w, 'autosetmode')
def close(self, entrypath):
'''Close the entry given by entryPath if its mode is close.'''
self.tk.call(self._w, 'close', entrypath)
def getmode(self, entrypath):
'''Returns the current mode of the entry given by entryPath.'''
return self.tk.call(self._w, 'getmode', entrypath)
def open(self, entrypath):
'''Open the entry given by entryPath if its mode is open.'''
self.tk.call(self._w, 'open', entrypath)
def setmode(self, entrypath, mode='none'):
'''This command is used to indicate whether the entry given by
entryPath has children entries and whether the children are visible. mode
must be one of open, close or none. If mode is set to open, a (+)
indicator is drawn next to the entry. If mode is set to close, a (-)
indicator is drawn next to the entry. If mode is set to none, no
indicators will be drawn for this entry. The default mode is none. The
open mode indicates the entry has hidden children and this entry can be
opened by the user. The close mode indicates that all the children of the
entry are now visible and the entry can be closed by the user.'''
self.tk.call(self._w, 'setmode', entrypath, mode)
# Could try subclassing Tree for CheckList - would need another arg to init
class CheckList(TixWidget):
"""The CheckList widget
displays a list of items to be selected by the user. CheckList acts
similarly to the Tk checkbutton or radiobutton widgets, except it is
capable of handling many more items than checkbuttons or radiobuttons.
"""
# FIXME: It should inherit -superclass tixTree
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixCheckList',
['options', 'radio'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def autosetmode(self):
'''This command calls the setmode method for all the entries in this
Tree widget: if an entry has no child entries, its mode is set to
none. Otherwise, if the entry has any hidden child entries, its mode is
set to open; otherwise its mode is set to close.'''
self.tk.call(self._w, 'autosetmode')
def close(self, entrypath):
'''Close the entry given by entryPath if its mode is close.'''
self.tk.call(self._w, 'close', entrypath)
def getmode(self, entrypath):
'''Returns the current mode of the entry given by entryPath.'''
return self.tk.call(self._w, 'getmode', entrypath)
def open(self, entrypath):
'''Open the entry given by entryPath if its mode is open.'''
self.tk.call(self._w, 'open', entrypath)
def getselection(self, mode='on'):
'''Returns a list of items whose status matches status. If status is
not specified, the list of items in the "on" status will be returned.
Mode can be on, off, default'''
c = self.tk.split(self.tk.call(self._w, 'getselection', mode))
return self.tk.splitlist(c)
def getstatus(self, entrypath):
'''Returns the current status of entryPath.'''
return self.tk.call(self._w, 'getstatus', entrypath)
def setstatus(self, entrypath, mode='on'):
'''Sets the status of entryPath to be status. A bitmap will be
displayed next to the entry its status is on, off or default.'''
self.tk.call(self._w, 'setstatus', entrypath, mode)
###########################################################################
### The subclassing below is used to instantiate the subwidgets in each ###
### mega widget. This allows us to access their methods directly. ###
###########################################################################
class _dummyButton(Button, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyCheckbutton(Checkbutton, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyEntry(Entry, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyFrame(Frame, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyLabel(Label, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyListbox(Listbox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyMenu(Menu, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyMenubutton(Menubutton, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrollbar(Scrollbar, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyText(Text, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrolledListBox(ScrolledListBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['listbox'] = _dummyListbox(self, 'listbox')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyHList(HList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrolledHList(ScrolledHList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyTList(TList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyComboBox(ComboBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, ['fancy',destroy_physically])
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
self.subwidget_list['arrow'] = _dummyButton(self, 'arrow')
self.subwidget_list['slistbox'] = _dummyScrolledListBox(self,
'slistbox')
try:
self.subwidget_list['tick'] = _dummyButton(self, 'tick')
#cross Button : present if created with the fancy option
self.subwidget_list['cross'] = _dummyButton(self, 'cross')
except TypeError:
# unavailable when -fancy not specified
pass
class _dummyDirList(DirList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyDirSelectBox(DirSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['dircbx'] = _dummyFileComboBox(self, 'dircbx')
class _dummyExFileSelectBox(ExFileSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['hidden'] = _dummyCheckbutton(self, 'hidden')
self.subwidget_list['types'] = _dummyComboBox(self, 'types')
self.subwidget_list['dir'] = _dummyComboBox(self, 'dir')
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['file'] = _dummyComboBox(self, 'file')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
class _dummyFileSelectBox(FileSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
self.subwidget_list['filter'] = _dummyComboBox(self, 'filter')
self.subwidget_list['selection'] = _dummyComboBox(self, 'selection')
class _dummyFileComboBox(ComboBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dircbx'] = _dummyComboBox(self, 'dircbx')
class _dummyStdButtonBox(StdButtonBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['apply'] = _dummyButton(self, 'apply')
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['help'] = _dummyButton(self, 'help')
class _dummyNoteBookFrame(NoteBookFrame, TixSubWidget):
def __init__(self, master, name, destroy_physically=0):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyPanedWindow(PanedWindow, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
########################
### Utility Routines ###
########################
#mike Should tixDestroy be exposed as a wrapper? - but not for widgets.
def OptionName(widget):
'''Returns the qualified path name for the widget. Normally used to set
default options for subwidgets. See tixwidgets.py'''
return widget.tk.call('tixOptionName', widget._w)
# Called with a dictionary argument of the form
# {'*.c':'C source files', '*.txt':'Text Files', '*':'All files'}
# returns a string which can be used to configure the fsbox file types
# in an ExFileSelectBox. i.e.,
# '{{*} {* - All files}} {{*.c} {*.c - C source files}} {{*.txt} {*.txt - Text Files}}'
def FileTypeList(dict):
s = ''
for type in dict.keys():
s = s + '{{' + type + '} {' + type + ' - ' + dict[type] + '}} '
return s
# Still to be done:
# tixIconView
class CObjView(TixWidget):
"""This file implements the Canvas Object View widget. This is a base
class of IconView. It implements automatic placement/adjustment of the
scrollbars according to the canvas objects inside the canvas subwidget.
The scrollbars are adjusted so that the canvas is just large enough
to see all the objects.
"""
# FIXME: It should inherit -superclass tixScrolledWidget
pass
class Grid(TixWidget, XView, YView):
'''The Tix Grid command creates a new window and makes it into a
tixGrid widget. Additional options, may be specified on the command
line or in the option database to configure aspects such as its cursor
and relief.
A Grid widget displays its contents in a two dimensional grid of cells.
Each cell may contain one Tix display item, which may be in text,
graphics or other formats. See the DisplayStyle class for more information
about Tix display items. Individual cells, or groups of cells, can be
formatted with a wide range of attributes, such as its color, relief and
border.
Subwidgets - None'''
# valid specific resources as of Tk 8.4
# editdonecmd, editnotifycmd, floatingcols, floatingrows, formatcmd,
# highlightbackground, highlightcolor, leftmargin, itemtype, selectmode,
# selectunit, topmargin,
def __init__(self, master=None, cnf={}, **kw):
static= []
self.cnf= cnf
TixWidget.__init__(self, master, 'tixGrid', static, cnf, kw)
# valid options as of Tk 8.4
# anchor, bdtype, cget, configure, delete, dragsite, dropsite, entrycget,
# edit, entryconfigure, format, geometryinfo, info, index, move, nearest,
# selection, set, size, unset, xview, yview
def anchor_clear(self):
"""Removes the selection anchor."""
self.tk.call(self, 'anchor', 'clear')
def anchor_get(self):
"Get the (x,y) coordinate of the current anchor cell"
return self._getints(self.tk.call(self, 'anchor', 'get'))
def anchor_set(self, x, y):
"""Set the selection anchor to the cell at (x, y)."""
self.tk.call(self, 'anchor', 'set', x, y)
def delete_row(self, from_, to=None):
"""Delete rows between from_ and to inclusive.
If to is not provided, delete only row at from_"""
if to is None:
self.tk.call(self, 'delete', 'row', from_)
else:
self.tk.call(self, 'delete', 'row', from_, to)
def delete_column(self, from_, to=None):
"""Delete columns between from_ and to inclusive.
If to is not provided, delete only column at from_"""
if to is None:
self.tk.call(self, 'delete', 'column', from_)
else:
self.tk.call(self, 'delete', 'column', from_, to)
def edit_apply(self):
"""If any cell is being edited, de-highlight the cell and applies
the changes."""
self.tk.call(self, 'edit', 'apply')
def edit_set(self, x, y):
"""Highlights the cell at (x, y) for editing, if the -editnotify
command returns True for this cell."""
self.tk.call(self, 'edit', 'set', x, y)
def entrycget(self, x, y, option):
"Get the option value for cell at (x,y)"
if option and option[0] != '-':
option = '-' + option
return self.tk.call(self, 'entrycget', x, y, option)
def entryconfigure(self, x, y, cnf=None, **kw):
return self._configure(('entryconfigure', x, y), cnf, kw)
# def format
# def index
def info_exists(self, x, y):
"Return True if display item exists at (x,y)"
return self._getboolean(self.tk.call(self, 'info', 'exists', x, y))
def info_bbox(self, x, y):
# This seems to always return '', at least for 'text' displayitems
return self.tk.call(self, 'info', 'bbox', x, y)
def move_column(self, from_, to, offset):
"""Moves the the range of columns from position FROM through TO by
the distance indicated by OFFSET. For example, move_column(2, 4, 1)
moves the columns 2,3,4 to columns 3,4,5."""
self.tk.call(self, 'move', 'column', from_, to, offset)
def move_row(self, from_, to, offset):
"""Moves the the range of rows from position FROM through TO by
the distance indicated by OFFSET.
For example, move_row(2, 4, 1) moves the rows 2,3,4 to rows 3,4,5."""
self.tk.call(self, 'move', 'row', from_, to, offset)
def nearest(self, x, y):
"Return coordinate of cell nearest pixel coordinate (x,y)"
return self._getints(self.tk.call(self, 'nearest', x, y))
# def selection adjust
# def selection clear
# def selection includes
# def selection set
# def selection toggle
def set(self, x, y, itemtype=None, **kw):
args= self._options(self.cnf, kw)
if itemtype is not None:
args= ('-itemtype', itemtype) + args
self.tk.call(self, 'set', x, y, *args)
def size_column(self, index, **kw):
"""Queries or sets the size of the column given by
INDEX. INDEX may be any non-negative
integer that gives the position of a given column.
INDEX can also be the string "default"; in this case, this command
queries or sets the default size of all columns.
When no option-value pair is given, this command returns a tuple
containing the current size setting of the given column. When
option-value pairs are given, the corresponding options of the
size setting of the given column are changed. Options may be one
of the follwing:
pad0 pixels
Specifies the paddings to the left of a column.
pad1 pixels
Specifies the paddings to the right of a column.
size val
Specifies the width of a column .
Val may be: "auto" -- the width of the column is set the
the widest cell in the column; a valid Tk screen distance
unit; or a real number following by the word chars
(e.g. 3.4chars) that sets the width of the column to the
given number of characters."""
return self.tk.split(self.tk.call(self._w, 'size', 'column', index,
*self._options({}, kw)))
def size_row(self, index, **kw):
"""Queries or sets the size of the row given by
INDEX. INDEX may be any non-negative
integer that gives the position of a given row .
INDEX can also be the string "default"; in this case, this command
queries or sets the default size of all rows.
When no option-value pair is given, this command returns a list con-
taining the current size setting of the given row . When option-value
pairs are given, the corresponding options of the size setting of the
given row are changed. Options may be one of the follwing:
pad0 pixels
Specifies the paddings to the top of a row.
pad1 pixels
Specifies the paddings to the the bottom of a row.
size val
Specifies the height of a row.
Val may be: "auto" -- the height of the row is set the
the highest cell in the row; a valid Tk screen distance
unit; or a real number following by the word chars
(e.g. 3.4chars) that sets the height of the row to the
given number of characters."""
return self.tk.split(self.tk.call(
self, 'size', 'row', index, *self._options({}, kw)))
def unset(self, x, y):
"""Clears the cell at (x, y) by removing its display item."""
self.tk.call(self._w, 'unset', x, y)
class ScrolledGrid(Grid):
'''Scrolled Grid widgets'''
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master=None, cnf={}, **kw):
static= []
self.cnf= cnf
TixWidget.__init__(self, master, 'tixScrolledGrid', static, cnf, kw) | unknown | codeparrot/codeparrot-clean | ||
<!--Copyright 2023 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
-->
*This model was released on 2023-05-24 and added to Hugging Face Transformers on 2023-09-19.*
# ViTMatte
<div class="flex flex-wrap space-x-1">
<img alt="PyTorch" src="https://img.shields.io/badge/PyTorch-DE3412?style=flat&logo=pytorch&logoColor=white">
</div>
## Overview
The ViTMatte model was proposed in [Boosting Image Matting with Pretrained Plain Vision Transformers](https://huggingface.co/papers/2305.15272) by Jingfeng Yao, Xinggang Wang, Shusheng Yang, Baoyuan Wang.
ViTMatte leverages plain [Vision Transformers](vit) for the task of image matting, which is the process of accurately estimating the foreground object in images and videos.
The abstract from the paper is the following:
*Recently, plain vision Transformers (ViTs) have shown impressive performance on various computer vision tasks, thanks to their strong modeling capacity and large-scale pretraining. However, they have not yet conquered the problem of image matting. We hypothesize that image matting could also be boosted by ViTs and present a new efficient and robust ViT-based matting system, named ViTMatte. Our method utilizes (i) a hybrid attention mechanism combined with a convolution neck to help ViTs achieve an excellent performance-computation trade-off in matting tasks. (ii) Additionally, we introduce the detail capture module, which just consists of simple lightweight convolutions to complement the detailed information required by matting. To the best of our knowledge, ViTMatte is the first work to unleash the potential of ViT on image matting with concise adaptation. It inherits many superior properties from ViT to matting, including various pretraining strategies, concise architecture design, and flexible inference strategies. We evaluate ViTMatte on Composition-1k and Distinctions-646, the most commonly used benchmark for image matting, our method achieves state-of-the-art performance and outperforms prior matting works by a large margin.*
This model was contributed by [nielsr](https://huggingface.co/nielsr).
The original code can be found [here](https://github.com/hustvl/ViTMatte).
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/model_doc/vitmatte_architecture.png"
alt="drawing" width="600"/>
<small> ViTMatte high-level overview. Taken from the <a href="https://huggingface.co/papers/2305.15272">original paper.</a> </small>
## Resources
A list of official Hugging Face and community (indicated by 🌎) resources to help you get started with ViTMatte.
- A demo notebook regarding inference with [`VitMatteForImageMatting`], including background replacement, can be found [here](https://github.com/NielsRogge/Transformers-Tutorials/tree/master/ViTMatte).
<Tip>
The model expects both the image and trimap (concatenated) as input. Use [`ViTMatteImageProcessor`] for this purpose.
</Tip>
## VitMatteConfig
[[autodoc]] VitMatteConfig
## VitMatteImageProcessor
[[autodoc]] VitMatteImageProcessor
- preprocess
## VitMatteImageProcessorFast
[[autodoc]] VitMatteImageProcessorFast
- preprocess
## VitMatteForImageMatting
[[autodoc]] VitMatteForImageMatting
- forward | unknown | github | https://github.com/huggingface/transformers | docs/source/en/model_doc/vitmatte.md |
import sys
import platform
import _pytest._code
import pytest
def runpdb_and_get_report(testdir, source):
p = testdir.makepyfile(source)
result = testdir.runpytest_inprocess("--pdb", p)
reports = result.reprec.getreports("pytest_runtest_logreport")
assert len(reports) == 3, reports # setup/call/teardown
return reports[1]
class TestPDB:
@pytest.fixture
def pdblist(self, request):
monkeypatch = request.getfixturevalue("monkeypatch")
pdblist = []
def mypdb(*args):
pdblist.append(args)
plugin = request.config.pluginmanager.getplugin('debugging')
monkeypatch.setattr(plugin, 'post_mortem', mypdb)
return pdblist
def test_pdb_on_fail(self, testdir, pdblist):
rep = runpdb_and_get_report(testdir, """
def test_func():
assert 0
""")
assert rep.failed
assert len(pdblist) == 1
tb = _pytest._code.Traceback(pdblist[0][0])
assert tb[-1].name == "test_func"
def test_pdb_on_xfail(self, testdir, pdblist):
rep = runpdb_and_get_report(testdir, """
import pytest
@pytest.mark.xfail
def test_func():
assert 0
""")
assert "xfail" in rep.keywords
assert not pdblist
def test_pdb_on_skip(self, testdir, pdblist):
rep = runpdb_and_get_report(testdir, """
import pytest
def test_func():
pytest.skip("hello")
""")
assert rep.skipped
assert len(pdblist) == 0
def test_pdb_on_BdbQuit(self, testdir, pdblist):
rep = runpdb_and_get_report(testdir, """
import bdb
def test_func():
raise bdb.BdbQuit
""")
assert rep.failed
assert len(pdblist) == 0
def test_pdb_interaction(self, testdir):
p1 = testdir.makepyfile("""
def test_1():
i = 0
assert i == 1
""")
child = testdir.spawn_pytest("--pdb %s" % p1)
child.expect(".*def test_1")
child.expect(".*i = 0")
child.expect("(Pdb)")
child.sendeof()
rest = child.read().decode("utf8")
assert "1 failed" in rest
assert "def test_1" not in rest
self.flush(child)
@staticmethod
def flush(child):
if platform.system() == 'Darwin':
return
if child.isalive():
child.wait()
def test_pdb_unittest_postmortem(self, testdir):
p1 = testdir.makepyfile("""
import unittest
class Blub(unittest.TestCase):
def tearDown(self):
self.filename = None
def test_false(self):
self.filename = 'debug' + '.me'
assert 0
""")
child = testdir.spawn_pytest("--pdb %s" % p1)
child.expect('(Pdb)')
child.sendline('p self.filename')
child.sendeof()
rest = child.read().decode("utf8")
assert 'debug.me' in rest
self.flush(child)
def test_pdb_unittest_skip(self, testdir):
"""Test for issue #2137"""
p1 = testdir.makepyfile("""
import unittest
@unittest.skipIf(True, 'Skipping also with pdb active')
class MyTestCase(unittest.TestCase):
def test_one(self):
assert 0
""")
child = testdir.spawn_pytest("-rs --pdb %s" % p1)
child.expect('Skipping also with pdb active')
child.expect('1 skipped in')
child.sendeof()
self.flush(child)
def test_pdb_interaction_capture(self, testdir):
p1 = testdir.makepyfile("""
def test_1():
print("getrekt")
assert False
""")
child = testdir.spawn_pytest("--pdb %s" % p1)
child.expect("getrekt")
child.expect("(Pdb)")
child.sendeof()
rest = child.read().decode("utf8")
assert "1 failed" in rest
assert "getrekt" not in rest
self.flush(child)
def test_pdb_interaction_exception(self, testdir):
p1 = testdir.makepyfile("""
import pytest
def globalfunc():
pass
def test_1():
pytest.raises(ValueError, globalfunc)
""")
child = testdir.spawn_pytest("--pdb %s" % p1)
child.expect(".*def test_1")
child.expect(".*pytest.raises.*globalfunc")
child.expect("(Pdb)")
child.sendline("globalfunc")
child.expect(".*function")
child.sendeof()
child.expect("1 failed")
self.flush(child)
def test_pdb_interaction_on_collection_issue181(self, testdir):
p1 = testdir.makepyfile("""
import pytest
xxx
""")
child = testdir.spawn_pytest("--pdb %s" % p1)
#child.expect(".*import pytest.*")
child.expect("(Pdb)")
child.sendeof()
child.expect("1 error")
self.flush(child)
def test_pdb_interaction_on_internal_error(self, testdir):
testdir.makeconftest("""
def pytest_runtest_protocol():
0/0
""")
p1 = testdir.makepyfile("def test_func(): pass")
child = testdir.spawn_pytest("--pdb %s" % p1)
#child.expect(".*import pytest.*")
child.expect("(Pdb)")
child.sendeof()
self.flush(child)
def test_pdb_interaction_capturing_simple(self, testdir):
p1 = testdir.makepyfile("""
import pytest
def test_1():
i = 0
print ("hello17")
pytest.set_trace()
x = 3
""")
child = testdir.spawn_pytest(str(p1))
child.expect("test_1")
child.expect("x = 3")
child.expect("(Pdb)")
child.sendeof()
rest = child.read().decode("utf-8")
assert "1 failed" in rest
assert "def test_1" in rest
assert "hello17" in rest # out is captured
self.flush(child)
def test_pdb_set_trace_interception(self, testdir):
p1 = testdir.makepyfile("""
import pdb
def test_1():
pdb.set_trace()
""")
child = testdir.spawn_pytest(str(p1))
child.expect("test_1")
child.expect("(Pdb)")
child.sendeof()
rest = child.read().decode("utf8")
assert "1 failed" in rest
assert "reading from stdin while output" not in rest
self.flush(child)
def test_pdb_and_capsys(self, testdir):
p1 = testdir.makepyfile("""
import pytest
def test_1(capsys):
print ("hello1")
pytest.set_trace()
""")
child = testdir.spawn_pytest(str(p1))
child.expect("test_1")
child.send("capsys.readouterr()\n")
child.expect("hello1")
child.sendeof()
child.read()
self.flush(child)
def test_set_trace_capturing_afterwards(self, testdir):
p1 = testdir.makepyfile("""
import pdb
def test_1():
pdb.set_trace()
def test_2():
print ("hello")
assert 0
""")
child = testdir.spawn_pytest(str(p1))
child.expect("test_1")
child.send("c\n")
child.expect("test_2")
child.expect("Captured")
child.expect("hello")
child.sendeof()
child.read()
self.flush(child)
def test_pdb_interaction_doctest(self, testdir):
p1 = testdir.makepyfile("""
import pytest
def function_1():
'''
>>> i = 0
>>> assert i == 1
'''
""")
child = testdir.spawn_pytest("--doctest-modules --pdb %s" % p1)
child.expect("(Pdb)")
child.sendline('i')
child.expect("0")
child.expect("(Pdb)")
child.sendeof()
rest = child.read().decode("utf8")
assert "1 failed" in rest
self.flush(child)
def test_pdb_interaction_capturing_twice(self, testdir):
p1 = testdir.makepyfile("""
import pytest
def test_1():
i = 0
print ("hello17")
pytest.set_trace()
x = 3
print ("hello18")
pytest.set_trace()
x = 4
""")
child = testdir.spawn_pytest(str(p1))
child.expect("test_1")
child.expect("x = 3")
child.expect("(Pdb)")
child.sendline('c')
child.expect("x = 4")
child.sendeof()
rest = child.read().decode("utf8")
assert "1 failed" in rest
assert "def test_1" in rest
assert "hello17" in rest # out is captured
assert "hello18" in rest # out is captured
self.flush(child)
def test_pdb_used_outside_test(self, testdir):
p1 = testdir.makepyfile("""
import pytest
pytest.set_trace()
x = 5
""")
child = testdir.spawn("%s %s" %(sys.executable, p1))
child.expect("x = 5")
child.sendeof()
self.flush(child)
def test_pdb_used_in_generate_tests(self, testdir):
p1 = testdir.makepyfile("""
import pytest
def pytest_generate_tests(metafunc):
pytest.set_trace()
x = 5
def test_foo(a):
pass
""")
child = testdir.spawn_pytest(str(p1))
child.expect("x = 5")
child.sendeof()
self.flush(child)
def test_pdb_collection_failure_is_shown(self, testdir):
p1 = testdir.makepyfile("""xxx """)
result = testdir.runpytest_subprocess("--pdb", p1)
result.stdout.fnmatch_lines([
"*NameError*xxx*",
"*1 error*",
])
def test_enter_pdb_hook_is_called(self, testdir):
testdir.makeconftest("""
def pytest_enter_pdb(config):
assert config.testing_verification == 'configured'
print 'enter_pdb_hook'
def pytest_configure(config):
config.testing_verification = 'configured'
""")
p1 = testdir.makepyfile("""
import pytest
def test_foo():
pytest.set_trace()
""")
child = testdir.spawn_pytest(str(p1))
child.expect("enter_pdb_hook")
child.send('c\n')
child.sendeof()
self.flush(child)
def test_pdb_custom_cls(self, testdir):
called = []
# install dummy debugger class and track which methods were called on it
class _CustomPdb:
def __init__(self, *args, **kwargs):
called.append("init")
def reset(self):
called.append("reset")
def interaction(self, *args):
called.append("interaction")
_pytest._CustomPdb = _CustomPdb
p1 = testdir.makepyfile("""xxx """)
result = testdir.runpytest_inprocess(
"--pdbcls=_pytest:_CustomPdb", p1)
result.stdout.fnmatch_lines([
"*NameError*xxx*",
"*1 error*",
])
assert called == ["init", "reset", "interaction"] | unknown | codeparrot/codeparrot-clean | ||
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
r"""Code to validate and convert settings of the Microsoft build tools.
This file contains code to validate and convert settings of the Microsoft
build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(),
and ValidateMSBuildSettings() are the entry points.
This file was created by comparing the projects created by Visual Studio 2008
and Visual Studio 2010 for all available settings through the user interface.
The MSBuild schemas were also considered. They are typically found in the
MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild
"""
import sys
import re
# Dictionaries of settings validators. The key is the tool name, the value is
# a dictionary mapping setting names to validation functions.
_msvs_validators = {}
_msbuild_validators = {}
# A dictionary of settings converters. The key is the tool name, the value is
# a dictionary mapping setting names to conversion functions.
_msvs_to_msbuild_converters = {}
# Tool name mapping from MSVS to MSBuild.
_msbuild_name_of_tool = {}
class _Tool(object):
"""Represents a tool used by MSVS or MSBuild.
Attributes:
msvs_name: The name of the tool in MSVS.
msbuild_name: The name of the tool in MSBuild.
"""
def __init__(self, msvs_name, msbuild_name):
self.msvs_name = msvs_name
self.msbuild_name = msbuild_name
def _AddTool(tool):
"""Adds a tool to the four dictionaries used to process settings.
This only defines the tool. Each setting also needs to be added.
Args:
tool: The _Tool object to be added.
"""
_msvs_validators[tool.msvs_name] = {}
_msbuild_validators[tool.msbuild_name] = {}
_msvs_to_msbuild_converters[tool.msvs_name] = {}
_msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name
def _GetMSBuildToolSettings(msbuild_settings, tool):
"""Returns an MSBuild tool dictionary. Creates it if needed."""
return msbuild_settings.setdefault(tool.msbuild_name, {})
class _Type(object):
"""Type of settings (Base class)."""
def ValidateMSVS(self, value):
"""Verifies that the value is legal for MSVS.
Args:
value: the value to check for this type.
Raises:
ValueError if value is not valid for MSVS.
"""
def ValidateMSBuild(self, value):
"""Verifies that the value is legal for MSBuild.
Args:
value: the value to check for this type.
Raises:
ValueError if value is not valid for MSBuild.
"""
def ConvertToMSBuild(self, value):
"""Returns the MSBuild equivalent of the MSVS value given.
Args:
value: the MSVS value to convert.
Returns:
the MSBuild equivalent.
Raises:
ValueError if value is not valid.
"""
return value
class _String(_Type):
"""A setting that's just a string."""
def ValidateMSVS(self, value):
if not isinstance(value, basestring):
raise ValueError('expected string; got %r' % value)
def ValidateMSBuild(self, value):
if not isinstance(value, basestring):
raise ValueError('expected string; got %r' % value)
def ConvertToMSBuild(self, value):
# Convert the macros
return ConvertVCMacrosToMSBuild(value)
class _StringList(_Type):
"""A settings that's a list of strings."""
def ValidateMSVS(self, value):
if not isinstance(value, basestring) and not isinstance(value, list):
raise ValueError('expected string list; got %r' % value)
def ValidateMSBuild(self, value):
if not isinstance(value, basestring) and not isinstance(value, list):
raise ValueError('expected string list; got %r' % value)
def ConvertToMSBuild(self, value):
# Convert the macros
if isinstance(value, list):
return [ConvertVCMacrosToMSBuild(i) for i in value]
else:
return ConvertVCMacrosToMSBuild(value)
class _Boolean(_Type):
"""Boolean settings, can have the values 'false' or 'true'."""
def _Validate(self, value):
if value != 'true' and value != 'false':
raise ValueError('expected bool; got %r' % value)
def ValidateMSVS(self, value):
self._Validate(value)
def ValidateMSBuild(self, value):
self._Validate(value)
def ConvertToMSBuild(self, value):
self._Validate(value)
return value
class _Integer(_Type):
"""Integer settings."""
def __init__(self, msbuild_base=10):
_Type.__init__(self)
self._msbuild_base = msbuild_base
def ValidateMSVS(self, value):
# Try to convert, this will raise ValueError if invalid.
self.ConvertToMSBuild(value)
def ValidateMSBuild(self, value):
# Try to convert, this will raise ValueError if invalid.
int(value, self._msbuild_base)
def ConvertToMSBuild(self, value):
msbuild_format = (self._msbuild_base == 10) and '%d' or '0x%04x'
return msbuild_format % int(value)
class _Enumeration(_Type):
"""Type of settings that is an enumeration.
In MSVS, the values are indexes like '0', '1', and '2'.
MSBuild uses text labels that are more representative, like 'Win32'.
Constructor args:
label_list: an array of MSBuild labels that correspond to the MSVS index.
In the rare cases where MSVS has skipped an index value, None is
used in the array to indicate the unused spot.
new: an array of labels that are new to MSBuild.
"""
def __init__(self, label_list, new=None):
_Type.__init__(self)
self._label_list = label_list
self._msbuild_values = set(value for value in label_list
if value is not None)
if new is not None:
self._msbuild_values.update(new)
def ValidateMSVS(self, value):
# Try to convert. It will raise an exception if not valid.
self.ConvertToMSBuild(value)
def ValidateMSBuild(self, value):
if value not in self._msbuild_values:
raise ValueError('unrecognized enumerated value %s' % value)
def ConvertToMSBuild(self, value):
index = int(value)
if index < 0 or index >= len(self._label_list):
raise ValueError('index value (%d) not in expected range [0, %d)' %
(index, len(self._label_list)))
label = self._label_list[index]
if label is None:
raise ValueError('converted value for %s not specified.' % value)
return label
# Instantiate the various generic types.
_boolean = _Boolean()
_integer = _Integer()
# For now, we don't do any special validation on these types:
_string = _String()
_file_name = _String()
_folder_name = _String()
_file_list = _StringList()
_folder_list = _StringList()
_string_list = _StringList()
# Some boolean settings went from numerical values to boolean. The
# mapping is 0: default, 1: false, 2: true.
_newly_boolean = _Enumeration(['', 'false', 'true'])
def _Same(tool, name, setting_type):
"""Defines a setting that has the same name in MSVS and MSBuild.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
name: the name of the setting.
setting_type: the type of this setting.
"""
_Renamed(tool, name, name, setting_type)
def _Renamed(tool, msvs_name, msbuild_name, setting_type):
"""Defines a setting for which the name has changed.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
msvs_name: the name of the MSVS setting.
msbuild_name: the name of the MSBuild setting.
setting_type: the type of this setting.
"""
def _Translate(value, msbuild_settings):
msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value)
_msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS
_msbuild_validators[tool.msbuild_name][msbuild_name] = (
setting_type.ValidateMSBuild)
_msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
def _Moved(tool, settings_name, msbuild_tool_name, setting_type):
_MovedAndRenamed(tool, settings_name, msbuild_tool_name, settings_name,
setting_type)
def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name,
msbuild_settings_name, setting_type):
"""Defines a setting that may have moved to a new section.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
msvs_settings_name: the MSVS name of the setting.
msbuild_tool_name: the name of the MSBuild tool to place the setting under.
msbuild_settings_name: the MSBuild name of the setting.
setting_type: the type of this setting.
"""
def _Translate(value, msbuild_settings):
tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {})
tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value)
_msvs_validators[tool.msvs_name][msvs_settings_name] = (
setting_type.ValidateMSVS)
validator = setting_type.ValidateMSBuild
_msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator
_msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate
def _MSVSOnly(tool, name, setting_type):
"""Defines a setting that is only found in MSVS.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
name: the name of the setting.
setting_type: the type of this setting.
"""
def _Translate(unused_value, unused_msbuild_settings):
# Since this is for MSVS only settings, no translation will happen.
pass
_msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS
_msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
def _MSBuildOnly(tool, name, setting_type):
"""Defines a setting that is only found in MSBuild.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
name: the name of the setting.
setting_type: the type of this setting.
"""
def _Translate(value, msbuild_settings):
# Let msbuild-only properties get translated as-is from msvs_settings.
tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {})
tool_settings[name] = value
_msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild
_msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
def _ConvertedToAdditionalOption(tool, msvs_name, flag):
"""Defines a setting that's handled via a command line option in MSBuild.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
msvs_name: the name of the MSVS setting that if 'true' becomes a flag
flag: the flag to insert at the end of the AdditionalOptions
"""
def _Translate(value, msbuild_settings):
if value == 'true':
tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
if 'AdditionalOptions' in tool_settings:
new_flags = '%s %s' % (tool_settings['AdditionalOptions'], flag)
else:
new_flags = flag
tool_settings['AdditionalOptions'] = new_flags
_msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS
_msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
def _CustomGeneratePreprocessedFile(tool, msvs_name):
def _Translate(value, msbuild_settings):
tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
if value == '0':
tool_settings['PreprocessToFile'] = 'false'
tool_settings['PreprocessSuppressLineNumbers'] = 'false'
elif value == '1': # /P
tool_settings['PreprocessToFile'] = 'true'
tool_settings['PreprocessSuppressLineNumbers'] = 'false'
elif value == '2': # /EP /P
tool_settings['PreprocessToFile'] = 'true'
tool_settings['PreprocessSuppressLineNumbers'] = 'true'
else:
raise ValueError('value must be one of [0, 1, 2]; got %s' % value)
# Create a bogus validator that looks for '0', '1', or '2'
msvs_validator = _Enumeration(['a', 'b', 'c']).ValidateMSVS
_msvs_validators[tool.msvs_name][msvs_name] = msvs_validator
msbuild_validator = _boolean.ValidateMSBuild
msbuild_tool_validators = _msbuild_validators[tool.msbuild_name]
msbuild_tool_validators['PreprocessToFile'] = msbuild_validator
msbuild_tool_validators['PreprocessSuppressLineNumbers'] = msbuild_validator
_msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
fix_vc_macro_slashes_regex_list = ('IntDir', 'OutDir')
fix_vc_macro_slashes_regex = re.compile(
r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list)
)
# Regular expression to detect keys that were generated by exclusion lists
_EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$')
def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
"""Verify that 'setting' is valid if it is generated from an exclusion list.
If the setting appears to be generated from an exclusion list, the root name
is checked.
Args:
setting: A string that is the setting name to validate
settings: A dictionary where the keys are valid settings
error_msg: The message to emit in the event of error
stderr: The stream receiving the error messages.
"""
# This may be unrecognized because it's an exclusion list. If the
# setting name has the _excluded suffix, then check the root name.
unrecognized = True
m = re.match(_EXCLUDED_SUFFIX_RE, setting)
if m:
root_setting = m.group(1)
unrecognized = root_setting not in settings
if unrecognized:
# We don't know this setting. Give a warning.
print >> stderr, error_msg
def FixVCMacroSlashes(s):
"""Replace macros which have excessive following slashes.
These macros are known to have a built-in trailing slash. Furthermore, many
scripts hiccup on processing paths with extra slashes in the middle.
This list is probably not exhaustive. Add as needed.
"""
if '$' in s:
s = fix_vc_macro_slashes_regex.sub(r'\1', s)
return s
def ConvertVCMacrosToMSBuild(s):
"""Convert the the MSVS macros found in the string to the MSBuild equivalent.
This list is probably not exhaustive. Add as needed.
"""
if '$' in s:
replace_map = {
'$(ConfigurationName)': '$(Configuration)',
'$(InputDir)': '%(RelativeDir)',
'$(InputExt)': '%(Extension)',
'$(InputFileName)': '%(Filename)%(Extension)',
'$(InputName)': '%(Filename)',
'$(InputPath)': '%(Identity)',
'$(ParentName)': '$(ProjectFileName)',
'$(PlatformName)': '$(Platform)',
'$(SafeInputName)': '%(Filename)',
}
for old, new in replace_map.iteritems():
s = s.replace(old, new)
s = FixVCMacroSlashes(s)
return s
def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
"""Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+).
Args:
msvs_settings: A dictionary. The key is the tool name. The values are
themselves dictionaries of settings and their values.
stderr: The stream receiving the error messages.
Returns:
A dictionary of MSBuild settings. The key is either the MSBuild tool name
or the empty string (for the global settings). The values are themselves
dictionaries of settings and their values.
"""
msbuild_settings = {}
for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems():
if msvs_tool_name in _msvs_to_msbuild_converters:
msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name]
for msvs_setting, msvs_value in msvs_tool_settings.iteritems():
if msvs_setting in msvs_tool:
# Invoke the translation function.
try:
msvs_tool[msvs_setting](msvs_value, msbuild_settings)
except ValueError, e:
print >> stderr, ('Warning: while converting %s/%s to MSBuild, '
'%s' % (msvs_tool_name, msvs_setting, e))
else:
_ValidateExclusionSetting(msvs_setting,
msvs_tool,
('Warning: unrecognized setting %s/%s '
'while converting to MSBuild.' %
(msvs_tool_name, msvs_setting)),
stderr)
else:
print >> stderr, ('Warning: unrecognized tool %s while converting to '
'MSBuild.' % msvs_tool_name)
return msbuild_settings
def ValidateMSVSSettings(settings, stderr=sys.stderr):
"""Validates that the names of the settings are valid for MSVS.
Args:
settings: A dictionary. The key is the tool name. The values are
themselves dictionaries of settings and their values.
stderr: The stream receiving the error messages.
"""
_ValidateSettings(_msvs_validators, settings, stderr)
def ValidateMSBuildSettings(settings, stderr=sys.stderr):
"""Validates that the names of the settings are valid for MSBuild.
Args:
settings: A dictionary. The key is the tool name. The values are
themselves dictionaries of settings and their values.
stderr: The stream receiving the error messages.
"""
_ValidateSettings(_msbuild_validators, settings, stderr)
def _ValidateSettings(validators, settings, stderr):
"""Validates that the settings are valid for MSBuild or MSVS.
We currently only validate the names of the settings, not their values.
Args:
validators: A dictionary of tools and their validators.
settings: A dictionary. The key is the tool name. The values are
themselves dictionaries of settings and their values.
stderr: The stream receiving the error messages.
"""
for tool_name in settings:
if tool_name in validators:
tool_validators = validators[tool_name]
for setting, value in settings[tool_name].iteritems():
if setting in tool_validators:
try:
tool_validators[setting](value)
except ValueError, e:
print >> stderr, ('Warning: for %s/%s, %s' %
(tool_name, setting, e))
else:
_ValidateExclusionSetting(setting,
tool_validators,
('Warning: unrecognized setting %s/%s' %
(tool_name, setting)),
stderr)
else:
print >> stderr, ('Warning: unrecognized tool %s' % tool_name)
# MSVS and MBuild names of the tools.
_compile = _Tool('VCCLCompilerTool', 'ClCompile')
_link = _Tool('VCLinkerTool', 'Link')
_midl = _Tool('VCMIDLTool', 'Midl')
_rc = _Tool('VCResourceCompilerTool', 'ResourceCompile')
_lib = _Tool('VCLibrarianTool', 'Lib')
_manifest = _Tool('VCManifestTool', 'Manifest')
_masm = _Tool('MASM', 'MASM')
_AddTool(_compile)
_AddTool(_link)
_AddTool(_midl)
_AddTool(_rc)
_AddTool(_lib)
_AddTool(_manifest)
_AddTool(_masm)
# Add sections only found in the MSBuild settings.
_msbuild_validators[''] = {}
_msbuild_validators['ProjectReference'] = {}
_msbuild_validators['ManifestResourceCompile'] = {}
# Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and
# ClCompile in MSBuild.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for
# the schema of the MSBuild ClCompile settings.
# Options that have the same name in MSVS and MSBuild
_Same(_compile, 'AdditionalIncludeDirectories', _folder_list) # /I
_Same(_compile, 'AdditionalOptions', _string_list)
_Same(_compile, 'AdditionalUsingDirectories', _folder_list) # /AI
_Same(_compile, 'AssemblerListingLocation', _file_name) # /Fa
_Same(_compile, 'BrowseInformationFile', _file_name)
_Same(_compile, 'BufferSecurityCheck', _boolean) # /GS
_Same(_compile, 'DisableLanguageExtensions', _boolean) # /Za
_Same(_compile, 'DisableSpecificWarnings', _string_list) # /wd
_Same(_compile, 'EnableFiberSafeOptimizations', _boolean) # /GT
_Same(_compile, 'EnablePREfast', _boolean) # /analyze Visible='false'
_Same(_compile, 'ExpandAttributedSource', _boolean) # /Fx
_Same(_compile, 'FloatingPointExceptions', _boolean) # /fp:except
_Same(_compile, 'ForceConformanceInForLoopScope', _boolean) # /Zc:forScope
_Same(_compile, 'ForcedIncludeFiles', _file_list) # /FI
_Same(_compile, 'ForcedUsingFiles', _file_list) # /FU
_Same(_compile, 'GenerateXMLDocumentationFiles', _boolean) # /doc
_Same(_compile, 'IgnoreStandardIncludePath', _boolean) # /X
_Same(_compile, 'MinimalRebuild', _boolean) # /Gm
_Same(_compile, 'OmitDefaultLibName', _boolean) # /Zl
_Same(_compile, 'OmitFramePointers', _boolean) # /Oy
_Same(_compile, 'PreprocessorDefinitions', _string_list) # /D
_Same(_compile, 'ProgramDataBaseFileName', _file_name) # /Fd
_Same(_compile, 'RuntimeTypeInfo', _boolean) # /GR
_Same(_compile, 'ShowIncludes', _boolean) # /showIncludes
_Same(_compile, 'SmallerTypeCheck', _boolean) # /RTCc
_Same(_compile, 'StringPooling', _boolean) # /GF
_Same(_compile, 'SuppressStartupBanner', _boolean) # /nologo
_Same(_compile, 'TreatWChar_tAsBuiltInType', _boolean) # /Zc:wchar_t
_Same(_compile, 'UndefineAllPreprocessorDefinitions', _boolean) # /u
_Same(_compile, 'UndefinePreprocessorDefinitions', _string_list) # /U
_Same(_compile, 'UseFullPaths', _boolean) # /FC
_Same(_compile, 'WholeProgramOptimization', _boolean) # /GL
_Same(_compile, 'XMLDocumentationFileName', _file_name)
_Same(_compile, 'AssemblerOutput',
_Enumeration(['NoListing',
'AssemblyCode', # /FA
'All', # /FAcs
'AssemblyAndMachineCode', # /FAc
'AssemblyAndSourceCode'])) # /FAs
_Same(_compile, 'BasicRuntimeChecks',
_Enumeration(['Default',
'StackFrameRuntimeCheck', # /RTCs
'UninitializedLocalUsageCheck', # /RTCu
'EnableFastChecks'])) # /RTC1
_Same(_compile, 'BrowseInformation',
_Enumeration(['false',
'true', # /FR
'true'])) # /Fr
_Same(_compile, 'CallingConvention',
_Enumeration(['Cdecl', # /Gd
'FastCall', # /Gr
'StdCall', # /Gz
'VectorCall'])) # /Gv
_Same(_compile, 'CompileAs',
_Enumeration(['Default',
'CompileAsC', # /TC
'CompileAsCpp'])) # /TP
_Same(_compile, 'DebugInformationFormat',
_Enumeration(['', # Disabled
'OldStyle', # /Z7
None,
'ProgramDatabase', # /Zi
'EditAndContinue'])) # /ZI
_Same(_compile, 'EnableEnhancedInstructionSet',
_Enumeration(['NotSet',
'StreamingSIMDExtensions', # /arch:SSE
'StreamingSIMDExtensions2', # /arch:SSE2
'AdvancedVectorExtensions', # /arch:AVX (vs2012+)
'NoExtensions', # /arch:IA32 (vs2012+)
# This one only exists in the new msbuild format.
'AdvancedVectorExtensions2', # /arch:AVX2 (vs2013r2+)
]))
_Same(_compile, 'ErrorReporting',
_Enumeration(['None', # /errorReport:none
'Prompt', # /errorReport:prompt
'Queue'], # /errorReport:queue
new=['Send'])) # /errorReport:send"
_Same(_compile, 'ExceptionHandling',
_Enumeration(['false',
'Sync', # /EHsc
'Async'], # /EHa
new=['SyncCThrow'])) # /EHs
_Same(_compile, 'FavorSizeOrSpeed',
_Enumeration(['Neither',
'Speed', # /Ot
'Size'])) # /Os
_Same(_compile, 'FloatingPointModel',
_Enumeration(['Precise', # /fp:precise
'Strict', # /fp:strict
'Fast'])) # /fp:fast
_Same(_compile, 'InlineFunctionExpansion',
_Enumeration(['Default',
'OnlyExplicitInline', # /Ob1
'AnySuitable'], # /Ob2
new=['Disabled'])) # /Ob0
_Same(_compile, 'Optimization',
_Enumeration(['Disabled', # /Od
'MinSpace', # /O1
'MaxSpeed', # /O2
'Full'])) # /Ox
_Same(_compile, 'RuntimeLibrary',
_Enumeration(['MultiThreaded', # /MT
'MultiThreadedDebug', # /MTd
'MultiThreadedDLL', # /MD
'MultiThreadedDebugDLL'])) # /MDd
_Same(_compile, 'StructMemberAlignment',
_Enumeration(['Default',
'1Byte', # /Zp1
'2Bytes', # /Zp2
'4Bytes', # /Zp4
'8Bytes', # /Zp8
'16Bytes'])) # /Zp16
_Same(_compile, 'WarningLevel',
_Enumeration(['TurnOffAllWarnings', # /W0
'Level1', # /W1
'Level2', # /W2
'Level3', # /W3
'Level4'], # /W4
new=['EnableAllWarnings'])) # /Wall
# Options found in MSVS that have been renamed in MSBuild.
_Renamed(_compile, 'EnableFunctionLevelLinking', 'FunctionLevelLinking',
_boolean) # /Gy
_Renamed(_compile, 'EnableIntrinsicFunctions', 'IntrinsicFunctions',
_boolean) # /Oi
_Renamed(_compile, 'KeepComments', 'PreprocessKeepComments', _boolean) # /C
_Renamed(_compile, 'ObjectFile', 'ObjectFileName', _file_name) # /Fo
_Renamed(_compile, 'OpenMP', 'OpenMPSupport', _boolean) # /openmp
_Renamed(_compile, 'PrecompiledHeaderThrough', 'PrecompiledHeaderFile',
_file_name) # Used with /Yc and /Yu
_Renamed(_compile, 'PrecompiledHeaderFile', 'PrecompiledHeaderOutputFile',
_file_name) # /Fp
_Renamed(_compile, 'UsePrecompiledHeader', 'PrecompiledHeader',
_Enumeration(['NotUsing', # VS recognized '' for this value too.
'Create', # /Yc
'Use'])) # /Yu
_Renamed(_compile, 'WarnAsError', 'TreatWarningAsError', _boolean) # /WX
_ConvertedToAdditionalOption(_compile, 'DefaultCharIsUnsigned', '/J')
# MSVS options not found in MSBuild.
_MSVSOnly(_compile, 'Detect64BitPortabilityProblems', _boolean)
_MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean)
# MSBuild options not found in MSVS.
_MSBuildOnly(_compile, 'BuildingInIDE', _boolean)
_MSBuildOnly(_compile, 'CompileAsManaged',
_Enumeration([], new=['false',
'true', # /clr
'Pure', # /clr:pure
'Safe', # /clr:safe
'OldSyntax'])) # /clr:oldSyntax
_MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean) # /hotpatch
_MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean) # /MP
_MSBuildOnly(_compile, 'PreprocessOutputPath', _string) # /Fi
_MSBuildOnly(_compile, 'ProcessorNumber', _integer) # the number of processors
_MSBuildOnly(_compile, 'TrackerLogDirectory', _folder_name)
_MSBuildOnly(_compile, 'TreatSpecificWarningsAsErrors', _string_list) # /we
_MSBuildOnly(_compile, 'UseUnicodeForAssemblerListing', _boolean) # /FAu
# Defines a setting that needs very customized processing
_CustomGeneratePreprocessedFile(_compile, 'GeneratePreprocessedFile')
# Directives for converting MSVS VCLinkerTool to MSBuild Link.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for
# the schema of the MSBuild Link settings.
# Options that have the same name in MSVS and MSBuild
_Same(_link, 'AdditionalDependencies', _file_list)
_Same(_link, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH
# /MANIFESTDEPENDENCY:
_Same(_link, 'AdditionalManifestDependencies', _file_list)
_Same(_link, 'AdditionalOptions', _string_list)
_Same(_link, 'AddModuleNamesToAssembly', _file_list) # /ASSEMBLYMODULE
_Same(_link, 'AllowIsolation', _boolean) # /ALLOWISOLATION
_Same(_link, 'AssemblyLinkResource', _file_list) # /ASSEMBLYLINKRESOURCE
_Same(_link, 'BaseAddress', _string) # /BASE
_Same(_link, 'CLRUnmanagedCodeCheck', _boolean) # /CLRUNMANAGEDCODECHECK
_Same(_link, 'DelayLoadDLLs', _file_list) # /DELAYLOAD
_Same(_link, 'DelaySign', _boolean) # /DELAYSIGN
_Same(_link, 'EmbedManagedResourceFile', _file_list) # /ASSEMBLYRESOURCE
_Same(_link, 'EnableUAC', _boolean) # /MANIFESTUAC
_Same(_link, 'EntryPointSymbol', _string) # /ENTRY
_Same(_link, 'ForceSymbolReferences', _file_list) # /INCLUDE
_Same(_link, 'FunctionOrder', _file_name) # /ORDER
_Same(_link, 'GenerateDebugInformation', _boolean) # /DEBUG
_Same(_link, 'GenerateMapFile', _boolean) # /MAP
_Same(_link, 'HeapCommitSize', _string)
_Same(_link, 'HeapReserveSize', _string) # /HEAP
_Same(_link, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB
_Same(_link, 'IgnoreEmbeddedIDL', _boolean) # /IGNOREIDL
_Same(_link, 'ImportLibrary', _file_name) # /IMPLIB
_Same(_link, 'KeyContainer', _file_name) # /KEYCONTAINER
_Same(_link, 'KeyFile', _file_name) # /KEYFILE
_Same(_link, 'ManifestFile', _file_name) # /ManifestFile
_Same(_link, 'MapExports', _boolean) # /MAPINFO:EXPORTS
_Same(_link, 'MapFileName', _file_name)
_Same(_link, 'MergedIDLBaseFileName', _file_name) # /IDLOUT
_Same(_link, 'MergeSections', _string) # /MERGE
_Same(_link, 'MidlCommandFile', _file_name) # /MIDL
_Same(_link, 'ModuleDefinitionFile', _file_name) # /DEF
_Same(_link, 'OutputFile', _file_name) # /OUT
_Same(_link, 'PerUserRedirection', _boolean)
_Same(_link, 'Profile', _boolean) # /PROFILE
_Same(_link, 'ProfileGuidedDatabase', _file_name) # /PGD
_Same(_link, 'ProgramDatabaseFile', _file_name) # /PDB
_Same(_link, 'RegisterOutput', _boolean)
_Same(_link, 'SetChecksum', _boolean) # /RELEASE
_Same(_link, 'StackCommitSize', _string)
_Same(_link, 'StackReserveSize', _string) # /STACK
_Same(_link, 'StripPrivateSymbols', _file_name) # /PDBSTRIPPED
_Same(_link, 'SupportUnloadOfDelayLoadedDLL', _boolean) # /DELAY:UNLOAD
_Same(_link, 'SuppressStartupBanner', _boolean) # /NOLOGO
_Same(_link, 'SwapRunFromCD', _boolean) # /SWAPRUN:CD
_Same(_link, 'TurnOffAssemblyGeneration', _boolean) # /NOASSEMBLY
_Same(_link, 'TypeLibraryFile', _file_name) # /TLBOUT
_Same(_link, 'TypeLibraryResourceID', _integer) # /TLBID
_Same(_link, 'UACUIAccess', _boolean) # /uiAccess='true'
_Same(_link, 'Version', _string) # /VERSION
_Same(_link, 'EnableCOMDATFolding', _newly_boolean) # /OPT:ICF
_Same(_link, 'FixedBaseAddress', _newly_boolean) # /FIXED
_Same(_link, 'LargeAddressAware', _newly_boolean) # /LARGEADDRESSAWARE
_Same(_link, 'OptimizeReferences', _newly_boolean) # /OPT:REF
_Same(_link, 'RandomizedBaseAddress', _newly_boolean) # /DYNAMICBASE
_Same(_link, 'TerminalServerAware', _newly_boolean) # /TSAWARE
_subsystem_enumeration = _Enumeration(
['NotSet',
'Console', # /SUBSYSTEM:CONSOLE
'Windows', # /SUBSYSTEM:WINDOWS
'Native', # /SUBSYSTEM:NATIVE
'EFI Application', # /SUBSYSTEM:EFI_APPLICATION
'EFI Boot Service Driver', # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER
'EFI ROM', # /SUBSYSTEM:EFI_ROM
'EFI Runtime', # /SUBSYSTEM:EFI_RUNTIME_DRIVER
'WindowsCE'], # /SUBSYSTEM:WINDOWSCE
new=['POSIX']) # /SUBSYSTEM:POSIX
_target_machine_enumeration = _Enumeration(
['NotSet',
'MachineX86', # /MACHINE:X86
None,
'MachineARM', # /MACHINE:ARM
'MachineEBC', # /MACHINE:EBC
'MachineIA64', # /MACHINE:IA64
None,
'MachineMIPS', # /MACHINE:MIPS
'MachineMIPS16', # /MACHINE:MIPS16
'MachineMIPSFPU', # /MACHINE:MIPSFPU
'MachineMIPSFPU16', # /MACHINE:MIPSFPU16
None,
None,
None,
'MachineSH4', # /MACHINE:SH4
None,
'MachineTHUMB', # /MACHINE:THUMB
'MachineX64']) # /MACHINE:X64
_Same(_link, 'AssemblyDebug',
_Enumeration(['',
'true', # /ASSEMBLYDEBUG
'false'])) # /ASSEMBLYDEBUG:DISABLE
_Same(_link, 'CLRImageType',
_Enumeration(['Default',
'ForceIJWImage', # /CLRIMAGETYPE:IJW
'ForcePureILImage', # /Switch="CLRIMAGETYPE:PURE
'ForceSafeILImage'])) # /Switch="CLRIMAGETYPE:SAFE
_Same(_link, 'CLRThreadAttribute',
_Enumeration(['DefaultThreadingAttribute', # /CLRTHREADATTRIBUTE:NONE
'MTAThreadingAttribute', # /CLRTHREADATTRIBUTE:MTA
'STAThreadingAttribute'])) # /CLRTHREADATTRIBUTE:STA
_Same(_link, 'DataExecutionPrevention',
_Enumeration(['',
'false', # /NXCOMPAT:NO
'true'])) # /NXCOMPAT
_Same(_link, 'Driver',
_Enumeration(['NotSet',
'Driver', # /Driver
'UpOnly', # /DRIVER:UPONLY
'WDM'])) # /DRIVER:WDM
_Same(_link, 'LinkTimeCodeGeneration',
_Enumeration(['Default',
'UseLinkTimeCodeGeneration', # /LTCG
'PGInstrument', # /LTCG:PGInstrument
'PGOptimization', # /LTCG:PGOptimize
'PGUpdate'])) # /LTCG:PGUpdate
_Same(_link, 'ShowProgress',
_Enumeration(['NotSet',
'LinkVerbose', # /VERBOSE
'LinkVerboseLib'], # /VERBOSE:Lib
new=['LinkVerboseICF', # /VERBOSE:ICF
'LinkVerboseREF', # /VERBOSE:REF
'LinkVerboseSAFESEH', # /VERBOSE:SAFESEH
'LinkVerboseCLR'])) # /VERBOSE:CLR
_Same(_link, 'SubSystem', _subsystem_enumeration)
_Same(_link, 'TargetMachine', _target_machine_enumeration)
_Same(_link, 'UACExecutionLevel',
_Enumeration(['AsInvoker', # /level='asInvoker'
'HighestAvailable', # /level='highestAvailable'
'RequireAdministrator'])) # /level='requireAdministrator'
_Same(_link, 'MinimumRequiredVersion', _string)
_Same(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX
# Options found in MSVS that have been renamed in MSBuild.
_Renamed(_link, 'ErrorReporting', 'LinkErrorReporting',
_Enumeration(['NoErrorReport', # /ERRORREPORT:NONE
'PromptImmediately', # /ERRORREPORT:PROMPT
'QueueForNextLogin'], # /ERRORREPORT:QUEUE
new=['SendErrorReport'])) # /ERRORREPORT:SEND
_Renamed(_link, 'IgnoreDefaultLibraryNames', 'IgnoreSpecificDefaultLibraries',
_file_list) # /NODEFAULTLIB
_Renamed(_link, 'ResourceOnlyDLL', 'NoEntryPoint', _boolean) # /NOENTRY
_Renamed(_link, 'SwapRunFromNet', 'SwapRunFromNET', _boolean) # /SWAPRUN:NET
_Moved(_link, 'GenerateManifest', '', _boolean)
_Moved(_link, 'IgnoreImportLibrary', '', _boolean)
_Moved(_link, 'LinkIncremental', '', _newly_boolean)
_Moved(_link, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
_Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean)
# MSVS options not found in MSBuild.
_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
# MSBuild options not found in MSVS.
_MSBuildOnly(_link, 'BuildingInIDE', _boolean)
_MSBuildOnly(_link, 'ImageHasSafeExceptionHandlers', _boolean) # /SAFESEH
_MSBuildOnly(_link, 'LinkDLL', _boolean) # /DLL Visible='false'
_MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS
_MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND
_MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND
_MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name)
_MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false'
_MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN
_MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION
_MSBuildOnly(_link, 'ForceFileOutput',
_Enumeration([], new=['Enabled', # /FORCE
# /FORCE:MULTIPLE
'MultiplyDefinedSymbolOnly',
'UndefinedSymbolOnly'])) # /FORCE:UNRESOLVED
_MSBuildOnly(_link, 'CreateHotPatchableImage',
_Enumeration([], new=['Enabled', # /FUNCTIONPADMIN
'X86Image', # /FUNCTIONPADMIN:5
'X64Image', # /FUNCTIONPADMIN:6
'ItaniumImage'])) # /FUNCTIONPADMIN:16
_MSBuildOnly(_link, 'CLRSupportLastError',
_Enumeration([], new=['Enabled', # /CLRSupportLastError
'Disabled', # /CLRSupportLastError:NO
# /CLRSupportLastError:SYSTEMDLL
'SystemDlls']))
# Directives for converting VCResourceCompilerTool to ResourceCompile.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for
# the schema of the MSBuild ResourceCompile settings.
_Same(_rc, 'AdditionalOptions', _string_list)
_Same(_rc, 'AdditionalIncludeDirectories', _folder_list) # /I
_Same(_rc, 'Culture', _Integer(msbuild_base=16))
_Same(_rc, 'IgnoreStandardIncludePath', _boolean) # /X
_Same(_rc, 'PreprocessorDefinitions', _string_list) # /D
_Same(_rc, 'ResourceOutputFileName', _string) # /fo
_Same(_rc, 'ShowProgress', _boolean) # /v
# There is no UI in VisualStudio 2008 to set the following properties.
# However they are found in CL and other tools. Include them here for
# completeness, as they are very likely to have the same usage pattern.
_Same(_rc, 'SuppressStartupBanner', _boolean) # /nologo
_Same(_rc, 'UndefinePreprocessorDefinitions', _string_list) # /u
# MSBuild options not found in MSVS.
_MSBuildOnly(_rc, 'NullTerminateStrings', _boolean) # /n
_MSBuildOnly(_rc, 'TrackerLogDirectory', _folder_name)
# Directives for converting VCMIDLTool to Midl.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for
# the schema of the MSBuild Midl settings.
_Same(_midl, 'AdditionalIncludeDirectories', _folder_list) # /I
_Same(_midl, 'AdditionalOptions', _string_list)
_Same(_midl, 'CPreprocessOptions', _string) # /cpp_opt
_Same(_midl, 'ErrorCheckAllocations', _boolean) # /error allocation
_Same(_midl, 'ErrorCheckBounds', _boolean) # /error bounds_check
_Same(_midl, 'ErrorCheckEnumRange', _boolean) # /error enum
_Same(_midl, 'ErrorCheckRefPointers', _boolean) # /error ref
_Same(_midl, 'ErrorCheckStubData', _boolean) # /error stub_data
_Same(_midl, 'GenerateStublessProxies', _boolean) # /Oicf
_Same(_midl, 'GenerateTypeLibrary', _boolean)
_Same(_midl, 'HeaderFileName', _file_name) # /h
_Same(_midl, 'IgnoreStandardIncludePath', _boolean) # /no_def_idir
_Same(_midl, 'InterfaceIdentifierFileName', _file_name) # /iid
_Same(_midl, 'MkTypLibCompatible', _boolean) # /mktyplib203
_Same(_midl, 'OutputDirectory', _string) # /out
_Same(_midl, 'PreprocessorDefinitions', _string_list) # /D
_Same(_midl, 'ProxyFileName', _file_name) # /proxy
_Same(_midl, 'RedirectOutputAndErrors', _file_name) # /o
_Same(_midl, 'SuppressStartupBanner', _boolean) # /nologo
_Same(_midl, 'TypeLibraryName', _file_name) # /tlb
_Same(_midl, 'UndefinePreprocessorDefinitions', _string_list) # /U
_Same(_midl, 'WarnAsError', _boolean) # /WX
_Same(_midl, 'DefaultCharType',
_Enumeration(['Unsigned', # /char unsigned
'Signed', # /char signed
'Ascii'])) # /char ascii7
_Same(_midl, 'TargetEnvironment',
_Enumeration(['NotSet',
'Win32', # /env win32
'Itanium', # /env ia64
'X64'])) # /env x64
_Same(_midl, 'EnableErrorChecks',
_Enumeration(['EnableCustom',
'None', # /error none
'All'])) # /error all
_Same(_midl, 'StructMemberAlignment',
_Enumeration(['NotSet',
'1', # Zp1
'2', # Zp2
'4', # Zp4
'8'])) # Zp8
_Same(_midl, 'WarningLevel',
_Enumeration(['0', # /W0
'1', # /W1
'2', # /W2
'3', # /W3
'4'])) # /W4
_Renamed(_midl, 'DLLDataFileName', 'DllDataFileName', _file_name) # /dlldata
_Renamed(_midl, 'ValidateParameters', 'ValidateAllParameters',
_boolean) # /robust
# MSBuild options not found in MSVS.
_MSBuildOnly(_midl, 'ApplicationConfigurationMode', _boolean) # /app_config
_MSBuildOnly(_midl, 'ClientStubFile', _file_name) # /cstub
_MSBuildOnly(_midl, 'GenerateClientFiles',
_Enumeration([], new=['Stub', # /client stub
'None'])) # /client none
_MSBuildOnly(_midl, 'GenerateServerFiles',
_Enumeration([], new=['Stub', # /client stub
'None'])) # /client none
_MSBuildOnly(_midl, 'LocaleID', _integer) # /lcid DECIMAL
_MSBuildOnly(_midl, 'ServerStubFile', _file_name) # /sstub
_MSBuildOnly(_midl, 'SuppressCompilerWarnings', _boolean) # /no_warn
_MSBuildOnly(_midl, 'TrackerLogDirectory', _folder_name)
_MSBuildOnly(_midl, 'TypeLibFormat',
_Enumeration([], new=['NewFormat', # /newtlb
'OldFormat'])) # /oldtlb
# Directives for converting VCLibrarianTool to Lib.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for
# the schema of the MSBuild Lib settings.
_Same(_lib, 'AdditionalDependencies', _file_list)
_Same(_lib, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH
_Same(_lib, 'AdditionalOptions', _string_list)
_Same(_lib, 'ExportNamedFunctions', _string_list) # /EXPORT
_Same(_lib, 'ForceSymbolReferences', _string) # /INCLUDE
_Same(_lib, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB
_Same(_lib, 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB
_Same(_lib, 'ModuleDefinitionFile', _file_name) # /DEF
_Same(_lib, 'OutputFile', _file_name) # /OUT
_Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO
_Same(_lib, 'UseUnicodeResponseFiles', _boolean)
_Same(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG
_Same(_lib, 'TargetMachine', _target_machine_enumeration)
# TODO(jeanluc) _link defines the same value that gets moved to
# ProjectReference. We may want to validate that they are consistent.
_Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
_MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false'
_MSBuildOnly(_lib, 'ErrorReporting',
_Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT
'QueueForNextLogin', # /ERRORREPORT:QUEUE
'SendErrorReport', # /ERRORREPORT:SEND
'NoErrorReport'])) # /ERRORREPORT:NONE
_MSBuildOnly(_lib, 'MinimumRequiredVersion', _string)
_MSBuildOnly(_lib, 'Name', _file_name) # /NAME
_MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE
_MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration)
_MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name)
_MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX
_MSBuildOnly(_lib, 'Verbose', _boolean)
# Directives for converting VCManifestTool to Mt.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for
# the schema of the MSBuild Lib settings.
# Options that have the same name in MSVS and MSBuild
_Same(_manifest, 'AdditionalManifestFiles', _file_list) # /manifest
_Same(_manifest, 'AdditionalOptions', _string_list)
_Same(_manifest, 'AssemblyIdentity', _string) # /identity:
_Same(_manifest, 'ComponentFileName', _file_name) # /dll
_Same(_manifest, 'GenerateCatalogFiles', _boolean) # /makecdfs
_Same(_manifest, 'InputResourceManifests', _string) # /inputresource
_Same(_manifest, 'OutputManifestFile', _file_name) # /out
_Same(_manifest, 'RegistrarScriptFile', _file_name) # /rgs
_Same(_manifest, 'ReplacementsFile', _file_name) # /replacements
_Same(_manifest, 'SuppressStartupBanner', _boolean) # /nologo
_Same(_manifest, 'TypeLibraryFile', _file_name) # /tlb:
_Same(_manifest, 'UpdateFileHashes', _boolean) # /hashupdate
_Same(_manifest, 'UpdateFileHashesSearchPath', _file_name)
_Same(_manifest, 'VerboseOutput', _boolean) # /verbose
# Options that have moved location.
_MovedAndRenamed(_manifest, 'ManifestResourceFile',
'ManifestResourceCompile',
'ResourceOutputFileName',
_file_name)
_Moved(_manifest, 'EmbedManifest', '', _boolean)
# MSVS options not found in MSBuild.
_MSVSOnly(_manifest, 'DependencyInformationFile', _file_name)
_MSVSOnly(_manifest, 'UseFAT32Workaround', _boolean)
_MSVSOnly(_manifest, 'UseUnicodeResponseFiles', _boolean)
# MSBuild options not found in MSVS.
_MSBuildOnly(_manifest, 'EnableDPIAwareness', _boolean)
_MSBuildOnly(_manifest, 'GenerateCategoryTags', _boolean) # /category
_MSBuildOnly(_manifest, 'ManifestFromManagedAssembly',
_file_name) # /managedassemblyname
_MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource
_MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency
_MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name)
# Directives for MASM.
# See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the
# MSBuild MASM settings.
# Options that have the same name in MSVS and MSBuild.
_Same(_masm, 'UseSafeExceptionHandlers', _boolean) # /safeseh | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
import os
from tvcm import fake_fs
class FakeFSUnittest(unittest.TestCase):
def testBasic(self):
fs = fake_fs.FakeFS()
fs.AddFile('/blah/x', 'foobar')
with fs:
assert os.path.exists('/blah/x')
self.assertEquals(
'foobar',
open('/blah/x', 'r').read())
def testWithableOpen(self):
fs = fake_fs.FakeFS()
fs.AddFile('/blah/x', 'foobar')
with fs:
with open('/blah/x', 'r') as f:
self.assertEquals('foobar', f.read())
def testWalk(self):
fs = fake_fs.FakeFS()
fs.AddFile('/x/w2/w3/z3.txt', '')
fs.AddFile('/x/w/z.txt', '')
fs.AddFile('/x/y.txt', '')
fs.AddFile('/a.txt', 'foobar')
with fs:
gen = os.walk('/')
r = gen.next()
self.assertEquals(('/', ['x'], ['a.txt']), r)
r = gen.next()
self.assertEquals(('/x', ['w', 'w2'], ['y.txt']), r)
r = gen.next()
self.assertEquals(('/x/w', [], ['z.txt']), r)
r = gen.next()
self.assertEquals(('/x/w2', ['w3'], []), r)
r = gen.next()
self.assertEquals(('/x/w2/w3', [], ['z3.txt']), r)
self.assertRaises(StopIteration,
lambda: gen.next()) | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.cache;
import com.google.common.annotations.GwtCompatible;
import com.google.common.collect.ImmutableMap;
import com.google.common.util.concurrent.ExecutionError;
import com.google.common.util.concurrent.UncheckedExecutionException;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.errorprone.annotations.CompatibleWith;
import com.google.errorprone.annotations.DoNotMock;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import org.jspecify.annotations.Nullable;
/**
* A semi-persistent mapping from keys to values. Cache entries are manually added using {@link
* #get(Object, Callable)} or {@link #put(Object, Object)}, and are stored in the cache until either
* evicted or manually invalidated. The common way to build instances is using {@link CacheBuilder}.
*
* <p>Implementations of this interface are expected to be thread-safe, and can be safely accessed
* by multiple concurrent threads.
*
* @param <K> the type of the cache's keys, which are not permitted to be null
* @param <V> the type of the cache's values, which are not permitted to be null
* @author Charles Fry
* @since 10.0
*/
@DoNotMock("Use CacheBuilder.newBuilder().build()")
@GwtCompatible
public interface Cache<K, V> {
/**
* Returns the value associated with {@code key} in this cache, or {@code null} if there is no
* cached value for {@code key}.
*
* @since 11.0
*/
@CanIgnoreReturnValue // TODO(b/27479612): consider removing this?
@Nullable V getIfPresent(@CompatibleWith("K") Object key);
/**
* Returns the value associated with {@code key} in this cache, obtaining that value from {@code
* loader} if necessary. The method improves upon the conventional "if cached, return; otherwise
* create, cache and return" pattern. For further improvements, use {@link LoadingCache} and its
* {@link LoadingCache#get(Object) get(K)} method instead of this one.
*
* <p>Among the improvements that this method and {@code LoadingCache.get(K)} both provide are:
*
* <ul>
* <li>{@linkplain LoadingCache#get(Object) awaiting the result of a pending load} rather than
* starting a redundant one
* <li>eliminating the error-prone caching boilerplate
* <li>tracking load {@linkplain #stats statistics}
* </ul>
*
* <p>Among the further improvements that {@code LoadingCache} can provide but this method cannot:
*
* <ul>
* <li>consolidation of the loader logic to {@linkplain CacheBuilder#build(CacheLoader) a single
* authoritative location}
* <li>{@linkplain LoadingCache#refresh refreshing of entries}, including {@linkplain
* CacheBuilder#refreshAfterWrite automated refreshing}
* <li>{@linkplain LoadingCache#getAll bulk loading requests}, including {@linkplain
* CacheLoader#loadAll bulk loading implementations}
* </ul>
*
* <p><b>Warning:</b> For any given key, every {@code loader} used with it should compute the same
* value. Otherwise, a call that passes one {@code loader} may return the result of another call
* with a differently behaving {@code loader}. For example, a call that requests a short timeout
* for an RPC may wait for a similar call that requests a long timeout, or a call by an
* unprivileged user may return a resource accessible only to a privileged user making a similar
* call. To prevent this problem, create a key object that includes all values that affect the
* result of the query. Or use {@code LoadingCache.get(K)}, which lacks the ability to refer to
* state other than that in the key.
*
* <p><b>Warning:</b> as with {@link CacheLoader#load}, {@code loader} <b>must not</b> return
* {@code null}; it may either return a non-null value or throw an exception.
*
* <p>No observable state associated with this cache is modified until loading completes.
*
* @throws ExecutionException if a checked exception was thrown while loading the value
* @throws UncheckedExecutionException if an unchecked exception was thrown while loading the
* value
* @throws ExecutionError if an error was thrown while loading the value
* @since 11.0
*/
@CanIgnoreReturnValue // TODO(b/27479612): consider removing this
V get(K key, Callable<? extends V> loader) throws ExecutionException;
/**
* Returns a map of the values associated with {@code keys} in this cache. The returned map will
* only contain entries which are already present in the cache.
*
* @since 11.0
*/
/*
* <? extends Object> is mostly the same as <?> to plain Java. But to nullness checkers, they
* differ: <? extends Object> means "non-null types," while <?> means "all types."
*/
ImmutableMap<K, V> getAllPresent(Iterable<? extends Object> keys);
/**
* Associates {@code value} with {@code key} in this cache. If the cache previously contained a
* value associated with {@code key}, the old value is replaced by {@code value}.
*
* <p>Prefer {@link #get(Object, Callable)} when using the conventional "if cached, return;
* otherwise create, cache and return" pattern.
*
* @since 11.0
*/
void put(K key, V value);
/**
* Copies all of the mappings from the specified map to the cache. The effect of this call is
* equivalent to that of calling {@code put(k, v)} on this map once for each mapping from key
* {@code k} to value {@code v} in the specified map. The behavior of this operation is undefined
* if the specified map is modified while the operation is in progress.
*
* @since 12.0
*/
void putAll(Map<? extends K, ? extends V> m);
/** Discards any cached value for key {@code key}. */
void invalidate(@CompatibleWith("K") Object key);
/**
* Discards any cached values for keys {@code keys}.
*
* @since 11.0
*/
// For discussion of <? extends Object>, see getAllPresent.
void invalidateAll(Iterable<? extends Object> keys);
/** Discards all entries in the cache. */
void invalidateAll();
/** Returns the approximate number of entries in this cache. */
long size();
/**
* Returns a current snapshot of this cache's cumulative statistics, or a set of default values if
* the cache is not recording statistics. All statistics begin at zero and never decrease over the
* lifetime of the cache.
*
* <p><b>Warning:</b> this cache may not be recording statistical data. For example, a cache
* created using {@link CacheBuilder} only does so if the {@link CacheBuilder#recordStats} method
* was called. If statistics are not being recorded, a {@code CacheStats} instance with zero for
* all values is returned.
*
*/
CacheStats stats();
/**
* Returns a view of the entries stored in this cache as a thread-safe map. Modifications made to
* the map directly affect the cache.
*
* <p>Iterators from the returned map are at least <i>weakly consistent</i>: they are safe for
* concurrent use, but if the cache is modified (including by eviction) after the iterator is
* created, it is undefined which of the changes (if any) will be reflected in that iterator.
*/
ConcurrentMap<K, V> asMap();
/**
* Performs any pending maintenance operations needed by the cache. Exactly which activities are
* performed -- if any -- is implementation-dependent.
*/
void cleanUp();
} | java | github | https://github.com/google/guava | guava/src/com/google/common/cache/Cache.java |
#!/bin/sh
test_description='avoid rewriting packed-refs unnecessarily'
. ./test-lib.sh
if test_have_prereq !REFFILES
then
skip_all='skipping files-backend specific pack-refs tests'
test_done
fi
# Add an identifying mark to the packed-refs file header line. This
# shouldn't upset readers, and it should be omitted if the file is
# ever rewritten.
mark_packed_refs () {
sed -e "s/^\(#.*\)/\1 t1409 /" .git/packed-refs >.git/packed-refs.new &&
mv .git/packed-refs.new .git/packed-refs
}
# Verify that the packed-refs file is still marked.
check_packed_refs_marked () {
grep -q '^#.* t1409 ' .git/packed-refs
}
test_expect_success 'setup' '
git commit --allow-empty -m "Commit A" &&
A=$(git rev-parse HEAD) &&
git commit --allow-empty -m "Commit B" &&
B=$(git rev-parse HEAD) &&
git commit --allow-empty -m "Commit C" &&
C=$(git rev-parse HEAD)
'
test_expect_success 'do not create packed-refs file gratuitously' '
test_path_is_missing .git/packed-refs &&
git update-ref refs/heads/foo $A &&
test_path_is_missing .git/packed-refs &&
git update-ref refs/heads/foo $B &&
test_path_is_missing .git/packed-refs &&
git update-ref refs/heads/foo $C $B &&
test_path_is_missing .git/packed-refs &&
git update-ref -d refs/heads/foo &&
test_path_is_missing .git/packed-refs
'
test_expect_success 'check that marking the packed-refs file works' '
git for-each-ref >expected &&
git pack-refs --all &&
mark_packed_refs &&
check_packed_refs_marked &&
git for-each-ref >actual &&
test_cmp expected actual &&
git pack-refs --all &&
! check_packed_refs_marked &&
git for-each-ref >actual2 &&
test_cmp expected actual2
'
test_expect_success 'leave packed-refs untouched on update of packed' '
git update-ref refs/heads/packed-update $A &&
git pack-refs --all &&
mark_packed_refs &&
git update-ref refs/heads/packed-update $B &&
check_packed_refs_marked
'
test_expect_success 'leave packed-refs untouched on checked update of packed' '
git update-ref refs/heads/packed-checked-update $A &&
git pack-refs --all &&
mark_packed_refs &&
git update-ref refs/heads/packed-checked-update $B $A &&
check_packed_refs_marked
'
test_expect_success 'leave packed-refs untouched on verify of packed' '
git update-ref refs/heads/packed-verify $A &&
git pack-refs --all &&
mark_packed_refs &&
echo "verify refs/heads/packed-verify $A" | git update-ref --stdin &&
check_packed_refs_marked
'
test_expect_success 'touch packed-refs on delete of packed' '
git update-ref refs/heads/packed-delete $A &&
git pack-refs --all &&
mark_packed_refs &&
git update-ref -d refs/heads/packed-delete &&
! check_packed_refs_marked
'
test_expect_success 'leave packed-refs untouched on update of loose' '
git pack-refs --all &&
git update-ref refs/heads/loose-update $A &&
mark_packed_refs &&
git update-ref refs/heads/loose-update $B &&
check_packed_refs_marked
'
test_expect_success 'leave packed-refs untouched on checked update of loose' '
git pack-refs --all &&
git update-ref refs/heads/loose-checked-update $A &&
mark_packed_refs &&
git update-ref refs/heads/loose-checked-update $B $A &&
check_packed_refs_marked
'
test_expect_success 'leave packed-refs untouched on verify of loose' '
git pack-refs --all &&
git update-ref refs/heads/loose-verify $A &&
mark_packed_refs &&
echo "verify refs/heads/loose-verify $A" | git update-ref --stdin &&
check_packed_refs_marked
'
test_expect_success 'leave packed-refs untouched on delete of loose' '
git pack-refs --all &&
git update-ref refs/heads/loose-delete $A &&
mark_packed_refs &&
git update-ref -d refs/heads/loose-delete &&
check_packed_refs_marked
'
test_done | unknown | github | https://github.com/git/git | t/t1409-avoid-packing-refs.sh |
#!/usr/bin/env python
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs Android's lint tool."""
import optparse
import os
import sys
from xml.dom import minidom
from util import build_utils
_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..', '..'))
def _RunLint(lint_path, config_path, processed_config_path, manifest_path,
result_path, product_dir, sources, jar_path):
def _RelativizePath(path):
"""Returns relative path to top-level src dir.
Args:
path: A path relative to cwd.
"""
return os.path.relpath(os.path.abspath(path), _SRC_ROOT)
def _ProcessConfigFile():
if not build_utils.IsTimeStale(processed_config_path, [config_path]):
return
with open(config_path, 'rb') as f:
content = f.read().replace(
'PRODUCT_DIR', _RelativizePath(product_dir))
with open(processed_config_path, 'wb') as f:
f.write(content)
def _ProcessResultFile():
with open(result_path, 'rb') as f:
content = f.read().replace(
_RelativizePath(product_dir), 'PRODUCT_DIR')
with open(result_path, 'wb') as f:
f.write(content)
def _ParseAndShowResultFile():
dom = minidom.parse(result_path)
issues = dom.getElementsByTagName('issue')
print >> sys.stderr
for issue in issues:
issue_id = issue.attributes['id'].value
message = issue.attributes['message'].value
location_elem = issue.getElementsByTagName('location')[0]
path = location_elem.attributes['file'].value
line = location_elem.getAttribute('line')
if line:
error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id)
else:
# Issues in class files don't have a line number.
error = '%s %s: %s [warning]' % (path, message, issue_id)
print >> sys.stderr, error
for attr in ['errorLine1', 'errorLine2']:
error_line = issue.getAttribute(attr)
if error_line:
print >> sys.stderr, error_line
return len(issues)
with build_utils.TempDir() as temp_dir:
_ProcessConfigFile()
cmd = [
_RelativizePath(lint_path), '-Werror', '--exitcode', '--showall',
'--config', _RelativizePath(processed_config_path),
'--classpath', _RelativizePath(jar_path),
'--xml', _RelativizePath(result_path),
]
# There may be multiple source files with the same basename (but in
# different directories). It is difficult to determine what part of the path
# corresponds to the java package, and so instead just link the source files
# into temporary directories (creating a new one whenever there is a name
# conflict).
src_dirs = []
def NewSourceDir():
new_dir = os.path.join(temp_dir, str(len(src_dirs)))
os.mkdir(new_dir)
src_dirs.append(new_dir)
cmd.extend(['--sources', _RelativizePath(new_dir)])
return new_dir
def PathInDir(d, src):
return os.path.join(d, os.path.basename(src))
for src in sources:
src_dir = None
for d in src_dirs:
if not os.path.exists(PathInDir(d, src)):
src_dir = d
break
if not src_dir:
src_dir = NewSourceDir()
os.symlink(os.path.abspath(src), PathInDir(src_dir, src))
cmd.append(_RelativizePath(os.path.join(manifest_path, os.pardir)))
if os.path.exists(result_path):
os.remove(result_path)
try:
build_utils.CheckOutput(cmd, cwd=_SRC_ROOT)
except build_utils.CalledProcessError as e:
# There is a problem with lint usage
if not os.path.exists(result_path):
print 'Something is wrong:'
print e
return 0
# There are actual lint issues
else:
try:
num_issues = _ParseAndShowResultFile()
except Exception:
print 'Lint created unparseable xml file...'
print 'File contents:'
with open(result_path) as f:
print f.read()
return 0
_ProcessResultFile()
msg = ('\nLint found %d new issues.\n'
' - For full explanation refer to %s\n'
' - Wanna suppress these issues?\n'
' 1. Read comment in %s\n'
' 2. Run "python %s %s"\n' %
(num_issues,
_RelativizePath(result_path),
_RelativizePath(config_path),
_RelativizePath(os.path.join(_SRC_ROOT, 'build', 'android',
'lint', 'suppress.py')),
_RelativizePath(result_path)))
print >> sys.stderr, msg
# Lint errors do not fail the build.
return 0
return 0
def main():
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--lint-path', help='Path to lint executable.')
parser.add_option('--config-path', help='Path to lint suppressions file.')
parser.add_option('--processed-config-path',
help='Path to processed lint suppressions file.')
parser.add_option('--manifest-path', help='Path to AndroidManifest.xml')
parser.add_option('--result-path', help='Path to XML lint result file.')
parser.add_option('--product-dir', help='Path to product dir.')
parser.add_option('--src-dirs', help='Directories containing java files.')
parser.add_option('--java-files', help='Paths to java files.')
parser.add_option('--jar-path', help='Jar file containing class files.')
parser.add_option('--stamp', help='Path to touch on success.')
parser.add_option('--enable', action='store_true',
help='Run lint instead of just touching stamp.')
options, _ = parser.parse_args()
build_utils.CheckOptions(
options, parser, required=['lint_path', 'config_path',
'processed_config_path', 'manifest_path',
'result_path', 'product_dir',
'jar_path'])
rc = 0
if options.enable:
sources = []
if options.src_dirs:
src_dirs = build_utils.ParseGypList(options.src_dirs)
sources = build_utils.FindInDirectories(src_dirs, '*.java')
elif options.java_files:
sources = build_utils.ParseGypList(options.java_files)
else:
print 'One of --src-dirs or --java-files must be specified.'
return 1
rc = _RunLint(options.lint_path, options.config_path,
options.processed_config_path,
options.manifest_path, options.result_path,
options.product_dir, sources, options.jar_path)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
build_utils.GetPythonDependencies())
if options.stamp and not rc:
build_utils.Touch(options.stamp)
return rc
if __name__ == '__main__':
sys.exit(main()) | unknown | codeparrot/codeparrot-clean | ||
"""
Account constants
"""
from django.utils.text import format_lazy
from django.utils.translation import ugettext_lazy as _
# The minimum and maximum length for the name ("full name") account field
NAME_MIN_LENGTH = 2
NAME_MAX_LENGTH = 255
# The minimum and maximum length for the username account field
USERNAME_MIN_LENGTH = 2
USERNAME_MAX_LENGTH = 30
# The minimum and maximum length for the email account field
EMAIL_MIN_LENGTH = 3
EMAIL_MAX_LENGTH = 254 # Limit per RFCs is 254
ACCOUNT_VISIBILITY_PREF_KEY = 'account_privacy'
# Indicates the user's preference that all users can view the shareable fields in their account information.
ALL_USERS_VISIBILITY = 'all_users'
# Indicates the user's preference that all their account information be private.
PRIVATE_VISIBILITY = 'private'
# Translators: This message is shown when the Unicode usernames are NOT allowed.
# It is shown to users who attempt to create a new account using invalid characters
# in the username.
USERNAME_INVALID_CHARS_ASCII = _(
u"Usernames can only contain letters (A-Z, a-z), numerals (0-9), underscores (_), and hyphens (-)."
)
# Translators: This message is shown only when the Unicode usernames are allowed.
# It is shown to users who attempt to create a new account using invalid characters
# in the username.
USERNAME_INVALID_CHARS_UNICODE = _(
u"Usernames can only contain letters, numerals, and @/./+/-/_ characters."
)
# Translators: This message is shown to users who attempt to create a new account using
# an invalid email format.
EMAIL_INVALID_MSG = _(u'"{email}" is not a valid email address.')
# Translators: This message is shown to users who attempt to create a new
# account using an username/email associated with an existing account.
EMAIL_CONFLICT_MSG = _(
u"It looks like {email_address} belongs to an existing account. "
u"Try again with a different email address."
)
USERNAME_CONFLICT_MSG = _(
u"It looks like {username} belongs to an existing account. "
u"Try again with a different username."
)
# Translators: This message is shown to users who enter a username/email/password
# with an inappropriate length (too short or too long).
USERNAME_BAD_LENGTH_MSG = format_lazy(
_(u"Username must be between {min} and {max} characters long."),
min=USERNAME_MIN_LENGTH,
max=USERNAME_MAX_LENGTH,
)
EMAIL_BAD_LENGTH_MSG = format_lazy(
_(u"Enter a valid email address that contains at least {min} characters."),
min=EMAIL_MIN_LENGTH,
)
# These strings are normally not user-facing.
USERNAME_BAD_TYPE_MSG = u"Username must be a string."
EMAIL_BAD_TYPE_MSG = u"Email must be a string."
PASSWORD_BAD_TYPE_MSG = u"Password must be a string."
# Translators: These messages are shown to users who do not enter information
# into the required field or enter it incorrectly.
REQUIRED_FIELD_NAME_MSG = _(u"Enter your full name.")
REQUIRED_FIELD_CONFIRM_EMAIL_MSG = _(u"The email addresses do not match.")
REQUIRED_FIELD_COUNTRY_MSG = _(u"Select your country or region of residence.")
REQUIRED_FIELD_PROFESSION_SELECT_MSG = _(u"Select your profession.")
REQUIRED_FIELD_SPECIALTY_SELECT_MSG = _(u"Select your specialty.")
REQUIRED_FIELD_PROFESSION_TEXT_MSG = _(u"Enter your profession.")
REQUIRED_FIELD_SPECIALTY_TEXT_MSG = _(u"Enter your specialty.")
REQUIRED_FIELD_CITY_MSG = _(u"Enter your city.")
REQUIRED_FIELD_GOALS_MSG = _(u"Tell us your goals.")
REQUIRED_FIELD_LEVEL_OF_EDUCATION_MSG = _(u"Select the highest level of education you have completed.")
REQUIRED_FIELD_MAILING_ADDRESS_MSG = _(u"Enter your mailing address.") | unknown | codeparrot/codeparrot-clean | ||
{
"kind": "Dashboard",
"apiVersion": "dashboard.grafana.app/v1beta1",
"metadata": {
"name": "v24.table-angular.v42"
},
"spec": {
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations \u0026 Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"liveNow": false,
"panels": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Tests basic migration with default style pattern (/.*/) containing thresholds and colors. Should convert styles to fieldConfig.defaults with threshold steps.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 1,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "B"
}
],
"title": "Basic Angular Table with Defaults",
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Tests comprehensive migration including: default style with thresholds/colors/unit/decimals/align/colorMode, column overrides with exact name and regex patterns, date formatting, hidden columns, and links with tooltips.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 2,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "Complex Table with All Style Features",
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Tests migration of timeseries_aggregations transform to reduce transformation with column mappings (avg-\u003emean, max-\u003emax, min-\u003emin, total-\u003esum, current-\u003elastNotNull, count-\u003ecount).",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 3,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "Table with Timeseries Aggregations Transform",
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Tests migration of timeseries_to_rows transform to seriesToRows transformation.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 4,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "Table with Timeseries to Rows Transform",
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Tests migration of timeseries_to_columns transform to seriesToColumns transformation.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 5,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "Table with Timeseries to Columns Transform",
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Tests migration of table transform to merge transformation. Also tests auto alignment conversion to empty string.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 6,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "Table with Merge Transform",
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Tests that existing transformations are preserved and new transformation from old format is appended to the list.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 7,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "Table with Existing Transformations",
"transformations": [
{
"id": "filterFieldsByName",
"options": {
"include": {
"names": [
"field1",
"field2"
]
}
}
}
],
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Tests handling of mixed numeric and string threshold values (int, string, float) with proper type conversion.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 8,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "Mixed Threshold Types",
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Tests all color mode mappings: cell-\u003ecolor-background, row-\u003ecolor-background, value-\u003ecolor-text.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 9,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "All Color Modes Test",
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Tests all alignment options: left, center, right, and auto (should convert to empty string).",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 10,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "All Alignment Options",
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Tests both field matcher types: byName for exact matches and byRegexp for regex patterns.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 11,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "Field Matcher Types Test",
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Tests various link configurations: with and without tooltip, with and without target blank.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 12,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "Link Configuration Test",
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Tests various date format patterns and aliases.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 13,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "Date Format Variations",
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "React table (table2) should not be migrated. Properties should remain unchanged.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 14,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "React Table - Should NOT Migrate",
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Angular table without styles property should not be migrated.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 15,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "Angular Table without Styles - Should NOT Migrate",
"type": "table"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Non-table panels should remain completely unchanged.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 16,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "Non-Table Panel - Should NOT Migrate",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"description": "Other panel types should not be affected by table migration.",
"gridPos": {
"h": 3,
"w": 6,
"x": 0,
"y": 0
},
"id": 17,
"options": {},
"pluginVersion": "",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "default-ds-uid"
},
"refId": "A"
}
],
"title": "Singlestat Panel - Should NOT Migrate",
"type": "stat"
}
],
"preload": false,
"refresh": "",
"schemaVersion": 42,
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {
"refresh_intervals": [
"5s",
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
]
},
"timezone": "",
"title": "No Title"
},
"status": {
"conversion": {
"failed": false,
"storedVersion": "v2beta1"
}
}
} | json | github | https://github.com/grafana/grafana | apps/dashboard/pkg/migration/conversion/testdata/output/migrated_dashboards_from_v0_to_v2/v2beta1.v24.table-angular.v1beta1.json |
#!/usr/bin/python
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: os_user_group
short_description: Associate OpenStack Identity users and groups
extends_documentation_fragment: openstack
version_added: "2.0"
author: "Monty Taylor (@emonty)"
description:
- Add and remove users from groups
options:
user:
description:
- Name or id for the user
required: true
group:
description:
- Name or id for the group.
required: true
state:
description:
- Should the user be present or absent in the group
choices: [present, absent]
default: present
availability_zone:
description:
- Ignored. Present for backwards compatability
required: false
requirements:
- "python >= 2.6"
- "shade"
'''
EXAMPLES = '''
# Add the demo user to the demo group
- os_user_group:
cloud: mycloud
user: demo
group: demo
'''
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
def _system_state_change(state, in_group):
if state == 'present' and not in_group:
return True
if state == 'absent' and in_group:
return True
return False
def main():
argument_spec = openstack_full_argument_spec(
user=dict(required=True),
group=dict(required=True),
state=dict(default='present', choices=['absent', 'present']),
)
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(argument_spec,
supports_check_mode=True,
**module_kwargs)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
user = module.params['user']
group = module.params['group']
state = module.params['state']
try:
cloud = shade.operator_cloud(**module.params)
in_group = cloud.is_user_in_group(user, group)
if module.check_mode:
module.exit_json(changed=_system_state_change(state, in_group))
changed = False
if state == 'present':
if not in_group:
cloud.add_user_to_group(user, group)
changed = True
elif state == 'absent':
if in_group:
cloud.remove_user_from_group(user, group)
changed=True
module.exit_json(changed=changed)
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e), extra_data=e.extra_data)
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: pause
short_description: Pause playbook execution
description:
- Pauses playbook execution for a set amount of time, or until a prompt is acknowledged.
All parameters are optional. The default behavior is to pause with a prompt.
- To pause/wait/sleep per host, use the M(wait_for) module.
- You can use C(ctrl+c) if you wish to advance a pause earlier than it is set to expire or if you need to abort a playbook run entirely.
To continue early press C(ctrl+c) and then C(c). To abort a playbook press C(ctrl+c) and then C(a).
- The pause module integrates into async/parallelized playbooks without any special considerations (see Rolling Updates).
When using pauses with the C(serial) playbook parameter (as in rolling updates) you are only prompted once for the current group of hosts.
- This module is also supported for Windows targets.
version_added: "0.8"
options:
minutes:
description:
- A positive number of minutes to pause for.
seconds:
description:
- A positive number of seconds to pause for.
prompt:
description:
- Optional text to use for the prompt message.
echo:
description:
- Controls whether or not keyboard input is shown when typing.
- Has no effect if 'seconds' or 'minutes' is set.
type: bool
default: 'yes'
version_added: 2.5
author: "Tim Bielawa (@tbielawa)"
notes:
- Starting in 2.2, if you specify 0 or negative for minutes or seconds, it will wait for 1 second, previously it would wait indefinitely.
- This module is also supported for Windows targets.
- User input is not captured or echoed, regardless of echo setting, when minutes or seconds is specified.
'''
EXAMPLES = '''
# Pause for 5 minutes to build app cache.
- pause:
minutes: 5
# Pause until you can verify updates to an application were successful.
- pause:
# A helpful reminder of what to look out for post-update.
- pause:
prompt: "Make sure org.foo.FooOverload exception is not present"
# Pause to get some sensitive input.
- pause:
prompt: "Enter a secret"
echo: no
'''
RETURN = '''
user_input:
description: User input from interactive console
returned: if no waiting time set
type: str
sample: Example user input
start:
description: Time when started pausing
returned: always
type: str
sample: "2017-02-23 14:35:07.298862"
stop:
description: Time when ended pausing
returned: always
type: str
sample: "2017-02-23 14:35:09.552594"
delta:
description: Time paused in seconds
returned: always
type: str
sample: 2
stdout:
description: Output of pause module
returned: always
type: str
sample: Paused for 0.04 minutes
echo:
description: Value of echo setting
returned: always
type: bool
sample: true
''' | unknown | codeparrot/codeparrot-clean | ||
---
applyTo: "vault/ui/**/*.hbs"
description: "HashiCorp Ember.js UI Handlebars template coding standards"
---
# HashiCorp Ember.js Handlebars Template Guidelines
This document provides Handlebars template coding standards for HashiCorp Ember.js UI applications.
> **Note**: For general project context, framework information, and repository structure, see `ember_general.instructions.md`.
## Template Best Practices
- Check truthiness of arrays directly instead of using `.length` property
- Use string interpolation `"prefix/{{value}}"` instead of `{{concat}}` helper
- Remove unnecessary quotes around dynamic component arguments
- Use `Hds::Link::Inline` for external documentation links instead of `<button>` elements
- Make `selected` attributes dynamic rather than static values - warn if static values are used
- Refactor conditionals to wrap content rather than entire elements when possible
- Avoid inline `style` attributes and `{{style ...}}` helpers - define CSS classes in `.scss` files instead
- Place `data-test-*` selectors as the last attribute on elements
- Remove quotes around dynamic data attributes: `data-test-id={{value}}` not `data-test-id="{{value}}"`
Examples:
```handlebars
{{!-- Good: direct array check --}}
{{#if this.model.allowed_roles}}
{{!-- Bad: unnecessary .length check --}}
{{#if (gt this.model.allowed_roles.length 0)}}
{{!-- Good: string interpolation --}}
@secret="role/{{@model.id}}"
{{!-- Bad: concat helper --}}
@secret={{concat "role/" @model.id}}
{{!-- Good: conditional content, not element --}}
<PH.Title>{{if this.version.isEnterprise "Enterprise" "Community"}} features</PH.Title>
{{!-- Bad: conditional wrapping entire element --}}
{{#if this.version.isEnterprise}}
<PH.Title>Enterprise features</PH.Title>
{{else}}
<PH.Title>Community features</PH.Title>
{{/if}}
{{!-- Good: CSS classes instead of inline styles --}}
<Hds::Button @text="Save" class="custom-button" data-test-save />
{{!-- Bad: inline style attribute --}}
<Hds::Button @text="Save" style="margin-top: 10px;" data-test-save />
{{!-- Bad: style helper --}}
<Hds::Button @text="Save" style={{style margin-top="10px"}} data-test-save />
{{!-- Good: data-test selector at the end --}}
<Hds::Button @text="Save" @icon="loading" disabled={{this.isLoading}} data-test-save />
{{!-- Bad: data-test selector not at the end --}}
<Hds::Button data-test-save @text="Save" @icon="loading" disabled={{this.isLoading}} />
{{!-- Good: no quotes around dynamic values --}}
<div data-test-namespace-link={{option.label}}>
{{!-- Bad: unnecessary quotes --}}
<div data-test-namespace-link="{{option.label}}">
```
---
# Content and Terminology
## Title and Heading Case Rules
- **USE SENTENCE CASE**: All HTML headings (`<h1>`, `<h2>`, `<h3>`, etc.) should use sentence case (only first letter capitalized)
- **NO TITLE CASE**: Avoid title case where every major word is capitalized
- **Component arguments**: Use sentence case for `@title`, `@label`, and similar text properties
- End descriptive text with proper punctuation
- Follow proper grammar rules including ending sentences with periods
- Use consistent terminology for product-specific features and components
Examples:
```handlebars
{{!-- CORRECT: Sentence case in HTML headings --}}
<h2 class="title is-4">Quick actions</h2>
<h3 class="title is-marginless is-6">Configuration settings</h3>
<h1>Authentication methods</h1>
{{!-- INCORRECT: Title case in HTML headings --}}
<h2 class="title is-4">Quick Actions</h2>
<h3 class="title is-marginless is-6">Configuration Settings</h3>
<h1>Authentication Methods</h1>
```
```javascript
// CORRECT: Sentence case in component arguments
@title="Upload user's profile"
@label="Configuration path"
@placeholder="Enter mount path"
// INCORRECT: Title case or inconsistent casing
@title="Upload User's Profile"
@label="Configuration Path"
@placeholder="Enter Mount Path"
```
```handlebars
{{!-- CORRECT: Sentence case in data-test attributes when they contain readable text --}}
data-test-card-subtitle="configuration-settings"
{{!-- Component usage with proper casing --}}
<Hds::Button @text="Create configuration" />
<Hds::Alert @message="Operation completed successfully" />
``` | unknown | github | https://github.com/hashicorp/vault | .github/instructions/generic/ember_hbs.instructions.md |
/*
* Copyright 2020 Google LLC
*
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file or at
* https://developers.google.com/open-source/licenses/bsd
*/
#ifndef BLOCK_H
#define BLOCK_H
#include "basics.h"
#include "record.h"
#include "reftable-block.h"
#include "reftable-blocksource.h"
/*
* Writes reftable blocks. The block_writer is reused across blocks to minimize
* allocation overhead.
*/
struct block_writer {
struct z_stream_s *zstream;
unsigned char *compressed;
size_t compressed_cap;
uint8_t *block;
uint32_t block_size;
/* Offset of the global header. Nonzero in the first block only. */
uint32_t header_off;
/* How often to restart keys. */
uint16_t restart_interval;
uint32_t hash_size;
/* Offset of next uint8_t to write. */
uint32_t next;
uint32_t *restarts;
uint32_t restart_len;
uint32_t restart_cap;
struct reftable_buf last_key;
/* Scratch buffer used to avoid allocations. */
struct reftable_buf scratch;
int entries;
};
/*
* initializes the blockwriter to write `typ` entries, using `block` as temporary
* storage. `block` is not owned by the block_writer. */
int block_writer_init(struct block_writer *bw, uint8_t typ, uint8_t *block,
uint32_t block_size, uint32_t header_off, uint32_t hash_size);
/* returns the block type (eg. 'r' for ref records. */
uint8_t block_writer_type(struct block_writer *bw);
/* Attempts to append the record. Returns 0 on success or error code on failure. */
int block_writer_add(struct block_writer *w, struct reftable_record *rec);
/* appends the key restarts, and compress the block if necessary. */
int block_writer_finish(struct block_writer *w);
/* clears out internally allocated block_writer members. */
void block_writer_release(struct block_writer *bw);
/* Iterator for records contained in a single block. */
struct block_iter {
/* offset within the block of the next entry to read. */
uint32_t next_off;
const struct reftable_block *block;
/* key for last entry we read. */
struct reftable_buf last_key;
struct reftable_buf scratch;
};
#define BLOCK_ITER_INIT { \
.last_key = REFTABLE_BUF_INIT, \
.scratch = REFTABLE_BUF_INIT, \
}
/*
* Initialize the block iterator with the given block. The iterator will be
* positioned at the first record contained in the block. The block must remain
* valid until the end of the iterator's lifetime. It is valid to re-initialize
* iterators multiple times.
*/
void block_iter_init(struct block_iter *it, const struct reftable_block *block);
/* Position the initialized iterator at the first record of its block. */
void block_iter_seek_start(struct block_iter *it);
/*
* Position the initialized iterator at the desired record key. It is not an
* error in case the record cannot be found. If so, a subsequent call to
* `block_iter_next()` will indicate that the iterator is exhausted.
*/
int block_iter_seek_key(struct block_iter *it, struct reftable_buf *want);
/* return < 0 for error, 0 for OK, > 0 for EOF. */
int block_iter_next(struct block_iter *it, struct reftable_record *rec);
/* Reset the block iterator to pristine state without releasing its memory. */
void block_iter_reset(struct block_iter *it);
/* deallocate memory for `it`. The block reader and its block is left intact. */
void block_iter_close(struct block_iter *it);
/* size of file header, depending on format version */
size_t header_size(int version);
/* size of file footer, depending on format version */
size_t footer_size(int version);
#endif | c | github | https://github.com/git/git | reftable/block.h |
# -*- coding: utf-8 -*-
"""
@author: Fabio Erculiani <lxnay@sabayon.org>
@contact: lxnay@sabayon.org
@copyright: Fabio Erculiani
@license: GPL-2
B{Entropy Package Manager Client Core Interface}.
"""
import os
import shutil
import threading
from entropy.core import Singleton
from entropy.locks import EntropyResourcesLock
from entropy.fetchers import UrlFetcher, MultipleUrlFetcher
from entropy.output import TextInterface, bold, red, darkred, blue
from entropy.qa import QAInterface
from entropy.security import System, Repository as RepositorySecurity
from entropy.spm.plugins.factory import get_default_instance as get_spm, \
get_default_class as get_spm_default_class
from entropy.client.interfaces.db import InstalledPackagesRepository
from entropy.client.interfaces.dep import CalculatorsMixin
from entropy.client.interfaces.methods import RepositoryMixin, MiscMixin, \
MatchMixin
from entropy.client.interfaces.package import PackageActionFactory
from entropy.client.interfaces.repository import Repository
from entropy.client.interfaces.settings import ClientSystemSettingsPlugin
from entropy.client.interfaces.sets import Sets
from entropy.client.misc import sharedinstlock, ConfigurationUpdates
from entropy.client.services.interfaces import \
ClientWebServiceFactory, RepositoryWebServiceFactory
from entropy.const import etpConst, const_debug_write, \
const_convert_to_unicode, const_setup_perms
from entropy.core.settings.base import SystemSettings
from entropy.misc import LogFile
from entropy.cache import EntropyCacher
from entropy.i18n import _
import entropy.dump
import entropy.dep
import entropy.tools
class Client(Singleton, TextInterface, CalculatorsMixin,
RepositoryMixin, MiscMixin, MatchMixin):
def init_singleton(self, indexing = True, installed_repo = None,
xcache = True, user_xcache = False, repo_validation = True,
url_fetcher = None, multiple_url_fetcher = None, **kwargs):
"""
Entropy Client Singleton interface. Your hitchhikers' guide to the
Galaxy.
@keyword indexing: enable metadata indexing (default is True)
@type indexing: bool
@keyword installed_repo: open installed packages repository? (default
is True). Accepted values: True = open, False = open but consider
it not available, -1 = do not even try to open
@type installed_repo: bool or int
@keyword xcache: enable on-disk cache (default is True)
@type xcache: bool
@keyword user_xcache: enable on-disk cache even for users not in the
entropy group (default is False). Dangerous, could lead to cache
inconsistencies.
@type user_xcache: bool
@keyword repo_validation: validate all the available repositories
and automatically exclude the faulty ones
@type repo_validation: bool
@keyword url_fetcher: override default entropy.fetchers.UrlFetcher
class usage. Provide your own implementation of UrlFetcher using
this argument.
@type url_fetcher: class or None
@keyword multiple_url_fetcher: override default
entropy.fetchers.MultipleUrlFetcher class usage. Provide your own
implementation of MultipleUrlFetcher using this argument.
"""
self.__post_acquire_hook_idx = None
self.__instance_destroyed = False
self._repo_error_messages_cache = set()
self._repodb_cache = {}
self._repodb_cache_mutex = threading.RLock()
self._memory_db_instances = {}
self._real_installed_repository = None
self._real_installed_repository_lock = threading.RLock()
self._treeupdates_repos = set()
self._can_run_sys_set_hooks = False
const_debug_write(__name__, "debug enabled")
self.safe_mode = 0
self._indexing = indexing
self._repo_validation = repo_validation
self._real_cacher = None
self._real_cacher_lock = threading.RLock()
# setup package settings (masking and other stuff)
self._real_settings = None
self._real_settings_lock = threading.RLock()
self._real_settings_client_plg = None
self._real_settings_client_plg_lock = threading.RLock()
self._real_logger = None
self._real_logger_lock = threading.RLock()
self._real_enabled_repos = None
self._real_enabled_repos_lock = threading.RLock()
self._multiple_url_fetcher = multiple_url_fetcher
self._url_fetcher = url_fetcher
if url_fetcher is None:
self._url_fetcher = UrlFetcher
if multiple_url_fetcher is None:
self._multiple_url_fetcher = MultipleUrlFetcher
self._do_open_installed_repo = True
self._installed_repo_enable = True
if installed_repo in (True, None, 1):
self._installed_repo_enable = True
elif installed_repo in (False, 0):
self._installed_repo_enable = False
elif installed_repo == -1:
self._installed_repo_enable = False
self._do_open_installed_repo = False
self.xcache = xcache
shell_xcache = os.getenv("ETP_NOCACHE")
if shell_xcache:
self.xcache = False
# now if we are on live, we should disable it
# are we running on a livecd? (/proc/cmdline has "cdroot")
if entropy.tools.islive():
self.xcache = False
elif (not entropy.tools.is_user_in_entropy_group()) and not user_xcache:
self.xcache = False
# Add Entropy Resources Lock post-acquire hook that cleans
# repository caches.
hook_ref = EntropyResourcesLock.add_post_acquire_hook(
self._resources_post_hook)
self.__post_acquire_hook_idx = hook_ref
# enable System Settings hooks
self._can_run_sys_set_hooks = True
const_debug_write(__name__, "singleton loaded")
@property
def _settings(self):
"""
Return a SystemSettings object instance.
"""
with self._real_settings_lock:
if self._real_settings is None:
self._real_settings = SystemSettings()
const_debug_write(__name__, "SystemSettings loaded")
# add our SystemSettings plugin
# Make sure we connect Entropy Client plugin
# AFTER client db init
self._real_settings.add_plugin(
self._settings_client_plugin)
return self._real_settings
@property
def _settings_client_plugin(self):
"""
Return the SystemSettings Entropy Client plugin.
"""
with self._real_settings_client_plg_lock:
if self._real_settings_client_plg is None:
plugin = ClientSystemSettingsPlugin(self)
self._real_settings_client_plg = plugin
return self._real_settings_client_plg
@property
def _cacher(self):
"""
Return an EntropyCacher object instance.
"""
with self._real_cacher_lock:
if self._real_cacher is None:
real_cacher = EntropyCacher()
const_debug_write(__name__, "EntropyCacher loaded")
# needs to be started here otherwise repository
# cache will be always dropped
if self.xcache:
real_cacher.start()
else:
# disable STASHING_CACHE or we leak
EntropyCacher.STASHING_CACHE = False
self._real_cacher = real_cacher
return self._real_cacher
@property
def logger(self):
"""
Return the Entropy Client Logger instance.
"""
with self._real_logger_lock:
if self._real_logger is None:
real_logger = LogFile(
level = self._settings['system']['log_level'],
filename = etpConst['entropylogfile'],
header = "[client]")
const_debug_write(__name__, "Logger loaded")
self._real_logger = real_logger
return self._real_logger
@property
def _enabled_repos(self):
with self._real_enabled_repos_lock:
if self._real_enabled_repos is None:
real_enabled_repos = []
if self._repo_validation:
self._validate_repositories(
enabled_repos = real_enabled_repos)
else:
real_enabled_repos.extend(
self._settings['repositories']['order'])
self._real_enabled_repos = real_enabled_repos
return self._real_enabled_repos
def _resources_post_hook(self):
"""
Hook running after Entropy Resources Lock acquisition.
This method takes care of the repository memory caches, by
invalidating it.
"""
with self._real_installed_repository_lock:
if self._real_installed_repository is not None:
self._real_installed_repository.clearCache()
with self._repodb_cache_mutex:
for repo in self._repodb_cache.values():
repo.clearCache()
def destroy(self, _from_shutdown = False):
"""
Destroy this Singleton instance, closing repositories, removing
SystemSettings plugins added during instance initialization.
This method should be always called when instance is not used anymore.
"""
self.__instance_destroyed = True
if self.__post_acquire_hook_idx is not None:
EntropyResourcesLock.remove_post_acquire_hook(
self.__post_acquire_hook_idx)
self.__post_acquire_hook_idx = None
if hasattr(self, '_installed_repository'):
inst_repo = self.installed_repository()
if inst_repo is not None:
inst_repo.close(_token = InstalledPackagesRepository.NAME)
if hasattr(self, '_real_logger_lock'):
with self._real_logger_lock:
if self._real_logger is not None:
self._real_logger.close()
if not _from_shutdown:
if hasattr(self, '_real_settings') and \
hasattr(self._real_settings, 'remove_plugin'):
# shutdown() will terminate the whole process
# so there is no need to remove plugins from
# SystemSettings, it wouldn't make any diff.
if self._real_settings is not None:
try:
self._real_settings.remove_plugin(
ClientSystemSettingsPlugin.ID)
except KeyError:
pass
self.close_repositories(mask_clear = False)
def shutdown(self):
"""
This method should be called when the whole process is going to be
killed. It calls destroy() and stops any running thread
"""
self._cacher.sync() # enforce, destroy() may kill the current content
self.destroy(_from_shutdown = True)
self._cacher.stop()
entropy.tools.kill_threads()
@sharedinstlock
def repository_packages_spm_sync(self, repository_identifier, repo_db,
force = False):
"""
Service method used to sync package names with Source Package Manager
via metadata stored in Repository dbs collected at server-time.
Source Package Manager can change package names, categories or slot
and Entropy repositories must be kept in sync.
In other words, it checks for /usr/portage/profiles/updates changes,
of course indirectly, since there is no way entropy.client can directly
depend on Portage.
@param repository_identifier: repository identifier which repo_db
parameter is bound
@type repository_identifier: string
@param repo_db: repository database instance
@type repo_db: entropy.db.EntropyRepository
@return: bool stating if changes have been made
@rtype: bool
"""
inst_repo = self.installed_repository()
if not inst_repo:
# nothing to do if client db is not availabe
return False
self._treeupdates_repos.add(repository_identifier)
do_rescan = False
shell_rescan = os.getenv("ETP_TREEUPDATES_RESCAN")
if shell_rescan:
do_rescan = True
# check database digest
stored_digest = repo_db.retrieveRepositoryUpdatesDigest(
repository_identifier)
if stored_digest == -1:
do_rescan = True
# check stored value in client database
client_digest = "0"
if not do_rescan:
client_digest = \
inst_repo.retrieveRepositoryUpdatesDigest(
repository_identifier)
if do_rescan or (str(stored_digest) != str(client_digest)) or force:
# reset database tables
inst_repo.clearTreeupdatesEntries(
repository_identifier)
# load updates
update_actions = repo_db.retrieveTreeUpdatesActions(
repository_identifier)
# now filter the required actions
update_actions = inst_repo.filterTreeUpdatesActions(
update_actions)
if update_actions:
mytxt = "%s: %s." % (
bold(_("ATTENTION")),
red(_("forcing packages metadata update")),
)
self.output(
mytxt,
importance = 1,
level = "info",
header = darkred(" * ")
)
mytxt = "%s %s." % (
red(_("Updating system database using repository")),
blue(repository_identifier),
)
self.output(
mytxt,
importance = 1,
level = "info",
header = darkred(" * ")
)
# run stuff
inst_repo.runTreeUpdatesActions(
update_actions)
# store new digest into database
inst_repo.setRepositoryUpdatesDigest(
repository_identifier, stored_digest)
# store new actions
inst_repo.addRepositoryUpdatesActions(
InstalledPackagesRepository.NAME, update_actions,
self._settings['repositories']['branch'])
inst_repo.commit()
# clear client cache
inst_repo.clearCache()
return True
def is_destroyed(self):
return self.__instance_destroyed
def clear_cache(self):
"""
Clear all the Entropy default cache directory. This function is
fault tolerant and will never return any exception.
"""
with self._cacher:
# no data is written while holding self._cacher by the balls
# drop all the buffers then remove on-disk data
self._cacher.discard()
# clear repositories live cache
inst_repo = self.installed_repository()
if inst_repo is not None:
inst_repo.clearCache()
with self._repodb_cache_mutex:
for repo in self._repodb_cache.values():
repo.clearCache()
cache_dir = self._cacher.current_directory()
try:
shutil.rmtree(cache_dir, True)
except (shutil.Error, IOError, OSError):
return
try:
os.makedirs(cache_dir, 0o775)
except (IOError, OSError):
return
try:
const_setup_perms(cache_dir, etpConst['entropygid'])
except (IOError, OSError):
return
def QA(self):
"""
Load Entropy QA interface object
@rtype: entropy.qa.QAInterface
"""
qa_intf = QAInterface()
qa_intf.output = self.output
qa_intf.ask_question = self.ask_question
qa_intf.input_box = self.input_box
qa_intf.set_title = self.set_title
return qa_intf
def Settings(self):
"""
Return SystemSettings instance object
"""
return self._settings
def ClientSettings(self):
"""
Return SystemSettings Entropy Client plugin metadata dictionary
"""
p_id = ClientSystemSettingsPlugin.ID
return self._settings[p_id]
def Cacher(self):
"""
Return EntropyCacher instance object
@return: EntropyCacher instance object
@rtype: entropy.cache.EntropyCacher
"""
return self._cacher
def PackageActionFactory(self):
"""
Load Entropy PackageActionFactory instance object
"""
return PackageActionFactory(self)
def ConfigurationUpdates(self):
"""
Return Entropy Configuration File Updates management object.
"""
return ConfigurationUpdates(self)
def Spm(self):
"""
Load Source Package Manager instance object
"""
return get_spm(self)
def Spm_class(self):
"""
Load Source Package Manager default plugin class
"""
return get_spm_default_class()
def Repositories(self, *args, **kwargs):
"""
Load Entropy Repositories manager instance object
@return: Repository instance object
@rtype: entropy.client.interfaces.repository.Repository
"""
client_data = self.ClientSettings()['misc']
kwargs['gpg'] = client_data['gpg']
return Repository(self, *args, **kwargs)
def Security(self, *args, **kwargs):
"""
Load Entropy Security Advisories interface object
@return: Repository Security instance object
@rtype: entropy.security.System
"""
return System(self, *args, **kwargs)
def RepositorySecurity(self, keystore_dir = None):
"""
Load Entropy Repository Security interface object
@return: Repository Repository Security instance object
@rtype: entropy.security.Repository
@raise RepositorySecurity.GPGError: GPGError based instances in case
of problems.
"""
if keystore_dir is None:
keystore_dir = etpConst['etpclientgpgdir']
return RepositorySecurity(keystore_dir = keystore_dir)
def Sets(self):
"""
Load Package Sets interface object
@return: Sets instance object
@rtype: entropy.client.interfaces.sets.Sets
"""
return Sets(self)
def WebServices(self):
"""
Load the Entropy Web Services Factory interface, that can be used
to obtain a WebService object that is able to communicate with
repository remote services, if available.
@return: WebServicesFactory instance object
@rtype: entropy.client.services.interfaces.WebServicesFactory
"""
return ClientWebServiceFactory(self)
def RepositoryWebServices(self):
"""
Load the Repository Entropy Web Services Factory interface, that can
be used to obtain a RepositoryWebService object that is able to
communicate with repository remote services, querying for package
metadata and general repository status.
@return: RepositoryWebServiceFactory instance object
@rtype: entropy.client.services.interfaces.RepositoryWebServiceFactory
"""
return RepositoryWebServiceFactory(self) | unknown | codeparrot/codeparrot-clean | ||
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012 (<http://www.erpsystems.ro>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class res_partner(osv.osv):
_name = "res.partner"
_inherit = "res.partner"
_columns = {
'nrc' : fields.char('NRC', size=16, help='Registration number at the Registry of Commerce'),
}
def _auto_init(self, cr, context=None):
result = super(res_partner, self)._auto_init(cr, context=context)
# Remove constrains for vat, nrc on "commercial entities" because is not mandatory by legislation
# Even that VAT numbers are unique, the NRC field is not unique, and there are certain entities that
# doesn't have a NRC number plus the formatting was changed few times, so we cannot have a base rule for
# checking if available and emmited by the Ministry of Finance, only online on their website.
cr.execute("""
DROP INDEX IF EXISTS res_partner_vat_uniq_for_companies;
DROP INDEX IF EXISTS res_partner_nrc_uniq_for_companies;
""")
return result
def _commercial_fields(self, cr, uid, context=None):
return super(res_partner, self)._commercial_fields(cr, uid, context=context) + ['nrc']
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | unknown | codeparrot/codeparrot-clean | ||
from __future__ import absolute_import
import sys
import types
from django.apps import AppConfig
from django.test import TestCase
from tenant_schemas import utils
class AppLabelsTestCase(TestCase):
def setUp(self):
self._modules = set()
def tearDown(self):
for name in self._modules:
sys.modules.pop(name, None)
def set_up_module(self, whole_name):
parts = whole_name.split('.')
name = ''
for part in parts:
name += ('.%s' % part) if name else part
module = types.ModuleType(name)
module.__path__ = ['/tmp']
self._modules.add(name)
sys.modules[name] = module
return sys.modules[whole_name]
def test_app_labels(self):
"""
Verifies that app_labels handle Django 1.7+ AppConfigs properly.
https://docs.djangoproject.com/en/1.7/ref/applications/
"""
self.set_up_module('example1')
apps = self.set_up_module('example2.apps')
# set up AppConfig on the `test_app.apps` module
class Example2AppConfig(AppConfig):
name = 'example2'
label = 'example2_app' # with different name
path = '/tmp' # for whatever reason path is required
apps.Example2AppConfig = Example2AppConfig
self.assertEqual(
utils.app_labels([
'example1',
'example2.apps.Example2AppConfig'
]),
['example1', 'example2_app'],
) | unknown | codeparrot/codeparrot-clean | ||
import subprocess
import os
import shutil
import re
import multiprocessing as mp
from praatio import tgio
from .config import TEMP_DIR
from .helper import thirdparty_binary
from .multiprocessing import transcribe, transcribe_fmllr
from .corpus import AlignableCorpus
from .helper import score, log_kaldi_errors, parse_logs
from .exceptions import KaldiProcessingError
class Transcriber(object):
min_language_model_weight = 7
max_language_model_weight = 17
word_insertion_penalties = [0, 0.5, 1.0]
def __init__(self, corpus, dictionary, acoustic_model, language_model, transcribe_config, temp_directory=None,
call_back=None, debug=False, verbose=False, evaluation_mode=False, logger=None):
self.logger = logger
self.corpus = corpus
self.dictionary = dictionary
self.acoustic_model = acoustic_model
self.language_model = language_model
self.transcribe_config = transcribe_config
if not temp_directory:
temp_directory = TEMP_DIR
self.temp_directory = temp_directory
self.call_back = call_back
if self.call_back is None:
self.call_back = print
self.verbose = verbose
self.debug = debug
self.evaluation_mode = evaluation_mode
self.acoustic_model.export_model(self.model_directory)
self.log_dir = os.path.join(self.transcribe_directory, 'log')
os.makedirs(self.log_dir, exist_ok=True)
self.setup()
@property
def transcribe_directory(self):
return os.path.join(self.temp_directory, 'transcribe')
@property
def model_directory(self):
return os.path.join(self.temp_directory, 'models')
def get_tree_info(self):
tree_proc = subprocess.Popen([thirdparty_binary('tree-info'),
os.path.join(self.model_directory, 'tree')], text=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, _ = tree_proc.communicate()
context_width = 1
central_pos = 0
for line in stdout.split('\n'):
text = line.strip().split(' ')
if text[0] == 'context-width':
context_width = int(text[1])
elif text[0] == 'central-position':
central_pos = int(text[1])
return context_width, central_pos
def setup(self):
dirty_path = os.path.join(self.model_directory, 'dirty')
if os.path.exists(dirty_path): # if there was an error, let's redo from scratch
shutil.rmtree(self.model_directory)
self.dictionary.write(disambig=True)
if isinstance(self.corpus, AlignableCorpus):
self.corpus.initialize_corpus(self.dictionary)
else:
self.corpus.initialize_corpus()
self.acoustic_model.feature_config.generate_features(self.corpus)
log_dir = os.path.join(self.model_directory, 'log')
os.makedirs(log_dir, exist_ok=True)
context_width, central_pos = self.get_tree_info()
small_g_path = os.path.join(self.model_directory, 'small_G.fst')
ldet_temp_path = os.path.join(self.model_directory, 'Ldet.fst_temp')
ldet_path = os.path.join(self.model_directory, 'Ldet.fst')
med_g_path = os.path.join(self.model_directory, 'med_G.fst')
carpa_path = os.path.join(self.model_directory, 'G.carpa')
temp_carpa_path = os.path.join(self.model_directory, 'G.carpa_temp')
lg_path = os.path.join(self.model_directory, 'LG.fst')
clg_path = os.path.join(self.model_directory, 'CLG_{}_{}.fst'.format(context_width, central_pos))
log_path = os.path.join(log_dir, 'hclg.log')
in_disambig = os.path.join(self.dictionary.phones_dir, 'disambig.int')
out_disambig = os.path.join(self.model_directory,
'disambig_ilabels_{}_{}.int'.format(context_width, central_pos))
ha_out_disambig = os.path.join(self.model_directory, 'disambig_tid.int')
ilabels_temp = os.path.join(self.model_directory, 'ilabels_{}_{}'.format(context_width, central_pos))
tree_path = os.path.join(self.model_directory, 'tree')
model_path = os.path.join(self.model_directory, 'final.mdl')
ha_path = os.path.join(self.model_directory, 'Ha.fst')
hclga_path = os.path.join(self.model_directory, 'HCLGa.fst')
hclg_path = os.path.join(self.model_directory, 'HCLG.fst')
words_path = os.path.join(self.model_directory, 'words.txt')
dirty_path = os.path.join(self.model_directory, 'dirty')
shutil.copyfile(self.dictionary.words_symbol_path, os.path.join(self.model_directory, 'words.txt'))
if os.path.exists(hclg_path):
return
try:
self.logger.info('Generating decoding graph...')
with open(log_path, 'w') as log_file:
#if not os.path.exists(ldet_path):
# self.logger.info('Generating Ldet.fst...')
# with open(ldet_temp_path, 'w', encoding='utf8') as f:
# print_proc = subprocess.Popen([thirdparty_binary('fstprint'), self.dictionary.disambig_path],
# stdout=subprocess.PIPE, stderr=log_file, text=True)
# for line in iter(print_proc.stdout.readline,''):
#
# print(line)
# error
if not os.path.exists(small_g_path):
self.logger.info('Generating small_G.fst...')
arpafst_proc = subprocess.Popen([thirdparty_binary('arpa2fst'), '--disambig-symbol=#0',
'--read-symbol-table=' + words_path,
self.language_model.small_arpa_path, small_g_path], stderr=log_file,
stdout=log_file)
arpafst_proc.communicate()
self.logger.info('Done!')
if not os.path.exists(med_g_path):
self.logger.info('Generating med_G.fst...')
arpafst_proc = subprocess.Popen([thirdparty_binary('arpa2fst'), '--disambig-symbol=#0',
'--read-symbol-table=' + words_path,
self.language_model.medium_arpa_path, med_g_path], stderr=log_file,
stdout=log_file)
arpafst_proc.communicate()
self.logger.info('Done!')
if not os.path.exists(carpa_path):
self.logger.info('Generating G.carpa...')
bos_symbol = self.dictionary.words_mapping['<s>']
eos_symbol = self.dictionary.words_mapping['</s>']
unk_symbol = self.dictionary.words_mapping['<unk>']
with open(self.language_model.carpa_path, 'r', encoding='utf8') as f, \
open(temp_carpa_path, 'w', encoding='utf8') as outf:
current_order = -1
num_oov_lines = 0
for line in f:
line = line.strip()
col = line.split()
if current_order == -1 and not re.match(r'^\\data\\$', line):
continue
if re.match(r'^\\data\\$', line):
log_file.write(r'Processing data...\n')
current_order = 0
outf.write(line + '\n')
elif re.match(r'^\\[0-9]*-grams:$', line):
current_order = int(re.sub(r'\\([0-9]*)-grams:$', r'\1', line))
log_file.write('Processing {} grams...\n'.format(current_order))
outf.write(line + '\n')
elif re.match(r'^\\end\\$', line):
outf.write(line + '\n')
elif not line:
if current_order >= 1:
outf.write('\n')
else:
if current_order == 0:
outf.write(line + '\n')
else:
if len(col) > 2 + current_order or len(col) < 1 + current_order:
raise Exception('Bad line in arpa lm "{}"'.format(line))
prob = col.pop(0)
is_oov = False
for i in range(current_order):
try:
col[i] = str(self.dictionary.words_mapping[col[i]])
except KeyError:
is_oov = True
num_oov_lines += 1
break
if not is_oov:
rest_of_line = ' '.join(col)
outf.write('{}\t{}\n'.format(prob, rest_of_line))
carpa_proc = subprocess.Popen([thirdparty_binary('arpa-to-const-arpa'),
'--bos-symbol={}'.format(bos_symbol),'--eos-symbol={}'.format(eos_symbol),
'--unk-symbol={}'.format(unk_symbol),
temp_carpa_path, carpa_path], stdin=subprocess.PIPE,
stderr=log_file,
stdout=log_file)
carpa_proc.communicate()
os.remove(temp_carpa_path)
self.logger.info('Done!')
if not os.path.exists(lg_path):
self.logger.info('Generating LG.fst...')
temp_compose_path = os.path.join(self.model_directory, 'LG.temp')
compose_proc = subprocess.Popen([thirdparty_binary('fsttablecompose'),
self.dictionary.disambig_path, small_g_path, temp_compose_path],
stderr=log_file)
compose_proc.communicate()
temp2_compose_path = os.path.join(self.model_directory, 'LG.temp2')
determinize_proc = subprocess.Popen([thirdparty_binary('fstdeterminizestar'),
'--use-log=true', temp_compose_path, temp2_compose_path],
stderr=log_file)
determinize_proc.communicate()
os.remove(temp_compose_path)
minimize_proc = subprocess.Popen([thirdparty_binary('fstminimizeencoded'),
temp2_compose_path, temp_compose_path],
stdout=subprocess.PIPE, stderr=log_file)
minimize_proc.communicate()
os.remove(temp2_compose_path)
push_proc = subprocess.Popen([thirdparty_binary('fstpushspecial'), temp_compose_path, lg_path],
stderr=log_file)
push_proc.communicate()
os.remove(temp_compose_path)
self.logger.info('Done!')
if not os.path.exists(clg_path):
self.logger.info('Generating CLG.fst...')
compose_proc = subprocess.Popen([thirdparty_binary('fstcomposecontext'),
'--context-size={}'.format(context_width),
'--central-position={}'.format(central_pos),
'--read-disambig-syms={}'.format(in_disambig),
'--write-disambig-syms={}'.format(out_disambig),
ilabels_temp, lg_path], stdout=subprocess.PIPE, stderr=log_file)
sort_proc = subprocess.Popen([thirdparty_binary('fstarcsort'), '--sort_type=ilabel', '-', clg_path],
stdin=compose_proc.stdout, stderr=log_file)
sort_proc.communicate()
self.logger.info('Done!')
if not os.path.exists(hclga_path):
self.logger.info('Generating HCLGa.fst...')
make_h_proc = subprocess.Popen([thirdparty_binary('make-h-transducer'),
'--disambig-syms-out={}'.format(ha_out_disambig),
'--transition-scale={}'.format(self.transcribe_config.transition_scale),
ilabels_temp, tree_path, model_path, ha_path],
stderr=log_file, stdout=log_file)
make_h_proc.communicate()
temp_compose_path = os.path.join(self.model_directory, 'HCLGa.temp')
compose_proc = subprocess.Popen([thirdparty_binary('fsttablecompose'), ha_path,
clg_path, temp_compose_path], stderr=log_file)
compose_proc.communicate()
determinize_proc = subprocess.Popen([thirdparty_binary('fstdeterminizestar'),
'--use-log=true', temp_compose_path],
stdout=subprocess.PIPE, stderr=log_file)
rmsymbols_proc = subprocess.Popen([thirdparty_binary('fstrmsymbols'), ha_out_disambig],
stdin=determinize_proc.stdout, stdout=subprocess.PIPE,
stderr=log_file)
rmeps_proc = subprocess.Popen([thirdparty_binary('fstrmepslocal')],
stdin=rmsymbols_proc.stdout, stdout=subprocess.PIPE, stderr=log_file)
minimize_proc = subprocess.Popen([thirdparty_binary('fstminimizeencoded'), '-', hclga_path],
stdin=rmeps_proc.stdout, stderr=log_file)
minimize_proc.communicate()
os.remove(temp_compose_path)
self.logger.info('Done!')
self.logger.info('Finishing up...')
self_loop_proc = subprocess.Popen([thirdparty_binary('add-self-loops'),
'--self-loop-scale={}'.format(self.transcribe_config.self_loop_scale),
'--reorder=true', model_path, hclga_path],
stdout=subprocess.PIPE, stderr=log_file)
convert_proc = subprocess.Popen([thirdparty_binary('fstconvert'), '--fst_type=const', '-', hclg_path],
stdin=self_loop_proc.stdout, stderr=log_file)
convert_proc.communicate()
parse_logs(log_dir)
self.logger.info('Finished graph construction!')
except Exception as e:
with open(dirty_path, 'w'):
pass
if isinstance(e, KaldiProcessingError):
log_kaldi_errors(e.error_logs, self.logger)
e.update_log_file(self.logger.handlers[0].baseFilename)
raise
def transcribe(self):
self.logger.info('Beginning transcription...')
dirty_path = os.path.join(self.transcribe_directory, 'dirty')
if os.path.exists(dirty_path):
shutil.rmtree(self.transcribe_directory, ignore_errors=True)
os.makedirs(self.log_dir,exist_ok=True)
try:
transcribe(self)
if self.transcribe_config.fmllr and not self.transcribe_config.no_speakers:
self.logger.info('Performing speaker adjusted transcription...')
transcribe_fmllr(self)
except Exception as e:
with open(dirty_path, 'w'):
pass
if isinstance(e, KaldiProcessingError):
log_kaldi_errors(e.error_logs, self.logger)
e.update_log_file(self.logger.handlers[0].baseFilename)
raise
def evaluate(self, output_directory, input_directory=None):
self.logger.info('Evaluating transcripts...')
transcripts = self._load_transcripts(input_directory)
# Sentence-level measures
correct = 0
incorrect = 0
# Word-level measures
total_edits = 0
total_length = 0
issues = []
with mp.Pool(self.corpus.num_jobs) as pool:
to_comp = []
for utt, pred in transcripts.items():
g = self.corpus.text_mapping[utt].split()
h = pred.split()
if g != h:
issues.append((utt, g, h))
to_comp.append((g, h))
gen = pool.map(score, to_comp)
for (edits, length) in gen:
if edits == 0:
correct += 1
else:
incorrect += 1
total_edits += edits
total_length += length
for utt, gold in self.corpus.text_mapping.items():
if utt not in transcripts:
incorrect += 1
gold = gold.split()
total_edits += len(gold)
total_length += len(gold)
ser = 100 * incorrect / (correct + incorrect)
wer = 100 * total_edits / total_length
output_path = os.path.join(output_directory, 'transcription_issues.csv')
with open(output_path, 'w', encoding='utf8') as f:
for utt, g, h in issues:
g = ' '.join(g)
h = ' '.join(h)
f.write('{},{},{}\n'.format(utt, g, h))
self.logger.info('SER: {:.2f}%, WER: {:.2f}%'.format(ser, wer))
return ser, wer
def _load_transcripts(self, input_directory=None):
transcripts = {}
lookup = self.dictionary.reversed_word_mapping
if input_directory is None:
input_directory = self.transcribe_directory
if self.transcribe_config.fmllr and not self.transcribe_config.no_speakers:
input_directory = os.path.join(input_directory, 'fmllr')
for j in range(self.corpus.num_jobs):
with open(os.path.join(input_directory, 'tra.{}'.format(j)), 'r', encoding='utf8') as f:
for line in f:
t = line.strip().split(' ')
utt = t[0]
ints = t[1:]
if not ints:
continue
transcription = []
for i in ints:
transcription.append(lookup[int(i)])
transcripts[utt] = ' '.join(transcription)
return transcripts
def export_transcriptions(self, output_directory, source=None):
transcripts = self._load_transcripts(source)
if not self.corpus.segments:
for utt, t in transcripts.items():
relative = self.corpus.file_directory_mapping[utt]
if relative:
speaker_directory = os.path.join(output_directory, relative)
else:
speaker_directory = output_directory
os.makedirs(speaker_directory, exist_ok=True)
outpath = os.path.join(speaker_directory, utt + '.lab')
with open(outpath, 'w', encoding='utf8') as f:
f.write(t)
else:
for filename in self.corpus.file_directory_mapping.keys():
maxtime = self.corpus.get_wav_duration(filename)
speaker_directory = output_directory
try:
if self.corpus.file_directory_mapping[filename]:
speaker_directory = os.path.join(output_directory, self.corpus.file_directory_mapping[filename])
except KeyError:
pass
tiers = {}
if self.transcribe_config.no_speakers:
speaker = 'speech'
tiers[speaker] = tgio.IntervalTier(speaker, [], minT=0, maxT=maxtime)
else:
for speaker in self.corpus.speaker_ordering[filename]:
tiers[speaker] = tgio.IntervalTier(speaker, [], minT=0, maxT=maxtime)
tg = tgio.Textgrid()
tg.maxTimestamp = maxtime
for utt_name, text in transcripts.items():
seg = self.corpus.segments[utt_name]
utt_filename, begin, end = seg['file_name'], seg['begin'], seg['end']
if utt_filename != filename:
continue
if self.transcribe_config.no_speakers:
speaker = 'speech'
else:
speaker = self.corpus.utt_speak_mapping[utt_name]
begin = float(begin)
end = float(end)
tiers[speaker].entryList.append((begin, end, text))
for t in tiers.values():
tg.addTier(t)
tg.save(os.path.join(speaker_directory, filename + '.TextGrid'), useShortForm=False) | unknown | codeparrot/codeparrot-clean | ||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from lxml import etree
import webob
from nova.api.openstack.compute.contrib import extended_virtual_interfaces_net
from nova.api.openstack import wsgi
from nova import compute
from nova import network
from nova.openstack.common import jsonutils
from nova import test
from nova.tests.api.openstack import fakes
FAKE_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
FAKE_VIFS = [{'uuid': '00000000-0000-0000-0000-00000000000000000',
'address': '00-00-00-00-00-00',
'net_uuid': '00000000-0000-0000-0000-00000000000000001'},
{'uuid': '11111111-1111-1111-1111-11111111111111111',
'address': '11-11-11-11-11-11',
'net_uuid': '11111111-1111-1111-1111-11111111111111112'}]
EXPECTED_NET_UUIDS = ['00000000-0000-0000-0000-00000000000000001',
'11111111-1111-1111-1111-11111111111111112']
def compute_api_get(self, context, instance_id):
return dict(uuid=FAKE_UUID, id=instance_id, instance_type_id=1, host='bob')
def get_vifs_by_instance(self, context, instance_id):
return FAKE_VIFS
def get_vif_by_mac_address(self, context, mac_address):
if mac_address == "00-00-00-00-00-00":
return {'net_uuid': '00000000-0000-0000-0000-00000000000000001'}
else:
return {'net_uuid': '11111111-1111-1111-1111-11111111111111112'}
class ExtendedServerVIFNetTest(test.TestCase):
content_type = 'application/json'
prefix = "%s:" % extended_virtual_interfaces_net. \
Extended_virtual_interfaces_net.alias
def setUp(self):
super(ExtendedServerVIFNetTest, self).setUp()
self.stubs.Set(compute.api.API, "get",
compute_api_get)
self.stubs.Set(network.api.API, "get_vifs_by_instance",
get_vifs_by_instance)
self.stubs.Set(network.api.API, "get_vif_by_mac_address",
get_vif_by_mac_address)
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Virtual_interfaces',
'Extended_virtual_interfaces_net'])
def _make_request(self, url):
req = webob.Request.blank(url)
req.headers['Accept'] = self.content_type
res = req.get_response(fakes.wsgi_app(init_only=(
'os-virtual-interfaces', 'OS-EXT-VIF-NET')))
return res
def _get_vifs(self, body):
return jsonutils.loads(body).get('virtual_interfaces')
def _get_net_id(self, vifs):
for vif in vifs:
yield vif['%snet_id' % self.prefix]
def assertVIFs(self, vifs):
result = []
for net_id in self._get_net_id(vifs):
result.append(net_id)
sorted(result)
for i, net_uuid in enumerate(result):
self.assertEqual(net_uuid, EXPECTED_NET_UUIDS[i])
def test_get_extend_virtual_interfaces_list(self):
res = self._make_request('/v2/fake/servers/abcd/os-virtual-interfaces')
self.assertEqual(res.status_int, 200)
self.assertVIFs(self._get_vifs(res.body))
class ExtendedServerVIFNetSerializerTest(ExtendedServerVIFNetTest):
content_type = 'application/xml'
prefix = "{%s}" % extended_virtual_interfaces_net. \
Extended_virtual_interfaces_net.namespace
def setUp(self):
super(ExtendedServerVIFNetSerializerTest, self).setUp()
self.namespace = wsgi.XMLNS_V11
self.serializer = extended_virtual_interfaces_net. \
ExtendedVirtualInterfaceNetTemplate()
def _get_vifs(self, body):
return etree.XML(body).getchildren()
def _get_net_id(self, vifs):
for vif in vifs:
yield vif.attrib['%snet_id' % self.prefix] | unknown | codeparrot/codeparrot-clean | ||
#! /usr/bin/env python
# Copyright 2019 Google
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Builds nanopb protos by calling a proto generator wrapper.
Example usage:
python Firebase/Core/nanopb_build_protos.py \
--nanopb \
--protos_dir=Firebase/Core/Protos/ \
--pythonpath=Firebase/Core/nanopb_temp/generator/ \
--output_dir=Firebase/Core/Protos/
"""
from __future__ import print_function
import sys
import argparse
import os
import os.path
import re
import subprocess
OBJC_GENERATOR='nanopb_proto_generator.py'
COPYRIGHT_NOTICE = '''
/*
* Copyright 2019 Google
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'''.lstrip()
def main():
parser = argparse.ArgumentParser(
description='Generates proto messages.')
parser.add_argument(
'--nanopb', action='store_true',
help='Generates nanopb messages.')
parser.add_argument(
'--objc', action='store_true',
help='Generates Objective-C messages.')
parser.add_argument(
'--protos_dir',
help='Source directory containing .proto files.')
parser.add_argument(
'--output_dir', '-d',
help='Directory to write files; subdirectories will be created.')
parser.add_argument(
'--protoc', default='protoc',
help='Location of the protoc executable')
parser.add_argument(
'--pythonpath',
help='Location of the protoc python library.')
parser.add_argument(
'--include', '-I', action='append', default=[],
help='Adds INCLUDE to the proto path.')
args = parser.parse_args()
if args.nanopb is None and args.objc is None:
parser.print_help()
sys.exit(1)
if args.protos_dir is None:
root_dir = os.path.abspath(os.path.dirname(__file__))
args.protos_dir = os.path.join(root_dir, 'protos')
if args.output_dir is None:
root_dir = os.path.abspath(os.path.dirname(__file__))
args.output_dir = os.path.join(root_dir, 'protogen-please-supply-an-outputdir')
all_proto_files = collect_files(args.protos_dir, '.proto')
if args.nanopb:
NanopbGenerator(args, all_proto_files).run()
proto_files = remove_well_known_protos(all_proto_files)
if args.objc:
ObjcProtobufGenerator(args, proto_files).run()
class NanopbGenerator(object):
"""Builds and runs the nanopb plugin to protoc."""
def __init__(self, args, proto_files):
self.args = args
self.proto_files = proto_files
def run(self):
"""Performs the action of the generator."""
nanopb_out = os.path.join(self.args.output_dir, 'nanopb')
mkdir(nanopb_out)
self.__run_generator(nanopb_out)
sources = collect_files(nanopb_out, '.nanopb.h', '.nanopb.c')
post_process_files(
sources,
add_copyright,
nanopb_remove_extern_c,
nanopb_rename_delete,
nanopb_use_module_import
)
def __run_generator(self, out_dir):
"""Invokes protoc using the nanopb plugin."""
cmd = protoc_command(self.args)
gen = os.path.join(os.path.dirname(__file__), OBJC_GENERATOR)
cmd.append('--plugin=protoc-gen-nanopb=%s' % gen)
nanopb_flags = [
'--extension=.nanopb',
'--source-extension=.c',
'--no-timestamp'
]
nanopb_flags.extend(['-I%s' % path for path in self.args.include])
cmd.append('--nanopb_out=%s:%s' % (' '.join(nanopb_flags), out_dir))
cmd.extend(self.proto_files)
run_protoc(self.args, cmd)
def protoc_command(args):
"""Composes the initial protoc command-line including its include path."""
cmd = [args.protoc]
if args.include is not None:
cmd.extend(['-I=%s' % path for path in args.include])
return cmd
def run_protoc(args, cmd):
"""Actually runs the given protoc command.
Args:
args: The command-line args (including pythonpath)
cmd: The command to run expressed as a list of strings
"""
kwargs = {}
if args.pythonpath:
env = os.environ.copy()
old_path = env.get('PYTHONPATH')
env['PYTHONPATH'] = os.path.expanduser(args.pythonpath)
if old_path is not None:
env['PYTHONPATH'] += os.pathsep + old_path
kwargs['env'] = env
try:
print(subprocess.check_output(cmd, stderr=subprocess.STDOUT, **kwargs))
except subprocess.CalledProcessError as error:
print('command failed: ', ' '.join(cmd), '\nerror: ', error.output)
def remove_well_known_protos(filenames):
"""Remove "well-known" protos for objc.
On those platforms we get these for free as a part of the protobuf runtime.
We only need them for nanopb.
Args:
filenames: A list of filenames, each naming a .proto file.
Returns:
The filenames with members of google/protobuf removed.
"""
return [f for f in filenames if 'protos/google/protobuf/' not in f]
def post_process_files(filenames, *processors):
for filename in filenames:
lines = []
with open(filename, 'r') as fd:
lines = fd.readlines()
for processor in processors:
lines = processor(lines)
write_file(filename, lines)
def write_file(filename, lines):
mkdir(os.path.dirname(filename))
with open(filename, 'w') as fd:
fd.write(''.join(lines))
def add_copyright(lines):
"""Adds a copyright notice to the lines."""
result = [COPYRIGHT_NOTICE, '\n']
result.extend(lines)
return result
def nanopb_remove_extern_c(lines):
"""Removes extern "C" directives from nanopb code.
Args:
lines: A nanobp-generated source file, split into lines.
Returns:
A list of strings, similar to the input but modified to remove extern "C".
"""
result = []
state = 'initial'
for line in lines:
if state == 'initial':
if '#ifdef __cplusplus' in line:
state = 'in-ifdef'
continue
result.append(line)
elif state == 'in-ifdef':
if '#endif' in line:
state = 'initial'
return result
def nanopb_rename_delete(lines):
"""Renames a delete symbol to delete_.
If a proto uses a field named 'delete', nanopb happily uses that in the
message definition. Works fine for C; not so much for C++.
Args:
lines: The lines to fix.
Returns:
The lines, fixed.
"""
delete_keyword = re.compile(r'\bdelete\b')
return [delete_keyword.sub('delete_', line) for line in lines]
def nanopb_use_module_import(lines):
"""Changes #include <pb.h> to include <nanopb/pb.h>""" # Don't let Copybara alter these lines.
return [line.replace('#include <pb.h>', '{}include <nanopb/pb.h>'.format("#")) for line in lines]
def strip_trailing_whitespace(lines):
"""Removes trailing whitespace from the given lines."""
return [line.rstrip() + '\n' for line in lines]
def objc_flatten_imports(lines):
"""Flattens the import statements for compatibility with CocoaPods."""
long_import = re.compile(r'#import ".*/')
return [long_import.sub('#import "', line) for line in lines]
def objc_strip_extension_registry(lines):
"""Removes extensionRegistry methods from the classes."""
skip = False
result = []
for line in lines:
if '+ (GPBExtensionRegistry*)extensionRegistry {' in line:
skip = True
if not skip:
result.append(line)
elif line == '}\n':
skip = False
return result
def collect_files(root_dir, *extensions):
"""Finds files with the given extensions in the root_dir.
Args:
root_dir: The directory from which to start traversing.
*extensions: Filename extensions (including the leading dot) to find.
Returns:
A list of filenames, all starting with root_dir, that have one of the given
extensions.
"""
result = []
for root, _, files in os.walk(root_dir):
for basename in files:
for ext in extensions:
if basename.endswith(ext):
filename = os.path.join(root, basename)
result.append(filename)
return result
def mkdir(dirname):
if not os.path.isdir(dirname):
os.makedirs(dirname)
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
import socket
import time
import pickle
import os
from functools import partial
class Sender:
def __init__(self, client_ip, filename, verbose=False):
self.sock = socket.socket()
self.host = client_ip
self.port = 6000
self.filename = filename
self.filesize = os.path.getsize(self.filename)
self.verbose = verbose
def run(self):
self.send_metadata()
if self.get_accept():
self.send_file()
def send_file(self):
if self.verbose:
print("sending file {} ({})".format(self.filename, self.filesize))
sent = 0
with open(self.filename, "rb") as f:
for data in iter(partial(f.read, 4096), b''):
self.sock.sendall(data)
sent += len(data)
print("progress: {0:0.1f} %".format((sent / self.filesize) * 100), end=("\r" if sent < self.filesize else "\n"))
if self.verbose:
print("completed")
def send_metadata(self):
metadata = {
"filename": os.path.basename(self.filename),
"size": os.path.getsize(self.filename)
}
if self.verbose:
print("sending metadata")
self.sock.send(pickle.dumps(metadata))
def get_accept(self):
return self.sock.recv(1) == b"Y"
def __enter__(self):
self._init_connection()
return self
def __exit__(self, exc_type, exc_value, traceback):
self._close_connection()
def _init_connection(self):
if self.verbose:
print("connecting to {}:{}".format(self.host, self.port))
while True:
try:
self.sock.connect((self.host, self.port))
break
except:
time.sleep(1)
pass
if self.verbose:
print("connected")
def _close_connection(self):
if self.verbose:
print("closing connection")
self.sock.close() | unknown | codeparrot/codeparrot-clean | ||
import json
from django.views.generic import View
from django.utils.safestring import mark_safe
from django.shortcuts import render_to_response, RequestContext
from django.core.exceptions import PermissionDenied
from rest_framework.views import Response
from rest_framework_swagger.urlparser import UrlParser
from rest_framework_swagger.apidocview import APIDocView
from rest_framework.renderers import JSONRenderer
from rest_framework_swagger.docgenerator import DocumentationGenerator
from rest_framework_swagger import SWAGGER_SETTINGS
class SwaggerUIView(View):
def get(self, request, *args, **kwargs):
if not self.has_permission(request):
raise PermissionDenied()
template_name = "rest_framework_swagger/index.html"
data = {
'swagger_settings': {
'discovery_url': "%sapi-docs/" % request.build_absolute_uri(),
'api_key': SWAGGER_SETTINGS.get('api_key', ''),
'token_type': SWAGGER_SETTINGS.get('token_type'),
'enabled_methods': mark_safe(
json.dumps( SWAGGER_SETTINGS.get('enabled_methods')))
}
}
response = render_to_response(template_name, RequestContext(request, data))
return response
def has_permission(self, request):
if SWAGGER_SETTINGS.get('is_superuser') and not request.user.is_superuser:
return False
if SWAGGER_SETTINGS.get('is_authenticated') and not request.user.is_authenticated():
return False
return True
class SwaggerResourcesView(APIDocView):
renderer_classes = (JSONRenderer,)
def get(self, request):
apis = []
resources = self.get_resources()
for path in resources:
apis.append({
'path': "/%s" % path,
})
return Response({
'apiVersion': SWAGGER_SETTINGS.get('api_version', ''),
'swaggerVersion': '1.2',
'basePath': self.host.rstrip('/'),
'apis': apis
})
def get_resources(self):
urlparser = UrlParser()
apis = urlparser.get_apis(exclude_namespaces=SWAGGER_SETTINGS.get('exclude_namespaces'))
resources = urlparser.get_top_level_apis(apis)
return resources
class SwaggerApiView(APIDocView):
renderer_classes = (JSONRenderer,)
def get(self, request, path):
apis = self.get_api_for_resource(path)
generator = DocumentationGenerator()
return Response({
'apis': generator.generate(apis),
'models': generator.get_models(apis),
'basePath': self.api_full_uri.rstrip('/'),
})
def get_api_for_resource(self, filter_path):
urlparser = UrlParser()
return urlparser.get_apis(filter_path=filter_path) | unknown | codeparrot/codeparrot-clean | ||
import json
import requests
from requests.exceptions import HTTPError
class SmartLogic(object):
BASE_URL = 'https://cloud.smartlogic.com/'
SERVICE_URL = 'svc/0dcee7c7-1667-4164-81e5-c16e46f2f74c/ses/v1.2/CombinedModel/'
def __init__(self, api_key):
self.api_key = api_key
self._token = self.token()['access_token']
@property
def auth_headers(self):
return {
'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': 'Bearer {}'.format(self._token)
}
def endpoint(self, method, route, **kwargs):
url = self.BASE_URL + route
try:
response = getattr(requests, method)(url, **kwargs)
except HTTPError as e:
raise
try:
return response.json()
except json.JSONDecodeError:
return response
def token(self):
data = {'grant_type': 'apikey', 'key': self.api_key}
return self.endpoint('post', 'token', data=data)
def terms(self, params):
return self.endpoint(
'post',
self.SERVICE_URL + 'terms.json',
params=params,
headers=self.auth_headers
) | unknown | codeparrot/codeparrot-clean | ||
import bz2, re
from lib.stringutil import StringUtil
class TermState:
Invalid, Known, Unknown, Ignored, NotSeen = range(5)
@staticmethod
def ToString(state):
if state==0:
return "Invalid"
elif state==1:
return "Known"
elif state==2:
return "Unknown"
elif state==3:
return "Ignored"
elif state==4:
return "NotSeen"
raise Exception("Unknown int state")
@staticmethod
def ToEnum(state):
state = str(state).lower()
if state=="invalid":
return 0
elif state=="known":
return 1
elif state=="unknown":
return 2
elif state=="ignored":
return 3
elif state=="notseen":
return 4
raise Exception("Unknown string state")
class ItemType:
Unknown, Text, Video = range(3)
@staticmethod
def ToString(itemType):
if itemType==0:
return "Invalid"
elif itemType==1:
return "Text"
elif itemType==2:
return "Video"
raise Exception("Unknown int itemType")
class TermType:
Unknown, Create, Modify, Delete = range(4)
@staticmethod
def ToString(termType):
if termType==0:
return "Unknown"
elif termType==1:
return "Create"
elif termType==2:
return "Modify"
elif termType==3:
return "Delete"
raise Exception("Unknown int termType")
class LanguageDirection:
Unknown, LeftToRight, RightToLeft = range(3)
class User():
def __init__(self):
self.userId = None
self.username = ""
self.lastLogin = None
self.accessKey = ""
self.accessSecret = ""
self.syncData = False
def hasCredentials(self):
if not self.accessKey or not self.accessSecret:
return False
if len(self.accessKey)!=20 or len(self.accessSecret)!=50:
return False
return True
class Language():
TERM_REGEX = "([a-zA-ZÀ-ÖØ-öø-ÿĀ-ſƀ-ɏ\’\'-]+)|(\s+)|(\d+)|(__\d+__)|(<\/?[a-z][A-Z0-9]*[^>]*>)|(.)"
def __init__(self):
self.languageId = None
self.name = ""
self.created = None
self.modified = None
self.isArchived = False
self.languageCode = "--"
self.userId = None
self.termRegex = Language.TERM_REGEX
self.direction = LanguageDirection.LeftToRight
self.theme = None
self.sourceCode = "--"
def toDict(self):
d = {}
d["languageId"] = str(self.languageId)
d["name"] = self.name
d["created"] = self.created
d["modified"] = self.modified
d["isArchived"] = self.isArchived
d["languageCode"] = self.languageCode
d["userId"] = str(self.userId)
d["termRegex"] = self.termRegex
d["direction"] = self.direction
d["theme"] = self.theme
d["sourceCode"] = self.sourceCode
return d
class LanguageCode():
def __init__(self):
self.code = ""
self.name = ""
class LanguagePlugin():
def __init__(self):
self.pluginId = None
self.name = ""
self.description = ""
self.enabled = False
self.content = ""
self.uuid = None
class SharedTerm():
def __init__(self):
self._phrase = ""
self.id = None
self.code = ""
self.lowerPhrase = ""
self.basePhrase = ""
self.sentence = ""
self.definition = ""
self.language = ""
self.source = ""
@property
def phrase(self):
return self._phrase
@phrase.setter
def phrase(self, value):
self._phrase = value
self.lowerPhrase = (value or "").lower()
class Term():
def __init__(self):
self.termId = None
self.created = None
self.modified = None
self._phrase = ""
self._isFragment = False
self.lowerPhrase = ""
self.basePhrase = ""
self.definition = ""
self.sentence = ""
self.languageId = None
self.state = TermState.Unknown
self.userId = None
self.itemSourceId = None
self.language = ""
self.itemSource = ""
self.sourceCode = ""
self.itemSourceCollection = "" #only for search
self.itemSourceTitle = "" #only for search
def fullDefinition(self, joinString="<br/>"):
fullDef = ""
if joinString=="<br/>":
if not StringUtil.isEmpty(self.basePhrase):
fullDef += self.basePhrase + joinString
if not StringUtil.isEmpty(self.definition):
fullDef += self.definition
return re.sub(r"\n", "<br/>", fullDef)
if joinString=="\n":
if not StringUtil.isEmpty(self.basePhrase):
fullDef += self.basePhrase + joinString
if not StringUtil.isEmpty(self.definition):
fullDef += self.definition
return fullDef
if joinString==" ; ":
if not StringUtil.isEmpty(self.basePhrase):
fullDef += self.basePhrase + joinString
if not StringUtil.isEmpty(self.definition):
fullDef += self.definition
return re.sub(r"\n", " ; ", fullDef)
@property
def phrase(self):
return self._phrase
@phrase.setter
def phrase(self, value):
self._phrase = value
self.lowerPhrase = (value or "").lower()
if " " in value:
self._isFragment = True
@property
def isFragment(self):
return self._isFragment
@isFragment.setter
def isFragment(self, value):
self._isFragment = value;
def toDict(self):
d = {}
d["termId"] = str(self.termId)
d["created"] = self.created
d["modified"] = self.modified
d["phrase"] = self.phrase
d["lowerPhrase"] = self.lowerPhrase
d["basePhrase"] = self.basePhrase
d["definition"] = self.definition
d["sentence"] = self.sentence
d["languageId"] = str(self.languageId)
d["state"] = TermState.ToString(self.state).lower() #historical
d["userId"] = str(self.userId)
d["itemSourceId"] = str(self.itemSourceId)
d["language"] = self.language
d["itemSource"] = self.itemSource
d["isFragment"] = self.isFragment
return d
class TermLog():
def __init__(self):
self.entryDate = None
self.termId = None
self.state = None
self.type = TermType.Unknown
self.languageId = None
self.userId = None
def toDict(self):
d = {}
d["entryDate"] = self.entryDate
d["termId"] = str(self.termId)
d["state"] = TermState.ToString(self.state)
d["type"] = TermType.ToString(self.type)
d["languageId"] = str(self.languageId)
d["userId"] = str(self.userId)
return d
class Item():
def __init__(self):
self.itemId = None
self.created = None
self.modified = None
self.itemType = ItemType.Text
self.userId = None
self.collectionName = ""
self.collectionNo = None
self.mediaUri = ""
self.lastRead = None
self.l1Title = ""
self.l2Title = ""
self.l1LanguageId = None
self.l2LanguageId = None
self.readTimes = 0
self.listenedTimes = 0
self.l1Language = None
self.l2Language = None
self.l1Content = None
self.l2Content = None
def getL1Content(self):
if self.l1Content is None or StringUtil.isEmpty(self.l1Content):
return ""
return bz2.decompress(self.l1Content).decode()
def setL1Content(self, value):
if value is None or StringUtil.isEmpty(value):
self.l1Content = None
return
self.l1Content = bz2.compress(value.encode())
def getL2Content(self):
if self.l2Content is None or StringUtil.isEmpty(self.l2Content):
return ""
return bz2.decompress(self.l2Content).decode()
def setL2Content(self, value):
if value is None or StringUtil.isEmpty(value):
self.l2Content = None
return
self.l2Content = bz2.compress(value.encode())
def hasMedia(self):
return not StringUtil.isEmpty(self.mediaUri)
def isParallel(self):
return not StringUtil.isEmpty(self.l2Content)
def name(self):
name = ""
if self.collectionNo:
name += str(self.collectionNo) + ". "
if not StringUtil.isEmpty(self.collectionName):
name += self.collectionName + " - "
name += self.l1Title
return name
def toDict(self):
d = {}
d["itemId"] = str(self.itemId)
d["created"] = self.created
d["modified"] = self.modified
d["itemType"] = self.itemType
d["userId"] = str(self.userId)
d["collectionName"] = self.collectionName
d["collectionNo"] = self.collectionNo
d["mediaUri"] = self.mediaUri
d["lastRead"] = self.lastRead
d["l1Title"] = self.l1Title
d["l2Title"] = self.l2Title
d["l1LanguageId"] = str(self.l1LanguageId)
d["l2LanguageId"] = str(self.l2LanguageId)
d["readTimes"] = self.readTimes
d["listenedTimes"] = self.listenedTimes
d["l1Language"] = self.l1Language
d["l2Language"] = self.l2Language
d["isParallel"] = self.isParallel()
d["hasMedia"] = self.hasMedia()
if self.l1Content is not None:
d["l1Content"] = self.getL1Content()
else:
d["l1Content"] = ""
if self.l2Content is not None:
try:
d["l2Content"] = self.getL2Content()
except ValueError: #Find all only returns 1st 20 bytes
d["l2Content"] = ""
else:
d["l2Content"] = ""
return d
class Plugin():
def __init__(self):
self.pluginId = None
self.description = ""
self.name = ""
self.content = ""
self.uuid = ""
self.version = 0
self.local = False
def toDict(self):
d = {}
d["pluginId"] = str(self.pluginId)
d["description"] = self.description
d["name"] = self.name
d["content"] = self.content
d["uuid"] = self.uuid
d["version"] = self.version
d["local"] = self.local
return d
class Storage():
def __init__(self):
self.uuid = ""
self.key = ""
self.value = ""
def toDict(self):
d = {}
d["uuid"] = str(self.uuid)
d["key"] = self.key
d["value"] = self.value
return d | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
class pos_open_statement(osv.osv_memory):
_name = 'pos.open.statement'
_description = 'Open Statements'
def open_statement(self, cr, uid, ids, context=None):
"""
Open the statements
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return : Blank Directory
"""
data = {}
mod_obj = self.pool.get('ir.model.data')
statement_obj = self.pool.get('account.bank.statement')
sequence_obj = self.pool.get('ir.sequence')
journal_obj = self.pool.get('account.journal')
if context is None:
context = {}
st_ids = []
j_ids = journal_obj.search(cr, uid, [('journal_user','=',1)], context=context)
if not j_ids:
raise osv.except_osv(_('No Cash Register Defined!'), _('You have to define which payment method must be available in the point of sale by reusing existing bank and cash through "Accounting / Configuration / Journals / Journals". Select a journal and check the field "PoS Payment Method" from the "Point of Sale" tab. You can also create new payment methods directly from menu "PoS Backend / Configuration / Payment Methods".'))
for journal in journal_obj.browse(cr, uid, j_ids, context=context):
ids = statement_obj.search(cr, uid, [('state', '!=', 'confirm'), ('user_id', '=', uid), ('journal_id', '=', journal.id)], context=context)
if journal.sequence_id:
number = sequence_obj.next_by_id(cr, uid, journal.sequence_id.id, context=context)
else:
number = sequence_obj.next_by_code(cr, uid, 'account.cash.statement', context=context)
data.update({
'journal_id': journal.id,
'user_id': uid,
'state': 'draft',
'name': number
})
statement_id = statement_obj.create(cr, uid, data, context=context)
st_ids.append(int(statement_id))
if journal.cash_control:
statement_obj.button_open(cr, uid, [statement_id], context)
tree_res = mod_obj.get_object_reference(cr, uid, 'point_of_sale', 'view_cash_statement_pos_tree')
tree_id = tree_res and tree_res[1] or False
form_res = mod_obj.get_object_reference(cr, uid, 'account', 'view_bank_statement_form2')
form_id = form_res and form_res[1] or False
search_res = mod_obj.get_object_reference(cr, uid, 'account', 'view_account_bank_statement_filter')
search_id = search_res and search_res[1] or False
return {
'type': 'ir.actions.act_window',
'name': _('List of Cash Registers'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'account.bank.statement',
'domain': str([('id', 'in', st_ids)]),
'views': [(tree_id, 'tree'), (form_id, 'form')],
'search_view_id': search_id,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | unknown | codeparrot/codeparrot-clean | ||
/**********************************************************************
vm_insnhelper.c - instruction helper functions. Included into vm.c.
$Author$
Copyright (C) 2007 Koichi Sasada
**********************************************************************/
#include "ruby/internal/config.h"
#include <math.h>
#ifdef HAVE_STDATOMIC_H
#include <stdatomic.h>
#endif
#include "constant.h"
#include "debug_counter.h"
#include "internal.h"
#include "internal/class.h"
#include "internal/compar.h"
#include "internal/hash.h"
#include "internal/numeric.h"
#include "internal/proc.h"
#include "internal/random.h"
#include "internal/variable.h"
#include "internal/set_table.h"
#include "internal/struct.h"
#include "variable.h"
/* finish iseq array */
#include "insns.inc"
#include "insns_info.inc"
extern rb_method_definition_t *rb_method_definition_create(rb_method_type_t type, ID mid);
extern void rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts);
extern int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2);
extern VALUE rb_make_no_method_exception(VALUE exc, VALUE format, VALUE obj,
int argc, const VALUE *argv, int priv);
static const struct rb_callcache vm_empty_cc;
static const struct rb_callcache vm_empty_cc_for_super;
/* control stack frame */
static rb_control_frame_t *vm_get_ruby_level_caller_cfp(const rb_execution_context_t *ec, const rb_control_frame_t *cfp);
VALUE
ruby_vm_special_exception_copy(VALUE exc)
{
VALUE e = rb_obj_alloc(rb_class_real(RBASIC_CLASS(exc)));
rb_obj_copy_ivar(e, exc);
return e;
}
NORETURN(static void ec_stack_overflow(rb_execution_context_t *ec, int));
static void
ec_stack_overflow(rb_execution_context_t *ec, int setup)
{
VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
ec->raised_flag = RAISED_STACKOVERFLOW;
if (setup) {
VALUE at = rb_ec_backtrace_object(ec);
mesg = ruby_vm_special_exception_copy(mesg);
rb_ivar_set(mesg, idBt, at);
rb_ivar_set(mesg, idBt_locations, at);
}
ec->errinfo = mesg;
EC_JUMP_TAG(ec, TAG_RAISE);
}
NORETURN(static void vm_stackoverflow(void));
static void
vm_stackoverflow(void)
{
ec_stack_overflow(GET_EC(), TRUE);
}
void
rb_ec_stack_overflow(rb_execution_context_t *ec, ruby_stack_overflow_critical_level crit)
{
if (rb_during_gc()) {
rb_bug("system stack overflow during GC. Faulty native extension?");
}
if (crit >= rb_stack_overflow_fatal) {
ec->raised_flag = RAISED_STACKOVERFLOW;
ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
EC_JUMP_TAG(ec, TAG_RAISE);
}
ec_stack_overflow(ec, crit < rb_stack_overflow_signal);
}
static inline void stack_check(rb_execution_context_t *ec);
#if VM_CHECK_MODE > 0
static int
callable_class_p(VALUE klass)
{
#if VM_CHECK_MODE >= 2
if (!klass) return FALSE;
switch (RB_BUILTIN_TYPE(klass)) {
default:
break;
case T_ICLASS:
if (!RB_TYPE_P(RCLASS_SUPER(klass), T_MODULE)) break;
case T_MODULE:
return TRUE;
}
while (klass) {
if (klass == rb_cBasicObject) {
return TRUE;
}
klass = RCLASS_SUPER(klass);
}
return FALSE;
#else
return klass != 0;
#endif
}
static int
callable_method_entry_p(const rb_callable_method_entry_t *cme)
{
if (cme == NULL) {
return TRUE;
}
else {
VM_ASSERT(IMEMO_TYPE_P((VALUE)cme, imemo_ment), "imemo_type:%s", rb_imemo_name(imemo_type((VALUE)cme)));
if (callable_class_p(cme->defined_class)) {
return TRUE;
}
else {
return FALSE;
}
}
}
static void
vm_check_frame_detail(VALUE type, int req_block, int req_me, int req_cref, VALUE specval, VALUE cref_or_me, int is_cframe, const rb_iseq_t *iseq)
{
unsigned int magic = (unsigned int)(type & VM_FRAME_MAGIC_MASK);
enum imemo_type cref_or_me_type = imemo_env; /* impossible value */
if (RB_TYPE_P(cref_or_me, T_IMEMO)) {
cref_or_me_type = imemo_type(cref_or_me);
}
if (type & VM_FRAME_FLAG_BMETHOD) {
req_me = TRUE;
}
if (req_block && (type & VM_ENV_FLAG_LOCAL) == 0) {
rb_bug("vm_push_frame: specval (%p) should be a block_ptr on %x frame", (void *)specval, magic);
}
if (!req_block && (type & VM_ENV_FLAG_LOCAL) != 0) {
rb_bug("vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (void *)specval, magic);
}
if (req_me) {
if (cref_or_me_type != imemo_ment) {
rb_bug("vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
}
}
else {
if (req_cref && cref_or_me_type != imemo_cref) {
rb_bug("vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
}
else { /* cref or Qfalse */
if (cref_or_me != Qfalse && cref_or_me_type != imemo_cref) {
if (((type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC || magic == VM_FRAME_MAGIC_DUMMY) && (cref_or_me_type == imemo_ment)) {
/* ignore */
}
else {
rb_bug("vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
}
}
}
}
if (cref_or_me_type == imemo_ment) {
const rb_callable_method_entry_t *me = (const rb_callable_method_entry_t *)cref_or_me;
if (!callable_method_entry_p(me)) {
rb_bug("vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
}
}
if ((type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
VM_ASSERT(iseq == NULL ||
RBASIC_CLASS((VALUE)iseq) == 0 || // dummy frame for loading
RUBY_VM_NORMAL_ISEQ_P(iseq) //argument error
);
}
else {
VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
}
}
static void
vm_check_frame(VALUE type,
VALUE specval,
VALUE cref_or_me,
const rb_iseq_t *iseq)
{
VALUE given_magic = type & VM_FRAME_MAGIC_MASK;
VM_ASSERT(FIXNUM_P(type));
#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
case magic: \
vm_check_frame_detail(type, req_block, req_me, req_cref, \
specval, cref_or_me, is_cframe, iseq); \
break
switch (given_magic) {
/* BLK ME CREF CFRAME */
CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
default:
rb_bug("vm_push_frame: unknown type (%x)", (unsigned int)given_magic);
}
#undef CHECK
}
static VALUE vm_stack_canary; /* Initialized later */
static bool vm_stack_canary_was_born = false;
// Return the index of the instruction right before the given PC.
// This is needed because insn_entry advances PC before the insn body.
static unsigned int
previous_insn_index(const rb_iseq_t *iseq, const VALUE *pc)
{
unsigned int pos = 0;
while (pos < ISEQ_BODY(iseq)->iseq_size) {
int opcode = rb_vm_insn_addr2opcode((void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
unsigned int next_pos = pos + insn_len(opcode);
if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
return pos;
}
pos = next_pos;
}
rb_bug("failed to find the previous insn");
}
void
rb_vm_check_canary(const rb_execution_context_t *ec, VALUE *sp)
{
const struct rb_control_frame_struct *reg_cfp = ec->cfp;
const struct rb_iseq_struct *iseq;
if (! LIKELY(vm_stack_canary_was_born)) {
return; /* :FIXME: isn't it rather fatal to enter this branch? */
}
else if ((VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
/* This is at the very beginning of a thread. cfp does not exist. */
return;
}
else if (! (iseq = GET_ISEQ())) {
return;
}
else if (LIKELY(sp[0] != vm_stack_canary)) {
return;
}
else {
/* we are going to call methods below; squash the canary to
* prevent infinite loop. */
sp[0] = Qundef;
}
const VALUE *orig = rb_iseq_original_iseq(iseq);
const VALUE iseqw = rb_iseqw_new(iseq);
const VALUE inspection = rb_inspect(iseqw);
const char *stri = rb_str_to_cstr(inspection);
const VALUE disasm = rb_iseq_disasm(iseq);
const char *strd = rb_str_to_cstr(disasm);
const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
const enum ruby_vminsn_type insn = (enum ruby_vminsn_type)orig[pos];
const char *name = insn_name(insn);
/* rb_bug() is not capable of outputting this large contents. It
is designed to run form a SIGSEGV handler, which tends to be
very restricted. */
ruby_debug_printf(
"We are killing the stack canary set by %s, "
"at %s@pc=%"PRIdPTR"\n"
"watch out the C stack trace.\n"
"%s",
name, stri, pos, strd);
rb_bug("see above.");
}
#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
#else
#define vm_check_canary(ec, sp)
#define vm_check_frame(a, b, c, d)
#endif /* VM_CHECK_MODE > 0 */
#if USE_DEBUG_COUNTER
static void
vm_push_frame_debug_counter_inc(
const struct rb_execution_context_struct *ec,
const struct rb_control_frame_struct *reg_cfp,
VALUE type)
{
const struct rb_control_frame_struct *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(reg_cfp);
RB_DEBUG_COUNTER_INC(frame_push);
if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
if (prev) {
if (curr) {
RB_DEBUG_COUNTER_INC(frame_R2R);
}
else {
RB_DEBUG_COUNTER_INC(frame_R2C);
}
}
else {
if (curr) {
RB_DEBUG_COUNTER_INC(frame_C2R);
}
else {
RB_DEBUG_COUNTER_INC(frame_C2C);
}
}
}
switch (type & VM_FRAME_MAGIC_MASK) {
case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method); return;
case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block); return;
case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class); return;
case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top); return;
case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc); return;
case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc); return;
case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval); return;
case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue); return;
case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy); return;
}
rb_bug("unreachable");
}
#else
#define vm_push_frame_debug_counter_inc(ec, cfp, t) /* void */
#endif
// Return a poison value to be set above the stack top to verify leafness.
VALUE
rb_vm_stack_canary(void)
{
#if VM_CHECK_MODE > 0
return vm_stack_canary;
#else
return 0;
#endif
}
STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
static void
vm_push_frame(rb_execution_context_t *ec,
const rb_iseq_t *iseq,
VALUE type,
VALUE self,
VALUE specval,
VALUE cref_or_me,
const VALUE *pc,
VALUE *sp,
int local_size,
int stack_max)
{
rb_control_frame_t *const cfp = RUBY_VM_NEXT_CONTROL_FRAME(ec->cfp);
vm_check_frame(type, specval, cref_or_me, iseq);
VM_ASSERT(local_size >= 0);
/* check stack overflow */
CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
vm_check_canary(ec, sp);
/* setup vm value stack */
/* initialize local variables */
for (int i=0; i < local_size; i++) {
*sp++ = Qnil;
}
/* setup ep with managing data */
*sp++ = cref_or_me; /* ep[-2] / Qnil or T_IMEMO(cref) or T_IMEMO(ment) */
*sp++ = specval /* ep[-1] / block handler or prev env ptr */;
*sp++ = type; /* ep[-0] / ENV_FLAGS */
/* setup new frame */
*cfp = (const struct rb_control_frame_struct) {
.pc = pc,
.sp = sp,
.iseq = iseq,
.self = self,
.ep = sp - 1,
.block_code = NULL,
#if VM_DEBUG_BP_CHECK
.bp_check = sp,
#endif
.jit_return = NULL,
};
/* Ensure the initialization of `*cfp` above never gets reordered with the update of `ec->cfp` below.
This is a no-op in all cases we've looked at (https://godbolt.org/z/3oxd1446K), but should guarantee it for all
future/untested compilers/platforms. */
#if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
atomic_signal_fence(memory_order_seq_cst);
#endif
ec->cfp = cfp;
if (VMDEBUG == 2) {
SDR();
}
vm_push_frame_debug_counter_inc(ec, cfp, type);
}
void
rb_vm_pop_frame_no_int(rb_execution_context_t *ec)
{
rb_control_frame_t *cfp = ec->cfp;
if (VMDEBUG == 2) SDR();
ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
}
/* return TRUE if the frame is finished */
static inline int
vm_pop_frame(rb_execution_context_t *ec, rb_control_frame_t *cfp, const VALUE *ep)
{
VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
if (VMDEBUG == 2) SDR();
RUBY_VM_CHECK_INTS(ec);
ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
return flags & VM_FRAME_FLAG_FINISH;
}
void
rb_vm_pop_frame(rb_execution_context_t *ec)
{
vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
}
// it pushes pseudo-frame with fname filename.
VALUE
rb_vm_push_frame_fname(rb_execution_context_t *ec, VALUE fname)
{
rb_iseq_t *rb_iseq_alloc_with_dummy_path(VALUE fname);
rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
vm_push_frame(ec,
dmy_iseq, //const rb_iseq_t *iseq,
VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH, // VALUE type,
ec->cfp->self, // VALUE self,
VM_BLOCK_HANDLER_NONE, // VALUE specval,
Qfalse, // VALUE cref_or_me,
NULL, // const VALUE *pc,
ec->cfp->sp, // VALUE *sp,
0, // int local_size,
0); // int stack_max
return (VALUE)dmy_iseq;
}
/* method dispatch */
static inline VALUE
rb_arity_error_new(int argc, int min, int max)
{
VALUE err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d", argc, min);
if (min == max) {
/* max is not needed */
}
else if (max == UNLIMITED_ARGUMENTS) {
rb_str_cat_cstr(err_mess, "+");
}
else {
rb_str_catf(err_mess, "..%d", max);
}
rb_str_cat_cstr(err_mess, ")");
return rb_exc_new3(rb_eArgError, err_mess);
}
void
rb_error_arity(int argc, int min, int max)
{
rb_exc_raise(rb_arity_error_new(argc, min, max));
}
/* lvar */
NOINLINE(static void vm_env_write_slowpath(const VALUE *ep, int index, VALUE v));
static void
vm_env_write_slowpath(const VALUE *ep, int index, VALUE v)
{
/* remember env value forcely */
rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
VM_FORCE_WRITE(&ep[index], v);
VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
}
// YJIT assumes this function never runs GC
static inline void
vm_env_write(const VALUE *ep, int index, VALUE v)
{
VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
VM_STACK_ENV_WRITE(ep, index, v);
}
else {
vm_env_write_slowpath(ep, index, v);
}
}
void
rb_vm_env_write(const VALUE *ep, int index, VALUE v)
{
vm_env_write(ep, index, v);
}
VALUE
rb_vm_bh_to_procval(const rb_execution_context_t *ec, VALUE block_handler)
{
if (block_handler == VM_BLOCK_HANDLER_NONE) {
return Qnil;
}
else {
switch (vm_block_handler_type(block_handler)) {
case block_handler_type_iseq:
case block_handler_type_ifunc:
return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler), rb_cProc);
case block_handler_type_symbol:
return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
case block_handler_type_proc:
return VM_BH_TO_PROC(block_handler);
default:
VM_UNREACHABLE(rb_vm_bh_to_procval);
}
}
}
/* svar */
#if VM_CHECK_MODE > 0
static int
vm_svar_valid_p(VALUE svar)
{
if (RB_TYPE_P((VALUE)svar, T_IMEMO)) {
switch (imemo_type(svar)) {
case imemo_svar:
case imemo_cref:
case imemo_ment:
return TRUE;
default:
break;
}
}
rb_bug("vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
return FALSE;
}
#endif
static inline struct vm_svar *
lep_svar(const rb_execution_context_t *ec, const VALUE *lep)
{
VALUE svar;
if (lep && (ec == NULL || ec->root_lep != lep)) {
svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
}
else {
svar = ec->root_svar;
}
VM_ASSERT(svar == Qfalse || vm_svar_valid_p(svar));
return (struct vm_svar *)svar;
}
static inline void
lep_svar_write(const rb_execution_context_t *ec, const VALUE *lep, const struct vm_svar *svar)
{
VM_ASSERT(vm_svar_valid_p((VALUE)svar));
if (lep && (ec == NULL || ec->root_lep != lep)) {
vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (VALUE)svar);
}
else {
RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
}
}
static VALUE
lep_svar_get(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key)
{
const struct vm_svar *svar = lep_svar(ec, lep);
if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) return Qnil;
switch (key) {
case VM_SVAR_LASTLINE:
return svar->lastline;
case VM_SVAR_BACKREF:
return svar->backref;
default: {
const VALUE ary = svar->others;
if (NIL_P(ary)) {
return Qnil;
}
else {
return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
}
}
}
}
static struct vm_svar *
svar_new(VALUE obj)
{
struct vm_svar *svar = IMEMO_NEW(struct vm_svar, imemo_svar, obj);
*((VALUE *)&svar->lastline) = Qnil;
*((VALUE *)&svar->backref) = Qnil;
*((VALUE *)&svar->others) = Qnil;
return svar;
}
static void
lep_svar_set(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, VALUE val)
{
struct vm_svar *svar = lep_svar(ec, lep);
if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) {
lep_svar_write(ec, lep, svar = svar_new((VALUE)svar));
}
switch (key) {
case VM_SVAR_LASTLINE:
RB_OBJ_WRITE(svar, &svar->lastline, val);
return;
case VM_SVAR_BACKREF:
RB_OBJ_WRITE(svar, &svar->backref, val);
return;
default: {
VALUE ary = svar->others;
if (NIL_P(ary)) {
RB_OBJ_WRITE(svar, &svar->others, ary = rb_ary_new());
}
rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
}
}
}
static inline VALUE
vm_getspecial(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, rb_num_t type)
{
VALUE val;
if (type == 0) {
val = lep_svar_get(ec, lep, key);
}
else {
VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
if (type & 0x01) {
switch (type >> 1) {
case '&':
val = rb_reg_last_match(backref);
break;
case '`':
val = rb_reg_match_pre(backref);
break;
case '\'':
val = rb_reg_match_post(backref);
break;
case '+':
val = rb_reg_match_last(backref);
break;
default:
rb_bug("unexpected back-ref");
}
}
else {
val = rb_reg_nth_match((int)(type >> 1), backref);
}
}
return val;
}
static inline VALUE
vm_backref_defined(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t type)
{
VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
int nth = 0;
if (type & 0x01) {
switch (type >> 1) {
case '&':
case '`':
case '\'':
break;
case '+':
return rb_reg_last_defined(backref);
default:
rb_bug("unexpected back-ref");
}
}
else {
nth = (int)(type >> 1);
}
return rb_reg_nth_defined(nth, backref);
}
PUREFUNC(static rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar));
static rb_callable_method_entry_t *
check_method_entry(VALUE obj, int can_be_svar)
{
if (obj == Qfalse) return NULL;
#if VM_CHECK_MODE > 0
if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_method_entry: unknown type: %s", rb_obj_info(obj));
#endif
switch (imemo_type(obj)) {
case imemo_ment:
return (rb_callable_method_entry_t *)obj;
case imemo_cref:
return NULL;
case imemo_svar:
if (can_be_svar) {
return check_method_entry(((struct vm_svar *)obj)->cref_or_me, FALSE);
}
default:
#if VM_CHECK_MODE > 0
rb_bug("check_method_entry: svar should not be there:");
#endif
return NULL;
}
}
static rb_callable_method_entry_t *
env_method_entry_unchecked(VALUE obj, int can_be_svar)
{
if (obj == Qfalse) return NULL;
switch (imemo_type(obj)) {
case imemo_ment:
return (rb_callable_method_entry_t *)obj;
case imemo_cref:
return NULL;
case imemo_svar:
if (can_be_svar) {
return env_method_entry_unchecked(((struct vm_svar *)obj)->cref_or_me, FALSE);
}
default:
return NULL;
}
}
const rb_callable_method_entry_t *
rb_vm_frame_method_entry(const rb_control_frame_t *cfp)
{
const VALUE *ep = cfp->ep;
rb_callable_method_entry_t *me;
while (!VM_ENV_LOCAL_P(ep)) {
if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return me;
ep = VM_ENV_PREV_EP(ep);
}
return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
}
const rb_callable_method_entry_t *
rb_vm_frame_method_entry_unchecked(const rb_control_frame_t *cfp)
{
const VALUE *ep = cfp->ep;
rb_callable_method_entry_t *me;
while (!VM_ENV_LOCAL_P_UNCHECKED(ep)) {
if ((me = env_method_entry_unchecked(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return me;
ep = VM_ENV_PREV_EP_UNCHECKED(ep);
}
return env_method_entry_unchecked(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
}
static const rb_iseq_t *
method_entry_iseqptr(const rb_callable_method_entry_t *me)
{
switch (me->def->type) {
case VM_METHOD_TYPE_ISEQ:
return me->def->body.iseq.iseqptr;
default:
return NULL;
}
}
static rb_cref_t *
method_entry_cref(const rb_callable_method_entry_t *me)
{
switch (me->def->type) {
case VM_METHOD_TYPE_ISEQ:
return me->def->body.iseq.cref;
default:
return NULL;
}
}
#if VM_CHECK_MODE == 0
PUREFUNC(static rb_cref_t *check_cref(VALUE, int));
#endif
static rb_cref_t *
check_cref(VALUE obj, int can_be_svar)
{
if (obj == Qfalse) return NULL;
#if VM_CHECK_MODE > 0
if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_cref: unknown type: %s", rb_obj_info(obj));
#endif
switch (imemo_type(obj)) {
case imemo_ment:
return method_entry_cref((rb_callable_method_entry_t *)obj);
case imemo_cref:
return (rb_cref_t *)obj;
case imemo_svar:
if (can_be_svar) {
return check_cref(((struct vm_svar *)obj)->cref_or_me, FALSE);
}
default:
#if VM_CHECK_MODE > 0
rb_bug("check_method_entry: svar should not be there:");
#endif
return NULL;
}
}
static inline rb_cref_t *
vm_env_cref(const VALUE *ep)
{
rb_cref_t *cref;
while (!VM_ENV_LOCAL_P(ep)) {
if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return cref;
ep = VM_ENV_PREV_EP(ep);
}
return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
}
static int
is_cref(const VALUE v, int can_be_svar)
{
if (RB_TYPE_P(v, T_IMEMO)) {
switch (imemo_type(v)) {
case imemo_cref:
return TRUE;
case imemo_svar:
if (can_be_svar) return is_cref(((struct vm_svar *)v)->cref_or_me, FALSE);
default:
break;
}
}
return FALSE;
}
static int
vm_env_cref_by_cref(const VALUE *ep)
{
while (!VM_ENV_LOCAL_P(ep)) {
if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) return TRUE;
ep = VM_ENV_PREV_EP(ep);
}
return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
}
static rb_cref_t *
cref_replace_with_duplicated_cref_each_frame(const VALUE *vptr, int can_be_svar, VALUE parent)
{
const VALUE v = *vptr;
rb_cref_t *cref, *new_cref;
if (RB_TYPE_P(v, T_IMEMO)) {
switch (imemo_type(v)) {
case imemo_cref:
cref = (rb_cref_t *)v;
new_cref = vm_cref_dup(cref);
if (parent) {
RB_OBJ_WRITE(parent, vptr, new_cref);
}
else {
VM_FORCE_WRITE(vptr, (VALUE)new_cref);
}
return (rb_cref_t *)new_cref;
case imemo_svar:
if (can_be_svar) {
return cref_replace_with_duplicated_cref_each_frame(&((struct vm_svar *)v)->cref_or_me, FALSE, v);
}
/* fall through */
case imemo_ment:
rb_bug("cref_replace_with_duplicated_cref_each_frame: unreachable");
default:
break;
}
}
return NULL;
}
static rb_cref_t *
vm_cref_replace_with_duplicated_cref(const VALUE *ep)
{
if (vm_env_cref_by_cref(ep)) {
rb_cref_t *cref;
VALUE envval;
while (!VM_ENV_LOCAL_P(ep)) {
envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
return cref;
}
ep = VM_ENV_PREV_EP(ep);
}
envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
}
else {
rb_bug("vm_cref_dup: unreachable");
}
}
static rb_cref_t *
vm_get_cref(const VALUE *ep)
{
rb_cref_t *cref = vm_env_cref(ep);
if (cref != NULL) {
return cref;
}
else {
rb_bug("vm_get_cref: unreachable");
}
}
rb_cref_t *
rb_vm_get_cref(const VALUE *ep)
{
return vm_get_cref(ep);
}
static rb_cref_t *
vm_ec_cref(const rb_execution_context_t *ec)
{
const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
if (cfp == NULL) {
return NULL;
}
return vm_get_cref(cfp->ep);
}
static const rb_cref_t *
vm_get_const_key_cref(const VALUE *ep)
{
const rb_cref_t *cref = vm_get_cref(ep);
const rb_cref_t *key_cref = cref;
while (cref) {
if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
RCLASS_CLONED_P(CREF_CLASS(cref)) ) {
return key_cref;
}
cref = CREF_NEXT(cref);
}
/* does not include singleton class */
return NULL;
}
rb_cref_t *
rb_vm_rewrite_cref(rb_cref_t *cref, VALUE old_klass, VALUE new_klass)
{
rb_cref_t *new_cref_head = NULL;
rb_cref_t *new_cref_tail = NULL;
#define ADD_NEW_CREF(new_cref) \
if (new_cref_tail) { \
RB_OBJ_WRITE(new_cref_tail, &new_cref_tail->next, new_cref); \
} \
else { \
new_cref_head = new_cref; \
} \
new_cref_tail = new_cref;
while (cref) {
rb_cref_t *new_cref;
if (CREF_CLASS(cref) == old_klass) {
new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
ADD_NEW_CREF(new_cref);
return new_cref_head;
}
new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
cref = CREF_NEXT(cref);
ADD_NEW_CREF(new_cref);
}
#undef ADD_NEW_CREF
// Could we just reuse the original cref?
return new_cref_head;
}
static rb_cref_t *
vm_cref_push(const rb_execution_context_t *ec, VALUE klass, const VALUE *ep, int pushed_by_eval, int singleton)
{
rb_cref_t *prev_cref = NULL;
if (ep) {
prev_cref = vm_env_cref(ep);
}
else {
rb_control_frame_t *cfp = vm_get_ruby_level_caller_cfp(ec, ec->cfp);
if (cfp) {
prev_cref = vm_env_cref(cfp->ep);
}
}
return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
}
static inline VALUE
vm_get_cbase(const VALUE *ep)
{
const rb_cref_t *cref = vm_get_cref(ep);
return CREF_CLASS_FOR_DEFINITION(cref);
}
static inline VALUE
vm_get_const_base(const VALUE *ep)
{
const rb_cref_t *cref = vm_get_cref(ep);
while (cref) {
if (!CREF_PUSHED_BY_EVAL(cref)) {
return CREF_CLASS_FOR_DEFINITION(cref);
}
cref = CREF_NEXT(cref);
}
return Qundef;
}
static inline void
vm_check_if_namespace(VALUE klass)
{
if (!RB_TYPE_P(klass, T_CLASS) && !RB_TYPE_P(klass, T_MODULE)) {
rb_raise(rb_eTypeError, "%+"PRIsVALUE" is not a class/module", klass);
}
}
static inline void
vm_ensure_not_refinement_module(VALUE self)
{
if (RB_TYPE_P(self, T_MODULE) && FL_TEST(self, RMODULE_IS_REFINEMENT)) {
rb_warn("not defined at the refinement, but at the outer class/module");
}
}
static inline VALUE
vm_get_iclass(const rb_control_frame_t *cfp, VALUE klass)
{
return klass;
}
static inline VALUE
vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, bool allow_nil, int is_defined)
{
void rb_const_warn_if_deprecated(const rb_const_entry_t *ce, VALUE klass, ID id);
VALUE val;
if (NIL_P(orig_klass) && allow_nil) {
/* in current lexical scope */
const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
const rb_cref_t *cref;
VALUE klass = Qnil;
while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
root_cref = CREF_NEXT(root_cref);
}
cref = root_cref;
while (cref && CREF_NEXT(cref)) {
if (CREF_PUSHED_BY_EVAL(cref)) {
klass = Qnil;
}
else {
klass = CREF_CLASS(cref);
}
cref = CREF_NEXT(cref);
if (!NIL_P(klass)) {
VALUE av, am = 0;
rb_const_entry_t *ce;
search_continue:
if ((ce = rb_const_lookup(klass, id))) {
rb_const_warn_if_deprecated(ce, klass, id);
val = ce->value;
if (UNDEF_P(val)) {
if (am == klass) break;
am = klass;
if (is_defined) return 1;
if (rb_autoloading_value(klass, id, &av, NULL)) return av;
rb_autoload_load(klass, id);
goto search_continue;
}
else {
if (is_defined) {
return 1;
}
else {
if (UNLIKELY(!rb_ractor_main_p())) {
if (!rb_ractor_shareable_p(val)) {
rb_raise(rb_eRactorIsolationError,
"can not access non-shareable objects in constant %"PRIsVALUE"::%"PRIsVALUE" by non-main ractor.", rb_class_path(klass), rb_id2str(id));
}
}
return val;
}
}
}
}
}
/* search self */
if (root_cref && !NIL_P(CREF_CLASS(root_cref))) {
klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
}
else {
klass = CLASS_OF(ec->cfp->self);
}
if (is_defined) {
return rb_const_defined(klass, id);
}
else {
return rb_const_get(klass, id);
}
}
else {
vm_check_if_namespace(orig_klass);
if (is_defined) {
return rb_public_const_defined_from(orig_klass, id);
}
else {
return rb_public_const_get_from(orig_klass, id);
}
}
}
VALUE
rb_vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, VALUE allow_nil)
{
return vm_get_ev_const(ec, orig_klass, id, allow_nil == Qtrue, 0);
}
static inline VALUE
vm_get_ev_const_chain(rb_execution_context_t *ec, const ID *segments)
{
VALUE val = Qnil;
int idx = 0;
int allow_nil = TRUE;
if (segments[0] == idNULL) {
val = rb_cObject;
idx++;
allow_nil = FALSE;
}
while (segments[idx]) {
ID id = segments[idx++];
val = vm_get_ev_const(ec, val, id, allow_nil, 0);
allow_nil = FALSE;
}
return val;
}
static inline VALUE
vm_get_cvar_base(const rb_cref_t *cref, const rb_control_frame_t *cfp, int top_level_raise)
{
VALUE klass;
if (!cref) {
rb_bug("vm_get_cvar_base: no cref");
}
while (CREF_NEXT(cref) &&
(NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
cref = CREF_NEXT(cref);
}
if (top_level_raise && !CREF_NEXT(cref)) {
rb_raise(rb_eRuntimeError, "class variable access from toplevel");
}
klass = vm_get_iclass(cfp, CREF_CLASS(cref));
if (NIL_P(klass)) {
rb_raise(rb_eTypeError, "no class variables available");
}
return klass;
}
ALWAYS_INLINE(static void fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id));
static inline void
fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id)
{
if (is_attr) {
vm_cc_attr_index_set(cc, index, shape_id);
}
else {
vm_ic_attr_index_set(iseq, ic, index, shape_id);
}
}
#define ractor_incidental_shareable_p(cond, val) \
(!(cond) || rb_ractor_shareable_p(val))
#define ractor_object_incidental_shareable_p(obj, val) \
ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, const rb_iseq_t *, IVC, const struct rb_callcache *, int, VALUE));
static inline VALUE
vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, VALUE default_value)
{
VALUE fields_obj;
#if OPT_IC_FOR_IVAR
if (SPECIAL_CONST_P(obj)) {
return default_value;
}
switch (BUILTIN_TYPE(obj)) {
case T_OBJECT:
fields_obj = obj;
break;
case T_CLASS:
case T_MODULE:
{
if (UNLIKELY(!rb_ractor_main_p())) {
// For two reasons we can only use the fast path on the main
// ractor.
// First, only the main ractor is allowed to set ivars on classes
// and modules. So we can skip locking.
// Second, other ractors need to check the shareability of the
// values returned from the class ivars.
if (default_value == Qundef) { // defined?
return rb_ivar_defined(obj, id) ? Qtrue : Qundef;
}
else {
goto general_path;
}
}
fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
break;
}
default:
fields_obj = rb_obj_fields(obj, id);
}
if (!fields_obj) {
return default_value;
}
VALUE val = Qundef;
shape_id_t shape_id = RBASIC_SHAPE_ID_FOR_READ(fields_obj);
VALUE *ivar_list = rb_imemo_fields_ptr(fields_obj);
shape_id_t cached_id;
attr_index_t index;
if (is_attr) {
vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
}
else {
vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
}
if (LIKELY(cached_id == shape_id)) {
RUBY_ASSERT(!rb_shape_too_complex_p(cached_id));
if (index == ATTR_INDEX_NOT_SET) {
return default_value;
}
val = ivar_list[index];
#if USE_DEBUG_COUNTER
RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
if (RB_TYPE_P(obj, T_OBJECT)) {
RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
}
#endif
RUBY_ASSERT(!UNDEF_P(val));
}
else { // cache miss case
#if USE_DEBUG_COUNTER
if (is_attr) {
if (cached_id != INVALID_SHAPE_ID) {
RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
}
else {
RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
}
}
else {
if (cached_id != INVALID_SHAPE_ID) {
RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
}
else {
RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
}
}
RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
if (RB_TYPE_P(obj, T_OBJECT)) {
RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
}
#endif
if (UNLIKELY(rb_shape_too_complex_p(shape_id))) {
st_table *table = (st_table *)ivar_list;
RUBY_ASSERT(table);
RUBY_ASSERT(table == rb_imemo_fields_complex_tbl(fields_obj));
if (!st_lookup(table, id, &val)) {
val = default_value;
}
}
else {
shape_id_t previous_cached_id = cached_id;
if (rb_shape_get_iv_index_with_hint(shape_id, id, &index, &cached_id)) {
// This fills in the cache with the shared cache object.
// "ent" is the shared cache object
if (cached_id != previous_cached_id) {
fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
}
if (index == ATTR_INDEX_NOT_SET) {
val = default_value;
}
else {
// We fetched the ivar list above
val = ivar_list[index];
RUBY_ASSERT(!UNDEF_P(val));
}
}
else {
if (is_attr) {
vm_cc_attr_index_initialize(cc, shape_id);
}
else {
vm_ic_attr_index_initialize(ic, shape_id);
}
val = default_value;
}
}
}
if (!UNDEF_P(default_value)) {
RUBY_ASSERT(!UNDEF_P(val));
}
return val;
general_path:
#endif /* OPT_IC_FOR_IVAR */
RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
if (is_attr) {
return rb_attr_get(obj, id);
}
else {
return rb_ivar_get(obj, id);
}
}
static void
populate_cache(attr_index_t index, shape_id_t next_shape_id, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, bool is_attr)
{
RUBY_ASSERT(!rb_shape_too_complex_p(next_shape_id));
// Cache population code
if (is_attr) {
vm_cc_attr_index_set(cc, index, next_shape_id);
}
else {
vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
}
}
ALWAYS_INLINE(static VALUE vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr));
NOINLINE(static VALUE vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic));
NOINLINE(static VALUE vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc));
static VALUE
vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
{
#if OPT_IC_FOR_IVAR
RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
rb_check_frozen(obj);
attr_index_t index = rb_ivar_set_index(obj, id, val);
shape_id_t next_shape_id = RBASIC_SHAPE_ID(obj);
if (!rb_shape_too_complex_p(next_shape_id)) {
populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
}
RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
return val;
#else
return rb_ivar_set(obj, id, val);
#endif
}
static VALUE
vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic)
{
return vm_setivar_slowpath(obj, id, val, iseq, ic, NULL, false);
}
static VALUE
vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc)
{
return vm_setivar_slowpath(obj, id, val, NULL, NULL, cc, true);
}
NOINLINE(static VALUE vm_setivar_class(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index));
static VALUE
vm_setivar_class(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index)
{
if (UNLIKELY(!rb_ractor_main_p())) {
return Qundef;
}
VALUE fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
if (UNLIKELY(!fields_obj)) {
return Qundef;
}
shape_id_t shape_id = RBASIC_SHAPE_ID(fields_obj);
// Cache hit case
if (shape_id == dest_shape_id) {
RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
}
else if (dest_shape_id != INVALID_SHAPE_ID) {
if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) == id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
}
else {
return Qundef;
}
}
else {
return Qundef;
}
RB_OBJ_WRITE(fields_obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
if (shape_id != dest_shape_id) {
RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
RBASIC_SET_SHAPE_ID(fields_obj, dest_shape_id);
}
RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
return val;
}
NOINLINE(static VALUE vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index));
static VALUE
vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index)
{
shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
// Cache hit case
if (shape_id == dest_shape_id) {
RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
}
else if (dest_shape_id != INVALID_SHAPE_ID) {
if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) == id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
}
else {
return Qundef;
}
}
else {
return Qundef;
}
VALUE fields_obj = rb_obj_fields(obj, id);
RUBY_ASSERT(fields_obj);
RB_OBJ_WRITE(fields_obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
if (shape_id != dest_shape_id) {
RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
RBASIC_SET_SHAPE_ID(fields_obj, dest_shape_id);
}
RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
return val;
}
static inline VALUE
vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index)
{
#if OPT_IC_FOR_IVAR
switch (BUILTIN_TYPE(obj)) {
case T_OBJECT:
{
VM_ASSERT(!rb_ractor_shareable_p(obj) || rb_obj_frozen_p(obj));
shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
RUBY_ASSERT(dest_shape_id == INVALID_SHAPE_ID || !rb_shape_too_complex_p(dest_shape_id));
if (LIKELY(shape_id == dest_shape_id)) {
RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
VM_ASSERT(!rb_ractor_shareable_p(obj));
}
else if (dest_shape_id != INVALID_SHAPE_ID) {
if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) == id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
}
else {
break;
}
}
else {
break;
}
VALUE *ptr = ROBJECT_FIELDS(obj);
RUBY_ASSERT(!rb_shape_obj_too_complex_p(obj));
RB_OBJ_WRITE(obj, &ptr[index], val);
RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
return val;
}
break;
case T_CLASS:
case T_MODULE:
RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
default:
break;
}
return Qundef;
#endif /* OPT_IC_FOR_IVAR */
}
static VALUE
update_classvariable_cache(const rb_iseq_t *iseq, VALUE klass, ID id, const rb_cref_t * cref, ICVARC ic)
{
VALUE defined_class = 0;
VALUE cvar_value = rb_cvar_find(klass, id, &defined_class);
if (RB_TYPE_P(defined_class, T_ICLASS)) {
defined_class = RBASIC(defined_class)->klass;
}
struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
if (!rb_cvc_tbl) {
rb_bug("the cvc table should be set");
}
VALUE ent_data;
if (!rb_id_table_lookup(rb_cvc_tbl, id, &ent_data)) {
rb_bug("should have cvar cache entry");
}
struct rb_cvar_class_tbl_entry *ent = (void *)ent_data;
ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
ent->cref = cref;
ic->entry = ent;
RUBY_ASSERT(BUILTIN_TYPE((VALUE)cref) == T_IMEMO && IMEMO_TYPE_P(cref, imemo_cref));
RB_OBJ_WRITTEN(iseq, Qundef, ent->cref);
RB_OBJ_WRITTEN(iseq, Qundef, ent->class_value);
RB_OBJ_WRITTEN(ent->class_value, Qundef, ent->cref);
return cvar_value;
}
static inline VALUE
vm_getclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *reg_cfp, ID id, ICVARC ic)
{
const rb_cref_t *cref;
cref = vm_get_cref(GET_EP());
if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
VALUE v = rb_ivar_lookup(ic->entry->class_value, id, Qundef);
RUBY_ASSERT(!UNDEF_P(v));
return v;
}
VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
return update_classvariable_cache(iseq, klass, id, cref, ic);
}
VALUE
rb_vm_getclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID id, ICVARC ic)
{
return vm_getclassvariable(iseq, cfp, id, ic);
}
static inline void
vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *reg_cfp, ID id, VALUE val, ICVARC ic)
{
const rb_cref_t *cref;
cref = vm_get_cref(GET_EP());
if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
rb_class_ivar_set(ic->entry->class_value, id, val);
return;
}
VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
rb_cvar_set(klass, id, val);
update_classvariable_cache(iseq, klass, id, cref, ic);
}
void
rb_vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID id, VALUE val, ICVARC ic)
{
vm_setclassvariable(iseq, cfp, id, val, ic);
}
ALWAYS_INLINE(static VALUE vm_getinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, IVC ic));
static inline VALUE
vm_getinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, IVC ic)
{
return vm_getivar(obj, id, iseq, ic, NULL, FALSE, Qnil);
}
static inline void
vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
{
if (RB_SPECIAL_CONST_P(obj)) {
rb_error_frozen_object(obj);
return;
}
shape_id_t dest_shape_id;
attr_index_t index;
vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
if (UNLIKELY(UNDEF_P(vm_setivar(obj, id, val, dest_shape_id, index)))) {
switch (BUILTIN_TYPE(obj)) {
case T_OBJECT:
break;
case T_CLASS:
case T_MODULE:
if (!UNDEF_P(vm_setivar_class(obj, id, val, dest_shape_id, index))) {
return;
}
break;
default:
if (!UNDEF_P(vm_setivar_default(obj, id, val, dest_shape_id, index))) {
return;
}
}
vm_setivar_slowpath_ivar(obj, id, val, iseq, ic);
}
}
void
rb_vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
{
vm_setinstancevariable(iseq, obj, id, val, ic);
}
VALUE
rb_vm_getinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, IVC ic)
{
return vm_getinstancevariable(iseq, obj, id, ic);
}
static VALUE
vm_throw_continue(const rb_execution_context_t *ec, VALUE err)
{
/* continue throw */
if (FIXNUM_P(err)) {
ec->tag->state = RUBY_TAG_FATAL;
}
else if (SYMBOL_P(err)) {
ec->tag->state = TAG_THROW;
}
else if (THROW_DATA_P(err)) {
ec->tag->state = THROW_DATA_STATE((struct vm_throw_data *)err);
}
else {
ec->tag->state = TAG_RAISE;
}
return err;
}
static VALUE
vm_throw_start(const rb_execution_context_t *ec, rb_control_frame_t *const reg_cfp, enum ruby_tag_type state,
const int flag, const VALUE throwobj)
{
const rb_control_frame_t *escape_cfp = NULL;
const rb_control_frame_t * const eocfp = RUBY_VM_END_CONTROL_FRAME(ec); /* end of control frame pointer */
if (flag != 0) {
/* do nothing */
}
else if (state == TAG_BREAK) {
int is_orphan = 1;
const VALUE *ep = GET_EP();
const rb_iseq_t *base_iseq = GET_ISEQ();
escape_cfp = reg_cfp;
while (ISEQ_BODY(base_iseq)->type != ISEQ_TYPE_BLOCK) {
if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
ep = escape_cfp->ep;
base_iseq = escape_cfp->iseq;
}
else {
ep = VM_ENV_PREV_EP(ep);
base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
VM_ASSERT(escape_cfp->iseq == base_iseq);
}
}
if (VM_FRAME_LAMBDA_P(escape_cfp)) {
/* lambda{... break ...} */
is_orphan = 0;
state = TAG_RETURN;
}
else {
ep = VM_ENV_PREV_EP(ep);
while (escape_cfp < eocfp) {
if (escape_cfp->ep == ep) {
const rb_iseq_t *const iseq = escape_cfp->iseq;
const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
const struct iseq_catch_table *const ct = ISEQ_BODY(iseq)->catch_table;
unsigned int i;
if (!ct) break;
for (i=0; i < ct->size; i++) {
const struct iseq_catch_table_entry *const entry =
UNALIGNED_MEMBER_PTR(ct, entries[i]);
if (entry->type == CATCH_TYPE_BREAK &&
entry->iseq == base_iseq &&
entry->start < epc && entry->end >= epc) {
if (entry->cont == epc) { /* found! */
is_orphan = 0;
}
break;
}
}
break;
}
escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
}
}
if (is_orphan) {
rb_vm_localjump_error("break from proc-closure", throwobj, TAG_BREAK);
}
}
else if (state == TAG_RETRY) {
const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
}
else if (state == TAG_RETURN) {
const VALUE *current_ep = GET_EP();
const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
int in_class_frame = 0;
int toplevel = 1;
escape_cfp = reg_cfp;
// find target_lep, target_ep
while (!VM_ENV_LOCAL_P(ep)) {
if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
target_ep = ep;
}
ep = VM_ENV_PREV_EP(ep);
}
target_lep = ep;
while (escape_cfp < eocfp) {
const VALUE *lep = VM_CF_LEP(escape_cfp);
if (!target_lep) {
target_lep = lep;
}
if (lep == target_lep &&
VM_FRAME_RUBYFRAME_P(escape_cfp) &&
ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
in_class_frame = 1;
target_lep = 0;
}
if (lep == target_lep) {
if (VM_FRAME_LAMBDA_P(escape_cfp)) {
toplevel = 0;
if (in_class_frame) {
/* lambda {class A; ... return ...; end} */
goto valid_return;
}
else {
const VALUE *tep = current_ep;
while (target_lep != tep) {
if (escape_cfp->ep == tep) {
/* in lambda */
if (tep == target_ep) {
goto valid_return;
}
else {
goto unexpected_return;
}
}
tep = VM_ENV_PREV_EP(tep);
}
}
}
else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
switch (ISEQ_BODY(escape_cfp->iseq)->type) {
case ISEQ_TYPE_TOP:
case ISEQ_TYPE_MAIN:
if (toplevel) {
if (in_class_frame) goto unexpected_return;
if (target_ep == NULL) {
goto valid_return;
}
else {
goto unexpected_return;
}
}
break;
case ISEQ_TYPE_EVAL: {
const rb_iseq_t *is = escape_cfp->iseq;
enum rb_iseq_type t = ISEQ_BODY(is)->type;
while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
if (!(is = ISEQ_BODY(is)->parent_iseq)) break;
t = ISEQ_BODY(is)->type;
}
toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
break;
}
case ISEQ_TYPE_CLASS:
toplevel = 0;
break;
default:
break;
}
}
}
if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
if (target_ep == NULL) {
goto valid_return;
}
else {
goto unexpected_return;
}
}
escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
}
unexpected_return:;
rb_vm_localjump_error("unexpected return", throwobj, TAG_RETURN);
valid_return:;
/* do nothing */
}
else {
rb_bug("isns(throw): unsupported throw type");
}
ec->tag->state = state;
return (VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
}
static VALUE
vm_throw(const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
rb_num_t throw_state, VALUE throwobj)
{
const int state = (int)(throw_state & VM_THROW_STATE_MASK);
const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
if (state != 0) {
return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
}
else {
return vm_throw_continue(ec, throwobj);
}
}
VALUE
rb_vm_throw(const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t throw_state, VALUE throwobj)
{
return vm_throw(ec, reg_cfp, throw_state, throwobj);
}
static inline void
vm_expandarray(struct rb_control_frame_struct *cfp, VALUE ary, rb_num_t num, int flag)
{
int is_splat = flag & 0x01;
const VALUE *ptr;
rb_num_t len;
const VALUE obj = ary;
if (!RB_TYPE_P(ary, T_ARRAY) && NIL_P(ary = rb_check_array_type(ary))) {
ary = obj;
ptr = &ary;
len = 1;
}
else {
ptr = RARRAY_CONST_PTR(ary);
len = (rb_num_t)RARRAY_LEN(ary);
}
if (num + is_splat == 0) {
/* no space left on stack */
}
else if (flag & 0x02) {
/* post: ..., nil ,ary[-1], ..., ary[0..-num] # top */
rb_num_t i = 0, j;
if (len < num) {
for (i = 0; i < num - len; i++) {
*cfp->sp++ = Qnil;
}
}
for (j = 0; i < num; i++, j++) {
VALUE v = ptr[len - j - 1];
*cfp->sp++ = v;
}
if (is_splat) {
*cfp->sp++ = rb_ary_new4(len - j, ptr);
}
}
else {
/* normal: ary[num..-1], ary[num-2], ary[num-3], ..., ary[0] # top */
if (is_splat) {
if (num > len) {
*cfp->sp++ = rb_ary_new();
}
else {
*cfp->sp++ = rb_ary_new4(len - num, ptr + num);
}
}
if (num > len) {
rb_num_t i = 0;
for (; i < num - len; i++) {
*cfp->sp++ = Qnil;
}
for (rb_num_t j = 0; i < num; i++, j++) {
*cfp->sp++ = ptr[len - j - 1];
}
}
else {
for (rb_num_t j = 0; j < num; j++) {
*cfp->sp++ = ptr[num - j - 1];
}
}
}
RB_GC_GUARD(ary);
}
static VALUE vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
static VALUE vm_mtbl_dump(VALUE klass, ID target_mid);
static struct rb_class_cc_entries *
vm_ccs_create(VALUE klass, VALUE cc_tbl, ID mid, const rb_callable_method_entry_t *cme)
{
int initial_capa = 2;
struct rb_class_cc_entries *ccs = ruby_xmalloc(vm_ccs_alloc_size(initial_capa));
#if VM_CHECK_MODE > 0
ccs->debug_sig = ~(VALUE)ccs;
#endif
ccs->capa = initial_capa;
ccs->len = 0;
ccs->cme = cme;
METHOD_ENTRY_CACHED_SET((rb_callable_method_entry_t *)cme);
rb_managed_id_table_insert(cc_tbl, mid, (VALUE)ccs);
RB_OBJ_WRITTEN(cc_tbl, Qundef, cme);
return ccs;
}
static void
vm_ccs_push(VALUE cc_tbl, ID mid, struct rb_class_cc_entries *ccs, const struct rb_callinfo *ci, const struct rb_callcache *cc)
{
if (! vm_cc_markable(cc)) {
return;
}
if (UNLIKELY(ccs->len == ccs->capa)) {
RUBY_ASSERT(ccs->capa > 0);
ccs->capa *= 2;
ccs = ruby_xrealloc(ccs, vm_ccs_alloc_size(ccs->capa));
#if VM_CHECK_MODE > 0
ccs->debug_sig = ~(VALUE)ccs;
#endif
// GC?
rb_managed_id_table_insert(cc_tbl, mid, (VALUE)ccs);
}
VM_ASSERT(ccs->len < ccs->capa);
const int pos = ccs->len++;
ccs->entries[pos].argc = vm_ci_argc(ci);
ccs->entries[pos].flag = vm_ci_flag(ci);
RB_OBJ_WRITE(cc_tbl, &ccs->entries[pos].cc, cc);
if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
// for tuning
// vm_mtbl_dump(klass, 0);
}
}
#if VM_CHECK_MODE > 0
void
rb_vm_ccs_dump(struct rb_class_cc_entries *ccs)
{
ruby_debug_printf("ccs:%p (%d,%d)\n", (void *)ccs, ccs->len, ccs->capa);
for (int i=0; i<ccs->len; i++) {
ruby_debug_printf("CCS CI ID:flag:%x argc:%u\n",
ccs->entries[i].flag,
ccs->entries[i].argc);
rp(ccs->entries[i].cc);
}
}
static int
vm_ccs_verify(struct rb_class_cc_entries *ccs, ID mid, VALUE klass)
{
VM_ASSERT(vm_ccs_p(ccs));
VM_ASSERT(ccs->len <= ccs->capa);
for (int i=0; i<ccs->len; i++) {
const struct rb_callcache *cc = ccs->entries[i].cc;
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
VM_ASSERT(vm_cc_class_check(cc, klass));
VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
VM_ASSERT(!vm_cc_super_p(cc));
VM_ASSERT(!vm_cc_refinement_p(cc));
}
return TRUE;
}
#endif
const rb_callable_method_entry_t *rb_check_overloaded_cme(const rb_callable_method_entry_t *cme, const struct rb_callinfo * const ci);
static void
vm_evict_cc(VALUE klass, VALUE cc_tbl, ID mid)
{
ASSERT_vm_locking();
if (rb_multi_ractor_p()) {
if (RCLASS_WRITABLE_CC_TBL(klass) != cc_tbl) {
// Another ractor updated the CC table while we were waiting on the VM lock.
// We have to retry.
return;
}
VALUE ccs_obj = 0;
rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj);
struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_obj;
if (!ccs || !METHOD_ENTRY_INVALIDATED(ccs->cme)) {
// Another ractor replaced that entry while we were waiting on the VM lock.
return;
}
VALUE new_table = rb_vm_cc_table_dup(cc_tbl);
rb_vm_cc_table_delete(new_table, mid);
RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), new_table);
}
else {
rb_vm_cc_table_delete(cc_tbl, mid);
}
}
static const struct rb_callcache *
vm_populate_cc(VALUE klass, const struct rb_callinfo * const ci, ID mid)
{
ASSERT_vm_locking();
RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
const rb_callable_method_entry_t *cme = rb_callable_method_entry(klass, mid);
VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
if (cme == NULL) {
// undef or not found: can't cache the information
VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
return &vm_empty_cc;
}
VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
const VALUE original_cc_table = cc_tbl;
if (!cc_tbl) {
// Is this possible after rb_callable_method_entry ?
cc_tbl = rb_vm_cc_table_create(1);
}
else if (rb_multi_ractor_p()) {
cc_tbl = rb_vm_cc_table_dup(cc_tbl);
}
VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
METHOD_ENTRY_CACHED_SET((struct rb_callable_method_entry_struct *)cme);
VM_ASSERT(cc_tbl);
struct rb_class_cc_entries *ccs = NULL;
{
VALUE ccs_obj;
if (UNLIKELY(rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj))) {
ccs = (struct rb_class_cc_entries *)ccs_obj;
}
else {
// TODO: required?
ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
}
}
cme = rb_check_overloaded_cme(cme, ci);
const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
vm_ccs_push(cc_tbl, mid, ccs, ci, cc);
VM_ASSERT(vm_cc_cme(cc) != NULL);
VM_ASSERT(cme->called_id == mid);
VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
if (original_cc_table != cc_tbl) {
RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), cc_tbl);
}
return cc;
}
static const struct rb_callcache *
vm_lookup_cc(const VALUE klass, const struct rb_callinfo * const ci, ID mid)
{
VALUE cc_tbl;
struct rb_class_cc_entries *ccs;
retry:
cc_tbl = RUBY_ATOMIC_VALUE_LOAD(RCLASS_WRITABLE_CC_TBL(klass));
ccs = NULL;
if (cc_tbl) {
// CCS data is keyed on method id, so we don't need the method id
// for doing comparisons in the `for` loop below.
VALUE ccs_obj;
if (rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj)) {
ccs = (struct rb_class_cc_entries *)ccs_obj;
const int ccs_len = ccs->len;
if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
RB_VM_LOCKING() {
vm_evict_cc(klass, cc_tbl, mid);
}
goto retry;
}
else {
VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
// We already know the method id is correct because we had
// to look up the ccs_data by method id. All we need to
// compare is argc and flag
unsigned int argc = vm_ci_argc(ci);
unsigned int flag = vm_ci_flag(ci);
for (int i=0; i<ccs_len; i++) {
unsigned int ccs_ci_argc = ccs->entries[i].argc;
unsigned int ccs_ci_flag = ccs->entries[i].flag;
const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
VM_ASSERT(ccs_cc->klass == klass);
VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
return ccs_cc;
}
}
}
}
}
RB_GC_GUARD(cc_tbl);
return NULL;
}
static const struct rb_callcache *
vm_search_cc(const VALUE klass, const struct rb_callinfo * const ci)
{
const ID mid = vm_ci_mid(ci);
const struct rb_callcache *cc = vm_lookup_cc(klass, ci, mid);
if (cc) {
return cc;
}
RB_VM_LOCKING() {
if (rb_multi_ractor_p()) {
// The CC may have been populated by another ractor while we were waiting on the lock,
// so we must lookup a second time.
cc = vm_lookup_cc(klass, ci, mid);
}
if (!cc) {
cc = vm_populate_cc(klass, ci, mid);
}
}
return cc;
}
const struct rb_callcache *
rb_vm_search_method_slowpath(const struct rb_callinfo *ci, VALUE klass)
{
const struct rb_callcache *cc;
VM_ASSERT_TYPE2(klass, T_CLASS, T_ICLASS);
cc = vm_search_cc(klass, ci);
VM_ASSERT(cc);
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
return cc;
}
static const struct rb_callcache *
vm_search_method_slowpath0(VALUE cd_owner, struct rb_call_data *cd, VALUE klass)
{
#if USE_DEBUG_COUNTER
const struct rb_callcache *old_cc = cd->cc;
#endif
const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
#if OPT_INLINE_METHOD_CACHE
cd->cc = cc;
const struct rb_callcache *empty_cc = &vm_empty_cc;
if (cd_owner && cc != empty_cc) {
RB_OBJ_WRITTEN(cd_owner, Qundef, cc);
}
#if USE_DEBUG_COUNTER
if (!old_cc || old_cc == empty_cc) {
// empty
RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
}
else if (old_cc == cc) {
RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
}
else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
}
else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
}
else {
RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
}
#endif
#endif // OPT_INLINE_METHOD_CACHE
VM_ASSERT(vm_cc_cme(cc) == NULL ||
vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
return cc;
}
ALWAYS_INLINE(static const struct rb_callcache *vm_search_method_fastpath(VALUE cd_owner, struct rb_call_data *cd, VALUE klass));
static const struct rb_callcache *
vm_search_method_fastpath(VALUE cd_owner, struct rb_call_data *cd, VALUE klass)
{
const struct rb_callcache *cc = cd->cc;
#if OPT_INLINE_METHOD_CACHE
if (LIKELY(vm_cc_class_check(cc, klass))) {
if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
RB_DEBUG_COUNTER_INC(mc_inline_hit);
VM_ASSERT(vm_cc_cme(cc) == NULL || // not found
(vm_ci_flag(cd->ci) & VM_CALL_SUPER) || // search_super w/ define_method
vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci)); // cme->called_id == ci->mid
return cc;
}
RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
}
else {
RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
}
#endif
return vm_search_method_slowpath0(cd_owner, cd, klass);
}
static const struct rb_callable_method_entry_struct *
vm_search_method(VALUE cd_owner, struct rb_call_data *cd, VALUE recv)
{
VALUE klass = CLASS_OF(recv);
VM_ASSERT(klass != Qfalse);
VM_ASSERT(RBASIC_CLASS(klass) == 0 || rb_obj_is_kind_of(klass, rb_cClass));
const struct rb_callcache *cc = vm_search_method_fastpath(cd_owner, cd, klass);
return vm_cc_cme(cc);
}
const struct rb_callable_method_entry_struct *
rb_zjit_vm_search_method(VALUE cd_owner, struct rb_call_data *cd, VALUE recv)
{
return vm_search_method(cd_owner, cd, recv);
}
#if __has_attribute(transparent_union)
typedef union {
VALUE (*anyargs)(ANYARGS);
VALUE (*f00)(VALUE);
VALUE (*f01)(VALUE, VALUE);
VALUE (*f02)(VALUE, VALUE, VALUE);
VALUE (*f03)(VALUE, VALUE, VALUE, VALUE);
VALUE (*f04)(VALUE, VALUE, VALUE, VALUE, VALUE);
VALUE (*f05)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
VALUE (*f06)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
VALUE (*f07)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
VALUE (*f08)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
VALUE (*f09)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
VALUE (*f10)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
VALUE (*f11)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
VALUE (*f12)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
VALUE (*f13)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
VALUE (*f14)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
VALUE (*f15)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
VALUE (*fm1)(int, union { VALUE *x; const VALUE *y; } __attribute__((__transparent_union__)), VALUE);
} __attribute__((__transparent_union__)) cfunc_type;
# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
#else
typedef VALUE (*cfunc_type)(ANYARGS);
# define make_cfunc_type(f) (cfunc_type)(f)
#endif
static inline int
check_cfunc(const rb_callable_method_entry_t *me, cfunc_type func)
{
if (! me) {
return false;
}
else {
VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
VM_ASSERT(callable_method_entry_p(me));
VM_ASSERT(me->def);
if (me->def->type != VM_METHOD_TYPE_CFUNC) {
return false;
}
else {
#if __has_attribute(transparent_union)
return me->def->body.cfunc.func == func.anyargs;
#else
return me->def->body.cfunc.func == func;
#endif
}
}
}
static inline int
check_method_basic_definition(const rb_callable_method_entry_t *me)
{
return me && METHOD_ENTRY_BASIC(me);
}
static inline int
vm_method_cfunc_is(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv, cfunc_type func)
{
VM_ASSERT(iseq != NULL);
const struct rb_callable_method_entry_struct *cme = vm_search_method((VALUE)iseq, cd, recv);
return check_cfunc(cme, func);
}
bool
rb_zjit_cme_is_cfunc(const rb_callable_method_entry_t *me, const cfunc_type func)
{
return check_cfunc(me, func);
}
int
rb_vm_method_cfunc_is(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv, cfunc_type func)
{
return vm_method_cfunc_is(iseq, cd, recv, func);
}
#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
#define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
static inline bool
FIXNUM_2_P(VALUE a, VALUE b)
{
/* FIXNUM_P(a) && FIXNUM_P(b)
* == ((a & 1) && (b & 1))
* == a & b & 1 */
SIGNED_VALUE x = a;
SIGNED_VALUE y = b;
SIGNED_VALUE z = x & y & 1;
return z == 1;
}
static inline bool
FLONUM_2_P(VALUE a, VALUE b)
{
#if USE_FLONUM
/* FLONUM_P(a) && FLONUM_P(b)
* == ((a & 3) == 2) && ((b & 3) == 2)
* == ! ((a ^ 2) | (b ^ 2) & 3)
*/
SIGNED_VALUE x = a;
SIGNED_VALUE y = b;
SIGNED_VALUE z = ((x ^ 2) | (y ^ 2)) & 3;
return !z;
#else
return false;
#endif
}
static VALUE
opt_equality_specialized(VALUE recv, VALUE obj)
{
if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
goto compare_by_identity;
}
else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
goto compare_by_identity;
}
else if (STATIC_SYM_P(recv) && STATIC_SYM_P(obj) && EQ_UNREDEFINED_P(SYMBOL)) {
goto compare_by_identity;
}
else if (SPECIAL_CONST_P(recv)) {
//
}
else if (RBASIC_CLASS(recv) == rb_cFloat && RB_FLOAT_TYPE_P(obj) && EQ_UNREDEFINED_P(FLOAT)) {
double a = RFLOAT_VALUE(recv);
double b = RFLOAT_VALUE(obj);
return RBOOL(a == b);
}
else if (RBASIC_CLASS(recv) == rb_cString && EQ_UNREDEFINED_P(STRING)) {
if (recv == obj) {
return Qtrue;
}
else if (RB_TYPE_P(obj, T_STRING)) {
return rb_str_eql_internal(obj, recv);
}
}
return Qundef;
compare_by_identity:
return RBOOL(recv == obj);
}
static VALUE
opt_equality(const rb_iseq_t *cd_owner, VALUE recv, VALUE obj, CALL_DATA cd)
{
VM_ASSERT(cd_owner != NULL);
VALUE val = opt_equality_specialized(recv, obj);
if (!UNDEF_P(val)) return val;
if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
return Qundef;
}
else {
return RBOOL(recv == obj);
}
}
#undef EQ_UNREDEFINED_P
static inline const struct rb_callcache *gccct_method_search(rb_execution_context_t *ec, VALUE recv, ID mid, const struct rb_callinfo *ci); // vm_eval.c
NOINLINE(static VALUE opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid));
static VALUE
opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid)
{
const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
return RBOOL(recv == obj);
}
else {
return Qundef;
}
}
static VALUE
opt_equality_by_mid(VALUE recv, VALUE obj, ID mid)
{
VALUE val = opt_equality_specialized(recv, obj);
if (!UNDEF_P(val)) {
return val;
}
else {
return opt_equality_by_mid_slowpath(recv, obj, mid);
}
}
VALUE
rb_equal_opt(VALUE obj1, VALUE obj2)
{
return opt_equality_by_mid(obj1, obj2, idEq);
}
VALUE
rb_eql_opt(VALUE obj1, VALUE obj2)
{
return opt_equality_by_mid(obj1, obj2, idEqlP);
}
extern VALUE rb_vm_call0(rb_execution_context_t *ec, VALUE, ID, int, const VALUE*, const rb_callable_method_entry_t *, int kw_splat);
extern VALUE rb_vm_call_with_refinements(rb_execution_context_t *, VALUE, ID, int, const VALUE *, int);
static VALUE
check_match(rb_execution_context_t *ec, VALUE pattern, VALUE target, enum vm_check_match_type type)
{
switch (type) {
case VM_CHECKMATCH_TYPE_WHEN:
return pattern;
case VM_CHECKMATCH_TYPE_RESCUE:
if (!rb_obj_is_kind_of(pattern, rb_cModule)) {
rb_raise(rb_eTypeError, "class or module required for rescue clause");
}
/* fall through */
case VM_CHECKMATCH_TYPE_CASE: {
return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target, RB_NO_KEYWORDS);
}
default:
rb_bug("check_match: unreachable");
}
}
static inline VALUE
double_cmp_lt(double a, double b)
{
return RBOOL(a < b);
}
static inline VALUE
double_cmp_le(double a, double b)
{
return RBOOL(a <= b);
}
static inline VALUE
double_cmp_gt(double a, double b)
{
return RBOOL(a > b);
}
static inline VALUE
double_cmp_ge(double a, double b)
{
return RBOOL(a >= b);
}
// Copied by vm_dump.c
static inline VALUE *
vm_base_ptr(const rb_control_frame_t *cfp)
{
const rb_control_frame_t *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
int params = ISEQ_BODY(cfp->iseq)->param.size;
CALL_INFO ci = (CALL_INFO)cfp->ep[-(VM_ENV_DATA_SIZE + (lts - params))]; // skip EP stuff, CI should be last local
bp += vm_ci_argc(ci);
}
if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
/* adjust `self' */
bp += 1;
}
#if VM_DEBUG_BP_CHECK
if (bp != cfp->bp_check) {
ruby_debug_printf("bp_check: %ld, bp: %ld\n",
(long)(cfp->bp_check - GET_EC()->vm_stack),
(long)(bp - GET_EC()->vm_stack));
rb_bug("vm_base_ptr: unreachable");
}
#endif
return bp;
}
else {
return NULL;
}
}
VALUE *
rb_vm_base_ptr(const rb_control_frame_t *cfp)
{
return vm_base_ptr(cfp);
}
/* method call processes with call_info */
#include "vm_args.c"
static inline VALUE vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc, int param_size, int local_size);
ALWAYS_INLINE(static VALUE vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me, int opt_pc, int param_size, int local_size));
static inline VALUE vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc);
static VALUE vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
static VALUE vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
static VALUE vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
static inline VALUE vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
static vm_call_handler vm_call_iseq_setup_func(const struct rb_callinfo *ci, const int param_size, const int local_size);
static VALUE
vm_call_iseq_setup_tailcall_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
}
static VALUE
vm_call_iseq_setup_normal_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
const struct rb_callcache *cc = calling->cc;
const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
int param = ISEQ_BODY(iseq)->param.size;
int local = ISEQ_BODY(iseq)->local_table_size;
return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
}
bool
rb_simple_iseq_p(const rb_iseq_t *iseq)
{
return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
}
bool
rb_iseq_only_optparam_p(const rb_iseq_t *iseq)
{
return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
}
bool
rb_iseq_only_kwparam_p(const rb_iseq_t *iseq)
{
return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
}
#define ALLOW_HEAP_ARGV (-2)
#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
static inline bool
vm_caller_setup_arg_splat(rb_control_frame_t *cfp, struct rb_calling_info *calling, VALUE ary, int max_args)
{
vm_check_canary(GET_EC(), cfp->sp);
bool ret = false;
if (!NIL_P(ary)) {
const VALUE *ptr = RARRAY_CONST_PTR(ary);
long len = RARRAY_LEN(ary);
int argc = calling->argc;
if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
/* Avoid SystemStackError when splatting large arrays by storing arguments in
* a temporary array, instead of trying to keeping arguments on the VM stack.
*/
VALUE *argv = cfp->sp - argc;
VALUE argv_ary = rb_ary_hidden_new(len + argc + 1);
rb_ary_cat(argv_ary, argv, argc);
rb_ary_cat(argv_ary, ptr, len);
cfp->sp -= argc - 1;
cfp->sp[-1] = argv_ary;
calling->argc = 1;
calling->heap_argv = argv_ary;
RB_GC_GUARD(ary);
}
else {
long i;
if (max_args >= 0 && len + argc > max_args) {
/* If only a given max_args is allowed, copy up to max args.
* Used by vm_callee_setup_block_arg for non-lambda blocks,
* where additional arguments are ignored.
*
* Also, copy up to one more argument than the maximum,
* in case it is an empty keyword hash that will be removed.
*/
calling->argc += len - (max_args - argc + 1);
len = max_args - argc + 1;
ret = true;
}
else {
/* Unset heap_argv if set originally. Can happen when
* forwarding modified arguments, where heap_argv was used
* originally, but heap_argv not supported by the forwarded
* method in all cases.
*/
calling->heap_argv = 0;
}
CHECK_VM_STACK_OVERFLOW(cfp, len);
for (i = 0; i < len; i++) {
*cfp->sp++ = ptr[i];
}
calling->argc += i;
}
}
return ret;
}
static inline void
vm_caller_setup_arg_kw(rb_control_frame_t *cfp, struct rb_calling_info *calling, const struct rb_callinfo *ci)
{
const VALUE *const passed_keywords = vm_ci_kwarg(ci)->keywords;
const int kw_len = vm_ci_kwarg(ci)->keyword_len;
const VALUE h = rb_hash_new_with_size(kw_len);
VALUE *sp = cfp->sp;
int i;
for (i=0; i<kw_len; i++) {
rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
}
(sp-kw_len)[0] = h;
cfp->sp -= kw_len - 1;
calling->argc -= kw_len - 1;
calling->kw_splat = 1;
}
static inline VALUE
vm_caller_setup_keyword_hash(const struct rb_callinfo *ci, VALUE keyword_hash)
{
if (UNLIKELY(!RB_TYPE_P(keyword_hash, T_HASH))) {
if (keyword_hash != Qnil) {
/* Convert a non-hash keyword splat to a new hash */
keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
}
}
else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !RHASH_EMPTY_P(keyword_hash)) {
/* Convert a hash keyword splat to a new hash unless
* a mutable keyword splat was passed.
* Skip allocating new hash for empty keyword splat, as empty
* keyword splat will be ignored by both callers.
*/
keyword_hash = rb_hash_dup(keyword_hash);
}
return keyword_hash;
}
static inline void
CALLER_SETUP_ARG(struct rb_control_frame_struct *restrict cfp,
struct rb_calling_info *restrict calling,
const struct rb_callinfo *restrict ci, int max_args)
{
if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
if (IS_ARGS_KW_SPLAT(ci)) {
// f(*a, **kw)
VM_ASSERT(calling->kw_splat == 1);
cfp->sp -= 2;
calling->argc -= 2;
VALUE ary = cfp->sp[0];
VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
// splat a
if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) return;
// put kw
if (kwh != Qnil && !RHASH_EMPTY_P(kwh)) {
if (UNLIKELY(calling->heap_argv)) {
rb_ary_push(calling->heap_argv, kwh);
((struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
calling->kw_splat = 0;
}
}
else {
cfp->sp[0] = kwh;
cfp->sp++;
calling->argc++;
VM_ASSERT(calling->kw_splat == 1);
}
}
else {
calling->kw_splat = 0;
}
}
else {
// f(*a)
VM_ASSERT(calling->kw_splat == 0);
cfp->sp -= 1;
calling->argc -= 1;
VALUE ary = cfp->sp[0];
if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
goto check_keyword;
}
// check the last argument
VALUE last_hash, argv_ary;
if (UNLIKELY(argv_ary = calling->heap_argv)) {
if (!IS_ARGS_KEYWORD(ci) &&
RARRAY_LEN(argv_ary) > 0 &&
RB_TYPE_P((last_hash = rb_ary_last(0, NULL, argv_ary)), T_HASH) &&
(((struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
rb_ary_pop(argv_ary);
if (!RHASH_EMPTY_P(last_hash)) {
rb_ary_push(argv_ary, rb_hash_dup(last_hash));
calling->kw_splat = 1;
}
}
}
else {
check_keyword:
if (!IS_ARGS_KEYWORD(ci) &&
calling->argc > 0 &&
RB_TYPE_P((last_hash = cfp->sp[-1]), T_HASH) &&
(((struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
if (RHASH_EMPTY_P(last_hash)) {
calling->argc--;
cfp->sp -= 1;
}
else {
cfp->sp[-1] = rb_hash_dup(last_hash);
calling->kw_splat = 1;
}
}
}
}
}
else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
// f(**kw)
VM_ASSERT(calling->kw_splat == 1);
VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
if (kwh == Qnil || RHASH_EMPTY_P(kwh)) {
cfp->sp--;
calling->argc--;
calling->kw_splat = 0;
}
else {
cfp->sp[-1] = kwh;
}
}
else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
// f(k1:1, k2:2)
VM_ASSERT(calling->kw_splat == 0);
/* This converts VM_CALL_KWARG style to VM_CALL_KW_SPLAT style
* by creating a keyword hash.
* So, vm_ci_flag(ci) & VM_CALL_KWARG is now inconsistent.
*/
vm_caller_setup_arg_kw(cfp, calling, ci);
}
}
#define USE_OPT_HIST 0
#if USE_OPT_HIST
#define OPT_HIST_MAX 64
static int opt_hist[OPT_HIST_MAX+1];
__attribute__((destructor))
static void
opt_hist_show_results_at_exit(void)
{
for (int i=0; i<OPT_HIST_MAX; i++) {
ruby_debug_printf("opt_hist\t%d\t%d\n", i, opt_hist[i]);
}
}
#endif
static VALUE
vm_call_iseq_setup_normal_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
{
const struct rb_callcache *cc = calling->cc;
const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
const int opt = calling->argc - lead_num;
const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
const int param = ISEQ_BODY(iseq)->param.size;
const int local = ISEQ_BODY(iseq)->local_table_size;
const int delta = opt_num - opt;
RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
#if USE_OPT_HIST
if (opt_pc < OPT_HIST_MAX) {
opt_hist[opt]++;
}
else {
opt_hist[OPT_HIST_MAX]++;
}
#endif
return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
}
static VALUE
vm_call_iseq_setup_tailcall_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
{
const struct rb_callcache *cc = calling->cc;
const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
const int opt = calling->argc - lead_num;
const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
#if USE_OPT_HIST
if (opt_pc < OPT_HIST_MAX) {
opt_hist[opt]++;
}
else {
opt_hist[OPT_HIST_MAX]++;
}
#endif
return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
}
static void
args_setup_kw_parameters(rb_execution_context_t *const ec, const rb_iseq_t *const iseq, const rb_callable_method_entry_t *cme,
VALUE *const passed_values, const int passed_keyword_len, const VALUE *const passed_keywords,
VALUE *const locals);
static VALUE
vm_call_iseq_forwardable(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
{
const struct rb_callcache *cc = calling->cc;
const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
int param_size = ISEQ_BODY(iseq)->param.size;
int local_size = ISEQ_BODY(iseq)->local_table_size;
// Setting up local size and param size
VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
local_size = local_size + vm_ci_argc(calling->cd->ci);
param_size = param_size + vm_ci_argc(calling->cd->ci);
cfp->sp[0] = (VALUE)calling->cd->ci;
return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
}
static VALUE
vm_call_iseq_setup_kwparm_kwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
{
const struct rb_callinfo *ci = calling->cd->ci;
const struct rb_callcache *cc = calling->cc;
VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
const struct rb_callinfo_kwarg *kw_arg = vm_ci_kwarg(ci);
const int ci_kw_len = kw_arg->keyword_len;
const VALUE * const ci_keywords = kw_arg->keywords;
VALUE *argv = cfp->sp - calling->argc;
VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
int param = ISEQ_BODY(iseq)->param.size;
int local = ISEQ_BODY(iseq)->local_table_size;
return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
}
static VALUE
vm_call_iseq_setup_kwparm_nokwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
{
const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
const struct rb_callcache *cc = calling->cc;
VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
VALUE * const argv = cfp->sp - calling->argc;
VALUE * const klocals = argv + kw_param->bits_start - kw_param->num;
int i;
for (i=0; i<kw_param->num; i++) {
klocals[i] = kw_param->default_values[i];
}
klocals[i] = INT2FIX(0); // kw specify flag
// NOTE:
// nobody check this value, but it should be cleared because it can
// points invalid VALUE (T_NONE objects, raw pointer and so on).
int param = ISEQ_BODY(iseq)->param.size;
int local = ISEQ_BODY(iseq)->local_table_size;
return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
}
static VALUE builtin_invoker0(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr);
static VALUE
vm_call_single_noarg_leaf_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp,
struct rb_calling_info *calling)
{
const struct rb_builtin_function *bf = calling->cc->aux_.bf;
cfp->sp -= (calling->argc + 1);
rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
}
VALUE rb_gen_method_name(VALUE owner, VALUE name); // in vm_backtrace.c
static void
warn_unused_block(const rb_callable_method_entry_t *cme, const rb_iseq_t *iseq, void *pc)
{
rb_vm_t *vm = GET_VM();
set_table *dup_check_table = vm->unused_block_warning_table;
st_data_t key;
bool strict_unused_block = rb_warning_category_enabled_p(RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK);
union {
VALUE v;
unsigned char b[SIZEOF_VALUE];
} k1 = {
.v = (VALUE)pc,
}, k2 = {
.v = (VALUE)cme->def,
};
// relax check
if (!strict_unused_block) {
key = (st_data_t)cme->def->original_id;
if (set_table_lookup(dup_check_table, key)) {
return;
}
}
// strict check
// make unique key from pc and me->def pointer
key = 0;
for (int i=0; i<SIZEOF_VALUE; i++) {
// fprintf(stderr, "k1:%3d k2:%3d\n", k1.b[i], k2.b[SIZEOF_VALUE-1-i]);
key |= (st_data_t)(k1.b[i] ^ k2.b[SIZEOF_VALUE-1-i]) << (8 * i);
}
if (0) {
fprintf(stderr, "SIZEOF_VALUE:%d\n", SIZEOF_VALUE);
fprintf(stderr, "pc:%p def:%p\n", pc, (void *)cme->def);
fprintf(stderr, "key:%p\n", (void *)key);
}
// duplication check
if (set_insert(dup_check_table, key)) {
// already shown
}
else if (RTEST(ruby_verbose) || strict_unused_block) {
VALUE m_loc = rb_method_entry_location((const rb_method_entry_t *)cme);
VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
if (!NIL_P(m_loc)) {
rb_warn("the block passed to '%"PRIsVALUE"' defined at %"PRIsVALUE":%"PRIsVALUE" may be ignored",
name, RARRAY_AREF(m_loc, 0), RARRAY_AREF(m_loc, 1));
}
else {
rb_warn("the block may be ignored because '%"PRIsVALUE"' does not use a block", name);
}
}
}
static inline int
vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
const rb_iseq_t *iseq, VALUE *argv, int param_size, int local_size)
{
const struct rb_callinfo *ci = calling->cd->ci;
const struct rb_callcache *cc = calling->cc;
VM_ASSERT((vm_ci_argc(ci), 1));
VM_ASSERT(vm_cc_cme(cc) != NULL);
if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
calling->block_handler != VM_BLOCK_HANDLER_NONE &&
!(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
warn_unused_block(vm_cc_cme(cc), iseq, (void *)ec->cfp->pc);
}
if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
if (LIKELY(rb_simple_iseq_p(iseq))) {
rb_control_frame_t *cfp = ec->cfp;
int lead_num = ISEQ_BODY(iseq)->param.lead_num;
CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
if (calling->argc != lead_num) {
argument_arity_error(ec, iseq, vm_cc_cme(cc), calling->argc, lead_num, lead_num);
}
//VM_ASSERT(ci == calling->cd->ci);
VM_ASSERT(cc == calling->cc);
if (vm_call_iseq_optimizable_p(ci, cc)) {
if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) && ruby_vm_c_events_enabled == 0) {
VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
vm_cc_bf_set(cc, (void *)iseq->body->iseq_encoded[1]);
CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin, true);
}
else {
CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), true);
}
}
return 0;
}
else if (rb_iseq_only_optparam_p(iseq)) {
rb_control_frame_t *cfp = ec->cfp;
const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
const int argc = calling->argc;
const int opt = argc - lead_num;
if (opt < 0 || opt > opt_num) {
argument_arity_error(ec, iseq, vm_cc_cme(cc), argc, lead_num, lead_num + opt_num);
}
if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
!IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
vm_call_cacheable(ci, cc));
}
else {
CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
!IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
vm_call_cacheable(ci, cc));
}
/* initialize opt vars for self-references */
VM_ASSERT((int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
for (int i=argc; i<lead_num + opt_num; i++) {
argv[i] = Qnil;
}
return (int)ISEQ_BODY(iseq)->param.opt_table[opt];
}
else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
const int argc = calling->argc;
const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
if (vm_ci_flag(ci) & VM_CALL_KWARG) {
const struct rb_callinfo_kwarg *kw_arg = vm_ci_kwarg(ci);
if (argc - kw_arg->keyword_len == lead_num) {
const int ci_kw_len = kw_arg->keyword_len;
const VALUE * const ci_keywords = kw_arg->keywords;
VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
vm_call_cacheable(ci, cc));
return 0;
}
}
else if (argc == lead_num) {
/* no kwarg */
VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), NULL, 0, NULL, klocals);
if (klocals[kw_param->num] == INT2FIX(0)) {
/* copy from default_values */
CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
vm_call_cacheable(ci, cc));
}
return 0;
}
}
}
// Called iseq is using ... param
// def foo(...) # <- iseq for foo will have "forwardable"
//
// We want to set the `...` local to the caller's CI
// foo(1, 2) # <- the ci for this should end up as `...`
//
// So hopefully the stack looks like:
//
// => 1
// => 2
// => *
// => **
// => &
// => ... # <- points at `foo`s CI
// => cref_or_me
// => specval
// => type
//
if (ISEQ_BODY(iseq)->param.flags.forwardable) {
bool can_fastpath = true;
if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
struct rb_forwarding_call_data * forward_cd = (struct rb_forwarding_call_data *)calling->cd;
if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
ci = vm_ci_new_runtime(
vm_ci_mid(ci),
vm_ci_flag(ci),
vm_ci_argc(ci),
vm_ci_kwarg(ci));
}
else {
ci = forward_cd->caller_ci;
}
can_fastpath = false;
}
// C functions calling iseqs will stack allocate a CI,
// so we need to convert it to heap allocated
if (!vm_ci_markable(ci)) {
ci = vm_ci_new_runtime(
vm_ci_mid(ci),
vm_ci_flag(ci),
vm_ci_argc(ci),
vm_ci_kwarg(ci));
can_fastpath = false;
}
argv[param_size - 1] = (VALUE)ci;
CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
return 0;
}
return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
}
static void
vm_adjust_stack_forwarding(const struct rb_execution_context_struct *ec, struct rb_control_frame_struct *cfp, int argc, VALUE splat)
{
// This case is when the caller is using a ... parameter.
// For example `bar(...)`. The call info will have VM_CALL_FORWARDING
// In this case the caller's caller's CI will be on the stack.
//
// For example:
//
// def bar(a, b); a + b; end
// def foo(...); bar(...); end
// foo(1, 2) # <- this CI will be on the stack when we call `bar(...)`
//
// Stack layout will be:
//
// > 1
// > 2
// > CI for foo(1, 2)
// > cref_or_me
// > specval
// > type
// > receiver
// > CI for foo(1, 2), via `getlocal ...`
// > ( SP points here )
const VALUE * lep = VM_CF_LEP(cfp);
const rb_iseq_t *iseq;
// If we're in an escaped environment (lambda for example), get the iseq
// from the captured env.
if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
rb_env_t * env = (rb_env_t *)lep[VM_ENV_DATA_INDEX_ENV];
iseq = env->iseq;
}
else { // Otherwise use the lep to find the caller
iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
}
// Our local storage is below the args we need to copy
int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1); // 2 for EP values
VALUE * to = cfp->sp - 1; // clobber the CI
if (RTEST(splat)) {
to -= 1; // clobber the splat array
CHECK_VM_STACK_OVERFLOW0(cfp, to, RARRAY_LEN(splat));
MEMCPY(to, RARRAY_CONST_PTR(splat), VALUE, RARRAY_LEN(splat));
to += RARRAY_LEN(splat);
}
CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
MEMCPY(to, from, VALUE, argc);
cfp->sp = to + argc;
// Stack layout should now be:
//
// > 1
// > 2
// > CI for foo(1, 2)
// > cref_or_me
// > specval
// > type
// > receiver
// > 1
// > 2
// > ( SP points here )
}
static VALUE
vm_call_iseq_setup(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
const struct rb_callcache *cc = calling->cc;
const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
int param_size = ISEQ_BODY(iseq)->param.size;
int local_size = ISEQ_BODY(iseq)->local_table_size;
RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
}
static VALUE
vm_call_iseq_fwd_setup(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
const struct rb_callcache *cc = calling->cc;
const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
int param_size = ISEQ_BODY(iseq)->param.size;
int local_size = ISEQ_BODY(iseq)->local_table_size;
RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
// Setting up local size and param size
local_size = local_size + vm_ci_argc(calling->cd->ci);
param_size = param_size + vm_ci_argc(calling->cd->ci);
const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
}
static inline VALUE
vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
int opt_pc, int param_size, int local_size)
{
const struct rb_callinfo *ci = calling->cd->ci;
const struct rb_callcache *cc = calling->cc;
if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
}
else {
return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
}
}
static inline VALUE
vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me,
int opt_pc, int param_size, int local_size)
{
const rb_iseq_t *iseq = def_iseq_ptr(me->def);
VALUE *argv = cfp->sp - calling->argc;
VALUE *sp = argv + param_size;
cfp->sp = argv - 1 /* recv */;
vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
calling->block_handler, (VALUE)me,
ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
local_size - param_size,
ISEQ_BODY(iseq)->stack_max);
return Qundef;
}
static inline VALUE
vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc)
{
const struct rb_callcache *cc = calling->cc;
unsigned int i;
VALUE *argv = cfp->sp - calling->argc;
const rb_callable_method_entry_t *me = vm_cc_cme(cc);
const rb_iseq_t *iseq = def_iseq_ptr(me->def);
VALUE *src_argv = argv;
VALUE *sp_orig, *sp;
VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
dst_captured->code.val = src_captured->code.val;
if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
}
else {
calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
}
}
vm_pop_frame(ec, cfp, cfp->ep);
cfp = ec->cfp;
sp_orig = sp = cfp->sp;
/* push self */
sp[0] = calling->recv;
sp++;
/* copy arguments */
for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
*sp++ = src_argv[i];
}
vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
calling->recv, calling->block_handler, (VALUE)me,
ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
ISEQ_BODY(iseq)->stack_max);
cfp->sp = sp_orig;
return Qundef;
}
static void
ractor_unsafe_check(void)
{
if (!rb_ractor_main_p()) {
rb_raise(rb_eRactorUnsafeError, "ractor unsafe method called from not main ractor");
}
}
static VALUE
call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
return (*f)(recv, rb_ary_new4(argc, argv));
}
static VALUE
call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(int, const VALUE *, VALUE) = (VALUE(*)(int, const VALUE *, VALUE))func;
return (*f)(argc, argv, recv);
}
static VALUE
call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
return (*f)(recv);
}
static VALUE
call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
return (*f)(recv, argv[0]);
}
static VALUE
call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1]);
}
static VALUE
call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2]);
}
static VALUE
call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
}
static VALUE
call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
}
static VALUE
call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
}
static VALUE
call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
}
static VALUE
call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
}
static VALUE
call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
}
static VALUE
call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
}
static VALUE
call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
}
static VALUE
call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
}
static VALUE
call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
}
static VALUE
call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
}
static VALUE
call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
ractor_unsafe_check();
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
}
static VALUE
ractor_safe_call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
return (*f)(recv, rb_ary_new4(argc, argv));
}
static VALUE
ractor_safe_call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(int, const VALUE *, VALUE) = (VALUE(*)(int, const VALUE *, VALUE))func;
return (*f)(argc, argv, recv);
}
static VALUE
ractor_safe_call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
return (*f)(recv);
}
static VALUE
ractor_safe_call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
return (*f)(recv, argv[0]);
}
static VALUE
ractor_safe_call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1]);
}
static VALUE
ractor_safe_call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2]);
}
static VALUE
ractor_safe_call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
}
static VALUE
ractor_safe_call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
}
static VALUE
ractor_safe_call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
}
static VALUE
ractor_safe_call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
}
static VALUE
ractor_safe_call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
}
static VALUE
ractor_safe_call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
}
static VALUE
ractor_safe_call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
}
static VALUE
ractor_safe_call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
}
static VALUE
ractor_safe_call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
}
static VALUE
ractor_safe_call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
}
static VALUE
ractor_safe_call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
}
static VALUE
ractor_safe_call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
{
VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
}
static inline int
vm_cfp_consistent_p(rb_execution_context_t *ec, const rb_control_frame_t *reg_cfp)
{
const int ov_flags = RAISED_STACKOVERFLOW;
if (LIKELY(reg_cfp == ec->cfp + 1)) return TRUE;
if (rb_ec_raised_p(ec, ov_flags)) {
rb_ec_raised_reset(ec, ov_flags);
return TRUE;
}
return FALSE;
}
#define CHECK_CFP_CONSISTENCY(func) \
(LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
static inline
const rb_method_cfunc_t *
vm_method_cfunc_entry(const rb_callable_method_entry_t *me)
{
#if VM_DEBUG_VERIFY_METHOD_CACHE
switch (me->def->type) {
case VM_METHOD_TYPE_CFUNC:
case VM_METHOD_TYPE_NOTIMPLEMENTED:
break;
# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
METHOD_BUG(ISEQ);
METHOD_BUG(ATTRSET);
METHOD_BUG(IVAR);
METHOD_BUG(BMETHOD);
METHOD_BUG(ZSUPER);
METHOD_BUG(UNDEF);
METHOD_BUG(OPTIMIZED);
METHOD_BUG(MISSING);
METHOD_BUG(REFINED);
METHOD_BUG(ALIAS);
# undef METHOD_BUG
default:
rb_bug("wrong method type: %d", me->def->type);
}
#endif
return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
}
static VALUE
vm_call_cfunc_with_frame_(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling,
int argc, VALUE *argv, VALUE *stack_bottom)
{
RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
const struct rb_callinfo *ci = calling->cd->ci;
const struct rb_callcache *cc = calling->cc;
VALUE val;
const rb_callable_method_entry_t *me = vm_cc_cme(cc);
const rb_method_cfunc_t *cfunc = vm_method_cfunc_entry(me);
VALUE recv = calling->recv;
VALUE block_handler = calling->block_handler;
VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
if (UNLIKELY(calling->kw_splat)) {
frame_type |= VM_FRAME_FLAG_CFRAME_KW;
}
VM_ASSERT(reg_cfp == ec->cfp);
RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, recv, me->def->original_id, vm_ci_mid(ci), me->owner, Qundef);
vm_push_frame(ec, NULL, frame_type, recv,
block_handler, (VALUE)me,
0, ec->cfp->sp, 0, 0);
int len = cfunc->argc;
if (len >= 0) rb_check_arity(argc, len, len);
reg_cfp->sp = stack_bottom;
val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
CHECK_CFP_CONSISTENCY("vm_call_cfunc");
rb_vm_pop_frame(ec);
VM_ASSERT(ec->cfp->sp == stack_bottom);
EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
return val;
}
// Push a C method frame for a given cme. This is called when JIT code skipped
// pushing a frame but the C method reached a point where a frame is needed.
void
rb_vm_push_cfunc_frame(const rb_callable_method_entry_t *cme, int recv_idx)
{
VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
rb_execution_context_t *ec = GET_EC();
VALUE *sp = ec->cfp->sp;
VALUE recv = *(sp - recv_idx - 1);
VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
VALUE block_handler = VM_BLOCK_HANDLER_NONE;
#if VM_CHECK_MODE > 0
// Clean up the stack canary since we're about to satisfy the "leaf or lazy push" assumption
*(GET_EC()->cfp->sp) = Qfalse;
#endif
vm_push_frame(ec, NULL, frame_type, recv, block_handler, (VALUE)cme, 0, ec->cfp->sp, 0, 0);
}
// If true, cc->call needs to include `CALLER_SETUP_ARG` (i.e. can't be skipped in fastpath)
bool
rb_splat_or_kwargs_p(const struct rb_callinfo *restrict ci)
{
return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
}
static VALUE
vm_call_cfunc_with_frame(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
int argc = calling->argc;
VALUE *stack_bottom = reg_cfp->sp - argc - 1;
VALUE *argv = &stack_bottom[1];
return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
}
static VALUE
vm_call_cfunc_other(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
const struct rb_callinfo *ci = calling->cd->ci;
RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
VALUE argv_ary;
if (UNLIKELY(argv_ary = calling->heap_argv)) {
VM_ASSERT(!IS_ARGS_KEYWORD(ci));
int argc = RARRAY_LENINT(argv_ary);
VALUE *argv = (VALUE *)RARRAY_CONST_PTR(argv_ary);
VALUE *stack_bottom = reg_cfp->sp - 2;
VM_ASSERT(calling->argc == 1);
VM_ASSERT(RB_TYPE_P(argv_ary, T_ARRAY));
VM_ASSERT(RBASIC_CLASS(argv_ary) == 0); // hidden ary
return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
}
else {
CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
}
}
static inline VALUE
vm_call_cfunc_array_argv(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, int stack_offset, int argc_offset)
{
VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
int argc = RARRAY_LENINT(argv_ary) - argc_offset;
if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
return vm_call_cfunc_other(ec, reg_cfp, calling);
}
VALUE *argv = (VALUE *)RARRAY_CONST_PTR(argv_ary);
calling->kw_splat = 0;
int i;
VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
VALUE *sp = stack_bottom;
CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
for(i = 0; i < argc; i++) {
*++sp = argv[i];
}
reg_cfp->sp = sp+1;
return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
}
static inline VALUE
vm_call_cfunc_only_splat(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
VALUE argv_ary = reg_cfp->sp[-1];
int argc = RARRAY_LENINT(argv_ary);
VALUE *argv = (VALUE *)RARRAY_CONST_PTR(argv_ary);
VALUE last_hash;
int argc_offset = 0;
if (UNLIKELY(argc > 0 &&
RB_TYPE_P((last_hash = argv[argc-1]), T_HASH) &&
(((struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
if (!RHASH_EMPTY_P(last_hash)) {
return vm_call_cfunc_other(ec, reg_cfp, calling);
}
argc_offset++;
}
return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
}
static inline VALUE
vm_call_cfunc_only_splat_kw(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
VALUE keyword_hash = reg_cfp->sp[-1];
if (keyword_hash == Qnil || (RB_TYPE_P(keyword_hash, T_HASH) && RHASH_EMPTY_P(keyword_hash))) {
return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
}
return vm_call_cfunc_other(ec, reg_cfp, calling);
}
static VALUE
vm_call_cfunc(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
const struct rb_callinfo *ci = calling->cd->ci;
RB_DEBUG_COUNTER_INC(ccf_cfunc);
if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
// f(*a)
CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
}
if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
// f(*a, **kw)
CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
}
}
CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
return vm_call_cfunc_other(ec, reg_cfp, calling);
}
static VALUE
vm_call_ivar(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
const struct rb_callcache *cc = calling->cc;
RB_DEBUG_COUNTER_INC(ccf_ivar);
cfp->sp -= 1;
VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE, Qnil);
return ivar;
}
static VALUE
vm_call_attrset_direct(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_callcache *cc, VALUE obj)
{
RB_DEBUG_COUNTER_INC(ccf_attrset);
VALUE val = *(cfp->sp - 1);
cfp->sp -= 2;
attr_index_t index;
shape_id_t dest_shape_id;
vm_cc_atomic_shape_and_index(cc, &dest_shape_id, &index);
ID id = vm_cc_cme(cc)->def->body.attr.id;
rb_check_frozen(obj);
VALUE res = vm_setivar(obj, id, val, dest_shape_id, index);
if (UNDEF_P(res)) {
switch (BUILTIN_TYPE(obj)) {
case T_OBJECT:
break;
case T_CLASS:
case T_MODULE:
{
res = vm_setivar_class(obj, id, val, dest_shape_id, index);
if (!UNDEF_P(res)) {
return res;
}
}
break;
default:
{
res = vm_setivar_default(obj, id, val, dest_shape_id, index);
if (!UNDEF_P(res)) {
return res;
}
}
}
res = vm_setivar_slowpath_attr(obj, id, val, cc);
}
return res;
}
static VALUE
vm_call_attrset(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
}
static inline VALUE
vm_call_bmethod_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv)
{
rb_proc_t *proc;
VALUE val;
const struct rb_callcache *cc = calling->cc;
const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
VALUE procv = cme->def->body.bmethod.proc;
if (!RB_OBJ_SHAREABLE_P(procv) &&
cme->def->body.bmethod.defined_ractor_id != rb_ec_ractor_id(ec)) {
rb_raise(rb_eRuntimeError, "defined with an un-shareable Proc in a different Ractor");
}
/* control block frame */
GetProcPtr(procv, proc);
val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
return val;
}
static int vm_callee_setup_block_arg(rb_execution_context_t *ec, struct rb_calling_info *calling, const struct rb_callinfo *ci, const rb_iseq_t *iseq, VALUE *argv, const enum arg_setup_type arg_setup_type);
static VALUE
vm_call_iseq_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
const struct rb_callcache *cc = calling->cc;
const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
VALUE procv = cme->def->body.bmethod.proc;
if (!RB_OBJ_SHAREABLE_P(procv) &&
cme->def->body.bmethod.defined_ractor_id != rb_ec_ractor_id(ec)) {
rb_raise(rb_eRuntimeError, "defined with an un-shareable Proc in a different Ractor");
}
rb_proc_t *proc;
GetProcPtr(procv, proc);
const struct rb_block *block = &proc->block;
while (vm_block_type(block) == block_type_proc) {
block = vm_proc_block(block->as.proc);
}
VM_ASSERT(vm_block_type(block) == block_type_iseq);
const struct rb_captured_block *captured = &block->as.captured;
const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
VALUE * const argv = cfp->sp - calling->argc;
const int arg_size = ISEQ_BODY(iseq)->param.size;
int opt_pc;
if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
}
else {
opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
}
cfp->sp = argv - 1; // -1 for the receiver
vm_push_frame(ec, iseq,
VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
calling->recv,
VM_GUARDED_PREV_EP(captured->ep),
(VALUE)cme,
ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
argv + arg_size,
ISEQ_BODY(iseq)->local_table_size - arg_size,
ISEQ_BODY(iseq)->stack_max);
return Qundef;
}
static VALUE
vm_call_noniseq_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
VALUE *argv;
int argc;
CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
if (UNLIKELY(calling->heap_argv)) {
argv = RARRAY_PTR(calling->heap_argv);
cfp->sp -= 2;
}
else {
argc = calling->argc;
argv = ALLOCA_N(VALUE, argc);
MEMCPY(argv, cfp->sp - argc, VALUE, argc);
cfp->sp += - argc - 1;
}
return vm_call_bmethod_body(ec, calling, argv);
}
static VALUE
vm_call_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_bmethod);
const struct rb_callcache *cc = calling->cc;
const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
VALUE procv = cme->def->body.bmethod.proc;
rb_proc_t *proc;
GetProcPtr(procv, proc);
const struct rb_block *block = &proc->block;
while (vm_block_type(block) == block_type_proc) {
block = vm_proc_block(block->as.proc);
}
if (vm_block_type(block) == block_type_iseq) {
CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
return vm_call_iseq_bmethod(ec, cfp, calling);
}
CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
return vm_call_noniseq_bmethod(ec, cfp, calling);
}
VALUE
rb_find_defined_class_by_owner(VALUE current_class, VALUE target_owner)
{
VALUE klass = current_class;
/* for prepended Module, then start from cover class */
if (RB_TYPE_P(klass, T_ICLASS) && RICLASS_IS_ORIGIN_P(klass) &&
RB_TYPE_P(RBASIC_CLASS(klass), T_CLASS)) {
klass = RBASIC_CLASS(klass);
}
while (RTEST(klass)) {
VALUE owner = RB_TYPE_P(klass, T_ICLASS) ? RBASIC_CLASS(klass) : klass;
if (owner == target_owner) {
return klass;
}
klass = RCLASS_SUPER(klass);
}
return current_class; /* maybe module function */
}
static const rb_callable_method_entry_t *
aliased_callable_method_entry(const rb_callable_method_entry_t *me)
{
const rb_method_entry_t *orig_me = me->def->body.alias.original_me;
const rb_callable_method_entry_t *cme;
if (orig_me->defined_class == 0) {
VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
VM_ASSERT_TYPE(orig_me->owner, T_MODULE);
cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
if (me->def->reference_count == 1) {
RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
}
else {
rb_method_definition_t *def =
rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
rb_method_definition_set((rb_method_entry_t *)me, def, (void *)cme);
}
}
else {
cme = (const rb_callable_method_entry_t *)orig_me;
}
VM_ASSERT(callable_method_entry_p(cme));
return cme;
}
const rb_callable_method_entry_t *
rb_aliased_callable_method_entry(const rb_callable_method_entry_t *me)
{
return aliased_callable_method_entry(me);
}
static VALUE
vm_call_alias(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
calling->cc = &VM_CC_ON_STACK(Qundef,
vm_call_general,
{{0}},
aliased_callable_method_entry(vm_cc_cme(calling->cc)));
return vm_call_method_each_type(ec, cfp, calling);
}
static enum method_missing_reason
ci_missing_reason(const struct rb_callinfo *ci)
{
enum method_missing_reason stat = MISSING_NOENTRY;
if (vm_ci_flag(ci) & VM_CALL_VCALL && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) stat |= MISSING_VCALL;
if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
return stat;
}
static VALUE vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
static VALUE
vm_call_symbol(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE symbol, int flags)
{
ASSUME(calling->argc >= 0);
enum method_missing_reason missing_reason = MISSING_NOENTRY;
int argc = calling->argc;
VALUE recv = calling->recv;
VALUE klass = CLASS_OF(recv);
ID mid = rb_check_id(&symbol);
flags |= VM_CALL_OPT_SEND;
if (UNLIKELY(! mid)) {
mid = idMethodMissing;
missing_reason = ci_missing_reason(ci);
ec->method_missing_reason = missing_reason;
VALUE argv_ary;
if (UNLIKELY(argv_ary = calling->heap_argv)) {
if (rb_method_basic_definition_p(klass, idMethodMissing)) {
rb_ary_unshift(argv_ary, symbol);
/* Inadvertent symbol creation shall be forbidden, see [Feature #5112] */
int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
VALUE exc = rb_make_no_method_exception(
rb_eNoMethodError, 0, recv, RARRAY_LENINT(argv_ary), RARRAY_CONST_PTR(argv_ary), priv);
rb_exc_raise(exc);
}
rb_ary_unshift(argv_ary, rb_str_intern(symbol));
}
else {
/* E.g. when argc == 2
*
* | | | | TOPN
* | | +------+
* | | +---> | arg1 | 0
* +------+ | +------+
* | arg1 | -+ +-> | arg0 | 1
* +------+ | +------+
* | arg0 | ---+ | sym | 2
* +------+ +------+
* | recv | | recv | 3
* --+------+--------+------+------
*/
int i = argc;
CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
INC_SP(1);
MEMMOVE(&TOPN(i - 1), &TOPN(i), VALUE, i);
argc = ++calling->argc;
if (rb_method_basic_definition_p(klass, idMethodMissing)) {
/* Inadvertent symbol creation shall be forbidden, see [Feature #5112] */
TOPN(i) = symbol;
int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
VALUE exc = rb_make_no_method_exception(
rb_eNoMethodError, 0, recv, argc, argv, priv);
rb_exc_raise(exc);
}
else {
TOPN(i) = rb_str_intern(symbol);
}
}
}
struct rb_forwarding_call_data new_fcd = {
.cd = {
.ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
.cc = NULL,
},
.caller_ci = NULL,
};
if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
calling->cd = &new_fcd.cd;
}
else {
const struct rb_callinfo *caller_ci = ((struct rb_forwarding_call_data *)calling->cd)->caller_ci;
VM_ASSERT((vm_ci_argc(caller_ci), 1));
new_fcd.caller_ci = caller_ci;
calling->cd = (struct rb_call_data *)&new_fcd;
}
calling->cc = &VM_CC_ON_STACK(klass,
vm_call_general,
{ .method_missing_reason = missing_reason },
rb_callable_method_entry_with_refinements(klass, mid, NULL));
if (flags & VM_CALL_FCALL) {
return vm_call_method(ec, reg_cfp, calling);
}
const struct rb_callcache *cc = calling->cc;
VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
if (vm_cc_cme(cc) != NULL) {
switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
case METHOD_VISI_PUBLIC: /* likely */
return vm_call_method_each_type(ec, reg_cfp, calling);
case METHOD_VISI_PRIVATE:
vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
break;
case METHOD_VISI_PROTECTED:
vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
break;
default:
VM_UNREACHABLE(vm_call_method);
}
return vm_call_method_missing(ec, reg_cfp, calling);
}
return vm_call_method_nome(ec, reg_cfp, calling);
}
static VALUE
vm_call_opt_send0(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, int flags)
{
const struct rb_callinfo *ci = calling->cd->ci;
int i;
VALUE sym;
i = calling->argc - 1;
if (calling->argc == 0) {
rb_raise(rb_eArgError, "no method name given");
}
sym = TOPN(i);
/* E.g. when i == 2
*
* | | | | TOPN
* +------+ | |
* | arg1 | ---+ | | 0
* +------+ | +------+
* | arg0 | -+ +-> | arg1 | 1
* +------+ | +------+
* | sym | +---> | arg0 | 2
* +------+ +------+
* | recv | | recv | 3
* --+------+--------+------+------
*/
/* shift arguments */
if (i > 0) {
MEMMOVE(&TOPN(i), &TOPN(i-1), VALUE, i);
}
calling->argc -= 1;
DEC_SP(1);
return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
}
static VALUE
vm_call_opt_send_complex(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
const struct rb_callinfo *ci = calling->cd->ci;
int flags = VM_CALL_FCALL;
VALUE sym;
VALUE argv_ary;
CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
if (UNLIKELY(argv_ary = calling->heap_argv)) {
sym = rb_ary_shift(argv_ary);
flags |= VM_CALL_ARGS_SPLAT;
if (calling->kw_splat) {
VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
((struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
calling->kw_splat = 0;
}
return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
}
if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
return vm_call_opt_send0(ec, reg_cfp, calling, flags);
}
static VALUE
vm_call_opt_send_simple(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
}
static VALUE
vm_call_opt_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_opt_send);
const struct rb_callinfo *ci = calling->cd->ci;
int flags = vm_ci_flag(ci);
if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
(calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
return vm_call_opt_send_complex(ec, reg_cfp, calling);
}
CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
return vm_call_opt_send_simple(ec, reg_cfp, calling);
}
static VALUE
vm_call_method_missing_body(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling,
const struct rb_callinfo *orig_ci, enum method_missing_reason reason)
{
RB_DEBUG_COUNTER_INC(ccf_method_missing);
VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
unsigned int argc, flag;
flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
argc = ++calling->argc;
/* shift arguments: m(a, b, c) #=> method_missing(:m, a, b, c) */
CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
vm_check_canary(ec, reg_cfp->sp);
if (argc > 1) {
MEMMOVE(argv+1, argv, VALUE, argc-1);
}
argv[0] = ID2SYM(vm_ci_mid(orig_ci));
INC_SP(1);
ec->method_missing_reason = reason;
struct rb_forwarding_call_data new_fcd = {
.cd = {
.ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
.cc = NULL,
},
.caller_ci = NULL,
};
if (!(flag & VM_CALL_FORWARDING)) {
calling->cd = &new_fcd.cd;
}
else {
const struct rb_callinfo *caller_ci = ((struct rb_forwarding_call_data *)calling->cd)->caller_ci;
VM_ASSERT((vm_ci_argc(caller_ci), 1));
new_fcd.caller_ci = caller_ci;
calling->cd = (struct rb_call_data *)&new_fcd;
}
calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }},
rb_callable_method_entry_without_refinements(CLASS_OF(calling->recv), idMethodMissing, NULL));
return vm_call_method(ec, reg_cfp, calling);
}
static VALUE
vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
}
static const rb_callable_method_entry_t *refined_method_callable_without_refinement(const rb_callable_method_entry_t *me);
static VALUE
vm_call_zsuper(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, VALUE klass)
{
klass = RCLASS_SUPER(klass);
const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, vm_ci_mid(calling->cd->ci)) : NULL;
if (cme == NULL) {
return vm_call_method_nome(ec, cfp, calling);
}
if (cme->def->type == VM_METHOD_TYPE_REFINED &&
cme->def->body.refined.orig_me) {
cme = refined_method_callable_without_refinement(cme);
}
calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, cme);
return vm_call_method_each_type(ec, cfp, calling);
}
static inline VALUE
find_refinement(VALUE refinements, VALUE klass)
{
if (NIL_P(refinements)) {
return Qnil;
}
return rb_hash_lookup(refinements, klass);
}
PUREFUNC(static rb_control_frame_t * current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp));
static rb_control_frame_t *
current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp)
{
rb_control_frame_t *top_cfp = cfp;
if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
do {
cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
/* TODO: orphan block */
return top_cfp;
}
} while (cfp->iseq != local_iseq);
}
return cfp;
}
static const rb_callable_method_entry_t *
refined_method_callable_without_refinement(const rb_callable_method_entry_t *me)
{
const rb_method_entry_t *orig_me = me->def->body.refined.orig_me;
const rb_callable_method_entry_t *cme;
if (orig_me->defined_class == 0) {
cme = NULL;
rb_notimplement();
}
else {
cme = (const rb_callable_method_entry_t *)orig_me;
}
VM_ASSERT(callable_method_entry_p(cme));
if (UNDEFINED_METHOD_ENTRY_P(cme)) {
cme = NULL;
}
return cme;
}
static const rb_callable_method_entry_t *
search_refined_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
ID mid = vm_ci_mid(calling->cd->ci);
const rb_cref_t *cref = vm_get_cref(cfp->ep);
const struct rb_callcache * const cc = calling->cc;
const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
for (; cref; cref = CREF_NEXT(cref)) {
const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
if (NIL_P(refinement)) continue;
const rb_callable_method_entry_t *const ref_me =
rb_callable_method_entry(refinement, mid);
if (ref_me) {
if (vm_cc_call(cc) == vm_call_super_method) {
const rb_control_frame_t *top_cfp = current_method_entry(ec, cfp);
const rb_callable_method_entry_t *top_me = rb_vm_frame_method_entry(top_cfp);
if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
continue;
}
}
if (cme->def->type != VM_METHOD_TYPE_REFINED ||
cme->def != ref_me->def) {
cme = ref_me;
}
if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
return cme;
}
}
else {
return NULL;
}
}
if (vm_cc_cme(cc)->def->body.refined.orig_me) {
return refined_method_callable_without_refinement(vm_cc_cme(cc));
}
else {
VALUE klass = RCLASS_SUPER(vm_cc_cme(cc)->defined_class);
const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, mid) : NULL;
return cme;
}
}
static VALUE
vm_call_refined(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
const rb_callable_method_entry_t *ref_cme = search_refined_method(ec, cfp, calling);
if (ref_cme) {
if (calling->cd->cc) {
const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
RB_OBJ_WRITE(cfp->iseq, &calling->cd->cc, cc);
return vm_call_method(ec, cfp, calling);
}
else {
struct rb_callcache *ref_cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, ref_cme);
calling->cc= ref_cc;
return vm_call_method(ec, cfp, calling);
}
}
else {
return vm_call_method_nome(ec, cfp, calling);
}
}
static inline VALUE vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_callinfo *ci, bool is_lambda, VALUE block_handler);
NOINLINE(static VALUE
vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler));
static VALUE
vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler)
{
int argc = calling->argc;
/* remove self */
if (argc > 0) MEMMOVE(&TOPN(argc), &TOPN(argc-1), VALUE, argc);
DEC_SP(1);
return vm_invoke_block(ec, reg_cfp, calling, ci, false, block_handler);
}
static VALUE
vm_call_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_opt_call);
const struct rb_callinfo *ci = calling->cd->ci;
VALUE procval = calling->recv;
return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
}
static VALUE
vm_call_opt_block_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
const struct rb_callinfo *ci = calling->cd->ci;
if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
}
else {
calling->recv = rb_vm_bh_to_procval(ec, block_handler);
calling->cc = rb_vm_search_method_slowpath(ci, CLASS_OF(calling->recv));
return vm_call_general(ec, reg_cfp, calling);
}
}
static VALUE
vm_call_opt_struct_aref0(rb_execution_context_t *ec, struct rb_calling_info *calling)
{
VALUE recv = calling->recv;
VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
return RSTRUCT_GET_RAW(recv, off);
}
static VALUE
vm_call_opt_struct_aref(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
VALUE ret = vm_call_opt_struct_aref0(ec, calling);
reg_cfp->sp -= 1;
return ret;
}
static VALUE
vm_call_opt_struct_aset0(rb_execution_context_t *ec, struct rb_calling_info *calling, VALUE val)
{
VALUE recv = calling->recv;
VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
rb_check_frozen(recv);
const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
RSTRUCT_SET_RAW(recv, off, val);
return val;
}
static VALUE
vm_call_opt_struct_aset(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
reg_cfp->sp -= 2;
return ret;
}
NOINLINE(static VALUE vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
const struct rb_callinfo *ci, const struct rb_callcache *cc));
#define VM_CALL_METHOD_ATTR(var, func, nohook) \
if (UNLIKELY(ruby_vm_c_events_enabled > 0)) { \
EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
var = func; \
EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
} \
else { \
nohook; \
var = func; \
}
static VALUE
vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
const struct rb_callinfo *ci, const struct rb_callcache *cc)
{
switch (vm_cc_cme(cc)->def->body.optimized.type) {
case OPTIMIZED_METHOD_TYPE_SEND:
CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
return vm_call_opt_send(ec, cfp, calling);
case OPTIMIZED_METHOD_TYPE_CALL:
CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
return vm_call_opt_call(ec, cfp, calling);
case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
return vm_call_opt_block_call(ec, cfp, calling);
case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
CALLER_SETUP_ARG(cfp, calling, ci, 0);
rb_check_arity(calling->argc, 0, 0);
VALUE v;
VM_CALL_METHOD_ATTR(v,
vm_call_opt_struct_aref(ec, cfp, calling),
set_vm_cc_ivar(cc); \
CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
return v;
}
case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
CALLER_SETUP_ARG(cfp, calling, ci, 1);
rb_check_arity(calling->argc, 1, 1);
VALUE v;
VM_CALL_METHOD_ATTR(v,
vm_call_opt_struct_aset(ec, cfp, calling),
set_vm_cc_ivar(cc); \
CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
return v;
}
default:
rb_bug("vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
}
}
static VALUE
vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
const struct rb_callinfo *ci = calling->cd->ci;
const struct rb_callcache *cc = calling->cc;
const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
VALUE v;
VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
switch (cme->def->type) {
case VM_METHOD_TYPE_ISEQ:
if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
return vm_call_iseq_fwd_setup(ec, cfp, calling);
}
else {
CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
return vm_call_iseq_setup(ec, cfp, calling);
}
case VM_METHOD_TYPE_NOTIMPLEMENTED:
case VM_METHOD_TYPE_CFUNC:
CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
return vm_call_cfunc(ec, cfp, calling);
case VM_METHOD_TYPE_ATTRSET:
CALLER_SETUP_ARG(cfp, calling, ci, 1);
rb_check_arity(calling->argc, 1, 1);
const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
if (vm_cc_markable(cc)) {
vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
VM_CALL_METHOD_ATTR(v,
vm_call_attrset_direct(ec, cfp, cc, calling->recv),
CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
}
else {
cc = &((struct rb_callcache) {
.flags = T_IMEMO |
(imemo_callcache << FL_USHIFT) |
VM_CALLCACHE_UNMARKABLE |
VM_CALLCACHE_ON_STACK,
.klass = cc->klass,
.cme_ = cc->cme_,
.call_ = cc->call_,
.aux_ = {
.attr = {
.value = vm_pack_shape_and_index(INVALID_SHAPE_ID, ATTR_INDEX_NOT_SET),
}
},
});
VM_CALL_METHOD_ATTR(v,
vm_call_attrset_direct(ec, cfp, cc, calling->recv),
CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
}
return v;
case VM_METHOD_TYPE_IVAR:
CALLER_SETUP_ARG(cfp, calling, ci, 0);
rb_check_arity(calling->argc, 0, 0);
vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
VM_CALL_METHOD_ATTR(v,
vm_call_ivar(ec, cfp, calling),
CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
return v;
case VM_METHOD_TYPE_MISSING:
vm_cc_method_missing_reason_set(cc, 0);
CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
return vm_call_method_missing(ec, cfp, calling);
case VM_METHOD_TYPE_BMETHOD:
CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
return vm_call_bmethod(ec, cfp, calling);
case VM_METHOD_TYPE_ALIAS:
CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
return vm_call_alias(ec, cfp, calling);
case VM_METHOD_TYPE_OPTIMIZED:
return vm_call_optimized(ec, cfp, calling, ci, cc);
case VM_METHOD_TYPE_UNDEF:
break;
case VM_METHOD_TYPE_ZSUPER:
return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
case VM_METHOD_TYPE_REFINED:
// CC_SET_FASTPATH(cc, vm_call_refined, TRUE);
// should not set FASTPATH since vm_call_refined assumes cc->call is vm_call_super_method on invokesuper.
return vm_call_refined(ec, cfp, calling);
}
rb_bug("vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
}
NORETURN(static void vm_raise_method_missing(rb_execution_context_t *ec, int argc, const VALUE *argv, VALUE obj, int call_status));
static VALUE
vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
/* method missing */
const struct rb_callinfo *ci = calling->cd->ci;
const int stat = ci_missing_reason(ci);
if (vm_ci_mid(ci) == idMethodMissing) {
if (UNLIKELY(calling->heap_argv)) {
vm_raise_method_missing(ec, RARRAY_LENINT(calling->heap_argv), RARRAY_CONST_PTR(calling->heap_argv), calling->recv, stat);
}
else {
rb_control_frame_t *reg_cfp = cfp;
VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
}
}
else {
return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
}
}
/* Protected method calls and super invocations need to check that the receiver
* (self for super) inherits the module on which the method is defined.
* In the case of refinements, it should consider the original class not the
* refinement.
*/
static VALUE
vm_defined_class_for_protected_call(const rb_callable_method_entry_t *me)
{
VALUE defined_class = me->defined_class;
VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
return NIL_P(refined_class) ? defined_class : refined_class;
}
static inline VALUE
vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
{
const struct rb_callinfo *ci = calling->cd->ci;
const struct rb_callcache *cc = calling->cc;
VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
if (vm_cc_cme(cc) != NULL) {
switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
case METHOD_VISI_PUBLIC: /* likely */
return vm_call_method_each_type(ec, cfp, calling);
case METHOD_VISI_PRIVATE:
if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
enum method_missing_reason stat = MISSING_PRIVATE;
if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
vm_cc_method_missing_reason_set(cc, stat);
CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
return vm_call_method_missing(ec, cfp, calling);
}
return vm_call_method_each_type(ec, cfp, calling);
case METHOD_VISI_PROTECTED:
if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
if (!rb_obj_is_kind_of(cfp->self, defined_class)) {
vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
return vm_call_method_missing(ec, cfp, calling);
}
else {
/* caching method info to dummy cc */
VM_ASSERT(vm_cc_cme(cc) != NULL);
struct rb_callcache cc_on_stack = *cc;
FL_SET_RAW((VALUE)&cc_on_stack, VM_CALLCACHE_UNMARKABLE);
calling->cc = &cc_on_stack;
return vm_call_method_each_type(ec, cfp, calling);
}
}
return vm_call_method_each_type(ec, cfp, calling);
default:
rb_bug("unreachable");
}
}
else {
return vm_call_method_nome(ec, cfp, calling);
}
}
static VALUE
vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_general);
return vm_call_method(ec, reg_cfp, calling);
}
void
rb_vm_cc_general(const struct rb_callcache *cc)
{
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
VM_ASSERT(cc != vm_cc_empty());
*(vm_call_handler *)&cc->call_ = vm_call_general;
}
static VALUE
vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
{
RB_DEBUG_COUNTER_INC(ccf_super_method);
// This line is introduced to make different from `vm_call_general` because some compilers (VC we found)
// can merge the function and the address of the function becomes same.
// The address of `vm_call_super_method` is used in `search_refined_method`, so it should be different.
if (ec == NULL) rb_bug("unreachable");
/* this check is required to distinguish with other functions. */
VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
return vm_call_method(ec, reg_cfp, calling);
}
/* super */
static inline VALUE
vm_search_normal_superclass(VALUE klass)
{
if (BUILTIN_TYPE(klass) == T_ICLASS &&
RB_TYPE_P(RBASIC(klass)->klass, T_MODULE) &&
FL_TEST_RAW(RBASIC(klass)->klass, RMODULE_IS_REFINEMENT)) {
klass = RBASIC(klass)->klass;
}
klass = RCLASS_ORIGIN(klass);
return RCLASS_SUPER(klass);
}
NORETURN(static void vm_super_outside(void));
static void
vm_super_outside(void)
{
rb_raise(rb_eNoMethodError, "super called outside of method");
}
static const struct rb_callcache *
empty_cc_for_super(void)
{
return &vm_empty_cc_for_super;
}
static const struct rb_callcache *
vm_search_super_method(const rb_control_frame_t *reg_cfp, struct rb_call_data *cd, VALUE recv)
{
VALUE current_defined_class;
const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
if (!me) {
vm_super_outside();
}
current_defined_class = vm_defined_class_for_protected_call(me);
if (BUILTIN_TYPE(current_defined_class) != T_MODULE &&
reg_cfp->iseq != method_entry_iseqptr(me) &&
!rb_obj_is_kind_of(recv, current_defined_class)) {
VALUE m = RB_TYPE_P(current_defined_class, T_ICLASS) ?
RCLASS_INCLUDER(current_defined_class) : current_defined_class;
if (m) { /* not bound UnboundMethod */
rb_raise(rb_eTypeError,
"self has wrong type to call super in this context: "
"%"PRIsVALUE" (expected %"PRIsVALUE")",
rb_obj_class(recv), m);
}
}
if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
rb_raise(rb_eRuntimeError,
"implicit argument passing of super from method defined"
" by define_method() is not supported."
" Specify all arguments explicitly.");
}
ID mid = me->def->original_id;
if (!vm_ci_markable(cd->ci)) {
VM_FORCE_WRITE((const VALUE *)&cd->ci->mid, (VALUE)mid);
}
else {
// update iseq. really? (TODO)
cd->ci = vm_ci_new_runtime(mid,
vm_ci_flag(cd->ci),
vm_ci_argc(cd->ci),
vm_ci_kwarg(cd->ci));
RB_OBJ_WRITTEN(reg_cfp->iseq, Qundef, cd->ci);
}
const struct rb_callcache *cc;
VALUE klass = vm_search_normal_superclass(me->defined_class);
if (!klass) {
/* bound instance method of module */
cc = vm_cc_new(Qundef, NULL, vm_call_method_missing, cc_type_super);
RB_OBJ_WRITE(reg_cfp->iseq, &cd->cc, cc);
}
else {
cc = vm_search_method_fastpath((VALUE)reg_cfp->iseq, cd, klass);
const rb_callable_method_entry_t *cached_cme = vm_cc_cme(cc);
// define_method can cache for different method id
if (cached_cme == NULL) {
// empty_cc_for_super is not markable object
cd->cc = empty_cc_for_super();
}
else if (cached_cme->called_id != mid) {
const rb_callable_method_entry_t *cme = rb_callable_method_entry(klass, mid);
if (cme) {
cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
RB_OBJ_WRITE(reg_cfp->iseq, &cd->cc, cc);
}
else {
cd->cc = cc = empty_cc_for_super();
}
}
else {
switch (cached_cme->def->type) {
// vm_call_refined (search_refined_method) assumes cc->call is vm_call_super_method on invokesuper
case VM_METHOD_TYPE_REFINED:
// cc->klass is superclass of receiver class. Checking cc->klass is not enough to invalidate IVC for the receiver class.
case VM_METHOD_TYPE_ATTRSET:
case VM_METHOD_TYPE_IVAR:
vm_cc_call_set(cc, vm_call_super_method); // invalidate fastpath
break;
default:
break; // use fastpath
}
}
}
VM_ASSERT((vm_cc_cme(cc), true));
return cc;
}
/* yield */
static inline int
block_proc_is_lambda(const VALUE procval)
{
rb_proc_t *proc;
if (procval) {
GetProcPtr(procval, proc);
return proc->is_lambda;
}
else {
return 0;
}
}
static VALUE
vm_yield_with_cfunc(rb_execution_context_t *ec,
const struct rb_captured_block *captured,
VALUE self, int argc, const VALUE *argv, int kw_splat, VALUE block_handler,
const rb_callable_method_entry_t *me)
{
int is_lambda = FALSE; /* TODO */
VALUE val, arg, blockarg;
int frame_flag;
const struct vm_ifunc *ifunc = captured->code.ifunc;
if (is_lambda) {
arg = rb_ary_new4(argc, argv);
}
else if (argc == 0) {
arg = Qnil;
}
else {
arg = argv[0];
}
blockarg = rb_vm_bh_to_procval(ec, block_handler);
frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
if (kw_splat) {
frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
}
vm_push_frame(ec, (const rb_iseq_t *)captured->code.ifunc,
frame_flag,
self,
VM_GUARDED_PREV_EP(captured->ep),
(VALUE)me,
0, ec->cfp->sp, 0, 0);
val = (*ifunc->func)(arg, (VALUE)ifunc->data, argc, argv, blockarg);
rb_vm_pop_frame(ec);
return val;
}
VALUE
rb_vm_yield_with_cfunc(rb_execution_context_t *ec, const struct rb_captured_block *captured, int argc, const VALUE *argv)
{
return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
}
static VALUE
vm_yield_with_symbol(rb_execution_context_t *ec, VALUE symbol, int argc, const VALUE *argv, int kw_splat, VALUE block_handler)
{
return rb_sym_proc_call(SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
}
static inline int
vm_callee_setup_block_arg_arg0_splat(rb_control_frame_t *cfp, const rb_iseq_t *iseq, VALUE *argv, VALUE ary)
{
int i;
long len = RARRAY_LEN(ary);
CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
for (i=0; i<len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
argv[i] = RARRAY_AREF(ary, i);
}
return i;
}
static inline VALUE
vm_callee_setup_block_arg_arg0_check(VALUE *argv)
{
VALUE ary, arg0 = argv[0];
ary = rb_check_array_type(arg0);
#if 0
argv[0] = arg0;
#else
VM_ASSERT(argv[0] == arg0);
#endif
return ary;
}
static int
vm_callee_setup_block_arg(rb_execution_context_t *ec, struct rb_calling_info *calling, const struct rb_callinfo *ci, const rb_iseq_t *iseq, VALUE *argv, const enum arg_setup_type arg_setup_type)
{
if (rb_simple_iseq_p(iseq)) {
rb_control_frame_t *cfp = ec->cfp;
VALUE arg0;
CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
if (arg_setup_type == arg_setup_block &&
calling->argc == 1 &&
ISEQ_BODY(iseq)->param.flags.has_lead &&
!ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
!NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
}
if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
if (arg_setup_type == arg_setup_block) {
if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
int i;
CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] = Qnil;
calling->argc = ISEQ_BODY(iseq)->param.lead_num; /* fill rest parameters */
}
else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
calling->argc = ISEQ_BODY(iseq)->param.lead_num; /* simply truncate arguments */
}
}
else {
argument_arity_error(ec, iseq, NULL, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
}
}
return 0;
}
else {
return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
}
}
static int
vm_yield_setup_args(rb_execution_context_t *ec, const rb_iseq_t *iseq, const int argc, VALUE *argv, int flags, VALUE block_handler, enum arg_setup_type arg_setup_type)
{
struct rb_calling_info calling_entry, *calling;
calling = &calling_entry;
calling->argc = argc;
calling->block_handler = block_handler;
calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
calling->recv = Qundef;
calling->heap_argv = 0;
calling->cc = NULL;
struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
}
/* ruby iseq -> ruby block */
static VALUE
vm_invoke_iseq_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling, const struct rb_callinfo *ci,
bool is_lambda, VALUE block_handler)
{
const struct rb_captured_block *captured = VM_BH_TO_ISEQ_BLOCK(block_handler);
const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
const int arg_size = ISEQ_BODY(iseq)->param.size;
VALUE * const rsp = GET_SP() - calling->argc;
VALUE * const argv = rsp;
int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
int frame_flag = VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0);
SET_SP(rsp);
vm_push_frame(ec, iseq,
frame_flag,
captured->self,
VM_GUARDED_PREV_EP(captured->ep), 0,
ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
rsp + arg_size,
ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
return Qundef;
}
static VALUE
vm_invoke_symbol_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling, const struct rb_callinfo *ci,
MAYBE_UNUSED(bool is_lambda), VALUE block_handler)
{
VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
int flags = vm_ci_flag(ci);
if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
((calling->argc == 0) ||
(calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
(calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
flags = 0;
if (UNLIKELY(calling->heap_argv)) {
#if VM_ARGC_STACK_MAX < 0
if (RARRAY_LEN(calling->heap_argv) < 1) {
rb_raise(rb_eArgError, "no receiver given");
}
#endif
calling->recv = rb_ary_shift(calling->heap_argv);
// Modify stack to avoid cfp consistency error
reg_cfp->sp++;
reg_cfp->sp[-1] = reg_cfp->sp[-2];
reg_cfp->sp[-2] = calling->recv;
flags |= VM_CALL_ARGS_SPLAT;
}
else {
if (calling->argc < 1) {
rb_raise(rb_eArgError, "no receiver given");
}
calling->recv = TOPN(--calling->argc);
}
if (calling->kw_splat) {
flags |= VM_CALL_KW_SPLAT;
}
}
else {
if (calling->argc < 1) {
rb_raise(rb_eArgError, "no receiver given");
}
calling->recv = TOPN(--calling->argc);
}
return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
}
static VALUE
vm_invoke_ifunc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling, const struct rb_callinfo *ci,
MAYBE_UNUSED(bool is_lambda), VALUE block_handler)
{
VALUE val;
int argc;
const struct rb_captured_block *captured = VM_BH_TO_IFUNC_BLOCK(block_handler);
CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
argc = calling->argc;
val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ? RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
POPN(argc); /* TODO: should put before C/yield? */
return val;
}
static VALUE
vm_proc_to_block_handler(VALUE procval)
{
const struct rb_block *block = vm_proc_block(procval);
switch (vm_block_type(block)) {
case block_type_iseq:
return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
case block_type_ifunc:
return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
case block_type_symbol:
return VM_BH_FROM_SYMBOL(block->as.symbol);
case block_type_proc:
return VM_BH_FROM_PROC(block->as.proc);
}
VM_UNREACHABLE(vm_yield_with_proc);
return Qundef;
}
static VALUE
vm_invoke_proc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling, const struct rb_callinfo *ci,
bool is_lambda, VALUE block_handler)
{
while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
VALUE proc = VM_BH_TO_PROC(block_handler);
is_lambda = block_proc_is_lambda(proc);
block_handler = vm_proc_to_block_handler(proc);
}
return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
}
static inline VALUE
vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling, const struct rb_callinfo *ci,
bool is_lambda, VALUE block_handler)
{
VALUE (*func)(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
struct rb_calling_info *calling, const struct rb_callinfo *ci,
bool is_lambda, VALUE block_handler);
switch (vm_block_handler_type(block_handler)) {
case block_handler_type_iseq: func = vm_invoke_iseq_block; break;
case block_handler_type_ifunc: func = vm_invoke_ifunc_block; break;
case block_handler_type_proc: func = vm_invoke_proc_block; break;
case block_handler_type_symbol: func = vm_invoke_symbol_block; break;
default: rb_bug("vm_invoke_block: unreachable");
}
return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
}
static VALUE
vm_make_proc_with_iseq(const rb_iseq_t *blockiseq)
{
const rb_execution_context_t *ec = GET_EC();
const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
struct rb_captured_block *captured;
if (cfp == 0) {
rb_bug("vm_make_proc_with_iseq: unreachable");
}
captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
captured->code.iseq = blockiseq;
return rb_vm_make_proc(ec, captured, rb_cProc);
}
static VALUE
vm_once_exec(VALUE iseq)
{
VALUE proc = vm_make_proc_with_iseq((rb_iseq_t *)iseq);
return rb_proc_call_with_block(proc, 0, 0, Qnil);
}
static VALUE
vm_once_clear(VALUE data)
{
union iseq_inline_storage_entry *is = (union iseq_inline_storage_entry *)data;
is->once.running_thread = NULL;
return Qnil;
}
/* defined insn */
static bool
check_respond_to_missing(VALUE obj, VALUE v)
{
VALUE args[2];
VALUE r;
args[0] = obj; args[1] = Qfalse;
r = rb_check_funcall(v, idRespond_to_missing, 2, args);
if (!UNDEF_P(r) && RTEST(r)) {
return true;
}
else {
return false;
}
}
static bool
vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE v)
{
VALUE klass;
enum defined_type type = (enum defined_type)op_type;
switch (type) {
case DEFINED_IVAR:
return rb_ivar_defined(GET_SELF(), SYM2ID(obj));
break;
case DEFINED_GVAR:
return rb_gvar_defined(SYM2ID(obj));
break;
case DEFINED_CVAR: {
const rb_cref_t *cref = vm_get_cref(GET_EP());
klass = vm_get_cvar_base(cref, GET_CFP(), 0);
return rb_cvar_defined(klass, SYM2ID(obj));
break;
}
case DEFINED_CONST:
case DEFINED_CONST_FROM: {
bool allow_nil = type == DEFINED_CONST;
klass = v;
return vm_get_ev_const(ec, klass, SYM2ID(obj), allow_nil, true);
break;
}
case DEFINED_FUNC:
klass = CLASS_OF(v);
return rb_ec_obj_respond_to(ec, v, SYM2ID(obj), TRUE);
break;
case DEFINED_METHOD:{
VALUE klass = CLASS_OF(v);
const rb_method_entry_t *me = rb_method_entry_with_refinements(klass, SYM2ID(obj), NULL);
if (me) {
switch (METHOD_ENTRY_VISI(me)) {
case METHOD_VISI_PRIVATE:
break;
case METHOD_VISI_PROTECTED:
if (!rb_obj_is_kind_of(GET_SELF(), rb_class_real(me->defined_class))) {
break;
}
case METHOD_VISI_PUBLIC:
return true;
break;
default:
rb_bug("vm_defined: unreachable: %u", (unsigned int)METHOD_ENTRY_VISI(me));
}
}
else {
return check_respond_to_missing(obj, v);
}
break;
}
case DEFINED_YIELD:
if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
return true;
}
break;
case DEFINED_ZSUPER:
{
const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(GET_CFP());
if (me) {
VALUE klass = vm_search_normal_superclass(me->defined_class);
if (!klass) return false;
ID id = me->def->original_id;
return rb_method_boundp(klass, id, 0);
}
}
break;
case DEFINED_REF:
return RTEST(vm_backref_defined(ec, GET_LEP(), FIX2INT(obj)));
default:
rb_bug("unimplemented defined? type (VM)");
break;
}
return false;
}
bool
rb_vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE v)
{
return vm_defined(ec, reg_cfp, op_type, obj, v);
}
static const VALUE *
vm_get_ep(const VALUE *const reg_ep, rb_num_t lv)
{
rb_num_t i;
const VALUE *ep = reg_ep;
for (i = 0; i < lv; i++) {
ep = GET_PREV_EP(ep);
}
return ep;
}
static VALUE
vm_get_special_object(const VALUE *const reg_ep,
enum vm_special_object_type type)
{
switch (type) {
case VM_SPECIAL_OBJECT_VMCORE:
return rb_mRubyVMFrozenCore;
case VM_SPECIAL_OBJECT_CBASE:
return vm_get_cbase(reg_ep);
case VM_SPECIAL_OBJECT_CONST_BASE:
return vm_get_const_base(reg_ep);
default:
rb_bug("putspecialobject insn: unknown value_type %d", type);
}
}
// ZJIT implementation is using the C function
// and needs to call a non-static function
VALUE
rb_vm_get_special_object(const VALUE *reg_ep, enum vm_special_object_type type)
{
return vm_get_special_object(reg_ep, type);
}
static VALUE
vm_concat_array(VALUE ary1, VALUE ary2st)
{
const VALUE ary2 = ary2st;
VALUE tmp1 = rb_check_to_array(ary1);
VALUE tmp2 = rb_check_to_array(ary2);
if (NIL_P(tmp1)) {
tmp1 = rb_ary_new3(1, ary1);
}
if (tmp1 == ary1) {
tmp1 = rb_ary_dup(ary1);
}
if (NIL_P(tmp2)) {
return rb_ary_push(tmp1, ary2);
}
else {
return rb_ary_concat(tmp1, tmp2);
}
}
static VALUE
vm_concat_to_array(VALUE ary1, VALUE ary2st)
{
/* ary1 must be a newly created array */
const VALUE ary2 = ary2st;
if (NIL_P(ary2)) return ary1;
VALUE tmp2 = rb_check_to_array(ary2);
if (NIL_P(tmp2)) {
return rb_ary_push(ary1, ary2);
}
else {
return rb_ary_concat(ary1, tmp2);
}
}
// YJIT implementation is using the C function
// and needs to call a non-static function
VALUE
rb_vm_concat_array(VALUE ary1, VALUE ary2st)
{
return vm_concat_array(ary1, ary2st);
}
VALUE
rb_vm_concat_to_array(VALUE ary1, VALUE ary2st)
{
return vm_concat_to_array(ary1, ary2st);
}
static VALUE
vm_splat_array(VALUE flag, VALUE ary)
{
if (NIL_P(ary)) {
return RTEST(flag) ? rb_ary_new() : rb_cArray_empty_frozen;
}
VALUE tmp = rb_check_to_array(ary);
if (NIL_P(tmp)) {
return rb_ary_new3(1, ary);
}
else if (RTEST(flag)) {
return rb_ary_dup(tmp);
}
else {
return tmp;
}
}
// YJIT implementation is using the C function
// and needs to call a non-static function
VALUE
rb_vm_splat_array(VALUE flag, VALUE ary)
{
return vm_splat_array(flag, ary);
}
static VALUE
vm_check_match(rb_execution_context_t *ec, VALUE target, VALUE pattern, rb_num_t flag)
{
enum vm_check_match_type type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
if (flag & VM_CHECKMATCH_ARRAY) {
long i;
const long n = RARRAY_LEN(pattern);
for (i = 0; i < n; i++) {
VALUE v = RARRAY_AREF(pattern, i);
VALUE c = check_match(ec, v, target, type);
if (RTEST(c)) {
return c;
}
}
return Qfalse;
}
else {
return check_match(ec, pattern, target, type);
}
}
VALUE
rb_vm_check_match(rb_execution_context_t *ec, VALUE target, VALUE pattern, rb_num_t flag)
{
return vm_check_match(ec, target, pattern, flag);
}
static VALUE
vm_check_keyword(lindex_t bits, lindex_t idx, const VALUE *ep)
{
const VALUE kw_bits = *(ep - bits);
if (FIXNUM_P(kw_bits)) {
unsigned int b = (unsigned int)FIX2ULONG(kw_bits);
if ((idx < VM_KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
return Qfalse;
}
else {
VM_ASSERT(RB_TYPE_P(kw_bits, T_HASH));
if (rb_hash_has_key(kw_bits, INT2FIX(idx))) return Qfalse;
}
return Qtrue;
}
static void
vm_dtrace(rb_event_flag_t flag, rb_execution_context_t *ec)
{
if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
switch (flag) {
case RUBY_EVENT_CALL:
RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
return;
case RUBY_EVENT_C_CALL:
RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
return;
case RUBY_EVENT_RETURN:
RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
return;
case RUBY_EVENT_C_RETURN:
RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
return;
}
}
}
static VALUE
vm_const_get_under(ID id, rb_num_t flags, VALUE cbase)
{
if (!rb_const_defined_at(cbase, id)) {
return 0;
}
else if (VM_DEFINECLASS_SCOPED_P(flags)) {
return rb_public_const_get_at(cbase, id);
}
else {
return rb_const_get_at(cbase, id);
}
}
static VALUE
vm_check_if_class(ID id, rb_num_t flags, VALUE super, VALUE klass)
{
if (!RB_TYPE_P(klass, T_CLASS)) {
return 0;
}
else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
VALUE tmp = rb_class_real(RCLASS_SUPER(klass));
if (tmp != super) {
rb_raise(rb_eTypeError,
"superclass mismatch for class %"PRIsVALUE"",
rb_id2str(id));
}
else {
return klass;
}
}
else {
return klass;
}
}
static VALUE
vm_check_if_module(ID id, VALUE mod)
{
if (!RB_TYPE_P(mod, T_MODULE)) {
return 0;
}
else {
return mod;
}
}
static VALUE
declare_under(ID id, VALUE cbase, VALUE c)
{
rb_set_class_path_string(c, cbase, rb_id2str(id));
rb_const_set(cbase, id, c);
return c;
}
static VALUE
vm_declare_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
{
/* new class declaration */
VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
VALUE c = declare_under(id, cbase, rb_define_class_id(id, s));
rb_define_alloc_func(c, rb_get_alloc_func(c));
rb_class_inherited(s, c);
return c;
}
static VALUE
vm_declare_module(ID id, VALUE cbase)
{
/* new module declaration */
return declare_under(id, cbase, rb_module_new());
}
NORETURN(static void unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old));
static void
unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old)
{
VALUE name = rb_id2str(id);
VALUE message = rb_sprintf("%"PRIsVALUE" is not a %s",
name, type);
VALUE location = rb_const_source_location_at(cbase, id);
if (!NIL_P(location)) {
rb_str_catf(message, "\n%"PRIsVALUE":%"PRIsVALUE":"
" previous definition of %"PRIsVALUE" was here",
rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
}
rb_exc_raise(rb_exc_new_str(rb_eTypeError, message));
}
static VALUE
vm_define_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
{
VALUE klass;
if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !RB_TYPE_P(super, T_CLASS)) {
rb_raise(rb_eTypeError,
"superclass must be an instance of Class (given an instance of %"PRIsVALUE")",
rb_obj_class(super));
}
vm_check_if_namespace(cbase);
/* find klass */
rb_autoload_load(cbase, id);
if ((klass = vm_const_get_under(id, flags, cbase)) != 0) {
if (!vm_check_if_class(id, flags, super, klass))
unmatched_redefinition("class", cbase, id, klass);
return klass;
}
else {
return vm_declare_class(id, flags, cbase, super);
}
}
static VALUE
vm_define_module(ID id, rb_num_t flags, VALUE cbase)
{
VALUE mod;
vm_check_if_namespace(cbase);
if ((mod = vm_const_get_under(id, flags, cbase)) != 0) {
if (!vm_check_if_module(id, mod))
unmatched_redefinition("module", cbase, id, mod);
return mod;
}
else {
return vm_declare_module(id, cbase);
}
}
static VALUE
vm_find_or_create_class_by_id(ID id,
rb_num_t flags,
VALUE cbase,
VALUE super)
{
rb_vm_defineclass_type_t type = VM_DEFINECLASS_TYPE(flags);
switch (type) {
case VM_DEFINECLASS_TYPE_CLASS:
/* classdef returns class scope value */
return vm_define_class(id, flags, cbase, super);
case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
/* classdef returns class scope value */
return rb_singleton_class(cbase);
case VM_DEFINECLASS_TYPE_MODULE:
/* classdef returns class scope value */
return vm_define_module(id, flags, cbase);
default:
rb_bug("unknown defineclass type: %d", (int)type);
}
}
static rb_method_visibility_t
vm_scope_visibility_get(const rb_execution_context_t *ec)
{
const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
if (!vm_env_cref_by_cref(cfp->ep)) {
return METHOD_VISI_PUBLIC;
}
else {
return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
}
}
static int
vm_scope_module_func_check(const rb_execution_context_t *ec)
{
const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
if (!vm_env_cref_by_cref(cfp->ep)) {
return FALSE;
}
else {
return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
}
}
static void
vm_define_method(const rb_execution_context_t *ec, VALUE obj, ID id, VALUE iseqval, int is_singleton)
{
VALUE klass;
rb_method_visibility_t visi;
rb_cref_t *cref = vm_ec_cref(ec);
if (is_singleton) {
klass = rb_singleton_class(obj); /* class and frozen checked in this API */
visi = METHOD_VISI_PUBLIC;
}
else {
klass = CREF_CLASS_FOR_DEFINITION(cref);
visi = vm_scope_visibility_get(ec);
}
if (NIL_P(klass)) {
rb_raise(rb_eTypeError, "no class/module to add method");
}
rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, visi);
// Set max_iv_count on klasses based on number of ivar sets that are in the initialize method
if (id == idInitialize && klass != rb_cObject && RB_TYPE_P(klass, T_CLASS) && (rb_get_alloc_func(klass) == rb_class_allocate_instance)) {
RCLASS_SET_MAX_IV_COUNT(klass, rb_estimate_iv_count(klass, (const rb_iseq_t *)iseqval));
}
if (!is_singleton && vm_scope_module_func_check(ec)) {
klass = rb_singleton_class(klass);
rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
}
}
// Return the untagged block handler:
// * If it's VM_BLOCK_HANDLER_NONE, return nil
// * If it's an ISEQ or an IFUNC, fetch it from its rb_captured_block
// * If it's a PROC or SYMBOL, return it as is
VALUE
rb_vm_untag_block_handler(VALUE block_handler)
{
if (VM_BLOCK_HANDLER_NONE == block_handler) return Qnil;
switch (vm_block_handler_type(block_handler)) {
case block_handler_type_iseq:
case block_handler_type_ifunc: {
struct rb_captured_block *captured = VM_TAGGED_PTR_REF(block_handler, 0x03);
return captured->code.val;
}
case block_handler_type_proc:
case block_handler_type_symbol:
return block_handler;
default:
rb_bug("rb_vm_untag_block_handler: unreachable");
}
}
VALUE
rb_vm_get_untagged_block_handler(rb_control_frame_t *reg_cfp)
{
return rb_vm_untag_block_handler(VM_CF_BLOCK_HANDLER(reg_cfp));
}
static VALUE
vm_invokeblock_i(struct rb_execution_context_struct *ec,
struct rb_control_frame_struct *reg_cfp,
struct rb_calling_info *calling)
{
const struct rb_callinfo *ci = calling->cd->ci;
VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
if (block_handler == VM_BLOCK_HANDLER_NONE) {
rb_vm_localjump_error("no block given (yield)", Qnil, 0);
}
else {
return vm_invoke_block(ec, GET_CFP(), calling, ci, false, block_handler);
}
}
enum method_explorer_type {
mexp_search_method,
mexp_search_invokeblock,
mexp_search_super,
};
static inline VALUE
vm_sendish(
struct rb_execution_context_struct *ec,
struct rb_control_frame_struct *reg_cfp,
struct rb_call_data *cd,
VALUE block_handler,
enum method_explorer_type method_explorer
) {
VALUE val = Qundef;
const struct rb_callinfo *ci = cd->ci;
const struct rb_callcache *cc;
int argc = vm_ci_argc(ci);
VALUE recv = TOPN(argc);
struct rb_calling_info calling = {
.block_handler = block_handler,
.kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
.recv = recv,
.argc = argc,
.cd = cd,
};
switch (method_explorer) {
case mexp_search_method:
calling.cc = cc = vm_search_method_fastpath((VALUE)reg_cfp->iseq, cd, CLASS_OF(recv));
val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
break;
case mexp_search_super:
calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
break;
case mexp_search_invokeblock:
val = vm_invokeblock_i(ec, GET_CFP(), &calling);
break;
}
return val;
}
VALUE
rb_vm_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd, ISEQ blockiseq)
{
stack_check(ec);
VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq, false);
VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
VM_EXEC(ec, val);
return val;
}
VALUE
rb_vm_sendforward(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd, ISEQ blockiseq)
{
stack_check(ec);
struct rb_forwarding_call_data adjusted_cd;
struct rb_callinfo adjusted_ci;
VALUE bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq, false, &adjusted_cd, &adjusted_ci);
VALUE val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
RB_OBJ_WRITE(GET_ISEQ(), &cd->cc, adjusted_cd.cd.cc);
}
VM_EXEC(ec, val);
return val;
}
VALUE
rb_vm_opt_send_without_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd)
{
stack_check(ec);
VALUE bh = VM_BLOCK_HANDLER_NONE;
VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
VM_EXEC(ec, val);
return val;
}
VALUE
rb_vm_invokesuper(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd, ISEQ blockiseq)
{
stack_check(ec);
VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq, true);
VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
VM_EXEC(ec, val);
return val;
}
VALUE
rb_vm_invokesuperforward(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd, ISEQ blockiseq)
{
stack_check(ec);
struct rb_forwarding_call_data adjusted_cd;
struct rb_callinfo adjusted_ci;
VALUE bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq, true, &adjusted_cd, &adjusted_ci);
VALUE val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
RB_OBJ_WRITE(GET_ISEQ(), &cd->cc, adjusted_cd.cd.cc);
}
VM_EXEC(ec, val);
return val;
}
VALUE
rb_vm_invokeblock(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, CALL_DATA cd)
{
stack_check(ec);
VALUE bh = VM_BLOCK_HANDLER_NONE;
VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
VM_EXEC(ec, val);
return val;
}
/* object.c */
VALUE rb_nil_to_s(VALUE);
VALUE rb_true_to_s(VALUE);
VALUE rb_false_to_s(VALUE);
/* numeric.c */
VALUE rb_int_to_s(int argc, VALUE *argv, VALUE x);
VALUE rb_fix_to_s(VALUE);
/* variable.c */
VALUE rb_mod_to_s(VALUE);
VALUE rb_mod_name(VALUE);
static VALUE
vm_objtostring(const rb_iseq_t *iseq, VALUE recv, CALL_DATA cd)
{
int type = TYPE(recv);
if (type == T_STRING) {
return recv;
}
const struct rb_callable_method_entry_struct *cme = vm_search_method((VALUE)iseq, cd, recv);
switch (type) {
case T_SYMBOL:
if (check_method_basic_definition(cme)) {
// rb_sym_to_s() allocates a mutable string, but since we are only
// going to use this string for interpolation, it's fine to use the
// frozen string.
return rb_sym2str(recv);
}
break;
case T_MODULE:
case T_CLASS:
if (check_cfunc(cme, rb_mod_to_s)) {
// rb_mod_to_s() allocates a mutable string, but since we are only
// going to use this string for interpolation, it's fine to use the
// frozen string.
VALUE val = rb_mod_name(recv);
if (NIL_P(val)) {
val = rb_mod_to_s(recv);
}
return val;
}
break;
case T_NIL:
if (check_cfunc(cme, rb_nil_to_s)) {
return rb_nil_to_s(recv);
}
break;
case T_TRUE:
if (check_cfunc(cme, rb_true_to_s)) {
return rb_true_to_s(recv);
}
break;
case T_FALSE:
if (check_cfunc(cme, rb_false_to_s)) {
return rb_false_to_s(recv);
}
break;
case T_FIXNUM:
if (check_cfunc(cme, rb_int_to_s)) {
return rb_fix_to_s(recv);
}
break;
}
return Qundef;
}
// ZJIT implementation is using the C function
// and needs to call a non-static function
VALUE
rb_vm_objtostring(const rb_iseq_t *iseq, VALUE recv, CALL_DATA cd)
{
return vm_objtostring(iseq, recv, cd);
}
static VALUE
vm_opt_ary_freeze(VALUE ary, int bop, ID id)
{
if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
return ary;
}
else {
return Qundef;
}
}
static VALUE
vm_opt_hash_freeze(VALUE hash, int bop, ID id)
{
if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
return hash;
}
else {
return Qundef;
}
}
static VALUE
vm_opt_str_freeze(VALUE str, int bop, ID id)
{
if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
return str;
}
else {
return Qundef;
}
}
/* this macro is mandatory to use OPTIMIZED_CMP. What a design! */
#define id_cmp idCmp
static VALUE
vm_opt_duparray_include_p(rb_execution_context_t *ec, const VALUE ary, VALUE target)
{
if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
return rb_ary_includes(ary, target);
}
else {
VALUE args[1] = {target};
// duparray
RUBY_DTRACE_CREATE_HOOK(ARRAY, RARRAY_LEN(ary));
VALUE dupary = rb_ary_resurrect(ary);
return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args, RB_NO_KEYWORDS);
}
}
VALUE
rb_vm_opt_duparray_include_p(rb_execution_context_t *ec, const VALUE ary, VALUE target)
{
return vm_opt_duparray_include_p(ec, ary, target);
}
static VALUE
vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t array_len, const VALUE *ptr)
{
if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
if (array_len == 0) {
return Qnil;
}
else {
VALUE result = *ptr;
rb_snum_t i = array_len - 1;
while (i-- > 0) {
const VALUE v = *++ptr;
if (OPTIMIZED_CMP(v, result) > 0) {
result = v;
}
}
return result;
}
}
else {
return rb_vm_call_with_refinements(ec, rb_ary_new4(array_len, ptr), idMax, 0, NULL, RB_NO_KEYWORDS);
}
}
VALUE
rb_vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t array_len, const VALUE *ptr)
{
return vm_opt_newarray_max(ec, array_len, ptr);
}
static VALUE
vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t array_len, const VALUE *ptr)
{
if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
if (array_len == 0) {
return Qnil;
}
else {
VALUE result = *ptr;
rb_snum_t i = array_len - 1;
while (i-- > 0) {
const VALUE v = *++ptr;
if (OPTIMIZED_CMP(v, result) < 0) {
result = v;
}
}
return result;
}
}
else {
return rb_vm_call_with_refinements(ec, rb_ary_new4(array_len, ptr), idMin, 0, NULL, RB_NO_KEYWORDS);
}
}
VALUE
rb_vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t array_len, const VALUE *ptr)
{
return vm_opt_newarray_min(ec, array_len, ptr);
}
static VALUE
vm_opt_newarray_hash(rb_execution_context_t *ec, rb_num_t array_len, const VALUE *ptr)
{
// If Array#hash is _not_ monkeypatched, use the optimized call
if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
return rb_ary_hash_values(array_len, ptr);
}
else {
return rb_vm_call_with_refinements(ec, rb_ary_new4(array_len, ptr), idHash, 0, NULL, RB_NO_KEYWORDS);
}
}
VALUE
rb_vm_opt_newarray_hash(rb_execution_context_t *ec, rb_num_t array_len, const VALUE *ptr)
{
return vm_opt_newarray_hash(ec, array_len, ptr);
}
VALUE rb_setup_fake_ary(struct RArray *fake_ary, const VALUE *list, long len);
VALUE rb_ec_pack_ary(rb_execution_context_t *ec, VALUE ary, VALUE fmt, VALUE buffer);
static VALUE
vm_opt_newarray_include_p(rb_execution_context_t *ec, rb_num_t array_len, const VALUE *ptr, VALUE target)
{
if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
struct RArray fake_ary = {RBASIC_INIT};
VALUE ary = rb_setup_fake_ary(&fake_ary, ptr, array_len);
return rb_ary_includes(ary, target);
}
else {
VALUE args[1] = {target};
return rb_vm_call_with_refinements(ec, rb_ary_new4(array_len, ptr), idIncludeP, 1, args, RB_NO_KEYWORDS);
}
}
VALUE
rb_vm_opt_newarray_include_p(rb_execution_context_t *ec, rb_num_t array_len, const VALUE *ptr, VALUE target)
{
return vm_opt_newarray_include_p(ec, array_len, ptr, target);
}
static VALUE
vm_opt_newarray_pack_buffer(rb_execution_context_t *ec, rb_num_t array_len, const VALUE *ptr, VALUE fmt, VALUE buffer)
{
if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
struct RArray fake_ary = {RBASIC_INIT};
VALUE ary = rb_setup_fake_ary(&fake_ary, ptr, array_len);
return rb_ec_pack_ary(ec, ary, fmt, (UNDEF_P(buffer) ? Qnil : buffer));
}
else {
// The opt_newarray_send insn drops the keyword args so we need to rebuild them.
// Setup an array with room for keyword hash.
VALUE args[2];
args[0] = fmt;
int kw_splat = RB_NO_KEYWORDS;
int argc = 1;
if (!UNDEF_P(buffer)) {
args[1] = rb_hash_new_with_size(1);
rb_hash_aset(args[1], ID2SYM(idBuffer), buffer);
kw_splat = RB_PASS_KEYWORDS;
argc++;
}
return rb_vm_call_with_refinements(ec, rb_ary_new4(array_len, ptr), idPack, argc, args, kw_splat);
}
}
VALUE
rb_vm_opt_newarray_pack_buffer(rb_execution_context_t *ec, rb_num_t array_len, const VALUE *ptr, VALUE fmt, VALUE buffer)
{
return vm_opt_newarray_pack_buffer(ec, array_len, ptr, fmt, buffer);
}
VALUE
rb_vm_opt_newarray_pack(rb_execution_context_t *ec, rb_num_t array_len, const VALUE *ptr, VALUE fmt)
{
return vm_opt_newarray_pack_buffer(ec, array_len, ptr, fmt, Qundef);
}
#undef id_cmp
static void
vm_track_constant_cache(ID id, void *ic)
{
rb_vm_t *vm = GET_VM();
struct rb_id_table *const_cache = vm->constant_cache;
VALUE lookup_result;
set_table *ics;
if (rb_id_table_lookup(const_cache, id, &lookup_result)) {
ics = (set_table *)lookup_result;
}
else {
ics = set_init_numtable();
rb_id_table_insert(const_cache, id, (VALUE)ics);
}
/* The call below to st_insert could allocate which could trigger a GC.
* If it triggers a GC, it may free an iseq that also holds a cache to this
* constant. If that iseq is the last iseq with a cache to this constant, then
* it will free this ST table, which would cause an use-after-free during this
* st_insert.
*
* So to fix this issue, we store the ID that is currently being inserted
* and, in remove_from_constant_cache, we don't free the ST table for ID
* equal to this one.
*
* See [Bug #20921].
*/
vm->inserting_constant_cache_id = id;
set_insert(ics, (st_data_t)ic);
vm->inserting_constant_cache_id = (ID)0;
}
static void
vm_ic_track_const_chain(rb_control_frame_t *cfp, IC ic, const ID *segments)
{
RB_VM_LOCKING() {
for (int i = 0; segments[i]; i++) {
ID id = segments[i];
if (id == idNULL) continue;
vm_track_constant_cache(id, ic);
}
}
}
// For JIT inlining
static inline bool
vm_inlined_ic_hit_p(VALUE flags, VALUE value, const rb_cref_t *ic_cref, const VALUE *reg_ep)
{
if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
return (ic_cref == NULL || // no need to check CREF
ic_cref == vm_get_cref(reg_ep));
}
return false;
}
static bool
vm_ic_hit_p(const struct iseq_inline_constant_cache_entry *ice, const VALUE *reg_ep)
{
VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
}
// YJIT needs this function to never allocate and never raise
bool
rb_vm_ic_hit_p(IC ic, const VALUE *reg_ep)
{
return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
}
static void
vm_ic_update(const rb_iseq_t *iseq, IC ic, VALUE val, const VALUE *reg_ep, const VALUE *pc)
{
if (ruby_vm_const_missing_count > 0) {
ruby_vm_const_missing_count = 0;
ic->entry = NULL;
return;
}
struct iseq_inline_constant_cache_entry *ice = SHAREABLE_IMEMO_NEW(struct iseq_inline_constant_cache_entry, imemo_constcache, 0);
RB_OBJ_WRITE(ice, &ice->value, val);
ice->ic_cref = vm_get_const_key_cref(reg_ep);
if (rb_ractor_shareable_p(val)) {
RUBY_ASSERT((rb_gc_verify_shareable(val), 1));
ice->flags |= IMEMO_CONST_CACHE_SHAREABLE;
}
RB_OBJ_WRITE(iseq, &ic->entry, ice);
RUBY_ASSERT(pc >= ISEQ_BODY(iseq)->iseq_encoded);
unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
rb_yjit_constant_ic_update(iseq, ic, pos);
}
VALUE
rb_vm_opt_getconstant_path(rb_execution_context_t *ec, rb_control_frame_t *const reg_cfp, IC ic)
{
VALUE val;
const ID *segments = ic->segments;
struct iseq_inline_constant_cache_entry *ice = ic->entry;
if (ice && vm_ic_hit_p(ice, GET_EP())) {
val = ice->value;
VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
}
else {
ruby_vm_constant_cache_misses++;
val = vm_get_ev_const_chain(ec, segments);
vm_ic_track_const_chain(GET_CFP(), ic, segments);
// Undo the PC increment to get the address to this instruction
// INSN_ATTR(width) == 2
vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
}
return val;
}
static VALUE
vm_once_dispatch(rb_execution_context_t *ec, ISEQ iseq, ISE is)
{
rb_thread_t *th = rb_ec_thread_ptr(ec);
rb_thread_t *const RUNNING_THREAD_ONCE_DONE = (rb_thread_t *)(0x1);
again:
if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
return is->once.value;
}
else if (is->once.running_thread == NULL) {
VALUE val;
is->once.running_thread = th;
val = rb_ensure(vm_once_exec, (VALUE)iseq, vm_once_clear, (VALUE)is);
// TODO: confirm that it is shareable
if (RB_FL_ABLE(val)) {
RB_OBJ_SET_SHAREABLE(val);
}
RB_OBJ_WRITE(ec->cfp->iseq, &is->once.value, val);
/* is->once.running_thread is cleared by vm_once_clear() */
is->once.running_thread = RUNNING_THREAD_ONCE_DONE; /* success */
return val;
}
else if (is->once.running_thread == th) {
/* recursive once */
return vm_once_exec((VALUE)iseq);
}
else {
/* waiting for finish */
RUBY_VM_CHECK_INTS(ec);
rb_thread_schedule();
goto again;
}
}
static OFFSET
vm_case_dispatch(CDHASH hash, OFFSET else_offset, VALUE key)
{
switch (OBJ_BUILTIN_TYPE(key)) {
case -1:
case T_FLOAT:
case T_SYMBOL:
case T_BIGNUM:
case T_STRING:
if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
SYMBOL_REDEFINED_OP_FLAG |
INTEGER_REDEFINED_OP_FLAG |
FLOAT_REDEFINED_OP_FLAG |
NIL_REDEFINED_OP_FLAG |
TRUE_REDEFINED_OP_FLAG |
FALSE_REDEFINED_OP_FLAG |
STRING_REDEFINED_OP_FLAG)) {
st_data_t val;
if (RB_FLOAT_TYPE_P(key)) {
double kval = RFLOAT_VALUE(key);
if (!isinf(kval) && modf(kval, &kval) == 0.0) {
key = FIXABLE(kval) ? LONG2FIX((long)kval) : rb_dbl2big(kval);
}
}
if (rb_hash_stlike_lookup(hash, key, &val)) {
return FIX2LONG((VALUE)val);
}
else {
return else_offset;
}
}
}
return 0;
}
NORETURN(static void
vm_stack_consistency_error(const rb_execution_context_t *ec,
const rb_control_frame_t *,
const VALUE *));
static void
vm_stack_consistency_error(const rb_execution_context_t *ec,
const rb_control_frame_t *cfp,
const VALUE *bp)
{
const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
static const char stack_consistency_error[] =
"Stack consistency error (sp: %"PRIdPTRDIFF", bp: %"PRIdPTRDIFF")";
#if defined RUBY_DEVEL
VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
rb_str_cat_cstr(mesg, "\n");
rb_str_append(mesg, rb_iseq_disasm(cfp->iseq));
rb_exc_fatal(rb_exc_new3(rb_eFatal, mesg));
#else
rb_bug(stack_consistency_error, nsp, nbp);
#endif
}
static VALUE
vm_opt_plus(VALUE recv, VALUE obj)
{
if (FIXNUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
return rb_fix_plus_fix(recv, obj);
}
else if (FLONUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
}
else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cFloat &&
RBASIC_CLASS(obj) == rb_cFloat &&
BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
}
else if (RBASIC_CLASS(recv) == rb_cString &&
RBASIC_CLASS(obj) == rb_cString &&
BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
return rb_str_opt_plus(recv, obj);
}
else if (RBASIC_CLASS(recv) == rb_cArray &&
RBASIC_CLASS(obj) == rb_cArray &&
BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
return rb_ary_plus(recv, obj);
}
else {
return Qundef;
}
}
static VALUE
vm_opt_minus(VALUE recv, VALUE obj)
{
if (FIXNUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
return rb_fix_minus_fix(recv, obj);
}
else if (FLONUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
}
else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cFloat &&
RBASIC_CLASS(obj) == rb_cFloat &&
BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
}
else {
return Qundef;
}
}
static VALUE
vm_opt_mult(VALUE recv, VALUE obj)
{
if (FIXNUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
return rb_fix_mul_fix(recv, obj);
}
else if (FLONUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
}
else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cFloat &&
RBASIC_CLASS(obj) == rb_cFloat &&
BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
}
else {
return Qundef;
}
}
static VALUE
vm_opt_div(VALUE recv, VALUE obj)
{
if (FIXNUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_div_fix(recv, obj);
}
else if (FLONUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
return rb_flo_div_flo(recv, obj);
}
else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cFloat &&
RBASIC_CLASS(obj) == rb_cFloat &&
BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
return rb_flo_div_flo(recv, obj);
}
else {
return Qundef;
}
}
static VALUE
vm_opt_mod(VALUE recv, VALUE obj)
{
if (FIXNUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_mod_fix(recv, obj);
}
else if (FLONUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
}
else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cFloat &&
RBASIC_CLASS(obj) == rb_cFloat &&
BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
}
else {
return Qundef;
}
}
static VALUE
vm_opt_neq(const rb_iseq_t *iseq, CALL_DATA cd, CALL_DATA cd_eq, VALUE recv, VALUE obj)
{
if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
VALUE val = opt_equality(iseq, recv, obj, cd_eq);
if (!UNDEF_P(val)) {
return RBOOL(!RTEST(val));
}
}
return Qundef;
}
static VALUE
vm_opt_lt(VALUE recv, VALUE obj)
{
if (FIXNUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
return RBOOL((SIGNED_VALUE)recv < (SIGNED_VALUE)obj);
}
else if (FLONUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
return RBOOL(RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj));
}
else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cFloat &&
RBASIC_CLASS(obj) == rb_cFloat &&
BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
return RBOOL(RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj));
}
else {
return Qundef;
}
}
static VALUE
vm_opt_le(VALUE recv, VALUE obj)
{
if (FIXNUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
return RBOOL((SIGNED_VALUE)recv <= (SIGNED_VALUE)obj);
}
else if (FLONUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
return RBOOL(RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj));
}
else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cFloat &&
RBASIC_CLASS(obj) == rb_cFloat &&
BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
return RBOOL(RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj));
}
else {
return Qundef;
}
}
static VALUE
vm_opt_gt(VALUE recv, VALUE obj)
{
if (FIXNUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
return RBOOL((SIGNED_VALUE)recv > (SIGNED_VALUE)obj);
}
else if (FLONUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
return RBOOL(RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj));
}
else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cFloat &&
RBASIC_CLASS(obj) == rb_cFloat &&
BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
return RBOOL(RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj));
}
else {
return Qundef;
}
}
static VALUE
vm_opt_ge(VALUE recv, VALUE obj)
{
if (FIXNUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
return RBOOL((SIGNED_VALUE)recv >= (SIGNED_VALUE)obj);
}
else if (FLONUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
return RBOOL(RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj));
}
else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cFloat &&
RBASIC_CLASS(obj) == rb_cFloat &&
BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
return RBOOL(RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj));
}
else {
return Qundef;
}
}
static VALUE
vm_opt_ltlt(VALUE recv, VALUE obj)
{
if (SPECIAL_CONST_P(recv)) {
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cString &&
BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
if (LIKELY(RB_TYPE_P(obj, T_STRING))) {
return rb_str_buf_append(recv, obj);
}
else {
return rb_str_concat(recv, obj);
}
}
else if (RBASIC_CLASS(recv) == rb_cArray &&
BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
return rb_ary_push(recv, obj);
}
else {
return Qundef;
}
}
static VALUE
vm_opt_and(VALUE recv, VALUE obj)
{
// If recv and obj are both fixnums, then the bottom tag bit
// will be 1 on both. 1 & 1 == 1, so the result value will also
// be a fixnum. If either side is *not* a fixnum, then the tag bit
// will be 0, and we return Qundef.
VALUE ret = ((SIGNED_VALUE) recv) & ((SIGNED_VALUE) obj);
if (FIXNUM_P(ret) &&
BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
return ret;
}
else {
return Qundef;
}
}
static VALUE
vm_opt_or(VALUE recv, VALUE obj)
{
if (FIXNUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
return recv | obj;
}
else {
return Qundef;
}
}
static VALUE
vm_opt_aref(VALUE recv, VALUE obj)
{
if (SPECIAL_CONST_P(recv)) {
if (FIXNUM_2_P(recv, obj) &&
BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
return rb_fix_aref(recv, obj);
}
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cArray &&
BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
if (FIXNUM_P(obj)) {
return rb_ary_entry_internal(recv, FIX2LONG(obj));
}
else {
return rb_ary_aref1(recv, obj);
}
}
else if (RBASIC_CLASS(recv) == rb_cHash &&
BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
return rb_hash_aref(recv, obj);
}
else {
return Qundef;
}
}
static VALUE
vm_opt_aset(VALUE recv, VALUE obj, VALUE set)
{
if (SPECIAL_CONST_P(recv)) {
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cArray &&
BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
FIXNUM_P(obj)) {
rb_ary_store(recv, FIX2LONG(obj), set);
return set;
}
else if (RBASIC_CLASS(recv) == rb_cHash &&
BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
rb_hash_aset(recv, obj, set);
return set;
}
else {
return Qundef;
}
}
static VALUE
vm_opt_length(VALUE recv, int bop)
{
if (SPECIAL_CONST_P(recv)) {
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cString &&
BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
if (bop == BOP_EMPTY_P) {
return LONG2NUM(RSTRING_LEN(recv));
}
else {
return rb_str_length(recv);
}
}
else if (RBASIC_CLASS(recv) == rb_cArray &&
BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
return LONG2NUM(RARRAY_LEN(recv));
}
else if (RBASIC_CLASS(recv) == rb_cHash &&
BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
return INT2FIX(RHASH_SIZE(recv));
}
else {
return Qundef;
}
}
static VALUE
vm_opt_empty_p(VALUE recv)
{
switch (vm_opt_length(recv, BOP_EMPTY_P)) {
case Qundef: return Qundef;
case INT2FIX(0): return Qtrue;
default: return Qfalse;
}
}
VALUE rb_false(VALUE obj);
static VALUE
vm_opt_nil_p(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv)
{
if (NIL_P(recv) &&
BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
return Qtrue;
}
else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
return Qfalse;
}
else {
return Qundef;
}
}
static VALUE
fix_succ(VALUE x)
{
switch (x) {
case ~0UL:
/* 0xFFFF_FFFF == INT2FIX(-1)
* `-1.succ` is of course 0. */
return INT2FIX(0);
case RSHIFT(~0UL, 1):
/* 0x7FFF_FFFF == LONG2FIX(0x3FFF_FFFF)
* 0x3FFF_FFFF + 1 == 0x4000_0000, which is a Bignum. */
return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
default:
/* LONG2FIX(FIX2LONG(x)+FIX2LONG(y))
* == ((lx*2+1)/2 + (ly*2+1)/2)*2+1
* == lx*2 + ly*2 + 1
* == (lx*2+1) + (ly*2+1) - 1
* == x + y - 1
*
* Here, if we put y := INT2FIX(1):
*
* == x + INT2FIX(1) - 1
* == x + 2 .
*/
return x + 2;
}
}
static VALUE
vm_opt_succ(VALUE recv)
{
if (FIXNUM_P(recv) &&
BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
return fix_succ(recv);
}
else if (SPECIAL_CONST_P(recv)) {
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cString &&
BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
return rb_str_succ(recv);
}
else {
return Qundef;
}
}
static VALUE
vm_opt_not(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv)
{
if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
return RBOOL(!RTEST(recv));
}
else {
return Qundef;
}
}
static VALUE
vm_opt_regexpmatch2(VALUE recv, VALUE obj)
{
if (SPECIAL_CONST_P(recv)) {
return Qundef;
}
else if (RBASIC_CLASS(recv) == rb_cString &&
CLASS_OF(obj) == rb_cRegexp &&
BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
return rb_reg_match(obj, recv);
}
else if (RBASIC_CLASS(recv) == rb_cRegexp &&
BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
return rb_reg_match(recv, obj);
}
else {
return Qundef;
}
}
rb_event_flag_t rb_iseq_event_flags(const rb_iseq_t *iseq, size_t pos);
NOINLINE(static void vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp));
static inline void
vm_trace_hook(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const VALUE *pc,
rb_event_flag_t pc_events, rb_event_flag_t target_event,
rb_hook_list_t *global_hooks, rb_hook_list_t *local_hooks, VALUE val)
{
rb_event_flag_t event = pc_events & target_event;
VALUE self = GET_SELF();
VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
if (local_hooks) local_hooks->running++; // make sure they don't get deleted while global hooks run
if (event & global_hooks->events) {
/* increment PC because source line is calculated with PC-1 */
reg_cfp->pc++;
vm_dtrace(event, ec);
rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
reg_cfp->pc--;
}
if (local_hooks) local_hooks->running--;
if (local_hooks != NULL) {
if (event & local_hooks->events) {
/* increment PC because source line is calculated with PC-1 */
reg_cfp->pc++;
rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
reg_cfp->pc--;
}
}
}
#define VM_TRACE_HOOK(target_event, val) do { \
if ((pc_events & (target_event)) & enabled_flags) { \
vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks, (val)); \
} \
} while (0)
static VALUE
rescue_errinfo(rb_execution_context_t *ec, rb_control_frame_t *cfp)
{
VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
}
static void
vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp)
{
const VALUE *pc = reg_cfp->pc;
rb_ractor_t *r = rb_ec_ractor_ptr(ec);
rb_event_flag_t enabled_flags = r->pub.hooks.events & ISEQ_TRACE_EVENTS;
rb_event_flag_t ractor_events = enabled_flags;
if (enabled_flags == 0 && rb_ractor_targeted_hooks_cnt(r) == 0) {
return;
}
else {
const rb_iseq_t *iseq = reg_cfp->iseq;
size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
rb_event_flag_t pc_events = rb_iseq_event_flags(iseq, pos);
unsigned int local_hooks_cnt = iseq->aux.exec.local_hooks_cnt;
rb_hook_list_t *local_hooks = NULL;
if (RB_UNLIKELY(local_hooks_cnt > 0)) {
st_data_t val;
if (st_lookup(rb_ractor_targeted_hooks(r), (st_data_t)iseq, &val)) {
local_hooks = (rb_hook_list_t*)val;
}
}
rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
rb_hook_list_t *bmethod_local_hooks = NULL;
rb_event_flag_t bmethod_local_events = 0;
const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
enabled_flags |= iseq_local_events;
VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
if (bmethod_frame) {
const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
unsigned int bmethod_hooks_cnt = me->def->body.bmethod.local_hooks_cnt;
if (RB_UNLIKELY(bmethod_hooks_cnt > 0)) {
st_data_t val;
if (st_lookup(rb_ractor_targeted_hooks(r), (st_data_t)me->def, &val)) {
bmethod_local_hooks = (rb_hook_list_t*)val;
}
if (bmethod_local_hooks) {
bmethod_local_events = bmethod_local_hooks->events;
}
}
}
if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
#if 0
/* disable trace */
/* TODO: incomplete */
rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
#else
/* do not disable trace because of performance problem
* (re-enable overhead)
*/
#endif
return;
}
else if (ec->trace_arg != NULL) {
/* already tracing */
return;
}
else {
rb_hook_list_t *global_hooks = rb_ec_ractor_hooks(ec);
/* Note, not considering iseq local events here since the same
* iseq could be used in multiple bmethods. */
rb_event_flag_t bmethod_events = ractor_events | bmethod_local_events;
if (0) {
ruby_debug_printf("vm_trace>>%4d (%4x) - %s:%d %s\n",
(int)pos,
(int)pc_events,
RSTRING_PTR(rb_iseq_path(iseq)),
(int)rb_iseq_line_no(iseq, pos),
RSTRING_PTR(rb_iseq_label(iseq)));
}
VM_ASSERT(reg_cfp->pc == pc);
VM_ASSERT(pc_events != 0);
/* check traces */
if ((pc_events & RUBY_EVENT_B_CALL) && bmethod_frame && (bmethod_events & RUBY_EVENT_CALL)) {
/* b_call instruction running as a method. Fire call event. */
vm_trace_hook(ec, reg_cfp, pc, RUBY_EVENT_CALL, RUBY_EVENT_CALL, global_hooks, bmethod_local_hooks, Qundef);
}
VM_TRACE_HOOK(RUBY_EVENT_CLASS | RUBY_EVENT_CALL | RUBY_EVENT_B_CALL, Qundef);
VM_TRACE_HOOK(RUBY_EVENT_RESCUE, rescue_errinfo(ec, reg_cfp));
VM_TRACE_HOOK(RUBY_EVENT_LINE, Qundef);
VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE, Qundef);
VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH, Qundef);
VM_TRACE_HOOK(RUBY_EVENT_END | RUBY_EVENT_RETURN | RUBY_EVENT_B_RETURN, TOPN(0));
if ((pc_events & RUBY_EVENT_B_RETURN) && bmethod_frame && (bmethod_events & RUBY_EVENT_RETURN)) {
/* b_return instruction running as a method. Fire return event. */
vm_trace_hook(ec, reg_cfp, pc, RUBY_EVENT_RETURN, RUBY_EVENT_RETURN, global_hooks, bmethod_local_hooks, TOPN(0));
}
}
}
}
#undef VM_TRACE_HOOK
#if VM_CHECK_MODE > 0
NORETURN( NOINLINE( COLDFUNC
void rb_vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)));
void
Init_vm_stack_canary(void)
{
/* This has to be called _after_ our PRNG is properly set up. */
int n = ruby_fill_random_bytes(&vm_stack_canary, sizeof vm_stack_canary, false);
vm_stack_canary |= 0x01; // valid VALUE (Fixnum)
vm_stack_canary_was_born = true;
VM_ASSERT(n == 0);
}
void
rb_vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)
{
/* Because a method has already been called, why not call
* another one. */
const char *insn = rb_insns_name(i);
VALUE inspection = rb_inspect(c);
const char *str = StringValueCStr(inspection);
rb_bug("dead canary found at %s: %s", insn, str);
}
#else
void Init_vm_stack_canary(void) { /* nothing to do */ }
#endif
/* a part of the following code is generated by this ruby script:
16.times{|i|
typedef_args = (0...i).map{|j| "VALUE v#{j+1}"}.join(", ")
typedef_args.prepend(", ") if i != 0
call_args = (0...i).map{|j| "argv[#{j}]"}.join(", ")
call_args.prepend(", ") if i != 0
puts %Q{
static VALUE
builtin_invoker#{i}(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr#{i}_t)(rb_execution_context_t *ec, VALUE self#{typedef_args});
return (*(rb_invoke_funcptr#{i}_t)funcptr)(ec, self#{call_args});
}}
}
puts
puts "static VALUE (* const cfunc_invokers[])(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr) = {"
16.times{|i|
puts " builtin_invoker#{i},"
}
puts "};"
*/
static VALUE
builtin_invoker0(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr0_t)(rb_execution_context_t *ec, VALUE self);
return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
}
static VALUE
builtin_invoker1(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr1_t)(rb_execution_context_t *ec, VALUE self, VALUE v1);
return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
}
static VALUE
builtin_invoker2(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr2_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2);
return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
}
static VALUE
builtin_invoker3(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr3_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3);
return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
}
static VALUE
builtin_invoker4(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr4_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4);
return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
}
static VALUE
builtin_invoker5(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr5_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5);
return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
}
static VALUE
builtin_invoker6(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr6_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6);
return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
}
static VALUE
builtin_invoker7(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr7_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7);
return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
}
static VALUE
builtin_invoker8(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr8_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8);
return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
}
static VALUE
builtin_invoker9(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr9_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9);
return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
}
static VALUE
builtin_invoker10(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr10_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10);
return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
}
static VALUE
builtin_invoker11(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr11_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11);
return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
}
static VALUE
builtin_invoker12(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr12_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12);
return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
}
static VALUE
builtin_invoker13(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr13_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13);
return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
}
static VALUE
builtin_invoker14(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr14_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14);
return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
}
static VALUE
builtin_invoker15(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
{
typedef VALUE (*rb_invoke_funcptr15_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14, VALUE v15);
return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
}
typedef VALUE (*builtin_invoker)(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr);
static builtin_invoker
lookup_builtin_invoker(int argc)
{
static const builtin_invoker invokers[] = {
builtin_invoker0,
builtin_invoker1,
builtin_invoker2,
builtin_invoker3,
builtin_invoker4,
builtin_invoker5,
builtin_invoker6,
builtin_invoker7,
builtin_invoker8,
builtin_invoker9,
builtin_invoker10,
builtin_invoker11,
builtin_invoker12,
builtin_invoker13,
builtin_invoker14,
builtin_invoker15,
};
return invokers[argc];
}
static inline VALUE
invoke_bf(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const struct rb_builtin_function* bf, const VALUE *argv)
{
const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF; // Verify an assumption of `Primitive.attr! :leaf`
SETUP_CANARY(canary_p);
rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
CHECK_CANARY(canary_p, BIN(invokebuiltin));
return ret;
}
static VALUE
vm_invoke_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function* bf, const VALUE *argv)
{
return invoke_bf(ec, cfp, bf, argv);
}
static VALUE
vm_invoke_builtin_delegate(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function *bf, unsigned int start_index)
{
if (0) { // debug print
fputs("vm_invoke_builtin_delegate: passing -> ", stderr);
for (int i=0; i<bf->argc; i++) {
ruby_debug_printf(":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
}
ruby_debug_printf("\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
(void *)(uintptr_t)bf->func_ptr);
}
if (bf->argc == 0) {
return invoke_bf(ec, cfp, bf, NULL);
}
else {
const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
return invoke_bf(ec, cfp, bf, argv);
}
}
// for __builtin_inline!()
VALUE
rb_vm_lvar_exposed(rb_execution_context_t *ec, int index)
{
const rb_control_frame_t *cfp = ec->cfp;
return cfp->ep[index];
} | c | github | https://github.com/ruby/ruby | vm_insnhelper.c |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.