code stringlengths 1 25.8M | language stringclasses 18 values | source stringclasses 4 values | repo stringclasses 78 values | path stringlengths 0 268 |
|---|---|---|---|---|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""@package pySEIMS
Python APIs for SEIMS
Preprocess, postprocess, parameters sensitivity, calibration, and scenario_analysis
-------------------
author : Liangjun Zhu, Junzhi Liu
copyright : (C) 2018-2020 by Lreis, IGSNRR, CAS
email : zlj@lreis.ac.cn
******************************************************************************
* *
* SEIMS is distributed for Research and/or Education only, any commercial *
* purpose will be FORBIDDEN. SEIMS is an open-source project, but without *
* ANY WARRANTY, WITHOUT even the implied warranty of MERCHANTABILITY or *
* FITNESS for A PARTICULAR PURPOSE. *
* See the GNU General Public License for more details. *
* *
******************************************************************************/
"""
from __future__ import absolute_import
__author__ = "SEIMS Team"
__version__ = "2.2"
__revision__ = "2.2.0"
__all__ = ["preprocess", "postprocess", "calibration", "scenario_analysis",
"parameters_sensitivity", "test"] | unknown | codeparrot/codeparrot-clean | ||
"""
naivefit.py
A NaiveFit follows the approach described in Crundall et al. (2019).
NaiveFit begins with an initial guess provided by user of an N component fit.
If no guess is provided, all provided stars are assumed to be members of one
component.
NaiveFit will perform an Expectation Maximisation on this N component fit until
converged.
Then NaiveFit will test increasing the compoennt count to N+1. This is done by
for each component out of the N existing, substituting it for 2 similar
components with slight age offsets, and running an EM fit. The result
is N separate "N+1 component" fits. The best one will be compared to the
"N component" fit using the Bayesian Information Criterion (BIC). If the
BIC has improved, this "N+1 component fit" will be taken as the best fit so far.
This process iterates until adding a component fails to yield a better fit.
"""
import numpy as np
import os
import sys
import logging
from distutils.dir_util import mkpath
import random
import uuid
#~ from emcee.utils import MPIPool
from multiprocessing import Pool
from multiprocessing import cpu_count
sys.path.insert(0, os.path.abspath('..'))
from . import expectmax
from . import readparam
from . import tabletool
from . import component
from . import traceorbit
from chronostar.parentfit import ParentFit
# python3 throws FileNotFoundError that is essentially the same as IOError
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
#ACW: put these into a helper module /start
def dummy_trace_orbit_func(loc, times=None):
"""
Purely for testing purposes
Dummy trace orbit func to skip irrelevant computation
A little constraint on age (since otherwise its a free floating
parameter)
"""
if times is not None:
if np.all(times > 1.):
return loc + 1000.
return loc
def log_message(msg, symbol='.', surround=False):
"""Little formatting helper"""
res = '{}{:^40}{}'.format(5 * symbol, msg, 5 * symbol)
if surround:
res = '\n{}\n{}\n{}'.format(50 * symbol, res, 50 * symbol)
logging.info(res)
#ACW: /end
class NaiveFit(ParentFit):
def __init__(self, fit_pars):
"""
Parameters
----------
fit_pars : str -or- dictionary
If a string, `fit_pars` should be a path to a parameter file which
can be parsed by readparam.readParam, to construct a dictionary.
Alternatively, an actual dictionary can be passed in. See README.md
for a description of parameters.
"""
super(NaiveFit, self).__init__(fit_pars)
def run_fit(self):
"""
Perform a fit (as described in Paper I) to a set of prepared data.
Results are outputted as two dictionaries
results = {'comps':best_fit, (list of components)
'med_and_spans':median and spans of model parameters,
'memb_probs': membership probability array (the standard one)}
scores = {'bic': the bic,
'lnlike': log likelihood of that run,
'lnpost': log posterior of that run}
"""
log_message('Beginning Chronostar run',
symbol='_', surround=True)
# ------------------------------------------------------------
# ----- EXECUTE RUN ----------------------------------------
# ------------------------------------------------------------
if self.fit_pars['store_burnin_chains']:
log_message(msg='Storing burnin chains', symbol='-')
# ACW: Make this a function (~50 lines)
# ------------------------------------------------------------
# ----- STAGE 1: ESTABLISHING INITIAL FIT -----------
# ------------------------------------------------------------
# Handle special case of very first run
# Either by fitting one component (default) or by using `init_comps`
# to initialise the EM fit.
# Check if not provided with init comps or membs
if (self.fit_pars['init_comps'] is None) and (self.fit_pars['init_memb_probs'] is None):
# NaiveFit doesn't know how to blindly intiialise runs with ncomps > 1
assert self.ncomps == 1, 'If no initialisation set, can only accept ncomp==1'
# If no init conditions provided, assume all stars are members and begine
# fit with 1 component.
init_memb_probs = np.zeros((len(self.data_dict['means']),
self.ncomps + self.fit_pars[
'use_background']))
init_memb_probs[:, 0] = 1. - 1.e-10
init_memb_probs[:, 1] = 1.e-10
self.fit_pars['init_memb_probs'] = init_memb_probs
log_message(msg='No initial information provided', symbol='-')
log_message(msg='Assuming all stars are members', symbol='-')
# Otherwise, we must have been given an init_comps, or an init_memb_probs
# to start things with
else:
log_message(msg='Initialising with init_comps or init_memb_probs with'
'%i components'%self.ncomps, symbol='*', surround=True)
pass
# MZ: just testing. Delete after if works
print("self.fit_pars['init_memb_probs']", self.fit_pars['init_memb_probs'])
print("self.fit_pars['init_comps']", self.fit_pars['init_comps'])
log_message(msg='FITTING {} COMPONENT'.format(self.ncomps),
symbol='*', surround=True)
run_dir = self.rdir + '{}/'.format(self.ncomps)
prev_result = self.run_em_unless_loadable(run_dir)
prev_score = self.calc_score(
prev_result['comps'], prev_result['memb_probs'],
use_box_background=self.fit_pars['use_box_background']
)
self.ncomps += 1
# ------------------------------------------------------------
# ----- STAGE 2: EXPLORE EXTRA COMPONENT BY DECOMPOSITION --
# ------------------------------------------------------------
# Calculate global score of fit for comparison with future fits with different
# component counts
# Begin iterative loop, each time trialing the incorporation of a new component
#
# `prev_result` track the previous fit, which is taken to be
# the best fit so far
#
# As new fits are acquired, we call them `new_result`.
# The new fits are compared against the previous fit, and if determined to
# be an improvement, they are taken as the best fit, and are renamed to
# `prev_result`
stage_2_ncomps = 2
while stage_2_ncomps <= self.fit_pars['max_comp_count']:
log_message(msg='FITTING {} COMPONENT'.format(stage_2_ncomps),
symbol='*', surround=True)
all_results = []
all_scores = []
# Iteratively try subdividing each previous component
# target_comp is the component we will split into two.
# This will make a total of ncomps (the target comp split into 2,
# plus the remaining components from prev_result['comps']
for i, target_comp in enumerate(prev_result['comps']):
div_label = chr(ord('A') + i)
run_dir = self.rdir + '{}/{}/'.format(stage_2_ncomps, div_label)
log_message(msg='Subdividing stage {}'.format(div_label),
symbol='+', surround=True)
mkpath(run_dir)
self.fit_pars['init_comps'] = self.build_init_comps(
prev_result['comps'], split_comp_ix=i,
prev_med_and_spans=prev_result['med_and_spans'],
memb_probs = prev_result['memb_probs'])
self.ncomps = len(self.fit_pars['init_comps'])
result = self.run_em_unless_loadable(run_dir)
all_results.append(result)
score = self.calc_score(
result['comps'], result['memb_probs'],
use_box_background=self.fit_pars['use_box_background']
)
all_scores.append(score)
logging.info(
'Decomposition {} finished with \nBIC: {}\nlnlike: {}\n'
'lnpost: {}'.format(
div_label, all_scores[-1]['bic'],
all_scores[-1]['lnlike'], all_scores[-1]['lnpost'],
))
# ------------------------------------------------------------
# ----- STAGE 2a: COMBINE RESULTS OF EACH GOOD SPLIT -------
# ------------------------------------------------------------
# identify all the improving splits
all_bics = np.array([score['bic'] for score in all_scores])
best_split_ix = np.nanargmin(all_bics)
new_result = all_results[best_split_ix]
new_score = all_scores[best_split_ix]
self.iter_end_log(best_split_ix, prev_result=prev_result, new_result=new_result)
# Check if the fit has improved
self.log_score_comparison(new=new_score,
prev=prev_score)
if new_score['bic'] < prev_score['bic']:
prev_score = new_score
prev_result = new_result
stage_2_ncomps += 1
log_message(msg="Commencing {} component fit on {}{}".format(
self.ncomps, self.ncomps - 1,
chr(ord('A') + best_split_ix)), symbol='+'
)
else:
# WRITING THE FINAL RESULTS INTO FILES
self.write_results_to_file(prev_result, prev_score)
break
logging.info("Best fit:\n{}".format(
[group.get_pars() for group in prev_result['comps']]))
if stage_2_ncomps >= self.fit_pars['max_comp_count']:
log_message(msg='REACHED MAX COMP LIMIT', symbol='+',
surround=True)
return prev_result, prev_score | unknown | codeparrot/codeparrot-clean | ||
# frozen_string_literal: true
# :markup: markdown
require "action_dispatch/journey/nfa/dot"
module ActionDispatch
module Journey # :nodoc:
module GTG # :nodoc:
class TransitionTable # :nodoc:
include Journey::NFA::Dot
attr_reader :memos
DEFAULT_EXP = /[^.\/?]+/
def initialize
@stdparam_states = {}
@regexp_states = {}
@string_states = {}
@accepting = {}
@memos = Hash.new { |h, k| h[k] = [] }
end
def add_accepting(state)
@accepting[state] = true
end
def accepting_states
@accepting.keys
end
def accepting?(state)
@accepting[state]
end
def add_memo(idx, memo)
@memos[idx] << memo
end
def memo(idx)
@memos[idx]
end
def eclosure(t)
Array(t)
end
def move(t, full_string, token, start_index, token_matches_default)
return [] if t.empty?
next_states = []
transitions_count = t.size
i = 0
while i < transitions_count
s = t[i]
previous_start = t[i + 1]
if previous_start.nil?
# In the simple case of a "default" param regex do this fast-path and add all
# next states.
if token_matches_default && std_state = @stdparam_states[s]
next_states << std_state << nil
end
# When we have a literal string, we can just pull the next state
if states = @string_states[s]
state = states[token]
next_states << state << nil unless state.nil?
end
end
# For regexes that aren't the "default" style, they may potentially not be
# terminated by the first "token" [./?], so we need to continue to attempt to
# match this regexp as well as any successful paths that continue out of it.
# both paths could be valid.
if states = @regexp_states[s]
slice_start = if previous_start.nil?
start_index
else
previous_start
end
slice_length = start_index + token.length - slice_start
curr_slice = full_string.slice(slice_start, slice_length)
states.each { |re, v|
# if we match, we can try moving past this
next_states << v << nil if !v.nil? && re.match?(curr_slice)
}
# and regardless, we must continue accepting tokens and retrying this regexp. we
# need to remember where we started as well so we can take bigger slices.
next_states << s << slice_start
end
i += 2
end
next_states
end
def as_json(options = nil)
simple_regexp = Hash.new { |h, k| h[k] = {} }
@regexp_states.each do |from, hash|
hash.each do |re, to|
simple_regexp[from][re.source] = to
end
end
{
regexp_states: simple_regexp.stringify_keys,
string_states: @string_states.stringify_keys,
stdparam_states: @stdparam_states.stringify_keys,
accepting: @accepting.stringify_keys
}
end
def to_svg
svg = IO.popen("dot -Tsvg", "w+") { |f|
f.write(to_dot)
f.close_write
f.readlines
}
3.times { svg.shift }
svg.join.sub(/width="[^"]*"/, "").sub(/height="[^"]*"/, "")
end
def visualizer(paths, title = "FSM")
viz_dir = File.join __dir__, "..", "visualizer"
fsm_js = File.read File.join(viz_dir, "fsm.js")
fsm_css = File.read File.join(viz_dir, "fsm.css")
erb = File.read File.join(viz_dir, "index.html.erb")
states = "function tt() { return #{to_json}; }"
fun_routes = paths.sample(3).map do |ast|
ast.filter_map { |n|
case n
when Nodes::Symbol
case n.left
when ":id" then rand(100).to_s
when ":format" then %w{ xml json }.sample
else
"omg"
end
when Nodes::Terminal then n.symbol
else
nil
end
}.join
end
stylesheets = [fsm_css]
svg = to_svg
javascripts = [states, fsm_js]
fun_routes = fun_routes
stylesheets = stylesheets
svg = svg
javascripts = javascripts
require "erb"
template = ERB.new erb
template.result(binding)
end
def []=(from, to, sym)
case sym
when String, Symbol
to_mapping = @string_states[from] ||= {}
# account for symbols in the constraints the same as strings
to_mapping[sym.to_s] = to
when Regexp
if sym == DEFAULT_EXP
@stdparam_states[from] = to
else
to_mapping = @regexp_states[from] ||= {}
# we must match the whole string to a token boundary
to_mapping[/\A#{sym}\Z/] = to
end
else
raise ArgumentError, "unknown symbol: %s" % sym.class
end
end
def states
ss = @string_states.keys + @string_states.values.flat_map(&:values)
ps = @stdparam_states.keys + @stdparam_states.values
rs = @regexp_states.keys + @regexp_states.values.flat_map(&:values)
(ss + ps + rs).uniq
end
def transitions
# double escaped because dot evaluates escapes
default_exp_anchored = "\\\\A#{DEFAULT_EXP.source}\\\\Z"
@string_states.flat_map { |from, hash|
hash.map { |s, to| [from, s, to] }
} + @stdparam_states.map { |from, to|
[from, default_exp_anchored, to]
} + @regexp_states.flat_map { |from, hash|
hash.map { |r, to| [from, r.source.gsub("\\") { "\\\\" }, to] }
}
end
end
end
end
end | ruby | github | https://github.com/rails/rails | actionpack/lib/action_dispatch/journey/gtg/transition_table.rb |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
This module contains functions and methods to authenticate with OpenID
providers.
"""
__revision__ = \
"$Id$"
from invenio.config import CFG_SITE_SECURE_URL
from invenio.external_authentication import ExternalAuth
from invenio.session import get_session
class ExternalOpenID(ExternalAuth):
"""
Contains methods for authenticate with an OpenID provider.
"""
@staticmethod
def __init_req(req):
req.g['openid_provider_name'] = ''
req.g['openid_debug'] = 0
req.g['openid_msg'] = ''
req.g['openid_debug_msg'] = ''
req.g['openid_response'] = None
def auth_user(self, username, password, req=None):
"""
Tries to find email and OpenID identity of the user. If it
doesn't find any of them, returns (None, None)
@param username: Isn't used in this function
@type username: str
@param password: Isn't used in this function
@type password: str
@param req: request
@type req: invenio.webinterface_handler_wsgi.SimulatedModPythonRequest
@rtype: str|NoneType, str|NoneType
"""
from openid.consumer import consumer
self._get_response(req)
response = req.g['openid_response']
identity = None
email = None
if response.status == consumer.SUCCESS:
# In the first login of the user, fetches his/her email
# from OpenID provider.
email = self._get_email_from_success_response(req)
identity = response.getDisplayIdentifier()
elif response.status == consumer.CANCEL:
# If user cancels the verification, set corresponding message.
req.openid_msg = 21
elif response.status == consumer.FAILURE:
# If verification fails, set corresponding message.
req.openid_msg.msg = 22
return email, identity
@staticmethod
def get_msg(req):
return req.g['openid_msg']
def fetch_user_nickname(self, username, password=None, req=None):
"""
Fetches the OpenID provider for nickname of the user. If it doesn't
find any, returns None.
This function doesn't need username, password or req. They are exist
just because this class is derived from ExternalAuth
@param username: Isn't used in this function
@type username: str
@param password: Isn't used in this function
@type password: str
@param req: request
@type req: invenio.webinterface_handler_wsgi.SimulatedModPythonRequest
@rtype: str|NoneType
"""
from openid.extensions import ax
from openid.extensions import sreg
nickname = None
# May be either Simple Registration (sreg) response or
# Attribute Exchange (ax) response.
sreg_resp = None
ax_resp = None
response = req.g['openid_response']
sreg_resp = sreg.SRegResponse.fromSuccessResponse(response)
if sreg_resp:
if sreg_resp.getExtensionArgs().has_key('nickname'):
nickname = sreg_resp.getExtensionArgs()['nickname']
ax_resp = ax.FetchResponse.fromSuccessResponse(response)
if ax_resp and not nickname:
extensions = ax_resp.getExtensionArgs()
if extensions.has_key('type.ext0') and \
extensions.has_key('value.ext0.1'):
if extensions['type.ext0'] == \
'http://axschema.org/namePerson/friendly':
nickname = extensions['value.ext0.1']
if extensions.has_key('type.ext1') and \
extensions.has_key('value.ext1.1') and not nickname:
if extensions['type.ext1'] == \
'http://axschema.org/namePerson/friendly':
nickname = extensions['value.ext1.1']
return nickname
@staticmethod
def _get_email_from_success_response(req):
"""
Fetches the email from consumer.SuccessResponse. If it doesn't find any
returns None.
@rtype: str|NoneType
"""
from openid.extensions import ax
email = None
response = req.g['openid_response']
ax_resp = ax.FetchResponse.fromSuccessResponse(response)
if ax_resp:
extensions = ax_resp.getExtensionArgs()
if extensions.has_key('type.ext0') and \
extensions.has_key('value.ext0.1'):
if extensions['type.ext0'] == \
'http://axschema.org/contact/email':
email = extensions['value.ext0.1']
if extensions.has_key('type.ext1') and \
extensions.has_key('value.ext1.1') and not email:
if extensions['type.ext1'] == \
'http://axschema.org/contact/email':
email = extensions['value.ext1.1']
return email
@staticmethod
def _get_response(req):
"""
Constructs the response returned from the OpenID provider
@param req: request
@type req: invenio.webinterface_handler_wsgi.SimulatedModPythonRequest
"""
from invenio.webinterface_handler import wash_urlargd
from openid.consumer import consumer
content = {}
for key in req.form.keys():
content[key] = (str, '')
args = wash_urlargd(req.form, content)
if args.has_key('ln'):
del args['ln']
if args.has_key('referer'):
if not args['referer']:
del args['referer']
oidconsumer = consumer.Consumer({"id": get_session(req)}, None)
url = CFG_SITE_SECURE_URL + "/youraccount/login"
req.g['openid_provider_name'] = args['provider']
req.g['openid_response'] = oidconsumer.complete(args, url) | unknown | codeparrot/codeparrot-clean | ||
# frozen_string_literal: true
# :markup: markdown
require "uri"
require "active_support/core_ext/hash/indifferent_access"
require "active_support/core_ext/string/access"
require "active_support/core_ext/module/redefine_method"
require "action_controller/metal/exceptions"
module ActionDispatch
module Assertions
# Suite of assertions to test routes generated by Rails and the handling of
# requests made to them.
module RoutingAssertions
extend ActiveSupport::Concern
module WithIntegrationRouting # :nodoc:
extend ActiveSupport::Concern
module ClassMethods
def with_routing(&block)
old_routes = nil
old_routes_call_method = nil
old_integration_session = nil
setup do
old_routes = initialize_lazy_routes(app.routes)
old_routes_call_method = old_routes.method(:call)
old_integration_session = integration_session
create_routes(&block)
end
teardown do
reset_routes(old_routes, old_routes_call_method, old_integration_session)
end
end
end
def with_routing(&block)
old_routes = initialize_lazy_routes(app.routes)
old_routes_call_method = old_routes.method(:call)
old_integration_session = integration_session
create_routes(&block)
ensure
reset_routes(old_routes, old_routes_call_method, old_integration_session)
end
private
def initialize_lazy_routes(routes)
if defined?(Rails::Engine::LazyRouteSet) && routes.is_a?(Rails::Engine::LazyRouteSet)
routes.tap(&:routes)
else
routes
end
end
def create_routes
app = self.app
routes = ActionDispatch::Routing::RouteSet.new
@original_routes ||= app.routes
@original_routes.singleton_class.redefine_method(:call, &routes.method(:call))
https = integration_session.https?
host = integration_session.host
app.instance_variable_set(:@routes, routes)
@integration_session = Class.new(ActionDispatch::Integration::Session) do
include app.routes.url_helpers
include app.routes.mounted_helpers
end.new(app)
@integration_session.https! https
@integration_session.host! host
@routes = routes
yield routes
end
def reset_routes(old_routes, old_routes_call_method, old_integration_session)
app.instance_variable_set(:@routes, old_routes)
@original_routes.singleton_class.redefine_method(:call, &old_routes_call_method)
@integration_session = old_integration_session
@routes = old_routes
end
end
module ClassMethods
# A helper to make it easier to test different route configurations. This method
# temporarily replaces @routes with a new RouteSet instance before each test.
#
# The new instance is yielded to the passed block. Typically the block will
# create some routes using `set.draw { match ... }`:
#
# with_routing do |set|
# set.draw do
# resources :users
# end
# end
#
def with_routing(&block)
old_routes, old_controller = nil
setup do
old_routes, old_controller = @routes, @controller
create_routes(&block)
end
teardown do
reset_routes(old_routes, old_controller)
end
end
end
def setup # :nodoc:
@routes ||= nil
super
end
# A helper to make it easier to test different route configurations. This method
# temporarily replaces @routes with a new RouteSet instance.
#
# The new instance is yielded to the passed block. Typically the block will
# create some routes using `set.draw { match ... }`:
#
# with_routing do |set|
# set.draw do
# resources :users
# end
# assert_equal "/users", users_path
# end
#
def with_routing(config = nil, &block)
old_routes, old_controller = @routes, @controller
create_routes(config, &block)
ensure
reset_routes(old_routes, old_controller)
end
# Asserts that the routing of the given `path` was handled correctly and that
# the parsed options (given in the `expected_options` hash) match `path`.
# Basically, it asserts that Rails recognizes the route given by
# `expected_options`.
#
# Pass a hash in the second argument (`path`) to specify the request method.
# This is useful for routes requiring a specific HTTP method. The hash should
# contain a `:path` with the incoming request path and a `:method` containing
# the required HTTP verb.
#
# # Asserts that POSTing to /items will call the create action on ItemsController
# assert_recognizes({controller: 'items', action: 'create'}, {path: 'items', method: :post})
#
# You can also pass in `extras` with a hash containing URL parameters that would
# normally be in the query string. This can be used to assert that values in the
# query string will end up in the params hash correctly. To test query strings
# you must use the extras argument because appending the query string on the
# path directly will not work. For example:
#
# # Asserts that a path of '/items/list/1?view=print' returns the correct options
# assert_recognizes({controller: 'items', action: 'list', id: '1', view: 'print'}, 'items/list/1', { view: "print" })
#
# The `message` parameter allows you to pass in an error message that is
# displayed upon failure.
#
# # Check the default route (i.e., the index action)
# assert_recognizes({controller: 'items', action: 'index'}, 'items')
#
# # Test a specific action
# assert_recognizes({controller: 'items', action: 'list'}, 'items/list')
#
# # Test an action with a parameter
# assert_recognizes({controller: 'items', action: 'destroy', id: '1'}, 'items/destroy/1')
#
# # Test a custom route
# assert_recognizes({controller: 'items', action: 'show', id: '1'}, 'view/item1')
def assert_recognizes(expected_options, path, extras = {}, msg = nil)
if path.is_a?(Hash) && path[:method].to_s == "all"
[:get, :post, :put, :delete].each do |method|
assert_recognizes(expected_options, path.merge(method: method), extras, msg)
end
else
request = recognized_request_for(path, extras, msg)
expected_options = expected_options.clone
expected_options.stringify_keys!
msg = message(msg, "") {
sprintf("The recognized options <%s> did not match <%s>, difference:",
request.path_parameters, expected_options)
}
assert_equal(expected_options, request.path_parameters, msg)
end
end
# Asserts that the provided options can be used to generate the provided path.
# This is the inverse of `assert_recognizes`. The `extras` parameter is used to
# tell the request the names and values of additional request parameters that
# would be in a query string. The `message` parameter allows you to specify a
# custom error message for assertion failures.
#
# The `defaults` parameter is unused.
#
# # Asserts that the default action is generated for a route with no action
# assert_generates "/items", controller: "items", action: "index"
#
# # Tests that the list action is properly routed
# assert_generates "/items/list", controller: "items", action: "list"
#
# # Tests the generation of a route with a parameter
# assert_generates "/items/list/1", { controller: "items", action: "list", id: "1" }
#
# # Asserts that the generated route gives us our custom route
# assert_generates "changesets/12", { controller: 'scm', action: 'show_diff', revision: "12" }
def assert_generates(expected_path, options, defaults = {}, extras = {}, message = nil)
if expected_path.include?("://")
fail_on(URI::InvalidURIError, message) do
uri = URI.parse(expected_path)
expected_path = uri.path.to_s.empty? ? "/" : uri.path
end
else
expected_path = "/#{expected_path}" unless expected_path.start_with?("/")
end
options = options.clone
generated_path, query_string_keys = @routes.generate_extras(options, defaults)
found_extras = options.reject { |k, _| ! query_string_keys.include? k }
msg = message || sprintf("found extras <%s>, not <%s>", found_extras, extras)
assert_equal(extras, found_extras, msg)
msg = message || sprintf("The generated path <%s> did not match <%s>", generated_path,
expected_path)
assert_equal(expected_path, generated_path, msg)
end
# Asserts that path and options match both ways; in other words, it verifies
# that `path` generates `options` and then that `options` generates `path`. This
# essentially combines `assert_recognizes` and `assert_generates` into one step.
#
# The `extras` hash allows you to specify options that would normally be
# provided as a query string to the action. The `message` parameter allows you
# to specify a custom error message to display upon failure.
#
# # Asserts a basic route: a controller with the default action (index)
# assert_routing '/home', controller: 'home', action: 'index'
#
# # Test a route generated with a specific controller, action, and parameter (id)
# assert_routing '/entries/show/23', controller: 'entries', action: 'show', id: 23
#
# # Asserts a basic route (controller + default action), with an error message if it fails
# assert_routing '/store', { controller: 'store', action: 'index' }, {}, {}, 'Route for store index not generated properly'
#
# # Tests a route, providing a defaults hash
# assert_routing 'controller/action/9', {id: "9", item: "square"}, {controller: "controller", action: "action"}, {}, {item: "square"}
#
# # Tests a route with an HTTP method
# assert_routing({ method: 'put', path: '/product/321' }, { controller: "product", action: "update", id: "321" })
def assert_routing(path, options, defaults = {}, extras = {}, message = nil)
assert_recognizes(options, path, extras, message)
controller, default_controller = options[:controller], defaults[:controller]
if controller && controller.include?(?/) && default_controller && default_controller.include?(?/)
options[:controller] = "/#{controller}"
end
generate_options = options.dup.delete_if { |k, _| defaults.key?(k) }
assert_generates(path.is_a?(Hash) ? path[:path] : path, generate_options, defaults, extras, message)
end
private
def create_routes(config = nil)
@routes = ActionDispatch::Routing::RouteSet.new(config || ActionDispatch::Routing::RouteSet::DEFAULT_CONFIG)
if @controller
@controller = @controller.clone
_routes = @routes
@controller.singleton_class.include(_routes.url_helpers)
if @controller.respond_to? :view_context_class
view_context_class = Class.new(@controller.view_context_class) do
include _routes.url_helpers
end
custom_view_context = Module.new {
define_method(:view_context_class) do
view_context_class
end
}
@controller.extend(custom_view_context)
end
end
yield @routes
end
def reset_routes(old_routes, old_controller)
@routes = old_routes
if @controller
@controller = old_controller
end
end
# Recognizes the route for a given path.
def recognized_request_for(path, extras = {}, msg)
if path.is_a?(Hash)
method = path[:method]
path = path[:path]
else
method = :get
end
controller = @controller if defined?(@controller)
request = ActionController::TestRequest.create controller&.class
if path.include?("://")
fail_on(URI::InvalidURIError, msg) do
uri = URI.parse(path)
request.env["rack.url_scheme"] = uri.scheme || "http"
request.host = uri.host if uri.host
request.port = uri.port if uri.port
request.path = uri.path.to_s.empty? ? "/" : uri.path
end
else
path = "/#{path}" unless path.start_with?("/")
request.path = path
end
request.request_method = method if method
params = fail_on(ActionController::RoutingError, msg) do
@routes.recognize_path(path, method: method, extras: extras)
end
request.path_parameters = params.with_indifferent_access
request
end
def fail_on(exception_class, message)
yield
rescue exception_class => e
flunk(message || e.message)
end
end
end
end | ruby | github | https://github.com/rails/rails | actionpack/lib/action_dispatch/testing/assertions/routing.rb |
/*
* Copyright 2010-2024 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.analysis.api.fe10.test.cases.generated.cases.components.resolver;
import com.intellij.testFramework.TestDataPath;
import org.jetbrains.kotlin.test.util.KtTestUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.kotlin.analysis.api.fe10.test.configurator.AnalysisApiFe10TestConfiguratorFactory;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiTestConfiguratorFactoryData;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiTestConfigurator;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.TestModuleKind;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.FrontendKind;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisSessionMode;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiMode;
import org.jetbrains.kotlin.analysis.api.impl.base.test.cases.components.resolver.AbstractResolveReferenceTest;
import org.jetbrains.kotlin.test.TestMetadata;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.util.regex.Pattern;
/** This class is generated by {@link org.jetbrains.kotlin.generators.tests.analysis.api.GenerateAnalysisApiTestsKt}. DO NOT MODIFY MANUALLY */
@SuppressWarnings("all")
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi")
@TestDataPath("$PROJECT_ROOT")
public class Fe10IdeNormalAnalysisSourceModuleResolveReferenceTestGenerated extends AbstractResolveReferenceTest {
@NotNull
@Override
public AnalysisApiTestConfigurator getConfigurator() {
return AnalysisApiFe10TestConfiguratorFactory.INSTANCE.createConfigurator(
new AnalysisApiTestConfiguratorFactoryData(
FrontendKind.Fe10,
TestModuleKind.Source,
AnalysisSessionMode.Normal,
AnalysisApiMode.Ide
)
);
}
@Test
public void testAllFilesPresentInSingleByPsi() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("annotationEntry.kt")
public void testAnnotationEntry() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationEntry.kt");
}
@Test
@TestMetadata("AnnotationForClass.kt")
public void testAnnotationForClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/AnnotationForClass.kt");
}
@Test
@TestMetadata("AnnotationFromJava.kt")
public void testAnnotationFromJava() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/AnnotationFromJava.kt");
}
@Test
@TestMetadata("annotationInAnnotation_arrayOf.kt")
public void testAnnotationInAnnotation_arrayOf() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationInAnnotation_arrayOf.kt");
}
@Test
@TestMetadata("annotationInAnnotation_collectionLiteral.kt")
public void testAnnotationInAnnotation_collectionLiteral() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationInAnnotation_collectionLiteral.kt");
}
@Test
@TestMetadata("annotationInAnnotation_collectionLiteral_badSpread.kt")
public void testAnnotationInAnnotation_collectionLiteral_badSpread() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationInAnnotation_collectionLiteral_badSpread.kt");
}
@Test
@TestMetadata("annotationInAnnotation_multipleAnnotations_arrayOf.kt")
public void testAnnotationInAnnotation_multipleAnnotations_arrayOf() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationInAnnotation_multipleAnnotations_arrayOf.kt");
}
@Test
@TestMetadata("annotationInAnnotation_multipleAnnotations_collectionLiteral.kt")
public void testAnnotationInAnnotation_multipleAnnotations_collectionLiteral() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationInAnnotation_multipleAnnotations_collectionLiteral.kt");
}
@Test
@TestMetadata("annotationInAnnotation_noarg.kt")
public void testAnnotationInAnnotation_noarg() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationInAnnotation_noarg.kt");
}
@Test
@TestMetadata("annotationInAnnotation_vararg.kt")
public void testAnnotationInAnnotation_vararg() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationInAnnotation_vararg.kt");
}
@Test
@TestMetadata("AnnotationInsideFunction.kt")
public void testAnnotationInsideFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/AnnotationInsideFunction.kt");
}
@Test
@TestMetadata("AnnotationOnCallSite.kt")
public void testAnnotationOnCallSite() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/AnnotationOnCallSite.kt");
}
@Test
@TestMetadata("annotationOnDelegate.kt")
public void testAnnotationOnDelegate() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationOnDelegate.kt");
}
@Test
@TestMetadata("annotationOnExpression_if.kt")
public void testAnnotationOnExpression_if() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationOnExpression_if.kt");
}
@Test
@TestMetadata("annotationOnExpression_whenBranch.kt")
public void testAnnotationOnExpression_whenBranch() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationOnExpression_whenBranch.kt");
}
@Test
@TestMetadata("annotationOnFile.kt")
public void testAnnotationOnFile() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationOnFile.kt");
}
@Test
@TestMetadata("AnnotationOnFile2.kt")
public void testAnnotationOnFile2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/AnnotationOnFile2.kt");
}
@Test
@TestMetadata("AnnotationOnFileWithImport.kt")
public void testAnnotationOnFileWithImport() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/AnnotationOnFileWithImport.kt");
}
@Test
@TestMetadata("annotationOnParameter_param.kt")
public void testAnnotationOnParameter_param() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationOnParameter_param.kt");
}
@Test
@TestMetadata("annotationOnParameter_parameterProperty.kt")
public void testAnnotationOnParameter_parameterProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationOnParameter_parameterProperty.kt");
}
@Test
@TestMetadata("annotationOnParameter_reified.kt")
public void testAnnotationOnParameter_reified() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationOnParameter_reified.kt");
}
@Test
@TestMetadata("annotationOnParameter_setparam.kt")
public void testAnnotationOnParameter_setparam() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationOnParameter_setparam.kt");
}
@Test
@TestMetadata("annotationOnProperty_field.kt")
public void testAnnotationOnProperty_field() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationOnProperty_field.kt");
}
@Test
@TestMetadata("annotationOnProperty_get.kt")
public void testAnnotationOnProperty_get() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationOnProperty_get.kt");
}
@Test
@TestMetadata("annotationOnProperty_property.kt")
public void testAnnotationOnProperty_property() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationOnProperty_property.kt");
}
@Test
@TestMetadata("annotationOnProperty_set.kt")
public void testAnnotationOnProperty_set() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/annotationOnProperty_set.kt");
}
@Test
@TestMetadata("AnnotationTypeParameter.kt")
public void testAnnotationTypeParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/AnnotationTypeParameter.kt");
}
@Test
@TestMetadata("arrayIncWithDotQualifier.kt")
public void testArrayIncWithDotQualifier() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/arrayIncWithDotQualifier.kt");
}
@Test
@TestMetadata("callableReference_genericQualifier1.kt")
public void testCallableReference_genericQualifier1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReference_genericQualifier1.kt");
}
@Test
@TestMetadata("callableReference_genericQualifier2.kt")
public void testCallableReference_genericQualifier2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReference_genericQualifier2.kt");
}
@Test
@TestMetadata("calleeExpressionOfImplicitInvoke.kt")
public void testCalleeExpressionOfImplicitInvoke() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/calleeExpressionOfImplicitInvoke.kt");
}
@Test
@TestMetadata("checkNotNullCall.kt")
public void testCheckNotNullCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/checkNotNullCall.kt");
}
@Test
@TestMetadata("checkNotNullCallAsCallee.kt")
public void testCheckNotNullCallAsCallee() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/checkNotNullCallAsCallee.kt");
}
@Test
@TestMetadata("classCollectionLiteral.kt")
public void testClassCollectionLiteral() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/classCollectionLiteral.kt");
}
@Test
@TestMetadata("ClassInTypeConstraint.kt")
public void testClassInTypeConstraint() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ClassInTypeConstraint.kt");
}
@Test
@TestMetadata("ClassObjectClassLiteralReference.kt")
public void testClassObjectClassLiteralReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ClassObjectClassLiteralReference.kt");
}
@Test
@TestMetadata("ClassObjectClassLiteralReferenceWithField.kt")
public void testClassObjectClassLiteralReferenceWithField() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ClassObjectClassLiteralReferenceWithField.kt");
}
@Test
@TestMetadata("ClassQualifierForNestedClassConstructorCall.kt")
public void testClassQualifierForNestedClassConstructorCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ClassQualifierForNestedClassConstructorCall.kt");
}
@Test
@TestMetadata("ClassReferenceInImport.kt")
public void testClassReferenceInImport() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ClassReferenceInImport.kt");
}
@Test
@TestMetadata("CollectionStream.kt")
public void testCollectionStream() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/CollectionStream.kt");
}
@Test
@TestMetadata("companionObjectReference.kt")
public void testCompanionObjectReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/companionObjectReference.kt");
}
@Test
@TestMetadata("CompanionObjectWithName1.kt")
public void testCompanionObjectWithName1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/CompanionObjectWithName1.kt");
}
@Test
@TestMetadata("CompanionObjectWithName2.kt")
public void testCompanionObjectWithName2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/CompanionObjectWithName2.kt");
}
@Test
@TestMetadata("comparisonCall.kt")
public void testComparisonCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/comparisonCall.kt");
}
@Test
@TestMetadata("consecutiveImplicitInvoke1.kt")
public void testConsecutiveImplicitInvoke1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/consecutiveImplicitInvoke1.kt");
}
@Test
@TestMetadata("consecutiveImplicitInvoke2.kt")
public void testConsecutiveImplicitInvoke2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/consecutiveImplicitInvoke2.kt");
}
@Test
@TestMetadata("consecutiveImplicitInvoke3.kt")
public void testConsecutiveImplicitInvoke3() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/consecutiveImplicitInvoke3.kt");
}
@Test
@TestMetadata("consecutiveImplicitInvoke_callee.kt")
public void testConsecutiveImplicitInvoke_callee() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/consecutiveImplicitInvoke_callee.kt");
}
@Test
@TestMetadata("constructorCallWithSubstitution.kt")
public void testConstructorCallWithSubstitution() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/constructorCallWithSubstitution.kt");
}
@Test
@TestMetadata("CtrlClickResolve.kt")
public void testCtrlClickResolve() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/CtrlClickResolve.kt");
}
@Test
@TestMetadata("dataClassCopy.kt")
public void testDataClassCopy() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/dataClassCopy.kt");
}
@Test
@TestMetadata("DefaultObjectAsExtensionReceiverForFunction.kt")
public void testDefaultObjectAsExtensionReceiverForFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/DefaultObjectAsExtensionReceiverForFunction.kt");
}
@Test
@TestMetadata("DefaultObjectAsExtensionReceiverForProperty.kt")
public void testDefaultObjectAsExtensionReceiverForProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/DefaultObjectAsExtensionReceiverForProperty.kt");
}
@Test
@TestMetadata("DefaultObjectAsReceiverForExtensionFunctionOnSuperType.kt")
public void testDefaultObjectAsReceiverForExtensionFunctionOnSuperType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/DefaultObjectAsReceiverForExtensionFunctionOnSuperType.kt");
}
@Test
@TestMetadata("DefaultObjectAsReceiverForMemberPropertyInSuperType.kt")
public void testDefaultObjectAsReceiverForMemberPropertyInSuperType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/DefaultObjectAsReceiverForMemberPropertyInSuperType.kt");
}
@Test
@TestMetadata("DefaultObjectInShortReferenceFormCall.kt")
public void testDefaultObjectInShortReferenceFormCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/DefaultObjectInShortReferenceFormCall.kt");
}
@Test
@TestMetadata("defaultStarImportsInLibrarySource.kt")
public void testDefaultStarImportsInLibrarySource() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/defaultStarImportsInLibrarySource.kt");
}
@Test
@TestMetadata("delegatedConstructor.kt")
public void testDelegatedConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedConstructor.kt");
}
@Test
@TestMetadata("delegatedConstructorApplicable.kt")
public void testDelegatedConstructorApplicable() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedConstructorApplicable.kt");
}
@Test
@TestMetadata("delegatedConstructorCallWithSubstitution.kt")
public void testDelegatedConstructorCallWithSubstitution() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedConstructorCallWithSubstitution.kt");
}
@Test
@TestMetadata("delegatedConstructorWithSubstitution.kt")
public void testDelegatedConstructorWithSubstitution() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedConstructorWithSubstitution.kt");
}
@Test
@TestMetadata("Deprecated.kt")
public void testDeprecated() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/Deprecated.kt");
}
@Test
@TestMetadata("DestructuringDeclarationInInit.kt")
public void testDestructuringDeclarationInInit() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/DestructuringDeclarationInInit.kt");
}
@Test
@TestMetadata("enumAsAnnotationValue.kt")
public void testEnumAsAnnotationValue() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/enumAsAnnotationValue.kt");
}
@Test
@TestMetadata("enumEntrySuperclassEntry.kt")
public void testEnumEntrySuperclassEntry() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/enumEntrySuperclassEntry.kt");
}
@Test
@TestMetadata("enumEntrySuperclassReference.kt")
public void testEnumEntrySuperclassReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/enumEntrySuperclassReference.kt");
}
@Test
@TestMetadata("EnumValues.kt")
public void testEnumValues() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/EnumValues.kt");
}
@Test
@TestMetadata("eqEqCall_fromAny.kt")
public void testEqEqCall_fromAny() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/eqEqCall_fromAny.kt");
}
@Test
@TestMetadata("eqEqCall_fromDataClass.kt")
public void testEqEqCall_fromDataClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/eqEqCall_fromDataClass.kt");
}
@Test
@TestMetadata("eqEqCall_fromDataClass_overridden.kt")
public void testEqEqCall_fromDataClass_overridden() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/eqEqCall_fromDataClass_overridden.kt");
}
@Test
@TestMetadata("eqEqCall_fromSealedClass.kt")
public void testEqEqCall_fromSealedClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/eqEqCall_fromSealedClass.kt");
}
@Test
@TestMetadata("eqEqCall_fromSealedClass_genericUpperBound.kt")
public void testEqEqCall_fromSealedClass_genericUpperBound() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/eqEqCall_fromSealedClass_genericUpperBound.kt");
}
@Test
@TestMetadata("eqEqCall_fromSuperType.kt")
public void testEqEqCall_fromSuperType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/eqEqCall_fromSuperType.kt");
}
@Test
@TestMetadata("eqEqCall_overridden.kt")
public void testEqEqCall_overridden() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/eqEqCall_overridden.kt");
}
@Test
@TestMetadata("EqualsOperator.kt")
public void testEqualsOperator() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/EqualsOperator.kt");
}
@Test
@TestMetadata("EqualsOperatorNoInfix.kt")
public void testEqualsOperatorNoInfix() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/EqualsOperatorNoInfix.kt");
}
@Test
@TestMetadata("explicitFunctionalInterfaceInvoke_globalVal.kt")
public void testExplicitFunctionalInterfaceInvoke_globalVal() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/explicitFunctionalInterfaceInvoke_globalVal.kt");
}
@Test
@TestMetadata("explicitFunctionalInterfaceInvoke_parameter.kt")
public void testExplicitFunctionalInterfaceInvoke_parameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/explicitFunctionalInterfaceInvoke_parameter.kt");
}
@Test
@TestMetadata("explicitLambdaParameter.kt")
public void testExplicitLambdaParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/explicitLambdaParameter.kt");
}
@Test
@TestMetadata("ExternalCompanionObject.kt")
public void testExternalCompanionObject() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ExternalCompanionObject.kt");
}
@Test
@TestMetadata("forEach.kt")
public void testForEach() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/forEach.kt");
}
@Test
@TestMetadata("functionCall.kt")
public void testFunctionCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionCall.kt");
}
@Test
@TestMetadata("functionCallInTheSameFile.kt")
public void testFunctionCallInTheSameFile() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionCallInTheSameFile.kt");
}
@Test
@TestMetadata("functionCallWithExtensionReceiverAndTypeArgument.kt")
public void testFunctionCallWithExtensionReceiverAndTypeArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionCallWithExtensionReceiverAndTypeArgument.kt");
}
@Test
@TestMetadata("functionCallWithLambdaArgument.kt")
public void testFunctionCallWithLambdaArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionCallWithLambdaArgument.kt");
}
@Test
@TestMetadata("functionCallWithNamedArgument.kt")
public void testFunctionCallWithNamedArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionCallWithNamedArgument.kt");
}
@Test
@TestMetadata("functionCallWithNonTrailingLambdaArgument.kt")
public void testFunctionCallWithNonTrailingLambdaArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionCallWithNonTrailingLambdaArgument.kt");
}
@Test
@TestMetadata("functionCallWithNonTrailingLambdaArgument2.kt")
public void testFunctionCallWithNonTrailingLambdaArgument2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionCallWithNonTrailingLambdaArgument2.kt");
}
@Test
@TestMetadata("functionCallWithSpreadArgument.kt")
public void testFunctionCallWithSpreadArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionCallWithSpreadArgument.kt");
}
@Test
@TestMetadata("functionCallWithTypeArgument.kt")
public void testFunctionCallWithTypeArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionCallWithTypeArgument.kt");
}
@Test
@TestMetadata("functionCallWithVarargArgument.kt")
public void testFunctionCallWithVarargArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionCallWithVarargArgument.kt");
}
@Test
@TestMetadata("functionTypeVariableCall_dispatchReceiver.kt")
public void testFunctionTypeVariableCall_dispatchReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionTypeVariableCall_dispatchReceiver.kt");
}
@Test
@TestMetadata("functionWithPostDecPropertyArgument.kt")
public void testFunctionWithPostDecPropertyArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionWithPostDecPropertyArgument.kt");
}
@Test
@TestMetadata("functionWithPostIncPropertyArgument.kt")
public void testFunctionWithPostIncPropertyArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionWithPostIncPropertyArgument.kt");
}
@Test
@TestMetadata("functionWithPreDecArrayAccessArgument.kt")
public void testFunctionWithPreDecArrayAccessArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionWithPreDecArrayAccessArgument.kt");
}
@Test
@TestMetadata("functionWithPreDecPropertyArgument.kt")
public void testFunctionWithPreDecPropertyArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionWithPreDecPropertyArgument.kt");
}
@Test
@TestMetadata("functionWithPreIncArrayAccessArgument.kt")
public void testFunctionWithPreIncArrayAccessArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionWithPreIncArrayAccessArgument.kt");
}
@Test
@TestMetadata("functionWithPreIncPropertyArgument.kt")
public void testFunctionWithPreIncPropertyArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionWithPreIncPropertyArgument.kt");
}
@Test
@TestMetadata("functionWithPropertyArgument.kt")
public void testFunctionWithPropertyArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionWithPropertyArgument.kt");
}
@Test
@TestMetadata("functionWithReceiverCall.kt")
public void testFunctionWithReceiverCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionWithReceiverCall.kt");
}
@Test
@TestMetadata("functionWithReceiverSafeCall.kt")
public void testFunctionWithReceiverSafeCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/functionWithReceiverSafeCall.kt");
}
@Test
@TestMetadata("GenericFunctionParameter.kt")
public void testGenericFunctionParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/GenericFunctionParameter.kt");
}
@Test
@TestMetadata("GenericTypeInFunctionParameter.kt")
public void testGenericTypeInFunctionParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/GenericTypeInFunctionParameter.kt");
}
@Test
@TestMetadata("groupBy.kt")
public void testGroupBy() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/groupBy.kt");
}
@Test
@TestMetadata("implicitConstructorDelegationCall.kt")
public void testImplicitConstructorDelegationCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/implicitConstructorDelegationCall.kt");
}
@Test
@TestMetadata("implicitConstuctorCall.kt")
public void testImplicitConstuctorCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/implicitConstuctorCall.kt");
}
@Test
@TestMetadata("implicitExtensionInvoke.kt")
public void testImplicitExtensionInvoke() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/implicitExtensionInvoke.kt");
}
@Test
@TestMetadata("implicitFunctionInvoke.kt")
public void testImplicitFunctionInvoke() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/implicitFunctionInvoke.kt");
}
@Test
@TestMetadata("implicitFunctionalInterfaceInvoke.kt")
public void testImplicitFunctionalInterfaceInvoke() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/implicitFunctionalInterfaceInvoke.kt");
}
@Test
@TestMetadata("implicitInvoke.kt")
public void testImplicitInvoke() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/implicitInvoke.kt");
}
@Test
@TestMetadata("implicitInvokeWithReceiver.kt")
public void testImplicitInvokeWithReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/implicitInvokeWithReceiver.kt");
}
@Test
@TestMetadata("implicitJavaConstuctorCall.kt")
public void testImplicitJavaConstuctorCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/implicitJavaConstuctorCall.kt");
}
@Test
@TestMetadata("implicitLambdaParameter.kt")
public void testImplicitLambdaParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/implicitLambdaParameter.kt");
}
@Test
@TestMetadata("importAlias.kt")
public void testImportAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/importAlias.kt");
}
@Test
@TestMetadata("ImportFromRootScope.kt")
public void testImportFromRootScope() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ImportFromRootScope.kt");
}
@Test
@TestMetadata("InClassParameter.kt")
public void testInClassParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/InClassParameter.kt");
}
@Test
@TestMetadata("InClassParameterField.kt")
public void testInClassParameterField() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/InClassParameterField.kt");
}
@Test
@TestMetadata("InEnumEntry.kt")
public void testInEnumEntry() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/InEnumEntry.kt");
}
@Test
@TestMetadata("InFunctionParameterType.kt")
public void testInFunctionParameterType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/InFunctionParameterType.kt");
}
@Test
@TestMetadata("InMethodParameter.kt")
public void testInMethodParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/InMethodParameter.kt");
}
@Test
@TestMetadata("InMethodVarargParameter.kt")
public void testInMethodVarargParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/InMethodVarargParameter.kt");
}
@Test
@TestMetadata("InOperator.kt")
public void testInOperator() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/InOperator.kt");
}
@Test
@TestMetadata("InVaragReferenceInFunctionBody.kt")
public void testInVaragReferenceInFunctionBody() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/InVaragReferenceInFunctionBody.kt");
}
@Test
@TestMetadata("InVaragReferenceInNamedParameter.kt")
public void testInVaragReferenceInNamedParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/InVaragReferenceInNamedParameter.kt");
}
@Test
@TestMetadata("incWithDotQualifier.kt")
public void testIncWithDotQualifier() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/incWithDotQualifier.kt");
}
@Test
@TestMetadata("indexedGet.kt")
public void testIndexedGet() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/indexedGet.kt");
}
@Test
@TestMetadata("indexedSet.kt")
public void testIndexedSet() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/indexedSet.kt");
}
@Test
@TestMetadata("intArrayOfInAnnotation.kt")
public void testIntArrayOfInAnnotation() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/intArrayOfInAnnotation.kt");
}
@Test
@TestMetadata("intCollectionLiteral.kt")
public void testIntCollectionLiteral() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/intCollectionLiteral.kt");
}
@Test
@TestMetadata("javaClassReference.kt")
public void testJavaClassReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/javaClassReference.kt");
}
@Test
@TestMetadata("javaFunctionCall.kt")
public void testJavaFunctionCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/javaFunctionCall.kt");
}
@Test
@TestMetadata("javaPropertyGetter.kt")
public void testJavaPropertyGetter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/javaPropertyGetter.kt");
}
@Test
@TestMetadata("javaPropertyNestedGetter.kt")
public void testJavaPropertyNestedGetter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/javaPropertyNestedGetter.kt");
}
@Test
@TestMetadata("javaPropertySetter.kt")
public void testJavaPropertySetter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/javaPropertySetter.kt");
}
@Test
@TestMetadata("javaSetterBuilderCall.kt")
public void testJavaSetterBuilderCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/javaSetterBuilderCall.kt");
}
@Test
@TestMetadata("javaSetterCall.kt")
public void testJavaSetterCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/javaSetterCall.kt");
}
@Test
@TestMetadata("JsAsDynamic.kt")
public void testJsAsDynamic() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/JsAsDynamic.kt");
}
@Test
@TestMetadata("KotlinPropertyAssignment.kt")
public void testKotlinPropertyAssignment() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/KotlinPropertyAssignment.kt");
}
@Test
@TestMetadata("kotlinPropertyGetter_unqualified.kt")
public void testKotlinPropertyGetter_unqualified() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kotlinPropertyGetter_unqualified.kt");
}
@Test
@TestMetadata("kotlinPropertyNestedGetter.kt")
public void testKotlinPropertyNestedGetter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kotlinPropertyNestedGetter.kt");
}
@Test
@TestMetadata("kotlinPropertySetter_unqualified.kt")
public void testKotlinPropertySetter_unqualified() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kotlinPropertySetter_unqualified.kt");
}
@Test
@TestMetadata("KotlinPropertyWithGetterAndSetterAssignment.kt")
public void testKotlinPropertyWithGetterAndSetterAssignment() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/KotlinPropertyWithGetterAndSetterAssignment.kt");
}
@Test
@TestMetadata("labeledLambda.kt")
public void testLabeledLambda() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/labeledLambda.kt");
}
@Test
@TestMetadata("labeledLambdaInsideParentheses.kt")
public void testLabeledLambdaInsideParentheses() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/labeledLambdaInsideParentheses.kt");
}
@Test
@TestMetadata("LabelsReturn.kt")
public void testLabelsReturn() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/LabelsReturn.kt");
}
@Test
@TestMetadata("memberFunctionCallWithTypeArgument.kt")
public void testMemberFunctionCallWithTypeArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/memberFunctionCallWithTypeArgument.kt");
}
@Test
@TestMetadata("memberFunctionVsTopLevel.kt")
public void testMemberFunctionVsTopLevel() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/memberFunctionVsTopLevel.kt");
}
@Test
@TestMetadata("MultiDeclarationExtension.kt")
public void testMultiDeclarationExtension() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/MultiDeclarationExtension.kt");
}
@Test
@TestMetadata("MultiDeclarationMember.kt")
public void testMultiDeclarationMember() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/MultiDeclarationMember.kt");
}
@Test
@TestMetadata("NamedClassObject.kt")
public void testNamedClassObject() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/NamedClassObject.kt");
}
@Test
@TestMetadata("noBuiltIns.kt")
public void testNoBuiltIns() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/noBuiltIns.kt");
}
@Test
@TestMetadata("NotEqualsOperator.kt")
public void testNotEqualsOperator() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/NotEqualsOperator.kt");
}
@Test
@TestMetadata("NotInOperator.kt")
public void testNotInOperator() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/NotInOperator.kt");
}
@Test
@TestMetadata("overloadBetweenLambdas.kt")
public void testOverloadBetweenLambdas() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/overloadBetweenLambdas.kt");
}
@Test
@TestMetadata("overloadWithReceiver.kt")
public void testOverloadWithReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/overloadWithReceiver.kt");
}
@Test
@TestMetadata("PackageFromAnnotationOnFile.kt")
public void testPackageFromAnnotationOnFile() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/PackageFromAnnotationOnFile.kt");
}
@Test
@TestMetadata("PackageFromAnnotationOnFunction.kt")
public void testPackageFromAnnotationOnFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/PackageFromAnnotationOnFunction.kt");
}
@Test
@TestMetadata("PackageReference.kt")
public void testPackageReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/PackageReference.kt");
}
@Test
@TestMetadata("PackageReferenceInImport.kt")
public void testPackageReferenceInImport() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/PackageReferenceInImport.kt");
}
@Test
@TestMetadata("parameterByName.kt")
public void testParameterByName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/parameterByName.kt");
}
@Test
@TestMetadata("parameterByNameInSafeCall.kt")
public void testParameterByNameInSafeCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/parameterByNameInSafeCall.kt");
}
@Test
@TestMetadata("PlusAssignByHand.kt")
public void testPlusAssignByHand() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/PlusAssignByHand.kt");
}
@Test
@TestMetadata("PlusAssignOperator.kt")
public void testPlusAssignOperator() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/PlusAssignOperator.kt");
}
@Test
@TestMetadata("PlusAssignViaPlusOperator.kt")
public void testPlusAssignViaPlusOperator() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/PlusAssignViaPlusOperator.kt");
}
@Test
@TestMetadata("postfixUnaryOperatorOnVar.kt")
public void testPostfixUnaryOperatorOnVar() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/postfixUnaryOperatorOnVar.kt");
}
@Test
@TestMetadata("postfixUnaryOperatorOnVar_base.kt")
public void testPostfixUnaryOperatorOnVar_base() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/postfixUnaryOperatorOnVar_base.kt");
}
@Test
@TestMetadata("postfixUnaryOperatorWithArrayAccessConvention.kt")
public void testPostfixUnaryOperatorWithArrayAccessConvention() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/postfixUnaryOperatorWithArrayAccessConvention.kt");
}
@Test
@TestMetadata("postfixUnaryOperatorWithArrayAccessConvention_base.kt")
public void testPostfixUnaryOperatorWithArrayAccessConvention_base() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/postfixUnaryOperatorWithArrayAccessConvention_base.kt");
}
@Test
@TestMetadata("postfixUnaryOperatorWithArrayAccessConvention_complexDispatcher.kt")
public void testPostfixUnaryOperatorWithArrayAccessConvention_complexDispatcher() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/postfixUnaryOperatorWithArrayAccessConvention_complexDispatcher.kt");
}
@Test
@TestMetadata("prefixUnaryOperatorOnVar.kt")
public void testPrefixUnaryOperatorOnVar() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/prefixUnaryOperatorOnVar.kt");
}
@Test
@TestMetadata("prefixUnaryOperatorOnVar_base.kt")
public void testPrefixUnaryOperatorOnVar_base() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/prefixUnaryOperatorOnVar_base.kt");
}
@Test
@TestMetadata("prefixUnaryOperatorOnVar_complex.kt")
public void testPrefixUnaryOperatorOnVar_complex() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/prefixUnaryOperatorOnVar_complex.kt");
}
@Test
@TestMetadata("prefixUnaryOperatorWithArrayAccessConvention.kt")
public void testPrefixUnaryOperatorWithArrayAccessConvention() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/prefixUnaryOperatorWithArrayAccessConvention.kt");
}
@Test
@TestMetadata("prefixUnaryOperatorWithArrayAccessConvention_base.kt")
public void testPrefixUnaryOperatorWithArrayAccessConvention_base() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/prefixUnaryOperatorWithArrayAccessConvention_base.kt");
}
@Test
@TestMetadata("prefixUnaryOperatorWithArrayAccessConvention_complexDispatcher.kt")
public void testPrefixUnaryOperatorWithArrayAccessConvention_complexDispatcher() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/prefixUnaryOperatorWithArrayAccessConvention_complexDispatcher.kt");
}
@Test
@TestMetadata("qualifiedCallSelector.kt")
public void testQualifiedCallSelector() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedCallSelector.kt");
}
@Test
@TestMetadata("qualifiedCalleeExpressionOfImplicitInvoke.kt")
public void testQualifiedCalleeExpressionOfImplicitInvoke() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedCalleeExpressionOfImplicitInvoke.kt");
}
@Test
@TestMetadata("qualifiedReferenceSelector.kt")
public void testQualifiedReferenceSelector() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedReferenceSelector.kt");
}
@Test
@TestMetadata("qualifiedWholeCall.kt")
public void testQualifiedWholeCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedWholeCall.kt");
}
@Test
@TestMetadata("qualifiedWholeReferenceSelector.kt")
public void testQualifiedWholeReferenceSelector() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedWholeReferenceSelector.kt");
}
@Test
@TestMetadata("ReferenceInClassWhereConstraint.kt")
public void testReferenceInClassWhereConstraint() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ReferenceInClassWhereConstraint.kt");
}
@Test
@TestMetadata("ReferenceInFunWhereConstraint.kt")
public void testReferenceInFunWhereConstraint() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ReferenceInFunWhereConstraint.kt");
}
@Test
@TestMetadata("ResolveClass.kt")
public void testResolveClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ResolveClass.kt");
}
@Test
@TestMetadata("ResolvePackageInProperty.kt")
public void testResolvePackageInProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ResolvePackageInProperty.kt");
}
@Test
@TestMetadata("ResolvePackageInTheEndInProperty.kt")
public void testResolvePackageInTheEndInProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ResolvePackageInTheEndInProperty.kt");
}
@Test
@TestMetadata("ResolvePackageInTheMiddleInProperty.kt")
public void testResolvePackageInTheMiddleInProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ResolvePackageInTheMiddleInProperty.kt");
}
@Test
@TestMetadata("ResolvePackageInTheTypeNameInProperty.kt")
public void testResolvePackageInTheTypeNameInProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ResolvePackageInTheTypeNameInProperty.kt");
}
@Test
@TestMetadata("RetentionValue.kt")
public void testRetentionValue() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/RetentionValue.kt");
}
@Test
@TestMetadata("safeQualifiedCallSelector.kt")
public void testSafeQualifiedCallSelector() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/safeQualifiedCallSelector.kt");
}
@Test
@TestMetadata("safeQualifiedReferenceSelector.kt")
public void testSafeQualifiedReferenceSelector() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/safeQualifiedReferenceSelector.kt");
}
@Test
@TestMetadata("safeQualifiedWholeCall.kt")
public void testSafeQualifiedWholeCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/safeQualifiedWholeCall.kt");
}
@Test
@TestMetadata("safeQualifiedWholeReferenceSelector.kt")
public void testSafeQualifiedWholeReferenceSelector() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/safeQualifiedWholeReferenceSelector.kt");
}
@Test
@TestMetadata("SamAdapter.kt")
public void testSamAdapter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/SamAdapter.kt");
}
@Test
@TestMetadata("SamConstructor.kt")
public void testSamConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/SamConstructor.kt");
}
@Test
@TestMetadata("samConstructorCall.kt")
public void testSamConstructorCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/samConstructorCall.kt");
}
@Test
@TestMetadata("SamConstructorTypeArguments.kt")
public void testSamConstructorTypeArguments() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/SamConstructorTypeArguments.kt");
}
@Test
@TestMetadata("samLambda.kt")
public void testSamLambda() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/samLambda.kt");
}
@Test
@TestMetadata("samMethodReference.kt")
public void testSamMethodReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/samMethodReference.kt");
}
@Test
@TestMetadata("SeveralOverrides.kt")
public void testSeveralOverrides() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/SeveralOverrides.kt");
}
@Test
@TestMetadata("shadowedProperty.kt")
public void testShadowedProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/shadowedProperty.kt");
}
@Test
@TestMetadata("smartCastExplicitDispatchReceiver.kt")
public void testSmartCastExplicitDispatchReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/smartCastExplicitDispatchReceiver.kt");
}
@Test
@TestMetadata("smartCastExplicitExtensionReceiver.kt")
public void testSmartCastExplicitExtensionReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/smartCastExplicitExtensionReceiver.kt");
}
@Test
@TestMetadata("smartCastExpression.kt")
public void testSmartCastExpression() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/smartCastExpression.kt");
}
@Test
@TestMetadata("smartCastImplicitDispatchReceiver.kt")
public void testSmartCastImplicitDispatchReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/smartCastImplicitDispatchReceiver.kt");
}
@Test
@TestMetadata("smartCastImplicitExtensionReceiver.kt")
public void testSmartCastImplicitExtensionReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/smartCastImplicitExtensionReceiver.kt");
}
@Test
@TestMetadata("smartCastInvokeReceiver.kt")
public void testSmartCastInvokeReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/smartCastInvokeReceiver.kt");
}
@Test
@TestMetadata("smartCastedArg.kt")
public void testSmartCastedArg() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/smartCastedArg.kt");
}
@Test
@TestMetadata("smartCastedNonNullArg.kt")
public void testSmartCastedNonNullArg() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/smartCastedNonNullArg.kt");
}
@Test
@TestMetadata("staticFieldFromJavaClassFromSuper.kt")
public void testStaticFieldFromJavaClassFromSuper() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/staticFieldFromJavaClassFromSuper.kt");
}
@Test
@TestMetadata("staticImportFunction.kt")
public void testStaticImportFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/staticImportFunction.kt");
}
@Test
@TestMetadata("staticImportFunctionFromSuper.kt")
public void testStaticImportFunctionFromSuper() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/staticImportFunctionFromSuper.kt");
}
@Test
@TestMetadata("staticImportNestedFunction.kt")
public void testStaticImportNestedFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/staticImportNestedFunction.kt");
}
@Test
@TestMetadata("staticImportNestedJavaField.kt")
public void testStaticImportNestedJavaField() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/staticImportNestedJavaField.kt");
}
@Test
@TestMetadata("staticImportNestedJavaFunction.kt")
public void testStaticImportNestedJavaFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/staticImportNestedJavaFunction.kt");
}
@Test
@TestMetadata("staticImportNestedProperty.kt")
public void testStaticImportNestedProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/staticImportNestedProperty.kt");
}
@Test
@TestMetadata("staticImportProperty.kt")
public void testStaticImportProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/staticImportProperty.kt");
}
@Test
@TestMetadata("staticImportPropertyFromSuper.kt")
public void testStaticImportPropertyFromSuper() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/staticImportPropertyFromSuper.kt");
}
@Test
@TestMetadata("staticMethodFromJavaClass.kt")
public void testStaticMethodFromJavaClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/staticMethodFromJavaClass.kt");
}
@Test
@TestMetadata("staticMethodFromJavaClassFromSuper.kt")
public void testStaticMethodFromJavaClassFromSuper() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/staticMethodFromJavaClassFromSuper.kt");
}
@Test
@TestMetadata("stringBuilderLength.kt")
public void testStringBuilderLength() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/stringBuilderLength.kt");
}
@Test
@TestMetadata("SuperTypePrimaryConstructor.kt")
public void testSuperTypePrimaryConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/SuperTypePrimaryConstructor.kt");
}
@Test
@TestMetadata("SuperTypeSecondaryConstructor.kt")
public void testSuperTypeSecondaryConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/SuperTypeSecondaryConstructor.kt");
}
@Test
@TestMetadata("superWithLabel_caretAtLabel.kt")
public void testSuperWithLabel_caretAtLabel() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/superWithLabel_caretAtLabel.kt");
}
@Test
@TestMetadata("superWithLabel_caretAtSuper.kt")
public void testSuperWithLabel_caretAtSuper() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/superWithLabel_caretAtSuper.kt");
}
@Test
@TestMetadata("suspendLambda.kt")
public void testSuspendLambda() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/suspendLambda.kt");
}
@Test
@TestMetadata("thisWithLabelToFunction_caretAtLabel.kt")
public void testThisWithLabelToFunction_caretAtLabel() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/thisWithLabelToFunction_caretAtLabel.kt");
}
@Test
@TestMetadata("thisWithLabelToFunction_caretAtThis.kt")
public void testThisWithLabelToFunction_caretAtThis() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/thisWithLabelToFunction_caretAtThis.kt");
}
@Test
@TestMetadata("thisWithLabelToProperty_caretAtLabel.kt")
public void testThisWithLabelToProperty_caretAtLabel() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/thisWithLabelToProperty_caretAtLabel.kt");
}
@Test
@TestMetadata("thisWithLabelToProperty_caretAtThis.kt")
public void testThisWithLabelToProperty_caretAtThis() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/thisWithLabelToProperty_caretAtThis.kt");
}
@Test
@TestMetadata("todo.kt")
public void testTodo() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/todo.kt");
}
@Test
@TestMetadata("TopLevelClassVsLocalClassConstructor.kt")
public void testTopLevelClassVsLocalClassConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/TopLevelClassVsLocalClassConstructor.kt");
}
@Test
@TestMetadata("TopLevelClassVsLocalClassConstructor2.kt")
public void testTopLevelClassVsLocalClassConstructor2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/TopLevelClassVsLocalClassConstructor2.kt");
}
@Test
@TestMetadata("TopLevelCompanionObjectVsLocalClassConstructor.kt")
public void testTopLevelCompanionObjectVsLocalClassConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/TopLevelCompanionObjectVsLocalClassConstructor.kt");
}
@Test
@TestMetadata("TopLevelCompanionObjectVsLocalClassConstructor2.kt")
public void testTopLevelCompanionObjectVsLocalClassConstructor2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/TopLevelCompanionObjectVsLocalClassConstructor2.kt");
}
@Test
@TestMetadata("TopLevelCompanionObjectVsLocalClassQualifier.kt")
public void testTopLevelCompanionObjectVsLocalClassQualifier() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/TopLevelCompanionObjectVsLocalClassQualifier.kt");
}
@Test
@TestMetadata("TopLevelObjectVsLocalClassConstructor.kt")
public void testTopLevelObjectVsLocalClassConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/TopLevelObjectVsLocalClassConstructor.kt");
}
@Test
@TestMetadata("TopLevelObjectVsLocalClassConstructor2.kt")
public void testTopLevelObjectVsLocalClassConstructor2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/TopLevelObjectVsLocalClassConstructor2.kt");
}
@Test
@TestMetadata("TopLevelObjectVsLocalClassConstructor3.kt")
public void testTopLevelObjectVsLocalClassConstructor3() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/TopLevelObjectVsLocalClassConstructor3.kt");
}
@Test
@TestMetadata("TopLevelObjectVsLocalClassConstructor4.kt")
public void testTopLevelObjectVsLocalClassConstructor4() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/TopLevelObjectVsLocalClassConstructor4.kt");
}
@Test
@TestMetadata("TopLevelObjectVsLocalClassQualifier.kt")
public void testTopLevelObjectVsLocalClassQualifier() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/TopLevelObjectVsLocalClassQualifier.kt");
}
@Test
@TestMetadata("typealiasOfExpectTypeWithActualTypealias.kt")
public void testTypealiasOfExpectTypeWithActualTypealias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typealiasOfExpectTypeWithActualTypealias.kt");
}
@Test
@TestMetadata("unresolvableOperator_elvis_1.kt")
public void testUnresolvableOperator_elvis_1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/unresolvableOperator_elvis_1.kt");
}
@Test
@TestMetadata("unresolvableOperator_elvis_2.kt")
public void testUnresolvableOperator_elvis_2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/unresolvableOperator_elvis_2.kt");
}
@Test
@TestMetadata("unresolvableOperator_eqeqeq_1.kt")
public void testUnresolvableOperator_eqeqeq_1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/unresolvableOperator_eqeqeq_1.kt");
}
@Test
@TestMetadata("unresolvableOperator_eqeqeq_2.kt")
public void testUnresolvableOperator_eqeqeq_2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/unresolvableOperator_eqeqeq_2.kt");
}
@Test
@TestMetadata("unresolvableOperator_excleqeq_1.kt")
public void testUnresolvableOperator_excleqeq_1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/unresolvableOperator_excleqeq_1.kt");
}
@Test
@TestMetadata("unresolvableOperator_excleqeq_2.kt")
public void testUnresolvableOperator_excleqeq_2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/unresolvableOperator_excleqeq_2.kt");
}
@Test
@TestMetadata("UnresolvedCallArgument.kt")
public void testUnresolvedCallArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/UnresolvedCallArgument.kt");
}
@Test
@TestMetadata("unsignedArrayOf.kt")
public void testUnsignedArrayOf() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/unsignedArrayOf.kt");
}
@Test
@TestMetadata("ValueParameter.kt")
public void testValueParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/ValueParameter.kt");
}
@Test
@TestMetadata("variableAsFunction.kt")
public void testVariableAsFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/variableAsFunction.kt");
}
@Test
@TestMetadata("variableAsFunctionWithKeywordParameterName.kt")
public void testVariableAsFunctionWithKeywordParameterName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/variableAsFunctionWithKeywordParameterName.kt");
}
@Test
@TestMetadata("variableAsFunctionWithParameterName.kt")
public void testVariableAsFunctionWithParameterName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/variableAsFunctionWithParameterName.kt");
}
@Test
@TestMetadata("variableAsFunctionWithParameterNameAnnotation.kt")
public void testVariableAsFunctionWithParameterNameAnnotation() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/variableAsFunctionWithParameterNameAnnotation.kt");
}
@Test
@TestMetadata("variableAsFunctionWithParameterNameAnnotationConflict.kt")
public void testVariableAsFunctionWithParameterNameAnnotationConflict() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/variableAsFunctionWithParameterNameAnnotationConflict.kt");
}
@Test
@TestMetadata("variableAsFunctionWithParameterNameGeneric.kt")
public void testVariableAsFunctionWithParameterNameGeneric() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/variableAsFunctionWithParameterNameGeneric.kt");
}
@Test
@TestMetadata("variableAsFunctionWithParameterNameInNonFunctionType.kt")
public void testVariableAsFunctionWithParameterNameInNonFunctionType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/variableAsFunctionWithParameterNameInNonFunctionType.kt");
}
@Test
@TestMetadata("variableAsFunctionWithParameterNameMixed.kt")
public void testVariableAsFunctionWithParameterNameMixed() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/variableAsFunctionWithParameterNameMixed.kt");
}
@Test
@TestMetadata("variableWithExtensionInvoke.kt")
public void testVariableWithExtensionInvoke() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/variableWithExtensionInvoke.kt");
}
@Test
@TestMetadata("variableWithInvokeFunctionCall_dispatchReceiver.kt")
public void testVariableWithInvokeFunctionCall_dispatchReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/variableWithInvokeFunctionCall_dispatchReceiver.kt");
}
@Test
@TestMetadata("variableWithMemberInvoke.kt")
public void testVariableWithMemberInvoke() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/variableWithMemberInvoke.kt");
}
@Test
@TestMetadata("whenConditionInRange.kt")
public void testWhenConditionInRange() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/whenConditionInRange.kt");
}
@Test
@TestMetadata("whenConditionInRangeInverted.kt")
public void testWhenConditionInRangeInverted() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/whenConditionInRangeInverted.kt");
}
@Test
@TestMetadata("whenSelectorSmartCast.kt")
public void testWhenSelectorSmartCast() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/whenSelectorSmartCast.kt");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/arrayAccess")
@TestDataPath("$PROJECT_ROOT")
public class ArrayAccess {
@Test
public void testAllFilesPresentInArrayAccess() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/arrayAccess"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("get.kt")
public void testGet() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/arrayAccess/get.kt");
}
@Test
@TestMetadata("getOperator.kt")
public void testGetOperator() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/arrayAccess/getOperator.kt");
}
@Test
@TestMetadata("set.kt")
public void testSet() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/arrayAccess/set.kt");
}
@Test
@TestMetadata("setOperator.kt")
public void testSetOperator() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/arrayAccess/setOperator.kt");
}
@Test
@TestMetadata("SetOperatorInc.kt")
public void testSetOperatorInc() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/arrayAccess/SetOperatorInc.kt");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/arrayAccess/withErrors")
@TestDataPath("$PROJECT_ROOT")
public class WithErrors {
@Test
public void testAllFilesPresentInWithErrors() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/arrayAccess/withErrors"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("getFunction.kt")
public void testGetFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/arrayAccess/withErrors/getFunction.kt");
}
@Test
@TestMetadata("setFunction.kt")
public void testSetFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/arrayAccess/withErrors/setFunction.kt");
}
@Test
@TestMetadata("SetOperatorInc.kt")
public void testSetOperatorInc() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/arrayAccess/withErrors/SetOperatorInc.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/builtins")
@TestDataPath("$PROJECT_ROOT")
public class Builtins {
@Test
public void testAllFilesPresentInBuiltins() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/builtins"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("emptyArray.kt")
public void testEmptyArray() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/builtins/emptyArray.kt");
}
@Test
@TestMetadata("emptyArrayWithStdlib.kt")
public void testEmptyArrayWithStdlib() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/builtins/emptyArrayWithStdlib.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences")
@TestDataPath("$PROJECT_ROOT")
public class CallableReferences {
@Test
public void testAllFilesPresentInCallableReferences() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("annotationConstructor.kt")
public void testAnnotationConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/annotationConstructor.kt");
}
@Test
@TestMetadata("boundMemberExtensionFunction.kt")
public void testBoundMemberExtensionFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/boundMemberExtensionFunction.kt");
}
@Test
@TestMetadata("boundMemberFunction.kt")
public void testBoundMemberFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/boundMemberFunction.kt");
}
@Test
@TestMetadata("boundMemberFunction_lhs.kt")
public void testBoundMemberFunction_lhs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/boundMemberFunction_lhs.kt");
}
@Test
@TestMetadata("boundMemberFunction_rhs.kt")
public void testBoundMemberFunction_rhs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/boundMemberFunction_rhs.kt");
}
@Test
@TestMetadata("boundMemberProperty.kt")
public void testBoundMemberProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/boundMemberProperty.kt");
}
@Test
@TestMetadata("boundTopLevelExtensionFunction.kt")
public void testBoundTopLevelExtensionFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/boundTopLevelExtensionFunction.kt");
}
@Test
@TestMetadata("boundTopLevelExtensionProperty.kt")
public void testBoundTopLevelExtensionProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/boundTopLevelExtensionProperty.kt");
}
@Test
@TestMetadata("buildList_implicit.kt")
public void testBuildList_implicit() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/buildList_implicit.kt");
}
@Test
@TestMetadata("buildList_let.kt")
public void testBuildList_let() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/buildList_let.kt");
}
@Test
@TestMetadata("buildList_otherList.kt")
public void testBuildList_otherList() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/buildList_otherList.kt");
}
@Test
@TestMetadata("buildList_this.kt")
public void testBuildList_this() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/buildList_this.kt");
}
@Test
@TestMetadata("constructor.kt")
public void testConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/constructor.kt");
}
@Test
@TestMetadata("genericParameterType.kt")
public void testGenericParameterType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/genericParameterType.kt");
}
@Test
@TestMetadata("genericParameterTypeGenericConsumer.kt")
public void testGenericParameterTypeGenericConsumer() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/genericParameterTypeGenericConsumer.kt");
}
@Test
@TestMetadata("genericReturnType.kt")
public void testGenericReturnType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/genericReturnType.kt");
}
@Test
@TestMetadata("genericReturnTypeGenericConsumer.kt")
public void testGenericReturnTypeGenericConsumer() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/genericReturnTypeGenericConsumer.kt");
}
@Test
@TestMetadata("memberExtensionProperty.kt")
public void testMemberExtensionProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/memberExtensionProperty.kt");
}
@Test
@TestMetadata("memberFunction.kt")
public void testMemberFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/memberFunction.kt");
}
@Test
@TestMetadata("memberProperty.kt")
public void testMemberProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/memberProperty.kt");
}
@Test
@TestMetadata("simple.kt")
public void testSimple() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/simple.kt");
}
@Test
@TestMetadata("topLevelExtensionFunction.kt")
public void testTopLevelExtensionFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/topLevelExtensionFunction.kt");
}
@Test
@TestMetadata("topLevelExtensionProperty.kt")
public void testTopLevelExtensionProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/topLevelExtensionProperty.kt");
}
@Test
@TestMetadata("topLevelFunction.kt")
public void testTopLevelFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/topLevelFunction.kt");
}
@Test
@TestMetadata("topLevelProperty.kt")
public void testTopLevelProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/callableReferences/topLevelProperty.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/cloneable")
@TestDataPath("$PROJECT_ROOT")
public class Cloneable {
@Test
public void testAllFilesPresentInCloneable() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/cloneable"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("cloneableCommon.kt")
public void testCloneableCommon() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/cloneable/cloneableCommon.kt");
}
@Test
@TestMetadata("cloneableJs.kt")
public void testCloneableJs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/cloneable/cloneableJs.kt");
}
@Test
@TestMetadata("cloneableJvm.kt")
public void testCloneableJvm() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/cloneable/cloneableJvm.kt");
}
@Test
@TestMetadata("cloneableNative.kt")
public void testCloneableNative() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/cloneable/cloneableNative.kt");
}
@Test
@TestMetadata("cloneableWasm.kt")
public void testCloneableWasm() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/cloneable/cloneableWasm.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment")
@TestDataPath("$PROJECT_ROOT")
public class CodeFragment {
@Test
public void testAllFilesPresentInCodeFragment() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/blockCodeFragment")
@TestDataPath("$PROJECT_ROOT")
public class BlockCodeFragment {
@Test
public void testAllFilesPresentInBlockCodeFragment() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/blockCodeFragment"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("BinaryExpression.kt")
public void testBinaryExpression() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/blockCodeFragment/BinaryExpression.kt");
}
@Test
@TestMetadata("CompanionCall.kt")
public void testCompanionCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/blockCodeFragment/CompanionCall.kt");
}
@Test
@TestMetadata("ContextFunctionCall.kt")
public void testContextFunctionCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/blockCodeFragment/ContextFunctionCall.kt");
}
@Test
@TestMetadata("FunctionCall.kt")
public void testFunctionCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/blockCodeFragment/FunctionCall.kt");
}
@Test
@TestMetadata("LocalFunctionCall.kt")
public void testLocalFunctionCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/blockCodeFragment/LocalFunctionCall.kt");
}
@Test
@TestMetadata("NestedCodeFragment.kt")
public void testNestedCodeFragment() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/blockCodeFragment/NestedCodeFragment.kt");
}
@Test
@TestMetadata("NestedCodeFragmentClass.kt")
public void testNestedCodeFragmentClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/blockCodeFragment/NestedCodeFragmentClass.kt");
}
@Test
@TestMetadata("NestedCodeFragmentClassMember.kt")
public void testNestedCodeFragmentClassMember() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/blockCodeFragment/NestedCodeFragmentClassMember.kt");
}
@Test
@TestMetadata("NestedCodeFragmentFunction.kt")
public void testNestedCodeFragmentFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/blockCodeFragment/NestedCodeFragmentFunction.kt");
}
@Test
@TestMetadata("PropertyCall.kt")
public void testPropertyCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/blockCodeFragment/PropertyCall.kt");
}
@Test
@TestMetadata("SecondStatement.kt")
public void testSecondStatement() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/blockCodeFragment/SecondStatement.kt");
}
@Test
@TestMetadata("UnsafeCastInLoop.kt")
public void testUnsafeCastInLoop() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/blockCodeFragment/UnsafeCastInLoop.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/expressionCodeFragment")
@TestDataPath("$PROJECT_ROOT")
public class ExpressionCodeFragment {
@Test
public void testAllFilesPresentInExpressionCodeFragment() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/expressionCodeFragment"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("ArrayAssignmentOperator.kt")
public void testArrayAssignmentOperator() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/expressionCodeFragment/ArrayAssignmentOperator.kt");
}
@Test
@TestMetadata("ArrayPlusAssignmentOperator.kt")
public void testArrayPlusAssignmentOperator() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/expressionCodeFragment/ArrayPlusAssignmentOperator.kt");
}
@Test
@TestMetadata("BinaryExpression.kt")
public void testBinaryExpression() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/expressionCodeFragment/BinaryExpression.kt");
}
@Test
@TestMetadata("CompanionCall.kt")
public void testCompanionCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/expressionCodeFragment/CompanionCall.kt");
}
@Test
@TestMetadata("ContextFunctionCall.kt")
public void testContextFunctionCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/expressionCodeFragment/ContextFunctionCall.kt");
}
@Test
@TestMetadata("FunctionCall.kt")
public void testFunctionCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/expressionCodeFragment/FunctionCall.kt");
}
@Test
@TestMetadata("LocalFunctionCall.kt")
public void testLocalFunctionCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/expressionCodeFragment/LocalFunctionCall.kt");
}
@Test
@TestMetadata("NestedCodeFragment.kt")
public void testNestedCodeFragment() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/expressionCodeFragment/NestedCodeFragment.kt");
}
@Test
@TestMetadata("NestedCodeFragmentUsage.kt")
public void testNestedCodeFragmentUsage() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/expressionCodeFragment/NestedCodeFragmentUsage.kt");
}
@Test
@TestMetadata("PropertyCall.kt")
public void testPropertyCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/expressionCodeFragment/PropertyCall.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/typeCodeFragment")
@TestDataPath("$PROJECT_ROOT")
public class TypeCodeFragment {
@Test
public void testAllFilesPresentInTypeCodeFragment() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/typeCodeFragment"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("FromContextFile.kt")
public void testFromContextFile() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/typeCodeFragment/FromContextFile.kt");
}
@Test
@TestMetadata("FromContextModule.kt")
public void testFromContextModule() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/typeCodeFragment/FromContextModule.kt");
}
@Test
@TestMetadata("GenericType.kt")
public void testGenericType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/typeCodeFragment/GenericType.kt");
}
@Test
@TestMetadata("GenericTypeArgument.kt")
public void testGenericTypeArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/typeCodeFragment/GenericTypeArgument.kt");
}
@Test
@TestMetadata("LocalClass.kt")
public void testLocalClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/typeCodeFragment/LocalClass.kt");
}
@Test
@TestMetadata("Simple.kt")
public void testSimple() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/typeCodeFragment/Simple.kt");
}
@Test
@TestMetadata("TypeAnnotation.kt")
public void testTypeAnnotation() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/typeCodeFragment/TypeAnnotation.kt");
}
@Test
@TestMetadata("TypeParameter.kt")
public void testTypeParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/codeFragment/typeCodeFragment/TypeParameter.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals")
@TestDataPath("$PROJECT_ROOT")
public class CollectionLiterals {
@Test
public void testAllFilesPresentInCollectionLiterals() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("array.kt")
public void testArray() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/array.kt");
}
@Test
@TestMetadata("customGenericCollection.kt")
public void testCustomGenericCollection() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/customGenericCollection.kt");
}
@Test
@TestMetadata("customGenericCollectionAssignments.kt")
public void testCustomGenericCollectionAssignments() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/customGenericCollectionAssignments.kt");
}
@Test
@TestMetadata("customNonGenericCollection.kt")
public void testCustomNonGenericCollection() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/customNonGenericCollection.kt");
}
@Test
@TestMetadata("customNonGenericCollectionAssignments.kt")
public void testCustomNonGenericCollectionAssignments() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/customNonGenericCollectionAssignments.kt");
}
@Test
@TestMetadata("customNonGenericCollectionOverload.kt")
public void testCustomNonGenericCollectionOverload() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/customNonGenericCollectionOverload.kt");
}
@Test
@TestMetadata("intArray.kt")
public void testIntArray() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/intArray.kt");
}
@Test
@TestMetadata("list.kt")
public void testList() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/list.kt");
}
@Test
@TestMetadata("mutableList.kt")
public void testMutableList() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/mutableList.kt");
}
@Test
@TestMetadata("mutableSet.kt")
public void testMutableSet() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/mutableSet.kt");
}
@Test
@TestMetadata("noOf.kt")
public void testNoOf() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/noOf.kt");
}
@Test
@TestMetadata("sequence.kt")
public void testSequence() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/sequence.kt");
}
@Test
@TestMetadata("set.kt")
public void testSet() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/set.kt");
}
@Test
@TestMetadata("uintArray.kt")
public void testUintArray() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/uintArray.kt");
}
@Test
@TestMetadata("uintArrayAnnotation.kt")
public void testUintArrayAnnotation() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/collectionLiterals/uintArrayAnnotation.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/constructorDelegatingReference")
@TestDataPath("$PROJECT_ROOT")
public class ConstructorDelegatingReference {
@Test
public void testAllFilesPresentInConstructorDelegatingReference() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/constructorDelegatingReference"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("constructorDelegationReferenceSuper.kt")
public void testConstructorDelegationReferenceSuper() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/constructorDelegatingReference/constructorDelegationReferenceSuper.kt");
}
@Test
@TestMetadata("constructorDelegationReferenceThis.kt")
public void testConstructorDelegationReferenceThis() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/constructorDelegatingReference/constructorDelegationReferenceThis.kt");
}
@Test
@TestMetadata("constructorDelegationReferenceThisImplicit.kt")
public void testConstructorDelegationReferenceThisImplicit() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/constructorDelegatingReference/constructorDelegationReferenceThisImplicit.kt");
}
@Test
@TestMetadata("toPrimary.kt")
public void testToPrimary() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/constructorDelegatingReference/toPrimary.kt");
}
@Test
@TestMetadata("toSecondary.kt")
public void testToSecondary() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/constructorDelegatingReference/toSecondary.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters")
@TestDataPath("$PROJECT_ROOT")
public class ContextParameters {
@Test
public void testAllFilesPresentInContextParameters() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("callableReferenceOnFunctionWithContextParameters.kt")
public void testCallableReferenceOnFunctionWithContextParameters() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/callableReferenceOnFunctionWithContextParameters.kt");
}
@Test
@TestMetadata("callableReferenceOnFunctionWithContextParameters_rhs.kt")
public void testCallableReferenceOnFunctionWithContextParameters_rhs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/callableReferenceOnFunctionWithContextParameters_rhs.kt");
}
@Test
@TestMetadata("callableReferenceOnFunctionWithContextReceivers.kt")
public void testCallableReferenceOnFunctionWithContextReceivers() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/callableReferenceOnFunctionWithContextReceivers.kt");
}
@Test
@TestMetadata("callableReferenceOnFunctionWithContextReceivers_rhs.kt")
public void testCallableReferenceOnFunctionWithContextReceivers_rhs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/callableReferenceOnFunctionWithContextReceivers_rhs.kt");
}
@Test
@TestMetadata("callableReferenceOnPropertyWithContextParameters.kt")
public void testCallableReferenceOnPropertyWithContextParameters() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/callableReferenceOnPropertyWithContextParameters.kt");
}
@Test
@TestMetadata("callableReferenceOnPropertyWithContextParameters_rhs.kt")
public void testCallableReferenceOnPropertyWithContextParameters_rhs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/callableReferenceOnPropertyWithContextParameters_rhs.kt");
}
@Test
@TestMetadata("callableReferenceOnPropertyWithContextReceivers.kt")
public void testCallableReferenceOnPropertyWithContextReceivers() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/callableReferenceOnPropertyWithContextReceivers.kt");
}
@Test
@TestMetadata("callableReferenceOnPropertyWithContextReceivers_rhs.kt")
public void testCallableReferenceOnPropertyWithContextReceivers_rhs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/callableReferenceOnPropertyWithContextReceivers_rhs.kt");
}
@Test
@TestMetadata("functionDeclarationSiteSubstitution.kt")
public void testFunctionDeclarationSiteSubstitution() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionDeclarationSiteSubstitution.kt");
}
@Test
@TestMetadata("functionFromContextReceiverFromClass.kt")
public void testFunctionFromContextReceiverFromClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionFromContextReceiverFromClass.kt");
}
@Test
@TestMetadata("functionFromContextReceiverFromComplexCase.kt")
public void testFunctionFromContextReceiverFromComplexCase() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionFromContextReceiverFromComplexCase.kt");
}
@Test
@TestMetadata("functionFromContextReceiverFromComplexCase2.kt")
public void testFunctionFromContextReceiverFromComplexCase2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionFromContextReceiverFromComplexCase2.kt");
}
@Test
@TestMetadata("functionFromContextReceiverFromFunction.kt")
public void testFunctionFromContextReceiverFromFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionFromContextReceiverFromFunction.kt");
}
@Test
@TestMetadata("functionFromContextReceiverFromFunctionComplexCase.kt")
public void testFunctionFromContextReceiverFromFunctionComplexCase() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionFromContextReceiverFromFunctionComplexCase.kt");
}
@Test
@TestMetadata("functionFromContextReceiverFromFunctionComplexCaseWithReceiver.kt")
public void testFunctionFromContextReceiverFromFunctionComplexCaseWithReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionFromContextReceiverFromFunctionComplexCaseWithReceiver.kt");
}
@Test
@TestMetadata("functionFromContextReceiverFromProperty.kt")
public void testFunctionFromContextReceiverFromProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionFromContextReceiverFromProperty.kt");
}
@Test
@TestMetadata("functionFromContextReceiverFromProperty2.kt")
public void testFunctionFromContextReceiverFromProperty2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionFromContextReceiverFromProperty2.kt");
}
@Test
@TestMetadata("functionFromFunctionExplicitly.kt")
public void testFunctionFromFunctionExplicitly() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionFromFunctionExplicitly.kt");
}
@Test
@TestMetadata("functionFromPropertyExplicitly.kt")
public void testFunctionFromPropertyExplicitly() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionFromPropertyExplicitly.kt");
}
@Test
@TestMetadata("functionNotEnoughContextWithContextParameter.kt")
public void testFunctionNotEnoughContextWithContextParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionNotEnoughContextWithContextParameter.kt");
}
@Test
@TestMetadata("functionNotEnoughContextWithContextParameterAndUnrelatedReceiver.kt")
public void testFunctionNotEnoughContextWithContextParameterAndUnrelatedReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionNotEnoughContextWithContextParameterAndUnrelatedReceiver.kt");
}
@Test
@TestMetadata("functionNotEnoughContextWithContextReceiver.kt")
public void testFunctionNotEnoughContextWithContextReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionNotEnoughContextWithContextReceiver.kt");
}
@Test
@TestMetadata("functionNotEnoughContextWithReceiver.kt")
public void testFunctionNotEnoughContextWithReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionNotEnoughContextWithReceiver.kt");
}
@Test
@TestMetadata("functionOutsideClass.kt")
public void testFunctionOutsideClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionOutsideClass.kt");
}
@Test
@TestMetadata("functionParameter.kt")
public void testFunctionParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionParameter.kt");
}
@Test
@TestMetadata("functionUsageInsideClass.kt")
public void testFunctionUsageInsideClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionUsageInsideClass.kt");
}
@Test
@TestMetadata("functionUseSiteSubstitution.kt")
public void testFunctionUseSiteSubstitution() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionUseSiteSubstitution.kt");
}
@Test
@TestMetadata("functionUseSiteSubstitution2.kt")
public void testFunctionUseSiteSubstitution2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionUseSiteSubstitution2.kt");
}
@Test
@TestMetadata("functionWithReceiverAndContext.kt")
public void testFunctionWithReceiverAndContext() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionWithReceiverAndContext.kt");
}
@Test
@TestMetadata("functionWithUnresolvedReceiverAndContext.kt")
public void testFunctionWithUnresolvedReceiverAndContext() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionWithUnresolvedReceiverAndContext.kt");
}
@Test
@TestMetadata("functionWithoutContext.kt")
public void testFunctionWithoutContext() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/functionWithoutContext.kt");
}
@Test
@TestMetadata("implicitInvoke.kt")
public void testImplicitInvoke() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/implicitInvoke.kt");
}
@Test
@TestMetadata("implicitInvoke2.kt")
public void testImplicitInvoke2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/implicitInvoke2.kt");
}
@Test
@TestMetadata("propertyDeclarationSiteSubstitution.kt")
public void testPropertyDeclarationSiteSubstitution() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/propertyDeclarationSiteSubstitution.kt");
}
@Test
@TestMetadata("propertyNotEnoughContextWithContextParameter.kt")
public void testPropertyNotEnoughContextWithContextParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/propertyNotEnoughContextWithContextParameter.kt");
}
@Test
@TestMetadata("propertyNotEnoughContextWithContextParameterAndUnrelatedReceiver.kt")
public void testPropertyNotEnoughContextWithContextParameterAndUnrelatedReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/propertyNotEnoughContextWithContextParameterAndUnrelatedReceiver.kt");
}
@Test
@TestMetadata("propertyNotEnoughContextWithContextReceiver.kt")
public void testPropertyNotEnoughContextWithContextReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/propertyNotEnoughContextWithContextReceiver.kt");
}
@Test
@TestMetadata("propertyNotEnoughContextWithReceiver.kt")
public void testPropertyNotEnoughContextWithReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/propertyNotEnoughContextWithReceiver.kt");
}
@Test
@TestMetadata("propertyOutsideClass.kt")
public void testPropertyOutsideClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/propertyOutsideClass.kt");
}
@Test
@TestMetadata("propertyParameter.kt")
public void testPropertyParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/propertyParameter.kt");
}
@Test
@TestMetadata("propertyUsageInsideClass.kt")
public void testPropertyUsageInsideClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/propertyUsageInsideClass.kt");
}
@Test
@TestMetadata("propertyUseSiteSubstitution.kt")
public void testPropertyUseSiteSubstitution() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/propertyUseSiteSubstitution.kt");
}
@Test
@TestMetadata("propertyUseSiteSubstitution2.kt")
public void testPropertyUseSiteSubstitution2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/propertyUseSiteSubstitution2.kt");
}
@Test
@TestMetadata("propertyWithReceiverAndContext.kt")
public void testPropertyWithReceiverAndContext() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/propertyWithReceiverAndContext.kt");
}
@Test
@TestMetadata("propertyWithUnresolvedReceiverAndContext.kt")
public void testPropertyWithUnresolvedReceiverAndContext() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/propertyWithUnresolvedReceiverAndContext.kt");
}
@Test
@TestMetadata("propertyWithoutContext.kt")
public void testPropertyWithoutContext() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/propertyWithoutContext.kt");
}
@Test
@TestMetadata("smartCastedContextParameterFromFunction.kt")
public void testSmartCastedContextParameterFromFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/smartCastedContextParameterFromFunction.kt");
}
@Test
@TestMetadata("smartCastedContextParameterFromFunction2.kt")
public void testSmartCastedContextParameterFromFunction2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/smartCastedContextParameterFromFunction2.kt");
}
@Test
@TestMetadata("smartCastedContextParameterWithImplicitInvoke.kt")
public void testSmartCastedContextParameterWithImplicitInvoke() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/contextParameters/smartCastedContextParameterWithImplicitInvoke.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/danglingAnnotations")
@TestDataPath("$PROJECT_ROOT")
public class DanglingAnnotations {
@Test
public void testAllFilesPresentInDanglingAnnotations() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/danglingAnnotations"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("DanglingAnnotationsResolvedAnonymous.kt")
public void testDanglingAnnotationsResolvedAnonymous() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/danglingAnnotations/DanglingAnnotationsResolvedAnonymous.kt");
}
@Test
@TestMetadata("DanglingAnnotationsResolvedClass.kt")
public void testDanglingAnnotationsResolvedClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/danglingAnnotations/DanglingAnnotationsResolvedClass.kt");
}
@Test
@TestMetadata("DanglingAnnotationsResolvedFile.kt")
public void testDanglingAnnotationsResolvedFile() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/danglingAnnotations/DanglingAnnotationsResolvedFile.kt");
}
@Test
@TestMetadata("DanglingAnnotationsResolvedLocal.kt")
public void testDanglingAnnotationsResolvedLocal() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/danglingAnnotations/DanglingAnnotationsResolvedLocal.kt");
}
@Test
@TestMetadata("DanglingAnnotationsResolvedPackageFile.kt")
public void testDanglingAnnotationsResolvedPackageFile() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/danglingAnnotations/DanglingAnnotationsResolvedPackageFile.kt");
}
@Test
@TestMetadata("DanglingAnnotationsResolvedTopLevel.kt")
public void testDanglingAnnotationsResolvedTopLevel() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/danglingAnnotations/DanglingAnnotationsResolvedTopLevel.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors")
@TestDataPath("$PROJECT_ROOT")
public class DelegatedPropertyAccessors {
@Test
public void testAllFilesPresentInDelegatedPropertyAccessors() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/inSource")
@TestDataPath("$PROJECT_ROOT")
public class InSource {
@Test
public void testAllFilesPresentInInSource() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/inSource"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("getExtension.kt")
public void testGetExtension() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/inSource/getExtension.kt");
}
@Test
@TestMetadata("getMember.kt")
public void testGetMember() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/inSource/getMember.kt");
}
@Test
@TestMetadata("getMultipleDeclarations.kt")
public void testGetMultipleDeclarations() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/inSource/getMultipleDeclarations.kt");
}
@Test
@TestMetadata("getOneFakeOverride.kt")
public void testGetOneFakeOverride() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/inSource/getOneFakeOverride.kt");
}
@Test
@TestMetadata("provideDelegate.kt")
public void testProvideDelegate() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/inSource/provideDelegate.kt");
}
@Test
@TestMetadata("provideDelegate_explicit.kt")
public void testProvideDelegate_explicit() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/inSource/provideDelegate_explicit.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/inStandardLibrary")
@TestDataPath("$PROJECT_ROOT")
public class InStandardLibrary {
@Test
public void testAllFilesPresentInInStandardLibrary() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/inStandardLibrary"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("lazy.kt")
public void testLazy() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/inStandardLibrary/lazy.kt");
}
@Test
@TestMetadata("notNull.kt")
public void testNotNull() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/inStandardLibrary/notNull.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/withErrors")
@TestDataPath("$PROJECT_ROOT")
public class WithErrors {
@Test
public void testAllFilesPresentInWithErrors() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/withErrors"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("provideDelegate_badDelegate.kt")
public void testProvideDelegate_badDelegate() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/withErrors/provideDelegate_badDelegate.kt");
}
@Test
@TestMetadata("unresolved.kt")
public void testUnresolved() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/delegatedPropertyAccessors/withErrors/unresolved.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/destructuringDeclaration")
@TestDataPath("$PROJECT_ROOT")
public class DestructuringDeclaration {
@Test
public void testAllFilesPresentInDestructuringDeclaration() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/destructuringDeclaration"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("dataClassAnotherName.kt")
public void testDataClassAnotherName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/destructuringDeclaration/dataClassAnotherName.kt");
}
@Test
@TestMetadata("dataClassLocalDefinition.kt")
public void testDataClassLocalDefinition() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/destructuringDeclaration/dataClassLocalDefinition.kt");
}
@Test
@TestMetadata("namedFull.kt")
public void testNamedFull() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/destructuringDeclaration/namedFull.kt");
}
@Test
@TestMetadata("namedShort.kt")
public void testNamedShort() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/destructuringDeclaration/namedShort.kt");
}
@Test
@TestMetadata("positionalFull.kt")
public void testPositionalFull() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/destructuringDeclaration/positionalFull.kt");
}
@Test
@TestMetadata("positionalShort.kt")
public void testPositionalShort() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/destructuringDeclaration/positionalShort.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions")
@TestDataPath("$PROJECT_ROOT")
public class Expressions {
@Test
public void testAllFilesPresentInExpressions() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("arrayAccessConvention.kt")
public void testArrayAccessConvention() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/arrayAccessConvention.kt");
}
@Test
@TestMetadata("arrayAccessConventionAsClass.kt")
public void testArrayAccessConventionAsClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/arrayAccessConventionAsClass.kt");
}
@Test
@TestMetadata("arrayAccessConventionAsClass_lhs.kt")
public void testArrayAccessConventionAsClass_lhs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/arrayAccessConventionAsClass_lhs.kt");
}
@Test
@TestMetadata("arrayAccessConventionAsClass_op.kt")
public void testArrayAccessConventionAsClass_op() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/arrayAccessConventionAsClass_op.kt");
}
@Test
@TestMetadata("arrayAccessConvention_complexReceivers.kt")
public void testArrayAccessConvention_complexReceivers() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/arrayAccessConvention_complexReceivers.kt");
}
@Test
@TestMetadata("arrayAccessConvention_complexReceivers_lhs.kt")
public void testArrayAccessConvention_complexReceivers_lhs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/arrayAccessConvention_complexReceivers_lhs.kt");
}
@Test
@TestMetadata("arrayAccessConvention_complexReceivers_op.kt")
public void testArrayAccessConvention_complexReceivers_op() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/arrayAccessConvention_complexReceivers_op.kt");
}
@Test
@TestMetadata("arrayAccessConvention_lhs.kt")
public void testArrayAccessConvention_lhs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/arrayAccessConvention_lhs.kt");
}
@Test
@TestMetadata("arrayAccessConvention_op.kt")
public void testArrayAccessConvention_op() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/arrayAccessConvention_op.kt");
}
@Test
@TestMetadata("candidateWithImplicitReturnType.kt")
public void testCandidateWithImplicitReturnType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/candidateWithImplicitReturnType.kt");
}
@Test
@TestMetadata("infixFunction.kt")
public void testInfixFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/infixFunction.kt");
}
@Test
@TestMetadata("infixFunction_op.kt")
public void testInfixFunction_op() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/infixFunction_op.kt");
}
@Test
@TestMetadata("plusAssignOnVal.kt")
public void testPlusAssignOnVal() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/plusAssignOnVal.kt");
}
@Test
@TestMetadata("plusAssignOnVal_lhs.kt")
public void testPlusAssignOnVal_lhs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/plusAssignOnVal_lhs.kt");
}
@Test
@TestMetadata("plusAssignOnVal_operator.kt")
public void testPlusAssignOnVal_operator() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/plusAssignOnVal_operator.kt");
}
@Test
@TestMetadata("plusAssignWithArrayGetConvention.kt")
public void testPlusAssignWithArrayGetConvention() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/plusAssignWithArrayGetConvention.kt");
}
@Test
@TestMetadata("plusAssignWithArrayGetConvention_lhs.kt")
public void testPlusAssignWithArrayGetConvention_lhs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/plusAssignWithArrayGetConvention_lhs.kt");
}
@Test
@TestMetadata("plusAssignWithArrayGetConvention_op.kt")
public void testPlusAssignWithArrayGetConvention_op() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/plusAssignWithArrayGetConvention_op.kt");
}
@Test
@TestMetadata("postfixInc.kt")
public void testPostfixInc() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/postfixInc.kt");
}
@Test
@TestMetadata("postfixInc_base.kt")
public void testPostfixInc_base() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/postfixInc_base.kt");
}
@Test
@TestMetadata("postfixInc_op.kt")
public void testPostfixInc_op() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/postfixInc_op.kt");
}
@Test
@TestMetadata("prefixDec.kt")
public void testPrefixDec() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/prefixDec.kt");
}
@Test
@TestMetadata("prefixDec_base.kt")
public void testPrefixDec_base() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/prefixDec_base.kt");
}
@Test
@TestMetadata("prefixDec_op.kt")
public void testPrefixDec_op() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/prefixDec_op.kt");
}
@Test
@TestMetadata("setOperator.kt")
public void testSetOperator() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/setOperator.kt");
}
@Test
@TestMetadata("setOperator_lhs.kt")
public void testSetOperator_lhs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/setOperator_lhs.kt");
}
@Test
@TestMetadata("setOperator_op.kt")
public void testSetOperator_op() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/setOperator_op.kt");
}
@Test
@TestMetadata("stringConcatenation.kt")
public void testStringConcatenation() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/stringConcatenation.kt");
}
@Test
@TestMetadata("unaryMinus.kt")
public void testUnaryMinus() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/unaryMinus.kt");
}
@Test
@TestMetadata("unaryMinus_base.kt")
public void testUnaryMinus_base() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/unaryMinus_base.kt");
}
@Test
@TestMetadata("unaryMinus_op.kt")
public void testUnaryMinus_op() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/unaryMinus_op.kt");
}
@Test
@TestMetadata("varConvention.kt")
public void testVarConvention() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/varConvention.kt");
}
@Test
@TestMetadata("varConvention_lhs.kt")
public void testVarConvention_lhs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/varConvention_lhs.kt");
}
@Test
@TestMetadata("varConvention_operator.kt")
public void testVarConvention_operator() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/expressions/varConvention_operator.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn")
@TestDataPath("$PROJECT_ROOT")
public class ForLoopIn {
@Test
public void testAllFilesPresentInForLoopIn() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/inBuiltIns")
@TestDataPath("$PROJECT_ROOT")
public class InBuiltIns {
@Test
public void testAllFilesPresentInInBuiltIns() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/inBuiltIns"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("extension.kt")
public void testExtension() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/inBuiltIns/extension.kt");
}
@Test
@TestMetadata("member.kt")
public void testMember() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/inBuiltIns/member.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/inLibrary")
@TestDataPath("$PROJECT_ROOT")
public class InLibrary {
@Test
public void testAllFilesPresentInInLibrary() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/inLibrary"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("extension.kt")
public void testExtension() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/inLibrary/extension.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/inSource")
@TestDataPath("$PROJECT_ROOT")
public class InSource {
@Test
@TestMetadata("allExtensions.kt")
public void testAllExtensions() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/inSource/allExtensions.kt");
}
@Test
public void testAllFilesPresentInInSource() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/inSource"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("allMembers.kt")
public void testAllMembers() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/inSource/allMembers.kt");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/inSource/withErrors")
@TestDataPath("$PROJECT_ROOT")
public class WithErrors {
@Test
public void testAllFilesPresentInWithErrors() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/inSource/withErrors"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("nextMissing.kt")
public void testNextMissing() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/inSource/withErrors/nextMissing.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/withErrors")
@TestDataPath("$PROJECT_ROOT")
public class WithErrors {
@Test
public void testAllFilesPresentInWithErrors() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/withErrors"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("unresolvedIterator.kt")
public void testUnresolvedIterator() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/forLoopIn/withErrors/unresolvedIterator.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/globallyDuplicateLibraries")
@TestDataPath("$PROJECT_ROOT")
public class GloballyDuplicateLibraries {
@Test
public void testAllFilesPresentInGloballyDuplicateLibraries() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/globallyDuplicateLibraries"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("functionCallsInIndependentModulesWithGloballyDuplicateJavaLibrary.kt")
public void testFunctionCallsInIndependentModulesWithGloballyDuplicateJavaLibrary() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/globallyDuplicateLibraries/functionCallsInIndependentModulesWithGloballyDuplicateJavaLibrary.kt");
}
@Test
@TestMetadata("functionCallsInIndependentModulesWithGloballyDuplicateLibrary.kt")
public void testFunctionCallsInIndependentModulesWithGloballyDuplicateLibrary() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/globallyDuplicateLibraries/functionCallsInIndependentModulesWithGloballyDuplicateLibrary.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke")
@TestDataPath("$PROJECT_ROOT")
public class Invoke {
@Test
public void testAllFilesPresentInInvoke() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("functionClash.kt")
public void testFunctionClash() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/functionClash.kt");
}
@Test
@TestMetadata("functionClashWithParenthesis.kt")
public void testFunctionClashWithParenthesis() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/functionClashWithParenthesis.kt");
}
@Test
@TestMetadata("lambdaAndParens.kt")
public void testLambdaAndParens() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/lambdaAndParens.kt");
}
@Test
@TestMetadata("lambdaNoPar.kt")
public void testLambdaNoPar() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/lambdaNoPar.kt");
}
@Test
@TestMetadata("lambdaNoParLabel.kt")
public void testLambdaNoParLabel() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/lambdaNoParLabel.kt");
}
@Test
@TestMetadata("lambdaNoParRCurly.kt")
public void testLambdaNoParRCurly() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/lambdaNoParRCurly.kt");
}
@Test
@TestMetadata("lambdaWithExtensionParameter_extensionProperty_regularCall.kt")
public void testLambdaWithExtensionParameter_extensionProperty_regularCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/lambdaWithExtensionParameter_extensionProperty_regularCall.kt");
}
@Test
@TestMetadata("lambdaWithExtensionParameter_extensionProperty_safeCall.kt")
public void testLambdaWithExtensionParameter_extensionProperty_safeCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/lambdaWithExtensionParameter_extensionProperty_safeCall.kt");
}
@Test
@TestMetadata("lambdaWithExtensionParameter_parameter_regularCall.kt")
public void testLambdaWithExtensionParameter_parameter_regularCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/lambdaWithExtensionParameter_parameter_regularCall.kt");
}
@Test
@TestMetadata("lambdaWithExtensionParameter_parameter_safeCall.kt")
public void testLambdaWithExtensionParameter_parameter_safeCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/lambdaWithExtensionParameter_parameter_safeCall.kt");
}
@Test
@TestMetadata("lambdaWithExtensionParameter_property_regularCall.kt")
public void testLambdaWithExtensionParameter_property_regularCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/lambdaWithExtensionParameter_property_regularCall.kt");
}
@Test
@TestMetadata("lambdaWithExtensionParameter_property_safeCall.kt")
public void testLambdaWithExtensionParameter_property_safeCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/lambdaWithExtensionParameter_property_safeCall.kt");
}
@Test
@TestMetadata("noParams.kt")
public void testNoParams() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/noParams.kt");
}
@Test
@TestMetadata("noParamsRPar.kt")
public void testNoParamsRPar() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/noParamsRPar.kt");
}
@Test
@TestMetadata("nonemptyLambdaRPar.kt")
public void testNonemptyLambdaRPar() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/nonemptyLambdaRPar.kt");
}
@Test
@TestMetadata("oneParam.kt")
public void testOneParam() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/oneParam.kt");
}
@Test
@TestMetadata("oneParamRPar.kt")
public void testOneParamRPar() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/oneParamRPar.kt");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/onObjects")
@TestDataPath("$PROJECT_ROOT")
public class OnObjects {
@Test
public void testAllFilesPresentInOnObjects() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/onObjects"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("companionObject1.kt")
public void testCompanionObject1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/onObjects/companionObject1.kt");
}
@Test
@TestMetadata("companionObject2.kt")
public void testCompanionObject2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/onObjects/companionObject2.kt");
}
@Test
@TestMetadata("companionObjectWithName1.kt")
public void testCompanionObjectWithName1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/onObjects/companionObjectWithName1.kt");
}
@Test
@TestMetadata("companionObjectWithName2.kt")
public void testCompanionObjectWithName2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/onObjects/companionObjectWithName2.kt");
}
@Test
@TestMetadata("companionObject_typeAliased.kt")
public void testCompanionObject_typeAliased() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/onObjects/companionObject_typeAliased.kt");
}
@Test
@TestMetadata("nestedObject1.kt")
public void testNestedObject1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/onObjects/nestedObject1.kt");
}
@Test
@TestMetadata("nestedObject2.kt")
public void testNestedObject2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/onObjects/nestedObject2.kt");
}
@Test
@TestMetadata("nestedObject3.kt")
public void testNestedObject3() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/onObjects/nestedObject3.kt");
}
@Test
@TestMetadata("nestedObject4.kt")
public void testNestedObject4() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/onObjects/nestedObject4.kt");
}
@Test
@TestMetadata("simpleObject1.kt")
public void testSimpleObject1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/onObjects/simpleObject1.kt");
}
@Test
@TestMetadata("simpleObject2.kt")
public void testSimpleObject2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/onObjects/simpleObject2.kt");
}
@Test
@TestMetadata("simpleObject_typeAliased.kt")
public void testSimpleObject_typeAliased() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/onObjects/simpleObject_typeAliased.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/withErrors")
@TestDataPath("$PROJECT_ROOT")
public class WithErrors {
@Test
public void testAllFilesPresentInWithErrors() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/withErrors"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("lambdaAndParensIncorrectVararg.kt")
public void testLambdaAndParensIncorrectVararg() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/withErrors/lambdaAndParensIncorrectVararg.kt");
}
@Test
@TestMetadata("lambdaNoParIncorrectVararg.kt")
public void testLambdaNoParIncorrectVararg() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/withErrors/lambdaNoParIncorrectVararg.kt");
}
@Test
@TestMetadata("lambdaNoParLabelIncorrectVararg.kt")
public void testLambdaNoParLabelIncorrectVararg() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/withErrors/lambdaNoParLabelIncorrectVararg.kt");
}
@Test
@TestMetadata("lambdaNoParRCurlyIncorrectVararg.kt")
public void testLambdaNoParRCurlyIncorrectVararg() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/withErrors/lambdaNoParRCurlyIncorrectVararg.kt");
}
@Test
@TestMetadata("nonemptyLambdaRParIncorrectVararg.kt")
public void testNonemptyLambdaRParIncorrectVararg() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/invoke/withErrors/nonemptyLambdaRParIncorrectVararg.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion")
@TestDataPath("$PROJECT_ROOT")
public class IsImplicitReferenceToCompanion {
@Test
public void testAllFilesPresentInIsImplicitReferenceToCompanion() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("callableReference.kt")
public void testCallableReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/callableReference.kt");
}
@Test
@TestMetadata("callableReference2.kt")
public void testCallableReference2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/callableReference2.kt");
}
@Test
@TestMetadata("classAccessWithExplicitReferenceToCompanion.kt")
public void testClassAccessWithExplicitReferenceToCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/classAccessWithExplicitReferenceToCompanion.kt");
}
@Test
@TestMetadata("classAccessWithExplicitReferenceToNamedCompanion.kt")
public void testClassAccessWithExplicitReferenceToNamedCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/classAccessWithExplicitReferenceToNamedCompanion.kt");
}
@Test
@TestMetadata("constructorCall.kt")
public void testConstructorCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/constructorCall.kt");
}
@Test
@TestMetadata("constructorCallWithInvokeInCompanion.kt")
public void testConstructorCallWithInvokeInCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/constructorCallWithInvokeInCompanion.kt");
}
@Test
@TestMetadata("explicitReferenceToCompanion.kt")
public void testExplicitReferenceToCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/explicitReferenceToCompanion.kt");
}
@Test
@TestMetadata("explicitReferenceToNamedCompanion.kt")
public void testExplicitReferenceToNamedCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/explicitReferenceToNamedCompanion.kt");
}
@Test
@TestMetadata("impicitReferenceToCompanion.kt")
public void testImpicitReferenceToCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/impicitReferenceToCompanion.kt");
}
@Test
@TestMetadata("invokeInCompanion.kt")
public void testInvokeInCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/invokeInCompanion.kt");
}
@Test
@TestMetadata("invokeInCompanion_explicitCompanion.kt")
public void testInvokeInCompanion_explicitCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/invokeInCompanion_explicitCompanion.kt");
}
@Test
@TestMetadata("nestedClassWithCompanion.kt")
public void testNestedClassWithCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/nestedClassWithCompanion.kt");
}
@Test
@TestMetadata("nestedClassWithCompanionInCompanion.kt")
public void testNestedClassWithCompanionInCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/nestedClassWithCompanionInCompanion.kt");
}
@Test
@TestMetadata("nestedClassWithCompanionInCompanion_companionQualifier.kt")
public void testNestedClassWithCompanionInCompanion_companionQualifier() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/nestedClassWithCompanionInCompanion_companionQualifier.kt");
}
@Test
@TestMetadata("nestedClassWithCompanion_qualifier.kt")
public void testNestedClassWithCompanion_qualifier() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/nestedClassWithCompanion_qualifier.kt");
}
@Test
@TestMetadata("nestedClassWithCompanion_withInvoke.kt")
public void testNestedClassWithCompanion_withInvoke() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/nestedClassWithCompanion_withInvoke.kt");
}
@Test
@TestMetadata("nestedClassWithCompanion_withInvoke_qualifier.kt")
public void testNestedClassWithCompanion_withInvoke_qualifier() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/nestedClassWithCompanion_withInvoke_qualifier.kt");
}
@Test
@TestMetadata("qualifierReferenceToClassWithCompanion.kt")
public void testQualifierReferenceToClassWithCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/qualifierReferenceToClassWithCompanion.kt");
}
@Test
@TestMetadata("typealiasInvokeInCompanion.kt")
public void testTypealiasInvokeInCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/typealiasInvokeInCompanion.kt");
}
@Test
@TestMetadata("typealiasReferenceToCompanion.kt")
public void testTypealiasReferenceToCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/typealiasReferenceToCompanion.kt");
}
@Test
@TestMetadata("typealiasReferenceToCompanionReceiver.kt")
public void testTypealiasReferenceToCompanionReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/isImplicitReferenceToCompanion/typealiasReferenceToCompanionReceiver.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/java")
@TestDataPath("$PROJECT_ROOT")
public class Java {
@Test
public void testAllFilesPresentInJava() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/java"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("defaultJavaConstructorWithTypeParameter.kt")
public void testDefaultJavaConstructorWithTypeParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/defaultJavaConstructorWithTypeParameter.kt");
}
@Test
@TestMetadata("JavaAnnotationParameter.kt")
public void testJavaAnnotationParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/JavaAnnotationParameter.kt");
}
@Test
@TestMetadata("JavaCallWithGenerics.kt")
public void testJavaCallWithGenerics() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/JavaCallWithGenerics.kt");
}
@Test
@TestMetadata("JavaConstructorNotNullParameter.kt")
public void testJavaConstructorNotNullParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/JavaConstructorNotNullParameter.kt");
}
@Test
@TestMetadata("JavaEnumEntry.kt")
public void testJavaEnumEntry() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/JavaEnumEntry.kt");
}
@Test
@TestMetadata("JavaEnumValueOf.kt")
public void testJavaEnumValueOf() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/JavaEnumValueOf.kt");
}
@Test
@TestMetadata("JavaReference.kt")
public void testJavaReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/JavaReference.kt");
}
@Test
@TestMetadata("JavaSameSourceLibrary.kt")
public void testJavaSameSourceLibrary() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/JavaSameSourceLibrary.kt");
}
@Test
@TestMetadata("JavaStaticMethod.kt")
public void testJavaStaticMethod() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/JavaStaticMethod.kt");
}
@Test
@TestMetadata("ReferenceToSam.kt")
public void testReferenceToSam() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/ReferenceToSam.kt");
}
@Test
@TestMetadata("SyntheticProperty.kt")
public void testSyntheticProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/SyntheticProperty.kt");
}
@Test
@TestMetadata("SyntheticPropertyJavaOverride.kt")
public void testSyntheticPropertyJavaOverride() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/SyntheticPropertyJavaOverride.kt");
}
@Test
@TestMetadata("SyntheticPropertyJavaOverrideGeneric.kt")
public void testSyntheticPropertyJavaOverrideGeneric() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/SyntheticPropertyJavaOverrideGeneric.kt");
}
@Test
@TestMetadata("SyntheticPropertyKotlinOverride.kt")
public void testSyntheticPropertyKotlinOverride() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/SyntheticPropertyKotlinOverride.kt");
}
@Test
@TestMetadata("SyntheticPropertyKotlinOverrideGeneric.kt")
public void testSyntheticPropertyKotlinOverrideGeneric() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/SyntheticPropertyKotlinOverrideGeneric.kt");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/java/enumWithCustomGetName")
@TestDataPath("$PROJECT_ROOT")
public class EnumWithCustomGetName {
@Test
public void testAllFilesPresentInEnumWithCustomGetName() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/java/enumWithCustomGetName"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("FunctionCallOnConstant.kt")
public void testFunctionCallOnConstant() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/enumWithCustomGetName/FunctionCallOnConstant.kt");
}
@Test
@TestMetadata("FunctionCallOnVariable.kt")
public void testFunctionCallOnVariable() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/enumWithCustomGetName/FunctionCallOnVariable.kt");
}
@Test
@TestMetadata("PropertyAccessOnConstant.kt")
public void testPropertyAccessOnConstant() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/enumWithCustomGetName/PropertyAccessOnConstant.kt");
}
@Test
@TestMetadata("PropertyAccessOnVariable.kt")
public void testPropertyAccessOnVariable() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/java/enumWithCustomGetName/PropertyAccessOnVariable.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc")
@TestDataPath("$PROJECT_ROOT")
public class KDoc {
@Test
public void testAllFilesPresentInKDoc() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags")
@TestDataPath("$PROJECT_ROOT")
public class BlockTags {
@Test
@TestMetadata("actualConstructor.kt")
public void testActualConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/actualConstructor.kt");
}
@Test
public void testAllFilesPresentInBlockTags() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("allTagSectionsRequiringSubjects.kt")
public void testAllTagSectionsRequiringSubjects() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/allTagSectionsRequiringSubjects.kt");
}
@Test
@TestMetadata("constructorBlockTag.kt")
public void testConstructorBlockTag() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/constructorBlockTag.kt");
}
@Test
@TestMetadata("constructorParameterInDifferentPlaces.kt")
public void testConstructorParameterInDifferentPlaces() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/constructorParameterInDifferentPlaces.kt");
}
@Test
@TestMetadata("deprecatedProperties.kt")
public void testDeprecatedProperties() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/deprecatedProperties.kt");
}
@Test
@TestMetadata("longNameInTagSections.kt")
public void testLongNameInTagSections() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/longNameInTagSections.kt");
}
@Test
@TestMetadata("overridePropertyInPrimaryConstructor.kt")
public void testOverridePropertyInPrimaryConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/overridePropertyInPrimaryConstructor.kt");
}
@Test
@TestMetadata("paramBlockOnFunction.kt")
public void testParamBlockOnFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/paramBlockOnFunction.kt");
}
@Test
@TestMetadata("paramBlockTag.kt")
public void testParamBlockTag() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/paramBlockTag.kt");
}
@Test
@TestMetadata("paramTagWithRegularParameter.kt")
public void testParamTagWithRegularParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/paramTagWithRegularParameter.kt");
}
@Test
@TestMetadata("prioritiesWithSameNames.kt")
public void testPrioritiesWithSameNames() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/prioritiesWithSameNames.kt");
}
@Test
@TestMetadata("receiverReferenceAsSubject.kt")
public void testReceiverReferenceAsSubject() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/receiverReferenceAsSubject.kt");
}
@Test
@TestMetadata("throwsAndException.kt")
public void testThrowsAndException() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/throwsAndException.kt");
}
@Test
@TestMetadata("throwsTag.kt")
public void testThrowsTag() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/throwsTag.kt");
}
@Test
@TestMetadata("typeParameters.kt")
public void testTypeParameters() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/blockTags/typeParameters.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/companionObject")
@TestDataPath("$PROJECT_ROOT")
public class CompanionObject {
@Test
public void testAllFilesPresentInCompanionObject() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/companionObject"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("childQualifiedFunctionFromSuperClassCompanion.kt")
public void testChildQualifiedFunctionFromSuperClassCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/companionObject/childQualifiedFunctionFromSuperClassCompanion.kt");
}
@Test
@TestMetadata("qualifiedClassNestedInClassNestedInCompanion.kt")
public void testQualifiedClassNestedInClassNestedInCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/companionObject/qualifiedClassNestedInClassNestedInCompanion.kt");
}
@Test
@TestMetadata("qualifiedFunctionFromCompanion.kt")
public void testQualifiedFunctionFromCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/companionObject/qualifiedFunctionFromCompanion.kt");
}
@Test
@TestMetadata("qualifiedFunctionFromCompanionInOtherFile.kt")
public void testQualifiedFunctionFromCompanionInOtherFile() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/companionObject/qualifiedFunctionFromCompanionInOtherFile.kt");
}
@Test
@TestMetadata("qualifiedFunctionFromCompanionWithImport.kt")
public void testQualifiedFunctionFromCompanionWithImport() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/companionObject/qualifiedFunctionFromCompanionWithImport.kt");
}
@Test
@TestMetadata("qualifiedFunctionFromOwnCompanion.kt")
public void testQualifiedFunctionFromOwnCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/companionObject/qualifiedFunctionFromOwnCompanion.kt");
}
@Test
@TestMetadata("qualifiedFunctionFromSuperClassCompanion.kt")
public void testQualifiedFunctionFromSuperClassCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/companionObject/qualifiedFunctionFromSuperClassCompanion.kt");
}
@Test
@TestMetadata("qualifiedFunctionFromSuperClassCompanionInOtherFile.kt")
public void testQualifiedFunctionFromSuperClassCompanionInOtherFile() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/companionObject/qualifiedFunctionFromSuperClassCompanionInOtherFile.kt");
}
@Test
@TestMetadata("qualifiedFunctionOfClassNestedInCompanion.kt")
public void testQualifiedFunctionOfClassNestedInCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/companionObject/qualifiedFunctionOfClassNestedInCompanion.kt");
}
@Test
@TestMetadata("unqualifiedFunctionFromOwnCompanion.kt")
public void testUnqualifiedFunctionFromOwnCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/companionObject/unqualifiedFunctionFromOwnCompanion.kt");
}
@Test
@TestMetadata("unqualifiedFunctionFromSuperClassCompanion.kt")
public void testUnqualifiedFunctionFromSuperClassCompanion() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/companionObject/unqualifiedFunctionFromSuperClassCompanion.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions")
@TestDataPath("$PROJECT_ROOT")
public class Extensions {
@Test
public void testAllFilesPresentInExtensions() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("callablesWithSameName.kt")
public void testCallablesWithSameName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/callablesWithSameName.kt");
}
@Test
@TestMetadata("deprecatedExtensions.kt")
public void testDeprecatedExtensions() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/deprecatedExtensions.kt");
}
@Test
@TestMetadata("extensionFromOuterClass.kt")
public void testExtensionFromOuterClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/extensionFromOuterClass.kt");
}
@Test
@TestMetadata("fromBaseClass.kt")
public void testFromBaseClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/fromBaseClass.kt");
}
@Test
@TestMetadata("functionOverloads.kt")
public void testFunctionOverloads() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/functionOverloads.kt");
}
@Test
@TestMetadata("KT-69047.kt")
public void testKT_69047() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/KT-69047.kt");
}
@Test
@TestMetadata("nonExtensions.kt")
public void testNonExtensions() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/nonExtensions.kt");
}
@Test
@TestMetadata("qualifiers.kt")
public void testQualifiers() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/qualifiers.kt");
}
@Test
@TestMetadata("receiverFromOuterClass.kt")
public void testReceiverFromOuterClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/receiverFromOuterClass.kt");
}
@Test
@TestMetadata("receiverTypesWithSameName_nestedScopes.kt")
public void testReceiverTypesWithSameName_nestedScopes() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/receiverTypesWithSameName_nestedScopes.kt");
}
@Test
@TestMetadata("receiverTypesWithSameName_starImports.kt")
public void testReceiverTypesWithSameName_starImports() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/receiverTypesWithSameName_starImports.kt");
}
@Test
@TestMetadata("topLevelFunction.kt")
public void testTopLevelFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/topLevelFunction.kt");
}
@Test
@TestMetadata("topLevelProperty.kt")
public void testTopLevelProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/topLevelProperty.kt");
}
@Test
@TestMetadata("topLevelTypeVsMemberFunctionAsReceiver.kt")
public void testTopLevelTypeVsMemberFunctionAsReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/topLevelTypeVsMemberFunctionAsReceiver.kt");
}
@Test
@TestMetadata("typeAliasReceiver.kt")
public void testTypeAliasReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/typeAliasReceiver.kt");
}
@Test
@TestMetadata("typeCompatibility.kt")
public void testTypeCompatibility() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/typeCompatibility.kt");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics")
@TestDataPath("$PROJECT_ROOT")
public class Generics {
@Test
public void testAllFilesPresentInGenerics() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("candidateReceiverWithBoundedGeneric.kt")
public void testCandidateReceiverWithBoundedGeneric() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/candidateReceiverWithBoundedGeneric.kt");
}
@Test
@TestMetadata("extensionFunctionWithBoundedGeneric.kt")
public void testExtensionFunctionWithBoundedGeneric() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/extensionFunctionWithBoundedGeneric.kt");
}
@Test
@TestMetadata("extensionFunctionWithVariance.kt")
public void testExtensionFunctionWithVariance() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/extensionFunctionWithVariance.kt");
}
@Test
@TestMetadata("extensionOnBoundedTypeParameter.kt")
public void testExtensionOnBoundedTypeParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/extensionOnBoundedTypeParameter.kt");
}
@Test
@TestMetadata("extensionOnTypeParameter.kt")
public void testExtensionOnTypeParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/extensionOnTypeParameter.kt");
}
@Test
@TestMetadata("extensionsOnIterable.kt")
public void testExtensionsOnIterable() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/extensionsOnIterable.kt");
}
@Test
@TestMetadata("extensionsRecursiveGenericsWithVariousProjections.kt")
public void testExtensionsRecursiveGenericsWithVariousProjections() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/extensionsRecursiveGenericsWithVariousProjections.kt");
}
@Test
@TestMetadata("extensionsSimpleMultipleBounds.kt")
public void testExtensionsSimpleMultipleBounds() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/extensionsSimpleMultipleBounds.kt");
}
@Test
@TestMetadata("extensionsSimpleVarianceCases.kt")
public void testExtensionsSimpleVarianceCases() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/extensionsSimpleVarianceCases.kt");
}
@Test
@TestMetadata("genericParameterReceiver.kt")
public void testGenericParameterReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/genericParameterReceiver.kt");
}
@Test
@TestMetadata("genericsCompatibility.kt")
public void testGenericsCompatibility() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/genericsCompatibility.kt");
}
@Test
@TestMetadata("inVarianceReceiverType.kt")
public void testInVarianceReceiverType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/inVarianceReceiverType.kt");
}
@Test
@TestMetadata("multipleBoundsGenericParameterReceiver.kt")
public void testMultipleBoundsGenericParameterReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/multipleBoundsGenericParameterReceiver.kt");
}
@Test
@TestMetadata("outVarianceReceiverType.kt")
public void testOutVarianceReceiverType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/outVarianceReceiverType.kt");
}
@Test
@TestMetadata("recursiveTypeParametersInExtensionNotUsedInReceivers.kt")
public void testRecursiveTypeParametersInExtensionNotUsedInReceivers() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/recursiveTypeParametersInExtensionNotUsedInReceivers.kt");
}
@Test
@TestMetadata("recursiveTypealiasedBounds.kt")
public void testRecursiveTypealiasedBounds() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/recursiveTypealiasedBounds.kt");
}
@Test
@TestMetadata("simpleCasesWithTypeAliasedActualReceivers.kt")
public void testSimpleCasesWithTypeAliasedActualReceivers() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/simpleCasesWithTypeAliasedActualReceivers.kt");
}
@Test
@TestMetadata("simpleCasesWithTypeAliasedExtensionReceivers.kt")
public void testSimpleCasesWithTypeAliasedExtensionReceivers() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/simpleCasesWithTypeAliasedExtensionReceivers.kt");
}
@Test
@TestMetadata("typeAliasedReceiversWithStarProjectionsAndBounds.kt")
public void testTypeAliasedReceiversWithStarProjectionsAndBounds() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/typeAliasedReceiversWithStarProjectionsAndBounds.kt");
}
@Test
@TestMetadata("typeParameterSubstitution.kt")
public void testTypeParameterSubstitution() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/typeParameterSubstitution.kt");
}
@Test
@TestMetadata("typeWithGenericsReceiver.kt")
public void testTypeWithGenericsReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/typeWithGenericsReceiver.kt");
}
@Test
@TestMetadata("typeWithStarProjectionReceiver.kt")
public void testTypeWithStarProjectionReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/typeWithStarProjectionReceiver.kt");
}
@Test
@TestMetadata("typealiasedActualAndExpectedReceiversWithDifferentArguments.kt")
public void testTypealiasedActualAndExpectedReceiversWithDifferentArguments() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/typealiasedActualAndExpectedReceiversWithDifferentArguments.kt");
}
@Test
@TestMetadata("typealiasedGenericReceiver.kt")
public void testTypealiasedGenericReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/typealiasedGenericReceiver.kt");
}
@Test
@TestMetadata("typealiasedReceiversWithOutProjection.kt")
public void testTypealiasedReceiversWithOutProjection() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/extensions/generics/typealiasedReceiversWithOutProjection.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/imports")
@TestDataPath("$PROJECT_ROOT")
public class Imports {
@Test
public void testAllFilesPresentInImports() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/imports"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("KT-64190.kt")
public void testKT_64190() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/imports/KT-64190.kt");
}
@Test
@TestMetadata("SameNameClassesFromStarImports.kt")
public void testSameNameClassesFromStarImports() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/imports/SameNameClassesFromStarImports.kt");
}
@Test
@TestMetadata("TypeAliasedImport.kt")
public void testTypeAliasedImport() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/imports/TypeAliasedImport.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations")
@TestDataPath("$PROJECT_ROOT")
public class JavaDeclarations {
@Test
public void testAllFilesPresentInJavaDeclarations() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("FunctionQualifiedWithJavaSubclass.kt")
public void testFunctionQualifiedWithJavaSubclass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/FunctionQualifiedWithJavaSubclass.kt");
}
@Test
@TestMetadata("FunctionQualifiedWithKotlinSubclass.kt")
public void testFunctionQualifiedWithKotlinSubclass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/FunctionQualifiedWithKotlinSubclass.kt");
}
@Test
@TestMetadata("GetLikeJavaMethodWithArguments.kt")
public void testGetLikeJavaMethodWithArguments() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/GetLikeJavaMethodWithArguments.kt");
}
@Test
@TestMetadata("inheritedJavaMembersVsMembersOfOuterClass.kt")
public void testInheritedJavaMembersVsMembersOfOuterClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/inheritedJavaMembersVsMembersOfOuterClass.kt");
}
@Test
@TestMetadata("KT-69128.kt")
public void testKT_69128() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/KT-69128.kt");
}
@Test
@TestMetadata("KT-69736.kt")
public void testKT_69736() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/KT-69736.kt");
}
@Test
@TestMetadata("NoSyntheticFieldInClass.kt")
public void testNoSyntheticFieldInClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/NoSyntheticFieldInClass.kt");
}
@Test
@TestMetadata("StaticFieldQualified.kt")
public void testStaticFieldQualified() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/StaticFieldQualified.kt");
}
@Test
@TestMetadata("StaticFunctionFromBaseClass.kt")
public void testStaticFunctionFromBaseClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/StaticFunctionFromBaseClass.kt");
}
@Test
@TestMetadata("StaticFunctionFullyQualified.kt")
public void testStaticFunctionFullyQualified() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/StaticFunctionFullyQualified.kt");
}
@Test
@TestMetadata("StaticFunctionQualified.kt")
public void testStaticFunctionQualified() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/StaticFunctionQualified.kt");
}
@Test
@TestMetadata("StaticFunctionQualifiedWithJavaSubclass.kt")
public void testStaticFunctionQualifiedWithJavaSubclass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/StaticFunctionQualifiedWithJavaSubclass.kt");
}
@Test
@TestMetadata("StaticFunctionQualifiedWithKotlinSubclass.kt")
public void testStaticFunctionQualifiedWithKotlinSubclass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/StaticFunctionQualifiedWithKotlinSubclass.kt");
}
@Test
@TestMetadata("SyntheticFieldInAnnotationInterface.kt")
public void testSyntheticFieldInAnnotationInterface() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/SyntheticFieldInAnnotationInterface.kt");
}
@Test
@TestMetadata("SyntheticFieldInClass.kt")
public void testSyntheticFieldInClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/SyntheticFieldInClass.kt");
}
@Test
@TestMetadata("SyntheticPropertyInKotlinSubclass.kt")
public void testSyntheticPropertyInKotlinSubclass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/javaDeclarations/SyntheticPropertyInKotlinSubclass.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext")
@TestDataPath("$PROJECT_ROOT")
public class LocalContext {
@Test
public void testAllFilesPresentInLocalContext() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("classPropertyVsOuterClass.kt")
public void testClassPropertyVsOuterClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/classPropertyVsOuterClass.kt");
}
@Test
@TestMetadata("classTypeParameterVsOuterClassProperty.kt")
public void testClassTypeParameterVsOuterClassProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/classTypeParameterVsOuterClassProperty.kt");
}
@Test
@TestMetadata("danglingReference.kt")
public void testDanglingReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/danglingReference.kt");
}
@Test
@TestMetadata("deprecatedContextDeclarations.kt")
public void testDeprecatedContextDeclarations() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/deprecatedContextDeclarations.kt");
}
@Test
@TestMetadata("deprecatedDeclarations.kt")
public void testDeprecatedDeclarations() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/deprecatedDeclarations.kt");
}
@Test
@TestMetadata("docInsidePrimaryConstructor.kt")
public void testDocInsidePrimaryConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/docInsidePrimaryConstructor.kt");
}
@Test
@TestMetadata("functionAndLocalPropertyWithSameName.kt")
public void testFunctionAndLocalPropertyWithSameName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/functionAndLocalPropertyWithSameName.kt");
}
@Test
@TestMetadata("localFunctionVariableVsOuterProperty.kt")
public void testLocalFunctionVariableVsOuterProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/localFunctionVariableVsOuterProperty.kt");
}
@Test
@TestMetadata("LocalSemiFullQualifiedCallableName.kt")
public void testLocalSemiFullQualifiedCallableName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/LocalSemiFullQualifiedCallableName.kt");
}
@Test
@TestMetadata("LocalSemiFullQualifiedClassName.kt")
public void testLocalSemiFullQualifiedClassName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/LocalSemiFullQualifiedClassName.kt");
}
@Test
@TestMetadata("nestedClasses.kt")
public void testNestedClasses() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/nestedClasses.kt");
}
@Test
@TestMetadata("NonLocalSemiFullQualifiedCallableName.kt")
public void testNonLocalSemiFullQualifiedCallableName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/NonLocalSemiFullQualifiedCallableName.kt");
}
@Test
@TestMetadata("NonLocalSemiFullQualifiedClassName.kt")
public void testNonLocalSemiFullQualifiedClassName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/NonLocalSemiFullQualifiedClassName.kt");
}
@Test
@TestMetadata("overloadedCallablesPriorities.kt")
public void testOverloadedCallablesPriorities() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/overloadedCallablesPriorities.kt");
}
@Test
@TestMetadata("referenceToMemberFromParameter.kt")
public void testReferenceToMemberFromParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/referenceToMemberFromParameter.kt");
}
@Test
@TestMetadata("referencesToEnumValuesFromEnumClass.kt")
public void testReferencesToEnumValuesFromEnumClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/referencesToEnumValuesFromEnumClass.kt");
}
@Test
@TestMetadata("referencesToMembersOfOuterClass.kt")
public void testReferencesToMembersOfOuterClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/referencesToMembersOfOuterClass.kt");
}
@Test
@TestMetadata("SelfLocalMethod.kt")
public void testSelfLocalMethod() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/SelfLocalMethod.kt");
}
@Test
@TestMetadata("SelfLocalProperty.kt")
public void testSelfLocalProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/SelfLocalProperty.kt");
}
@Test
@TestMetadata("SelfNonLocalEnumEntry.kt")
public void testSelfNonLocalEnumEntry() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/SelfNonLocalEnumEntry.kt");
}
@Test
@TestMetadata("SelfNonLocalMethod.kt")
public void testSelfNonLocalMethod() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/SelfNonLocalMethod.kt");
}
@Test
@TestMetadata("SelfNonLocalNestedClass.kt")
public void testSelfNonLocalNestedClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/SelfNonLocalNestedClass.kt");
}
@Test
@TestMetadata("SelfNonLocalProperty.kt")
public void testSelfNonLocalProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/SelfNonLocalProperty.kt");
}
@Test
@TestMetadata("shortNamesPriorities.kt")
public void testShortNamesPriorities() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/shortNamesPriorities.kt");
}
@Test
@TestMetadata("SiblingLocalMethod.kt")
public void testSiblingLocalMethod() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/SiblingLocalMethod.kt");
}
@Test
@TestMetadata("SiblingLocalProperty.kt")
public void testSiblingLocalProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/SiblingLocalProperty.kt");
}
@Test
@TestMetadata("SiblingNonLocalProperty.kt")
public void testSiblingNonLocalProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/SiblingNonLocalProperty.kt");
}
@Test
@TestMetadata("typeParameterOfOuterClass.kt")
public void testTypeParameterOfOuterClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/localContext/typeParameterOfOuterClass.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/packages")
@TestDataPath("$PROJECT_ROOT")
public class Packages {
@Test
public void testAllFilesPresentInPackages() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/packages"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("longPackage.kt")
public void testLongPackage() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/packages/longPackage.kt");
}
@Test
@TestMetadata("packageVsTopLevelDeclarations.kt")
public void testPackageVsTopLevelDeclarations() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/packages/packageVsTopLevelDeclarations.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/parameters")
@TestDataPath("$PROJECT_ROOT")
public class Parameters {
@Test
public void testAllFilesPresentInParameters() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/parameters"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("ClassPrimaryConstructorParameter.kt")
public void testClassPrimaryConstructorParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/parameters/ClassPrimaryConstructorParameter.kt");
}
@Test
@TestMetadata("ClassPrimaryConstructorValParameter.kt")
public void testClassPrimaryConstructorValParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/parameters/ClassPrimaryConstructorValParameter.kt");
}
@Test
@TestMetadata("ClassTypeParameter.kt")
public void testClassTypeParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/parameters/ClassTypeParameter.kt");
}
@Test
@TestMetadata("ConstructorValueParameter.kt")
public void testConstructorValueParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/parameters/ConstructorValueParameter.kt");
}
@Test
@TestMetadata("FunctionContextParameter.kt")
public void testFunctionContextParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/parameters/FunctionContextParameter.kt");
}
@Test
@TestMetadata("FunctionTypeParameter.kt")
public void testFunctionTypeParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/parameters/FunctionTypeParameter.kt");
}
@Test
@TestMetadata("FunctionValueParameter.kt")
public void testFunctionValueParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/parameters/FunctionValueParameter.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified")
@TestDataPath("$PROJECT_ROOT")
public class Qualified {
@Test
public void testAllFilesPresentInQualified() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("backtickedIdentifiers.kt")
public void testBacktickedIdentifiers() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/backtickedIdentifiers.kt");
}
@Test
@TestMetadata("fqnPriorities.kt")
public void testFqnPriorities() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/fqnPriorities.kt");
}
@Test
@TestMetadata("multiSegmentWithMoreLocalConflictingClass.kt")
public void testMultiSegmentWithMoreLocalConflictingClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/multiSegmentWithMoreLocalConflictingClass.kt");
}
@Test
@TestMetadata("multiSegmentWithMoreLocalConflictingProperty.kt")
public void testMultiSegmentWithMoreLocalConflictingProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/multiSegmentWithMoreLocalConflictingProperty.kt");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables")
@TestDataPath("$PROJECT_ROOT")
public class Callables {
@Test
public void testAllFilesPresentInCallables() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("callableFromTypealias.kt")
public void testCallableFromTypealias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/callableFromTypealias.kt");
}
@Test
@TestMetadata("functionFromClass.kt")
public void testFunctionFromClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/functionFromClass.kt");
}
@Test
@TestMetadata("functionFromInnerClass.kt")
public void testFunctionFromInnerClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/functionFromInnerClass.kt");
}
@Test
@TestMetadata("functionFromInterface.kt")
public void testFunctionFromInterface() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/functionFromInterface.kt");
}
@Test
@TestMetadata("functionFromNestedClass.kt")
public void testFunctionFromNestedClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/functionFromNestedClass.kt");
}
@Test
@TestMetadata("functionFromPrivateNestedClass.kt")
public void testFunctionFromPrivateNestedClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/functionFromPrivateNestedClass.kt");
}
@Test
@TestMetadata("functionFromSuperClass.kt")
public void testFunctionFromSuperClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/functionFromSuperClass.kt");
}
@Test
@TestMetadata("functionFromSuperInterface.kt")
public void testFunctionFromSuperInterface() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/functionFromSuperInterface.kt");
}
@Test
@TestMetadata("functionFromSuperSuperInterface.kt")
public void testFunctionFromSuperSuperInterface() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/functionFromSuperSuperInterface.kt");
}
@Test
@TestMetadata("overloadedFunctionFromClass.kt")
public void testOverloadedFunctionFromClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/overloadedFunctionFromClass.kt");
}
@Test
@TestMetadata("overloadedFunctionFromClassWithPrivateOverload.kt")
public void testOverloadedFunctionFromClassWithPrivateOverload() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/overloadedFunctionFromClassWithPrivateOverload.kt");
}
@Test
@TestMetadata("privateFunctionFromClass.kt")
public void testPrivateFunctionFromClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/privateFunctionFromClass.kt");
}
@Test
@TestMetadata("privatePropertyFromClass.kt")
public void testPrivatePropertyFromClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/privatePropertyFromClass.kt");
}
@Test
@TestMetadata("propertyFromClass.kt")
public void testPropertyFromClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/propertyFromClass.kt");
}
@Test
@TestMetadata("propertyFromInnerClass.kt")
public void testPropertyFromInnerClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/propertyFromInnerClass.kt");
}
@Test
@TestMetadata("propertyFromInterface.kt")
public void testPropertyFromInterface() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/propertyFromInterface.kt");
}
@Test
@TestMetadata("propertyFromNestedClass.kt")
public void testPropertyFromNestedClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/propertyFromNestedClass.kt");
}
@Test
@TestMetadata("propertyFromPrivateNestedClass.kt")
public void testPropertyFromPrivateNestedClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/propertyFromPrivateNestedClass.kt");
}
@Test
@TestMetadata("propertyFromSuperClass.kt")
public void testPropertyFromSuperClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/propertyFromSuperClass.kt");
}
@Test
@TestMetadata("propertyFromSuperInterface.kt")
public void testPropertyFromSuperInterface() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/propertyFromSuperInterface.kt");
}
@Test
@TestMetadata("propertyFromSuperSuperInterface.kt")
public void testPropertyFromSuperSuperInterface() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/propertyFromSuperSuperInterface.kt");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported")
@TestDataPath("$PROJECT_ROOT")
public class NotImported {
@Test
public void testAllFilesPresentInNotImported() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("functionFromClass.kt")
public void testFunctionFromClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/functionFromClass.kt");
}
@Test
@TestMetadata("functionFromInnerClass.kt")
public void testFunctionFromInnerClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/functionFromInnerClass.kt");
}
@Test
@TestMetadata("functionFromInterface.kt")
public void testFunctionFromInterface() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/functionFromInterface.kt");
}
@Test
@TestMetadata("functionFromNestedClass.kt")
public void testFunctionFromNestedClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/functionFromNestedClass.kt");
}
@Test
@TestMetadata("functionFromPrivateNestedClass.kt")
public void testFunctionFromPrivateNestedClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/functionFromPrivateNestedClass.kt");
}
@Test
@TestMetadata("functionFromSuperClass.kt")
public void testFunctionFromSuperClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/functionFromSuperClass.kt");
}
@Test
@TestMetadata("functionFromSuperInterface.kt")
public void testFunctionFromSuperInterface() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/functionFromSuperInterface.kt");
}
@Test
@TestMetadata("functionFromSuperSuperInterface.kt")
public void testFunctionFromSuperSuperInterface() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/functionFromSuperSuperInterface.kt");
}
@Test
@TestMetadata("overloadedFunctionFromClass.kt")
public void testOverloadedFunctionFromClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/overloadedFunctionFromClass.kt");
}
@Test
@TestMetadata("overloadedFunctionFromClassWithPrivateOverload.kt")
public void testOverloadedFunctionFromClassWithPrivateOverload() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/overloadedFunctionFromClassWithPrivateOverload.kt");
}
@Test
@TestMetadata("privateFunctionFromClass.kt")
public void testPrivateFunctionFromClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/privateFunctionFromClass.kt");
}
@Test
@TestMetadata("privatePropertyFromClass.kt")
public void testPrivatePropertyFromClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/privatePropertyFromClass.kt");
}
@Test
@TestMetadata("propertyFromClass.kt")
public void testPropertyFromClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/propertyFromClass.kt");
}
@Test
@TestMetadata("propertyFromInnerClass.kt")
public void testPropertyFromInnerClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/propertyFromInnerClass.kt");
}
@Test
@TestMetadata("propertyFromInterface.kt")
public void testPropertyFromInterface() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/propertyFromInterface.kt");
}
@Test
@TestMetadata("propertyFromNestedClass.kt")
public void testPropertyFromNestedClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/propertyFromNestedClass.kt");
}
@Test
@TestMetadata("propertyFromPrivateNestedClass.kt")
public void testPropertyFromPrivateNestedClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/propertyFromPrivateNestedClass.kt");
}
@Test
@TestMetadata("propertyFromSuperClass.kt")
public void testPropertyFromSuperClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/propertyFromSuperClass.kt");
}
@Test
@TestMetadata("propertyFromSuperInterface.kt")
public void testPropertyFromSuperInterface() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/propertyFromSuperInterface.kt");
}
@Test
@TestMetadata("propertyFromSuperSuperInterface.kt")
public void testPropertyFromSuperSuperInterface() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/callables/notImported/propertyFromSuperSuperInterface.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/conflictResolution")
@TestDataPath("$PROJECT_ROOT")
public class ConflictResolution {
@Test
public void testAllFilesPresentInConflictResolution() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/conflictResolution"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("resolveToPackage.kt")
public void testResolveToPackage() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/conflictResolution/resolveToPackage.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/fromOtherFile")
@TestDataPath("$PROJECT_ROOT")
public class FromOtherFile {
@Test
public void testAllFilesPresentInFromOtherFile() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/fromOtherFile"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("EnumEntryFromOtherByShortName.kt")
public void testEnumEntryFromOtherByShortName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/fromOtherFile/EnumEntryFromOtherByShortName.kt");
}
@Test
@TestMetadata("EnumEntryFromOtherFileByFullName.kt")
public void testEnumEntryFromOtherFileByFullName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/fromOtherFile/EnumEntryFromOtherFileByFullName.kt");
}
@Test
@TestMetadata("NestedClassFromOtherFileByFullName.kt")
public void testNestedClassFromOtherFileByFullName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/fromOtherFile/NestedClassFromOtherFileByFullName.kt");
}
@Test
@TestMetadata("NestedClassFromOtherFileByShortName.kt")
public void testNestedClassFromOtherFileByShortName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/fromOtherFile/NestedClassFromOtherFileByShortName.kt");
}
@Test
@TestMetadata("TopLevelClassFromOtherFileByFullName.kt")
public void testTopLevelClassFromOtherFileByFullName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/fromOtherFile/TopLevelClassFromOtherFileByFullName.kt");
}
@Test
@TestMetadata("TopLevelClassFromOtherFileByShortName.kt")
public void testTopLevelClassFromOtherFileByShortName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/fromOtherFile/TopLevelClassFromOtherFileByShortName.kt");
}
@Test
@TestMetadata("TopLevelFunctionFromOtherFileByFullName.kt")
public void testTopLevelFunctionFromOtherFileByFullName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/fromOtherFile/TopLevelFunctionFromOtherFileByFullName.kt");
}
@Test
@TestMetadata("TopLevelFunctionFromStdlibByShortName.kt")
public void testTopLevelFunctionFromStdlibByShortName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/fromOtherFile/TopLevelFunctionFromStdlibByShortName.kt");
}
@Test
@TestMetadata("TypealiasFromOtherFileByFullName.kt")
public void testTypealiasFromOtherFileByFullName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/fromOtherFile/TypealiasFromOtherFileByFullName.kt");
}
@Test
@TestMetadata("TypealiasFromOtherFileByShortName.kt")
public void testTypealiasFromOtherFileByShortName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/fromOtherFile/TypealiasFromOtherFileByShortName.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/stdlib")
@TestDataPath("$PROJECT_ROOT")
public class Stdlib {
@Test
public void testAllFilesPresentInStdlib() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/stdlib"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("EnumEntryFromStdlibByFullName.kt")
public void testEnumEntryFromStdlibByFullName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/stdlib/EnumEntryFromStdlibByFullName.kt");
}
@Test
@TestMetadata("EnumEntryFromStdlibByShortName.kt")
public void testEnumEntryFromStdlibByShortName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/stdlib/EnumEntryFromStdlibByShortName.kt");
}
@Test
@TestMetadata("TopLevelClassFromStdlibByFullName.kt")
public void testTopLevelClassFromStdlibByFullName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/stdlib/TopLevelClassFromStdlibByFullName.kt");
}
@Test
@TestMetadata("TopLevelClassFromStdlibByShortName.kt")
public void testTopLevelClassFromStdlibByShortName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/stdlib/TopLevelClassFromStdlibByShortName.kt");
}
@Test
@TestMetadata("TopLevelFunctionFromStdlibByFullName.kt")
public void testTopLevelFunctionFromStdlibByFullName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/stdlib/TopLevelFunctionFromStdlibByFullName.kt");
}
@Test
@TestMetadata("TopLevelFunctionFromStdlibByShortName.kt")
public void testTopLevelFunctionFromStdlibByShortName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/stdlib/TopLevelFunctionFromStdlibByShortName.kt");
}
@Test
@TestMetadata("TypealiasFromStdlibByFullName.kt")
public void testTypealiasFromStdlibByFullName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/stdlib/TypealiasFromStdlibByFullName.kt");
}
@Test
@TestMetadata("TypealiasFromStdlibByShortName.kt")
public void testTypealiasFromStdlibByShortName() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/qualified/stdlib/TypealiasFromStdlibByShortName.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/thisQualifier")
@TestDataPath("$PROJECT_ROOT")
public class ThisQualifier {
@Test
public void testAllFilesPresentInThisQualifier() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/thisQualifier"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("thisAndSuperQualifier.kt")
public void testThisAndSuperQualifier() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/thisQualifier/thisAndSuperQualifier.kt");
}
@Test
@TestMetadata("thisQualifierOnClass.kt")
public void testThisQualifierOnClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/thisQualifier/thisQualifierOnClass.kt");
}
@Test
@TestMetadata("thisQualifierOnFunction.kt")
public void testThisQualifierOnFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/thisQualifier/thisQualifierOnFunction.kt");
}
@Test
@TestMetadata("thisQualifierOnFunctionNoExtensionReceiver.kt")
public void testThisQualifierOnFunctionNoExtensionReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/thisQualifier/thisQualifierOnFunctionNoExtensionReceiver.kt");
}
@Test
@TestMetadata("thisQualifierOnProperty.kt")
public void testThisQualifierOnProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/thisQualifier/thisQualifierOnProperty.kt");
}
@Test
@TestMetadata("thisQualifierOnPropertyNoExtensionReceiver.kt")
public void testThisQualifierOnPropertyNoExtensionReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/thisQualifier/thisQualifierOnPropertyNoExtensionReceiver.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/withErrors")
@TestDataPath("$PROJECT_ROOT")
public class WithErrors {
@Test
public void testAllFilesPresentInWithErrors() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/withErrors"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("duplicatedContextParameters.kt")
public void testDuplicatedContextParameters() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/withErrors/duplicatedContextParameters.kt");
}
@Test
@TestMetadata("errorInLatestQualifer.kt")
public void testErrorInLatestQualifer() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/withErrors/errorInLatestQualifer.kt");
}
@Test
@TestMetadata("invalidPropertyAccessOnFunction.kt")
public void testInvalidPropertyAccessOnFunction() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/withErrors/invalidPropertyAccessOnFunction.kt");
}
@Test
@TestMetadata("invalidPropertyAccessOnFunctionFromCompanionObject.kt")
public void testInvalidPropertyAccessOnFunctionFromCompanionObject() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/withErrors/invalidPropertyAccessOnFunctionFromCompanionObject.kt");
}
@Test
@TestMetadata("invalidPropertyAccessOnFunctionFromNonImportedCompanionObject.kt")
public void testInvalidPropertyAccessOnFunctionFromNonImportedCompanionObject() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/withErrors/invalidPropertyAccessOnFunctionFromNonImportedCompanionObject.kt");
}
@Test
@TestMetadata("invalidPropertyAccessOnFunctionFromSuperClass.kt")
public void testInvalidPropertyAccessOnFunctionFromSuperClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kDoc/withErrors/invalidPropertyAccessOnFunctionFromSuperClass.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/kotlinPackage")
@TestDataPath("$PROJECT_ROOT")
public class KotlinPackage {
@Test
public void testAllFilesPresentInKotlinPackage() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/kotlinPackage"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("kotlinPackageAllowed.kt")
public void testKotlinPackageAllowed() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kotlinPackage/kotlinPackageAllowed.kt");
}
@Test
@TestMetadata("kotlinPackageNotAllowed.kt")
public void testKotlinPackageNotAllowed() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kotlinPackage/kotlinPackageNotAllowed.kt");
}
@Test
@TestMetadata("kotlinxPackage.kt")
public void testKotlinxPackage() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/kotlinPackage/kotlinxPackage.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/libraryDependency")
@TestDataPath("$PROJECT_ROOT")
public class LibraryDependency {
@Test
public void testAllFilesPresentInLibraryDependency() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/libraryDependency"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("memberFunctionCall.kt")
public void testMemberFunctionCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/libraryDependency/memberFunctionCall.kt");
}
@Test
@TestMetadata("topLevelFunctionCall.kt")
public void testTopLevelFunctionCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/libraryDependency/topLevelFunctionCall.kt");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/libraryDependency/fallbackDependencies")
@TestDataPath("$PROJECT_ROOT")
public class FallbackDependencies {
@Test
public void testAllFilesPresentInFallbackDependencies() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/libraryDependency/fallbackDependencies"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("memberFunctionCall.kt")
public void testMemberFunctionCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/libraryDependency/fallbackDependencies/memberFunctionCall.kt");
}
@Test
@TestMetadata("topLevelFunctionCall.kt")
public void testTopLevelFunctionCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/libraryDependency/fallbackDependencies/topLevelFunctionCall.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/nestedTypes")
@TestDataPath("$PROJECT_ROOT")
public class NestedTypes {
@Test
public void testAllFilesPresentInNestedTypes() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/nestedTypes"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("ResolveCompanionInCompanionType.kt")
public void testResolveCompanionInCompanionType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nestedTypes/ResolveCompanionInCompanionType.kt");
}
@Test
@TestMetadata("ResolveEndOfPackageInType.kt")
public void testResolveEndOfPackageInType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nestedTypes/ResolveEndOfPackageInType.kt");
}
@Test
@TestMetadata("ResolveMiddleOfPackageInType.kt")
public void testResolveMiddleOfPackageInType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nestedTypes/ResolveMiddleOfPackageInType.kt");
}
@Test
@TestMetadata("ResolveNamedCompanionInCompanionType.kt")
public void testResolveNamedCompanionInCompanionType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nestedTypes/ResolveNamedCompanionInCompanionType.kt");
}
@Test
@TestMetadata("ResolveStartOfPackageInType.kt")
public void testResolveStartOfPackageInType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nestedTypes/ResolveStartOfPackageInType.kt");
}
@Test
@TestMetadata("ResolveTypeInTheEndOfType.kt")
public void testResolveTypeInTheEndOfType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nestedTypes/ResolveTypeInTheEndOfType.kt");
}
@Test
@TestMetadata("ResolveTypeInTheMiddleOfCompanionType.kt")
public void testResolveTypeInTheMiddleOfCompanionType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nestedTypes/ResolveTypeInTheMiddleOfCompanionType.kt");
}
@Test
@TestMetadata("ResolveTypeInTheMiddleOfFunctionalType.kt")
public void testResolveTypeInTheMiddleOfFunctionalType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nestedTypes/ResolveTypeInTheMiddleOfFunctionalType.kt");
}
@Test
@TestMetadata("ResolveTypeInTheMiddleOfNullableType.kt")
public void testResolveTypeInTheMiddleOfNullableType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nestedTypes/ResolveTypeInTheMiddleOfNullableType.kt");
}
@Test
@TestMetadata("ResolveTypeInTheMiddleOfType.kt")
public void testResolveTypeInTheMiddleOfType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nestedTypes/ResolveTypeInTheMiddleOfType.kt");
}
@Test
@TestMetadata("ResolveTypeInTheStartOfCompanionType.kt")
public void testResolveTypeInTheStartOfCompanionType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nestedTypes/ResolveTypeInTheStartOfCompanionType.kt");
}
@Test
@TestMetadata("ResolveTypeInTheStartOfType.kt")
public void testResolveTypeInTheStartOfType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nestedTypes/ResolveTypeInTheStartOfType.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/nonCalls")
@TestDataPath("$PROJECT_ROOT")
public class NonCalls {
@Test
public void testAllFilesPresentInNonCalls() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/nonCalls"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("ifExpression.kt")
public void testIfExpression() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nonCalls/ifExpression.kt");
}
@Test
@TestMetadata("literalExpression.kt")
public void testLiteralExpression() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nonCalls/literalExpression.kt");
}
@Test
@TestMetadata("whenExpression.kt")
public void testWhenExpression() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/nonCalls/whenExpression.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/operators")
@TestDataPath("$PROJECT_ROOT")
public class Operators {
@Test
public void testAllFilesPresentInOperators() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/operators"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/assignment")
@TestDataPath("$PROJECT_ROOT")
public class Assignment {
@Test
public void testAllFilesPresentInAssignment() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/assignment"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("insidePlusAssignTarget.kt")
public void testInsidePlusAssignTarget() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/assignment/insidePlusAssignTarget.kt");
}
@Test
@TestMetadata("listAssignPlus.kt")
public void testListAssignPlus() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/assignment/listAssignPlus.kt");
}
@Test
@TestMetadata("listAssignPlusExplicit.kt")
public void testListAssignPlusExplicit() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/assignment/listAssignPlusExplicit.kt");
}
@Test
@TestMetadata("listAssignValue.kt")
public void testListAssignValue() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/assignment/listAssignValue.kt");
}
@Test
@TestMetadata("listAssignValue_complex.kt")
public void testListAssignValue_complex() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/assignment/listAssignValue_complex.kt");
}
@Test
@TestMetadata("listSet.kt")
public void testListSet() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/assignment/listSet.kt");
}
@Test
@TestMetadata("listSetExplicit.kt")
public void testListSetExplicit() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/assignment/listSetExplicit.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo")
@TestDataPath("$PROJECT_ROOT")
public class CompareTo {
@Test
public void testAllFilesPresentInCompareTo() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("compareTo.kt")
public void testCompareTo() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo/compareTo.kt");
}
@Test
@TestMetadata("compareToAndGeneric.kt")
public void testCompareToAndGeneric() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo/compareToAndGeneric.kt");
}
@Test
@TestMetadata("gt.kt")
public void testGt() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo/gt.kt");
}
@Test
@TestMetadata("gtAndGeneric.kt")
public void testGtAndGeneric() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo/gtAndGeneric.kt");
}
@Test
@TestMetadata("gtAndGenericAmbiguity.kt")
public void testGtAndGenericAmbiguity() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo/gtAndGenericAmbiguity.kt");
}
@Test
@TestMetadata("gtEq.kt")
public void testGtEq() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo/gtEq.kt");
}
@Test
@TestMetadata("lt.kt")
public void testLt() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo/lt.kt");
}
@Test
@TestMetadata("ltEq.kt")
public void testLtEq() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo/ltEq.kt");
}
@Test
@TestMetadata("numberCompareTo.kt")
public void testNumberCompareTo() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo/numberCompareTo.kt");
}
@Test
@TestMetadata("numberGt.kt")
public void testNumberGt() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo/numberGt.kt");
}
@Test
@TestMetadata("numberGtEq.kt")
public void testNumberGtEq() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo/numberGtEq.kt");
}
@Test
@TestMetadata("numberLt.kt")
public void testNumberLt() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo/numberLt.kt");
}
@Test
@TestMetadata("numberLtEq.kt")
public void testNumberLtEq() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/compareTo/numberLtEq.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/contains")
@TestDataPath("$PROJECT_ROOT")
public class Contains {
@Test
public void testAllFilesPresentInContains() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/contains"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("whenConditionInRange.kt")
public void testWhenConditionInRange() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/contains/whenConditionInRange.kt");
}
@Test
@TestMetadata("whenConditionInRangeInverted.kt")
public void testWhenConditionInRangeInverted() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/contains/whenConditionInRangeInverted.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/equals")
@TestDataPath("$PROJECT_ROOT")
public class Equals {
@Test
public void testAllFilesPresentInEquals() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/equals"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("equals.kt")
public void testEquals() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/equals/equals.kt");
}
@Test
@TestMetadata("equalsExplicit.kt")
public void testEqualsExplicit() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/equals/equalsExplicit.kt");
}
@Test
@TestMetadata("equalsExplicitWithExplicitImplementation.kt")
public void testEqualsExplicitWithExplicitImplementation() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/equals/equalsExplicitWithExplicitImplementation.kt");
}
@Test
@TestMetadata("equalsNot.kt")
public void testEqualsNot() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/equals/equalsNot.kt");
}
@Test
@TestMetadata("equalsNotWithExplicitImplementation.kt")
public void testEqualsNotWithExplicitImplementation() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/equals/equalsNotWithExplicitImplementation.kt");
}
@Test
@TestMetadata("equalsWithExplicitImplementation.kt")
public void testEqualsWithExplicitImplementation() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/equals/equalsWithExplicitImplementation.kt");
}
@Test
@TestMetadata("identityEquals.kt")
public void testIdentityEquals() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/equals/identityEquals.kt");
}
@Test
@TestMetadata("identityEqualsWithExplicitImplementation.kt")
public void testIdentityEqualsWithExplicitImplementation() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/equals/identityEqualsWithExplicitImplementation.kt");
}
@Test
@TestMetadata("identityNotEquals.kt")
public void testIdentityNotEquals() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/equals/identityNotEquals.kt");
}
@Test
@TestMetadata("identityNotEqualsWithExplicitImplementation.kt")
public void testIdentityNotEqualsWithExplicitImplementation() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/equals/identityNotEqualsWithExplicitImplementation.kt");
}
@Test
@TestMetadata("withSmartCast.kt")
public void testWithSmartCast() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/operators/equals/withSmartCast.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/packageReference")
@TestDataPath("$PROJECT_ROOT")
public class PackageReference {
@Test
public void testAllFilesPresentInPackageReference() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/packageReference"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("javaPackageFirstQualifier.kt")
public void testJavaPackageFirstQualifier() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/packageReference/javaPackageFirstQualifier.kt");
}
@Test
@TestMetadata("javaPackageSecondQualifier.kt")
public void testJavaPackageSecondQualifier() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/packageReference/javaPackageSecondQualifier.kt");
}
@Test
@TestMetadata("kotlinPackageFirstQualifier.kt")
public void testKotlinPackageFirstQualifier() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/packageReference/kotlinPackageFirstQualifier.kt");
}
@Test
@TestMetadata("kotlinPackageSecondQualifier.kt")
public void testKotlinPackageSecondQualifier() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/packageReference/kotlinPackageSecondQualifier.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedAccess")
@TestDataPath("$PROJECT_ROOT")
public class QualifiedAccess {
@Test
public void testAllFilesPresentInQualifiedAccess() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedAccess"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("callableReference1.kt")
public void testCallableReference1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedAccess/callableReference1.kt");
}
@Test
@TestMetadata("callableReference2.kt")
public void testCallableReference2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedAccess/callableReference2.kt");
}
@Test
@TestMetadata("callableReference3.kt")
public void testCallableReference3() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedAccess/callableReference3.kt");
}
@Test
@TestMetadata("nullSafeCallExpression.kt")
public void testNullSafeCallExpression() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedAccess/nullSafeCallExpression.kt");
}
@Test
@TestMetadata("ResolveFirstPackageOfFullyQualifiedReference.kt")
public void testResolveFirstPackageOfFullyQualifiedReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedAccess/ResolveFirstPackageOfFullyQualifiedReference.kt");
}
@Test
@TestMetadata("ResolveFullyQualifiedCompanionObject.kt")
public void testResolveFullyQualifiedCompanionObject() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedAccess/ResolveFullyQualifiedCompanionObject.kt");
}
@Test
@TestMetadata("ResolveOuterClassOfFullyQualifiedReference.kt")
public void testResolveOuterClassOfFullyQualifiedReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedAccess/ResolveOuterClassOfFullyQualifiedReference.kt");
}
@Test
@TestMetadata("ResolvePackageOfFullyQualifiedReference.kt")
public void testResolvePackageOfFullyQualifiedReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/qualifiedAccess/ResolvePackageOfFullyQualifiedReference.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias")
@TestDataPath("$PROJECT_ROOT")
public class TypeAlias {
@Test
public void testAllFilesPresentInTypeAlias() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("TypeAlias.kt")
public void testTypeAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/TypeAlias.kt");
}
@Test
@TestMetadata("TypeAliasAsCallableReference.kt")
public void testTypeAliasAsCallableReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/TypeAliasAsCallableReference.kt");
}
@Test
@TestMetadata("TypeAliasAsCallableReference_withTypeParameters.kt")
public void testTypeAliasAsCallableReference_withTypeParameters() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/TypeAliasAsCallableReference_withTypeParameters.kt");
}
@Test
@TestMetadata("TypeAliasAsConstructor.kt")
public void testTypeAliasAsConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/TypeAliasAsConstructor.kt");
}
@Test
@TestMetadata("TypeAliasAsConstructorParameter.kt")
public void testTypeAliasAsConstructorParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/TypeAliasAsConstructorParameter.kt");
}
@Test
@TestMetadata("TypeAliasAsConstructor_fixedTypeParameters.kt")
public void testTypeAliasAsConstructor_fixedTypeParameters() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/TypeAliasAsConstructor_fixedTypeParameters.kt");
}
@Test
@TestMetadata("TypeAliasAsConstructor_withTypeParameters.kt")
public void testTypeAliasAsConstructor_withTypeParameters() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/TypeAliasAsConstructor_withTypeParameters.kt");
}
@Test
@TestMetadata("TypeAliasAsSupertypeConstructor.kt")
public void testTypeAliasAsSupertypeConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/TypeAliasAsSupertypeConstructor.kt");
}
@Test
@TestMetadata("TypeAliasRHS.kt")
public void testTypeAliasRHS() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/TypeAliasRHS.kt");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency")
@TestDataPath("$PROJECT_ROOT")
public class MissingDependency {
@Test
public void testAllFilesPresentInMissingDependency() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries")
@TestDataPath("$PROJECT_ROOT")
public class Libraries {
@Test
public void testAllFilesPresentInLibraries() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("FunctionListAliasUnresolvedFunctionAlias.kt")
public void testFunctionListAliasUnresolvedFunctionAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/FunctionListAliasUnresolvedFunctionAlias.kt");
}
@Test
@TestMetadata("FunctionListAliasUnresolvedListAlias.kt")
public void testFunctionListAliasUnresolvedListAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/FunctionListAliasUnresolvedListAlias.kt");
}
@Test
@TestMetadata("ListAliasUnresolvedListAlias.kt")
public void testListAliasUnresolvedListAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/ListAliasUnresolvedListAlias.kt");
}
@Test
@TestMetadata("ListAliasUnresolvedStringAlias.kt")
public void testListAliasUnresolvedStringAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/ListAliasUnresolvedStringAlias.kt");
}
@Test
@TestMetadata("MyString.kt")
public void testMyString() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/MyString.kt");
}
@Test
@TestMetadata("NestedAsymmetricAliasOnlyResolveAsymmetricAlias.kt")
public void testNestedAsymmetricAliasOnlyResolveAsymmetricAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/NestedAsymmetricAliasOnlyResolveAsymmetricAlias.kt");
}
@Test
@TestMetadata("NestedAsymmetricAliasOnlyResolveAsymmetricAlias2.kt")
public void testNestedAsymmetricAliasOnlyResolveAsymmetricAlias2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/NestedAsymmetricAliasOnlyResolveAsymmetricAlias2.kt");
}
@Test
@TestMetadata("NestedAsymmetricAliasOnlyResolveBaseAliases.kt")
public void testNestedAsymmetricAliasOnlyResolveBaseAliases() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/NestedAsymmetricAliasOnlyResolveBaseAliases.kt");
}
@Test
@TestMetadata("NestedAsymmetricAliasUnresolvedAsymmetricAlias.kt")
public void testNestedAsymmetricAliasUnresolvedAsymmetricAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/NestedAsymmetricAliasUnresolvedAsymmetricAlias.kt");
}
@Test
@TestMetadata("NestedAsymmetricAliasUnresolvedAsymmetricAlias2.kt")
public void testNestedAsymmetricAliasUnresolvedAsymmetricAlias2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/NestedAsymmetricAliasUnresolvedAsymmetricAlias2.kt");
}
@Test
@TestMetadata("NestedAsymmetricAliasUnresolvedBaseAliases.kt")
public void testNestedAsymmetricAliasUnresolvedBaseAliases() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/NestedAsymmetricAliasUnresolvedBaseAliases.kt");
}
@Test
@TestMetadata("NestedCollectionAliasesOnlyResolveListAlias.kt")
public void testNestedCollectionAliasesOnlyResolveListAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/NestedCollectionAliasesOnlyResolveListAlias.kt");
}
@Test
@TestMetadata("NestedCollectionAliasesOnlyResolveSetAlias.kt")
public void testNestedCollectionAliasesOnlyResolveSetAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/NestedCollectionAliasesOnlyResolveSetAlias.kt");
}
@Test
@TestMetadata("NestedCollectionAliasesOnlyResolveStringAlias.kt")
public void testNestedCollectionAliasesOnlyResolveStringAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/NestedCollectionAliasesOnlyResolveStringAlias.kt");
}
@Test
@TestMetadata("NestedCollectionAliasesUnresolvedListAlias.kt")
public void testNestedCollectionAliasesUnresolvedListAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/NestedCollectionAliasesUnresolvedListAlias.kt");
}
@Test
@TestMetadata("NestedCollectionAliasesUnresolvedSetAlias.kt")
public void testNestedCollectionAliasesUnresolvedSetAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/NestedCollectionAliasesUnresolvedSetAlias.kt");
}
@Test
@TestMetadata("NestedCollectionAliasesUnresolvedStringAlias.kt")
public void testNestedCollectionAliasesUnresolvedStringAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/libraries/NestedCollectionAliasesUnresolvedStringAlias.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources")
@TestDataPath("$PROJECT_ROOT")
public class Sources {
@Test
public void testAllFilesPresentInSources() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("FunctionListAliasUnresolvedFunctionAlias.kt")
public void testFunctionListAliasUnresolvedFunctionAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/FunctionListAliasUnresolvedFunctionAlias.kt");
}
@Test
@TestMetadata("FunctionListAliasUnresolvedListAlias.kt")
public void testFunctionListAliasUnresolvedListAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/FunctionListAliasUnresolvedListAlias.kt");
}
@Test
@TestMetadata("ListAliasUnresolvedListAlias.kt")
public void testListAliasUnresolvedListAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/ListAliasUnresolvedListAlias.kt");
}
@Test
@TestMetadata("ListAliasUnresolvedStringAlias.kt")
public void testListAliasUnresolvedStringAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/ListAliasUnresolvedStringAlias.kt");
}
@Test
@TestMetadata("MyString.kt")
public void testMyString() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/MyString.kt");
}
@Test
@TestMetadata("NestedAsymmetricAliasOnlyResolveAsymmetricAlias.kt")
public void testNestedAsymmetricAliasOnlyResolveAsymmetricAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/NestedAsymmetricAliasOnlyResolveAsymmetricAlias.kt");
}
@Test
@TestMetadata("NestedAsymmetricAliasOnlyResolveAsymmetricAlias2.kt")
public void testNestedAsymmetricAliasOnlyResolveAsymmetricAlias2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/NestedAsymmetricAliasOnlyResolveAsymmetricAlias2.kt");
}
@Test
@TestMetadata("NestedAsymmetricAliasOnlyResolveBaseAliases.kt")
public void testNestedAsymmetricAliasOnlyResolveBaseAliases() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/NestedAsymmetricAliasOnlyResolveBaseAliases.kt");
}
@Test
@TestMetadata("NestedAsymmetricAliasUnresolvedAsymmetricAlias.kt")
public void testNestedAsymmetricAliasUnresolvedAsymmetricAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/NestedAsymmetricAliasUnresolvedAsymmetricAlias.kt");
}
@Test
@TestMetadata("NestedAsymmetricAliasUnresolvedAsymmetricAlias2.kt")
public void testNestedAsymmetricAliasUnresolvedAsymmetricAlias2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/NestedAsymmetricAliasUnresolvedAsymmetricAlias2.kt");
}
@Test
@TestMetadata("NestedAsymmetricAliasUnresolvedBaseAliases.kt")
public void testNestedAsymmetricAliasUnresolvedBaseAliases() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/NestedAsymmetricAliasUnresolvedBaseAliases.kt");
}
@Test
@TestMetadata("NestedCollectionAliasesOnlyResolveListAlias.kt")
public void testNestedCollectionAliasesOnlyResolveListAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/NestedCollectionAliasesOnlyResolveListAlias.kt");
}
@Test
@TestMetadata("NestedCollectionAliasesOnlyResolveSetAlias.kt")
public void testNestedCollectionAliasesOnlyResolveSetAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/NestedCollectionAliasesOnlyResolveSetAlias.kt");
}
@Test
@TestMetadata("NestedCollectionAliasesOnlyResolveStringAlias.kt")
public void testNestedCollectionAliasesOnlyResolveStringAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/NestedCollectionAliasesOnlyResolveStringAlias.kt");
}
@Test
@TestMetadata("NestedCollectionAliasesUnresolvedListAlias.kt")
public void testNestedCollectionAliasesUnresolvedListAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/NestedCollectionAliasesUnresolvedListAlias.kt");
}
@Test
@TestMetadata("NestedCollectionAliasesUnresolvedSetAlias.kt")
public void testNestedCollectionAliasesUnresolvedSetAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/NestedCollectionAliasesUnresolvedSetAlias.kt");
}
@Test
@TestMetadata("NestedCollectionAliasesUnresolvedStringAlias.kt")
public void testNestedCollectionAliasesUnresolvedStringAlias() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/missingDependency/sources/NestedCollectionAliasesUnresolvedStringAlias.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/withErrors")
@TestDataPath("$PROJECT_ROOT")
public class WithErrors {
@Test
public void testAllFilesPresentInWithErrors() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/withErrors"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("UnresolvedExpandedType.kt")
public void testUnresolvedExpandedType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeAlias/withErrors/UnresolvedExpandedType.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument")
@TestDataPath("$PROJECT_ROOT")
public class TypeArgument {
@Test
public void testAllFilesPresentInTypeArgument() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/constant")
@TestDataPath("$PROJECT_ROOT")
public class Constant {
@Test
public void testAllFilesPresentInConstant() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/constant"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("false.kt")
public void testFalse() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/constant/false.kt");
}
@Test
@TestMetadata("true.kt")
public void testTrue() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/constant/true.kt");
}
@Test
@TestMetadata("trueCall.kt")
public void testTrueCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/constant/trueCall.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall")
@TestDataPath("$PROJECT_ROOT")
public class FunctionCall {
@Test
public void testAllFilesPresentInFunctionCall() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("tooFewTypeArguments1.kt")
public void testTooFewTypeArguments1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall/tooFewTypeArguments1.kt");
}
@Test
@TestMetadata("tooFewTypeArguments1a.kt")
public void testTooFewTypeArguments1a() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall/tooFewTypeArguments1a.kt");
}
@Test
@TestMetadata("tooFewTypeArguments2.kt")
public void testTooFewTypeArguments2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall/tooFewTypeArguments2.kt");
}
@Test
@TestMetadata("tooManyTypeArguments1.kt")
public void testTooManyTypeArguments1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall/tooManyTypeArguments1.kt");
}
@Test
@TestMetadata("tooManyTypeArguments1a.kt")
public void testTooManyTypeArguments1a() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall/tooManyTypeArguments1a.kt");
}
@Test
@TestMetadata("tooManyTypeArguments2.kt")
public void testTooManyTypeArguments2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall/tooManyTypeArguments2.kt");
}
@Test
@TestMetadata("tooManyTypeArguments2a.kt")
public void testTooManyTypeArguments2a() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall/tooManyTypeArguments2a.kt");
}
@Test
@TestMetadata("unresolvedCallee1.kt")
public void testUnresolvedCallee1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall/unresolvedCallee1.kt");
}
@Test
@TestMetadata("unresolvedCallee2.kt")
public void testUnresolvedCallee2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall/unresolvedCallee2.kt");
}
@Test
@TestMetadata("unresolvedCalleeWithArguments1.kt")
public void testUnresolvedCalleeWithArguments1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall/unresolvedCalleeWithArguments1.kt");
}
@Test
@TestMetadata("unresolvedCalleeWithArguments2.kt")
public void testUnresolvedCalleeWithArguments2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall/unresolvedCalleeWithArguments2.kt");
}
@Test
@TestMetadata("upperBound1.kt")
public void testUpperBound1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall/upperBound1.kt");
}
@Test
@TestMetadata("upperBound2.kt")
public void testUpperBound2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/functionCall/upperBound2.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/propertyAccess")
@TestDataPath("$PROJECT_ROOT")
public class PropertyAccess {
@Test
public void testAllFilesPresentInPropertyAccess() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/propertyAccess"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("localVariable1.kt")
public void testLocalVariable1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/propertyAccess/localVariable1.kt");
}
@Test
@TestMetadata("localVariable2.kt")
public void testLocalVariable2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/propertyAccess/localVariable2.kt");
}
@Test
@TestMetadata("parameter1.kt")
public void testParameter1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/propertyAccess/parameter1.kt");
}
@Test
@TestMetadata("parameter2.kt")
public void testParameter2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/propertyAccess/parameter2.kt");
}
@Test
@TestMetadata("property1.kt")
public void testProperty1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/propertyAccess/property1.kt");
}
@Test
@TestMetadata("property2.kt")
public void testProperty2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/propertyAccess/property2.kt");
}
@Test
@TestMetadata("propertyWithGetter1.kt")
public void testPropertyWithGetter1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/propertyAccess/propertyWithGetter1.kt");
}
@Test
@TestMetadata("propertyWithGetter2.kt")
public void testPropertyWithGetter2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/propertyAccess/propertyWithGetter2.kt");
}
@Test
@TestMetadata("unresolvedProperty.kt")
public void testUnresolvedProperty() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/propertyAccess/unresolvedProperty.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/type")
@TestDataPath("$PROJECT_ROOT")
public class Type {
@Test
public void testAllFilesPresentInType() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/type"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("beforeDot.kt")
public void testBeforeDot() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeArgument/type/beforeDot.kt");
}
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/typeParameter")
@TestDataPath("$PROJECT_ROOT")
public class TypeParameter {
@Test
public void testAllFilesPresentInTypeParameter() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/typeParameter"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("multipleBounds.kt")
public void testMultipleBounds() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeParameter/multipleBounds.kt");
}
@Test
@TestMetadata("reifiedTypeParameterInBody.kt")
public void testReifiedTypeParameterInBody() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeParameter/reifiedTypeParameterInBody.kt");
}
@Test
@TestMetadata("reified_argumentInFunctionalType.kt")
public void testReified_argumentInFunctionalType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeParameter/reified_argumentInFunctionalType.kt");
}
@Test
@TestMetadata("reified_extensionReceiver.kt")
public void testReified_extensionReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeParameter/reified_extensionReceiver.kt");
}
@Test
@TestMetadata("reified_returnType.kt")
public void testReified_returnType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeParameter/reified_returnType.kt");
}
@Test
@TestMetadata("reified_valueParameter.kt")
public void testReified_valueParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeParameter/reified_valueParameter.kt");
}
@Test
@TestMetadata("reified_valueParameter_vararg.kt")
public void testReified_valueParameter_vararg() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeParameter/reified_valueParameter_vararg.kt");
}
@Test
@TestMetadata("TypeParameterInAnonymousObject.kt")
public void testTypeParameterInAnonymousObject() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeParameter/TypeParameterInAnonymousObject.kt");
}
@Test
@TestMetadata("TypeParameterInFunctionLiteral.kt")
public void testTypeParameterInFunctionLiteral() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeParameter/TypeParameterInFunctionLiteral.kt");
}
@Test
@TestMetadata("whereClause1.kt")
public void testWhereClause1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeParameter/whereClause1.kt");
}
@Test
@TestMetadata("whereClause2.kt")
public void testWhereClause2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/typeParameter/whereClause2.kt");
}
}
@Nested
@TestMetadata("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors")
@TestDataPath("$PROJECT_ROOT")
public class WithErrors {
@Test
public void testAllFilesPresentInWithErrors() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors"), Pattern.compile("^(.+)\\.kt$"), null, true, "withTestCompilerPluginEnabled");
}
@Test
@TestMetadata("ambiguityByReturnType.kt")
public void testAmbiguityByReturnType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/ambiguityByReturnType.kt");
}
@Test
@TestMetadata("ambiguous.kt")
public void testAmbiguous() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/ambiguous.kt");
}
@Test
@TestMetadata("ambiguousImplicitInvoke.kt")
public void testAmbiguousImplicitInvoke() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/ambiguousImplicitInvoke.kt");
}
@Test
@TestMetadata("ambiguousWithExplicitTypeParameters.kt")
public void testAmbiguousWithExplicitTypeParameters() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/ambiguousWithExplicitTypeParameters.kt");
}
@Test
@TestMetadata("ambiguousWithInferredTypeParameters.kt")
public void testAmbiguousWithInferredTypeParameters() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/ambiguousWithInferredTypeParameters.kt");
}
@Test
@TestMetadata("annotationOnExpression_asT.kt")
public void testAnnotationOnExpression_asT() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/annotationOnExpression_asT.kt");
}
@Test
@TestMetadata("annotationOnExpression_destructuring.kt")
public void testAnnotationOnExpression_destructuring() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/annotationOnExpression_destructuring.kt");
}
@Test
@TestMetadata("annotationOnReceiver.kt")
public void testAnnotationOnReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/annotationOnReceiver.kt");
}
@Test
@TestMetadata("AnnotationParameter.kt")
public void testAnnotationParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/AnnotationParameter.kt");
}
@Test
@TestMetadata("arrayOfInAnnotation.kt")
public void testArrayOfInAnnotation() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/arrayOfInAnnotation.kt");
}
@Test
@TestMetadata("assignementInExpression.kt")
public void testAssignementInExpression() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/assignementInExpression.kt");
}
@Test
@TestMetadata("binaryExpressionWithDeclarationRHS.kt")
public void testBinaryExpressionWithDeclarationRHS() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/binaryExpressionWithDeclarationRHS.kt");
}
@Test
@TestMetadata("builderInference.kt")
public void testBuilderInference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/builderInference.kt");
}
@Test
@TestMetadata("BuiltInImportsNoRuntime.kt")
public void testBuiltInImportsNoRuntime() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/BuiltInImportsNoRuntime.kt");
}
@Test
@TestMetadata("ByReturnExpression.kt")
public void testByReturnExpression() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/ByReturnExpression.kt");
}
@Test
@TestMetadata("callWithUnresolvedReturnType.kt")
public void testCallWithUnresolvedReturnType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/callWithUnresolvedReturnType.kt");
}
@Test
@TestMetadata("callWithUnresolvedTypeBound.kt")
public void testCallWithUnresolvedTypeBound() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/callWithUnresolvedTypeBound.kt");
}
@Test
@TestMetadata("ClassNameBeforeDot.kt")
public void testClassNameBeforeDot() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/ClassNameBeforeDot.kt");
}
@Test
@TestMetadata("ClassReferenceInIncorrectWhenClause.kt")
public void testClassReferenceInIncorrectWhenClause() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/ClassReferenceInIncorrectWhenClause.kt");
}
@Test
@TestMetadata("ClassWithMultipleSuperTypeCalls.kt")
public void testClassWithMultipleSuperTypeCalls() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/ClassWithMultipleSuperTypeCalls.kt");
}
@Test
@TestMetadata("CollectionLiteralLeft.kt")
public void testCollectionLiteralLeft() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/CollectionLiteralLeft.kt");
}
@Test
@TestMetadata("CollectionLiteralRight.kt")
public void testCollectionLiteralRight() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/CollectionLiteralRight.kt");
}
@Test
@TestMetadata("compoundAssignOnGenericVar.kt")
public void testCompoundAssignOnGenericVar() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/compoundAssignOnGenericVar.kt");
}
@Test
@TestMetadata("constructorCallWithSubstitution.kt")
public void testConstructorCallWithSubstitution() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/constructorCallWithSubstitution.kt");
}
@Test
@TestMetadata("constructorCallWithSubstitutionMissedType.kt")
public void testConstructorCallWithSubstitutionMissedType() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/constructorCallWithSubstitutionMissedType.kt");
}
@Test
@TestMetadata("constructorCallWithSubstitutionMissedTypeWithBound.kt")
public void testConstructorCallWithSubstitutionMissedTypeWithBound() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/constructorCallWithSubstitutionMissedTypeWithBound.kt");
}
@Test
@TestMetadata("constructorCallWithSubstitutionWrongBound.kt")
public void testConstructorCallWithSubstitutionWrongBound() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/constructorCallWithSubstitutionWrongBound.kt");
}
@Test
@TestMetadata("CoroutineSuspensionPoint.kt")
public void testCoroutineSuspensionPoint() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/CoroutineSuspensionPoint.kt");
}
@Test
@TestMetadata("DanglingAnnotations.kt")
public void testDanglingAnnotations() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/DanglingAnnotations.kt");
}
@Test
@TestMetadata("defaultValueThatCallsPreviousParameter.kt")
public void testDefaultValueThatCallsPreviousParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/defaultValueThatCallsPreviousParameter.kt");
}
@Test
@TestMetadata("delegatedConstructorCallWithSubstitution.kt")
public void testDelegatedConstructorCallWithSubstitution() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/delegatedConstructorCallWithSubstitution.kt");
}
@Test
@TestMetadata("delegatedConstructorCall_super.kt")
public void testDelegatedConstructorCall_super() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/delegatedConstructorCall_super.kt");
}
@Test
@TestMetadata("delegatedConstructorCall_super_unresolved.kt")
public void testDelegatedConstructorCall_super_unresolved() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/delegatedConstructorCall_super_unresolved.kt");
}
@Test
@TestMetadata("delegatedConstructorCall_this.kt")
public void testDelegatedConstructorCall_this() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/delegatedConstructorCall_this.kt");
}
@Test
@TestMetadata("delegatedConstructorCall_this_unresolved.kt")
public void testDelegatedConstructorCall_this_unresolved() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/delegatedConstructorCall_this_unresolved.kt");
}
@Test
@TestMetadata("delegatedConstructorInapplicable.kt")
public void testDelegatedConstructorInapplicable() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/delegatedConstructorInapplicable.kt");
}
@Test
@TestMetadata("delegatedConstructorInapplicableDifferentParametersCount.kt")
public void testDelegatedConstructorInapplicableDifferentParametersCount() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/delegatedConstructorInapplicableDifferentParametersCount.kt");
}
@Test
@TestMetadata("delegatedConstructorWithSubstitutionExtraTypeArgument.kt")
public void testDelegatedConstructorWithSubstitutionExtraTypeArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/delegatedConstructorWithSubstitutionExtraTypeArgument.kt");
}
@Test
@TestMetadata("delegatedConstructorWithSubstitutionMissedTypeArgument.kt")
public void testDelegatedConstructorWithSubstitutionMissedTypeArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/delegatedConstructorWithSubstitutionMissedTypeArgument.kt");
}
@Test
@TestMetadata("DeprecatedHidden.kt")
public void testDeprecatedHidden() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/DeprecatedHidden.kt");
}
@Test
@TestMetadata("enumSuperTypeInterface.kt")
public void testEnumSuperTypeInterface() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/enumSuperTypeInterface.kt");
}
@Test
@TestMetadata("ExpectSuperClassConstructorArgument.kt")
public void testExpectSuperClassConstructorArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/ExpectSuperClassConstructorArgument.kt");
}
@Test
@TestMetadata("ExpectSuperClassConstructorCall.kt")
public void testExpectSuperClassConstructorCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/ExpectSuperClassConstructorCall.kt");
}
@Test
@TestMetadata("FakeJavaLang1.kt")
public void testFakeJavaLang1() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/FakeJavaLang1.kt");
}
@Test
@TestMetadata("FakeJavaLang2.kt")
public void testFakeJavaLang2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/FakeJavaLang2.kt");
}
@Test
@TestMetadata("FakeJavaLang3.kt")
public void testFakeJavaLang3() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/FakeJavaLang3.kt");
}
@Test
@TestMetadata("FakeJavaLang4.kt")
public void testFakeJavaLang4() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/FakeJavaLang4.kt");
}
@Test
@TestMetadata("functionCallWithNonTrailingLambdaArgument.kt")
public void testFunctionCallWithNonTrailingLambdaArgument() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/functionCallWithNonTrailingLambdaArgument.kt");
}
@Test
@TestMetadata("functionCallWithTooFewTypeArguments.kt")
public void testFunctionCallWithTooFewTypeArguments() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/functionCallWithTooFewTypeArguments.kt");
}
@Test
@TestMetadata("functionCallWithTooFewTypeArguments2.kt")
public void testFunctionCallWithTooFewTypeArguments2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/functionCallWithTooFewTypeArguments2.kt");
}
@Test
@TestMetadata("functionCallWithTooManyTypeArguments.kt")
public void testFunctionCallWithTooManyTypeArguments() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/functionCallWithTooManyTypeArguments.kt");
}
@Test
@TestMetadata("functionCallWithTooManyTypeArguments2.kt")
public void testFunctionCallWithTooManyTypeArguments2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/functionCallWithTooManyTypeArguments2.kt");
}
@Test
@TestMetadata("functionReceiverSubstitution.kt")
public void testFunctionReceiverSubstitution() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/functionReceiverSubstitution.kt");
}
@Test
@TestMetadata("functionReceiverSubstitutionWithConflict.kt")
public void testFunctionReceiverSubstitutionWithConflict() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/functionReceiverSubstitutionWithConflict.kt");
}
@Test
@TestMetadata("functionTypeVariableCall_extensionReceiver.kt")
public void testFunctionTypeVariableCall_extensionReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/functionTypeVariableCall_extensionReceiver.kt");
}
@Test
@TestMetadata("getterAssignment.kt")
public void testGetterAssignment() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/getterAssignment.kt");
}
@Test
@TestMetadata("hiddenConstructor.kt")
public void testHiddenConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/hiddenConstructor.kt");
}
@Test
@TestMetadata("hiddenDeprecated.kt")
public void testHiddenDeprecated() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/hiddenDeprecated.kt");
}
@Test
@TestMetadata("implicitTypeSubstituteOverrideFromOtherModule.kt")
public void testImplicitTypeSubstituteOverrideFromOtherModule() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/implicitTypeSubstituteOverrideFromOtherModule.kt");
}
@Test
@TestMetadata("InObjectClassObject.kt")
public void testInObjectClassObject() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/InObjectClassObject.kt");
}
@Test
@TestMetadata("InSecondClassObject.kt")
public void testInSecondClassObject() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/InSecondClassObject.kt");
}
@Test
@TestMetadata("incompleteBinaryExpression.kt")
public void testIncompleteBinaryExpression() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/incompleteBinaryExpression.kt");
}
@Test
@TestMetadata("incompleteCodeNoParenthesis.kt")
public void testIncompleteCodeNoParenthesis() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/incompleteCodeNoParenthesis.kt");
}
@Test
@TestMetadata("incompleteCodeNoParenthesisDotQualifier.kt")
public void testIncompleteCodeNoParenthesisDotQualifier() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/incompleteCodeNoParenthesisDotQualifier.kt");
}
@Test
@TestMetadata("incompleteCodeWithAmbiguity.kt")
public void testIncompleteCodeWithAmbiguity() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/incompleteCodeWithAmbiguity.kt");
}
@Test
@TestMetadata("incorrectCodeJavaDeclaration.kt")
public void testIncorrectCodeJavaDeclaration() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/incorrectCodeJavaDeclaration.kt");
}
@Test
@TestMetadata("indexedGetWithNotEnoughArgs.kt")
public void testIndexedGetWithNotEnoughArgs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/indexedGetWithNotEnoughArgs.kt");
}
@Test
@TestMetadata("indexedGetWithTooManyArgs.kt")
public void testIndexedGetWithTooManyArgs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/indexedGetWithTooManyArgs.kt");
}
@Test
@TestMetadata("indexedSetWithNotEnoughArgs.kt")
public void testIndexedSetWithNotEnoughArgs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/indexedSetWithNotEnoughArgs.kt");
}
@Test
@TestMetadata("indexedSetWithTooManyArgs.kt")
public void testIndexedSetWithTooManyArgs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/indexedSetWithTooManyArgs.kt");
}
@Test
@TestMetadata("invalidImplicitInvoke.kt")
public void testInvalidImplicitInvoke() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/invalidImplicitInvoke.kt");
}
@Test
@TestMetadata("InvisibleMember.kt")
public void testInvisibleMember() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/InvisibleMember.kt");
}
@Test
@TestMetadata("javaPropertyGetter_unqualified.kt")
public void testJavaPropertyGetter_unqualified() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/javaPropertyGetter_unqualified.kt");
}
@Test
@TestMetadata("javaPropertySetterIncomplete.kt")
public void testJavaPropertySetterIncomplete() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/javaPropertySetterIncomplete.kt");
}
@Test
@TestMetadata("javaPropertySetter_unqualified.kt")
public void testJavaPropertySetter_unqualified() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/javaPropertySetter_unqualified.kt");
}
@Test
@TestMetadata("KotlinPropertyAssignmentWithErrors.kt")
public void testKotlinPropertyAssignmentWithErrors() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/KotlinPropertyAssignmentWithErrors.kt");
}
@Test
@TestMetadata("kotlinPropertyGetter.kt")
public void testKotlinPropertyGetter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/kotlinPropertyGetter.kt");
}
@Test
@TestMetadata("kotlinPropertySetter.kt")
public void testKotlinPropertySetter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/kotlinPropertySetter.kt");
}
@Test
@TestMetadata("kotlinPropertySetterGeneric.kt")
public void testKotlinPropertySetterGeneric() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/kotlinPropertySetterGeneric.kt");
}
@Test
@TestMetadata("KotlinPropertyWithGetterAndSetterAssignmentWithErrors.kt")
public void testKotlinPropertyWithGetterAndSetterAssignmentWithErrors() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/KotlinPropertyWithGetterAndSetterAssignmentWithErrors.kt");
}
@Test
@TestMetadata("MissingTypeArgumentBeforeDot.kt")
public void testMissingTypeArgumentBeforeDot() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/MissingTypeArgumentBeforeDot.kt");
}
@Test
@TestMetadata("MissingTypeArgumentBeforeDotQualified.kt")
public void testMissingTypeArgumentBeforeDotQualified() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/MissingTypeArgumentBeforeDotQualified.kt");
}
@Test
@TestMetadata("NoSelectorInDotQualifiedCall.kt")
public void testNoSelectorInDotQualifiedCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/NoSelectorInDotQualifiedCall.kt");
}
@Test
@TestMetadata("NoSelectorInDotQualifiedCall_ResolveInsideLambda.kt")
public void testNoSelectorInDotQualifiedCall_ResolveInsideLambda() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/NoSelectorInDotQualifiedCall_ResolveInsideLambda.kt");
}
@Test
@TestMetadata("NoSelectorInSafeQualifiedCall.kt")
public void testNoSelectorInSafeQualifiedCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/NoSelectorInSafeQualifiedCall.kt");
}
@Test
@TestMetadata("PackageFromAnnotationOnFileWithUnresolvedReference.kt")
public void testPackageFromAnnotationOnFileWithUnresolvedReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/PackageFromAnnotationOnFileWithUnresolvedReference.kt");
}
@Test
@TestMetadata("PackageFromAnnotationOnFunctionWithUnresolvedReference.kt")
public void testPackageFromAnnotationOnFunctionWithUnresolvedReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/PackageFromAnnotationOnFunctionWithUnresolvedReference.kt");
}
@Test
@TestMetadata("prefixUnaryOperatorOnGenericVar.kt")
public void testPrefixUnaryOperatorOnGenericVar() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/prefixUnaryOperatorOnGenericVar.kt");
}
@Test
@TestMetadata("privateMember.kt")
public void testPrivateMember() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/privateMember.kt");
}
@Test
@TestMetadata("PropertyPlaceInClassObjectInObject.kt")
public void testPropertyPlaceInClassObjectInObject() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/PropertyPlaceInClassObjectInObject.kt");
}
@Test
@TestMetadata("propertyReceiverSubstitution.kt")
public void testPropertyReceiverSubstitution() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/propertyReceiverSubstitution.kt");
}
@Test
@TestMetadata("propertyReceiverSubstitutionWithConflict.kt")
public void testPropertyReceiverSubstitutionWithConflict() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/propertyReceiverSubstitutionWithConflict.kt");
}
@Test
@TestMetadata("recursiveTypeParameter.kt")
public void testRecursiveTypeParameter() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/recursiveTypeParameter.kt");
}
@Test
@TestMetadata("resolveCallInSuperConstructorParam.kt")
public void testResolveCallInSuperConstructorParam() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/resolveCallInSuperConstructorParam.kt");
}
@Test
@TestMetadata("ResolveClassNameInCallExpression.kt")
public void testResolveClassNameInCallExpression() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/ResolveClassNameInCallExpression.kt");
}
@Test
@TestMetadata("samFromJava_lambda.kt")
public void testSamFromJava_lambda() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/samFromJava_lambda.kt");
}
@Test
@TestMetadata("samFromJava_methodReference.kt")
public void testSamFromJava_methodReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/samFromJava_methodReference.kt");
}
@Test
@TestMetadata("simpleCallWithNonMatchingArgs.kt")
public void testSimpleCallWithNonMatchingArgs() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/simpleCallWithNonMatchingArgs.kt");
}
@Test
@TestMetadata("substituteOverrideResolution.kt")
public void testSubstituteOverrideResolution() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/substituteOverrideResolution.kt");
}
@Test
@TestMetadata("superTypeCallEntryResolveToCallInfo.kt")
public void testSuperTypeCallEntryResolveToCallInfo() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/superTypeCallEntryResolveToCallInfo.kt");
}
@Test
@TestMetadata("SyntheticPropertyWithErrors.kt")
public void testSyntheticPropertyWithErrors() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/SyntheticPropertyWithErrors.kt");
}
@Test
@TestMetadata("TopLevelClassVsLocalClassQualifier.kt")
public void testTopLevelClassVsLocalClassQualifier() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/TopLevelClassVsLocalClassQualifier.kt");
}
@Test
@TestMetadata("TopLevelDestructingDeclaration.kt")
public void testTopLevelDestructingDeclaration() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/TopLevelDestructingDeclaration.kt");
}
@Test
@TestMetadata("TypeArgumentBeforeDot2.kt")
public void testTypeArgumentBeforeDot2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/TypeArgumentBeforeDot2.kt");
}
@Test
@TestMetadata("TypeArgumentUnresolvedClass.kt")
public void testTypeArgumentUnresolvedClass() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/TypeArgumentUnresolvedClass.kt");
}
@Test
@TestMetadata("TypeArgumentUnresolvedConstructor.kt")
public void testTypeArgumentUnresolvedConstructor() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/TypeArgumentUnresolvedConstructor.kt");
}
@Test
@TestMetadata("TypeArgumentWrongNumber.kt")
public void testTypeArgumentWrongNumber() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/TypeArgumentWrongNumber.kt");
}
@Test
@TestMetadata("typeParameterAsValue.kt")
public void testTypeParameterAsValue() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/typeParameterAsValue.kt");
}
@Test
@TestMetadata("typecheckerRecursiveError.kt")
public void testTypecheckerRecursiveError() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/typecheckerRecursiveError.kt");
}
@Test
@TestMetadata("unitTypeFromOtherModule.kt")
public void testUnitTypeFromOtherModule() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/unitTypeFromOtherModule.kt");
}
@Test
@TestMetadata("unresolvedReference.kt")
public void testUnresolvedReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/unresolvedReference.kt");
}
@Test
@TestMetadata("unresolvedSuperReference.kt")
public void testUnresolvedSuperReference() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/unresolvedSuperReference.kt");
}
@Test
@TestMetadata("variableAsFunctionLikeCall.kt")
public void testVariableAsFunctionLikeCall() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/variableAsFunctionLikeCall.kt");
}
@Test
@TestMetadata("variableAsFunctionWithParameterNameAnnotationConflict2.kt")
public void testVariableAsFunctionWithParameterNameAnnotationConflict2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/variableAsFunctionWithParameterNameAnnotationConflict2.kt");
}
@Test
@TestMetadata("variableWithInvokeFunctionCall_extensionReceiver.kt")
public void testVariableWithInvokeFunctionCall_extensionReceiver() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/variableWithInvokeFunctionCall_extensionReceiver.kt");
}
@Test
@TestMetadata("WrongNumberOfTypeArguments.kt")
public void testWrongNumberOfTypeArguments() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/WrongNumberOfTypeArguments.kt");
}
@Test
@TestMetadata("WrongNumberOfTypeArguments2.kt")
public void testWrongNumberOfTypeArguments2() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/WrongNumberOfTypeArguments2.kt");
}
@Test
@TestMetadata("WrongNumberOfTypeArguments3.kt")
public void testWrongNumberOfTypeArguments3() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/WrongNumberOfTypeArguments3.kt");
}
@Test
@TestMetadata("WrongNumberOfTypeArguments4.kt")
public void testWrongNumberOfTypeArguments4() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/WrongNumberOfTypeArguments4.kt");
}
@Test
@TestMetadata("WrongNumberOfTypeArgumentsInSupertype.kt")
public void testWrongNumberOfTypeArgumentsInSupertype() {
runTest("analysis/analysis-api/testData/components/resolver/singleByPsi/withErrors/WrongNumberOfTypeArgumentsInSupertype.kt");
}
}
} | java | github | https://github.com/JetBrains/kotlin | analysis/analysis-api-fe10/tests-gen/org/jetbrains/kotlin/analysis/api/fe10/test/cases/generated/cases/components/resolver/Fe10IdeNormalAnalysisSourceModuleResolveReferenceTestGenerated.java |
# initializations
import logging
import numpy
import theano
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
rng = numpy.random.RandomState(1234)
def constant_weight(shape, value=0., name=None):
param = numpy.ones(shape, dtype=theano.config.floatX) * value
return theano.shared(value=param, borrow=True, name=name)
# uniform initialization for weights
def uniform_weight(rng, shape, name=None):
low = -6./sum(shape)
high = 6./sum(shape)
param = numpy.asarray(rng.uniform(low=low, high=high, size=shape),
dtype=theano.config.floatX)
return theano.shared(value=param, borrow=True, name=name)
# orthogonal initialization for weights
def ortho_weight(rng, shape, scale=1., name=None):
#W = numpy.random.randn(ndim, ndim)
#u, s, v = numpy.linalg.svd(W)
#param = u.astype(theano.config.floatX)
#return theano.shared(value=param, borrow=True, name=name)
if len(shape) != 2:
raise ValueError
if shape[0] == shape[1]:
M = rng.randn(*shape).astype(theano.config.floatX)
Q, R = numpy.linalg.qr(M)
Q = Q * numpy.sign(numpy.diag(R))
param = Q*scale
return theano.shared(value=param, borrow=True, name=name)
M1 = rng.randn(shape[0], shape[0]).astype(theano.config.floatX)
M2 = rng.randn(shape[1], shape[1]).astype(theano.config.floatX)
Q1, R1 = numpy.linalg.qr(M1)
Q2, R2 = numpy.linalg.qr(M2)
Q1 = Q1 * numpy.sign(numpy.diag(R1))
Q2 = Q2 * numpy.sign(numpy.diag(R2))
n_min = min(shape[0], shape[1])
param = numpy.dot(Q1[:, :n_min], Q2[:n_min, :]) * scale
return theano.shared(value=param, borrow=True, name=name)
# weight initializer, normal by default
def norm_weight(rng, shape, loc=0, scale=0.01, name=None):
param = numpy.asarray(rng.normal(loc=loc, scale=scale, size=shape),
dtype=theano.config.floatX)
return theano.shared(value=param, borrow=True, name=name) | unknown | codeparrot/codeparrot-clean | ||
<?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\DependencyInjection\Loader\Configurator;
use Symfony\Component\Messenger\DataCollector\MessengerDataCollector;
return static function (ContainerConfigurator $container) {
$container->services()
->set('data_collector.messenger', MessengerDataCollector::class)
->tag('data_collector', [
'template' => '@WebProfiler/Collector/messenger.html.twig',
'id' => 'messenger',
'priority' => 100,
])
;
}; | php | github | https://github.com/symfony/symfony | src/Symfony/Bundle/FrameworkBundle/Resources/config/messenger_debug.php |
#!/bin/bash
set -ex
mkdir -p /usr/local/include
cp jni.h /usr/local/include | unknown | github | https://github.com/pytorch/pytorch | .ci/docker/common/install_jni.sh |
from __future__ import absolute_import
from __future__ import unicode_literals
from .. import unittest
from compose.utils import split_buffer
class SplitBufferTest(unittest.TestCase):
def test_single_line_chunks(self):
def reader():
yield b'abc\n'
yield b'def\n'
yield b'ghi\n'
self.assert_produces(reader, ['abc\n', 'def\n', 'ghi\n'])
def test_no_end_separator(self):
def reader():
yield b'abc\n'
yield b'def\n'
yield b'ghi'
self.assert_produces(reader, ['abc\n', 'def\n', 'ghi'])
def test_multiple_line_chunk(self):
def reader():
yield b'abc\ndef\nghi'
self.assert_produces(reader, ['abc\n', 'def\n', 'ghi'])
def test_chunked_line(self):
def reader():
yield b'a'
yield b'b'
yield b'c'
yield b'\n'
yield b'd'
self.assert_produces(reader, ['abc\n', 'd'])
def test_preserves_unicode_sequences_within_lines(self):
string = u"a\u2022c\n"
def reader():
yield string.encode('utf-8')
self.assert_produces(reader, [string])
def assert_produces(self, reader, expectations):
split = split_buffer(reader())
for (actual, expected) in zip(split, expectations):
self.assertEqual(type(actual), type(expected))
self.assertEqual(actual, expected) | unknown | codeparrot/codeparrot-clean | ||
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cstr, filter_strip_join
from frappe.website.website_generator import WebsiteGenerator
from erpnext.utilities.address_and_contact import load_address_and_contact
class SalesPartner(WebsiteGenerator):
page_title_field = "partner_name"
condition_field = "show_in_website"
template = "templates/generators/sales_partner.html"
def onload(self):
"""Load address and contacts in `__onload`"""
load_address_and_contact(self, "sales_partner")
def autoname(self):
self.name = self.partner_name
def validate(self):
self.parent_website_route = "partners"
super(SalesPartner, self).validate()
if self.partner_website and not self.partner_website.startswith("http"):
self.partner_website = "http://" + self.partner_website
def get_contacts(self, nm):
if nm:
return frappe.db.convert_to_lists(frappe.db.sql("""
select name, CONCAT(IFNULL(first_name,''),
' ',IFNULL(last_name,'')),contact_no,email_id
from `tabContact` where sales_partner = %s""", nm))
else:
return ''
def get_context(self, context):
address = frappe.db.get_value("Address",
{"sales_partner": self.name, "is_primary_address": 1},
"*", as_dict=True)
if address:
city_state = ", ".join(filter(None, [address.city, address.state]))
address_rows = [address.address_line1, address.address_line2,
city_state, address.pincode, address.country]
context.update({
"email": address.email_id,
"partner_address": filter_strip_join(address_rows, "\n<br>"),
"phone": filter_strip_join(cstr(address.phone).split(","), "\n<br>")
})
return context | unknown | codeparrot/codeparrot-clean | ||
#### Note: this error code is no longer emitted by the compiler.
A `while let` pattern attempts to match the pattern, and enters the body if the
match was successful. If the match is irrefutable (when it cannot fail to
match), use a regular `let`-binding inside a `loop` instead. For instance:
```no_run
struct Irrefutable(i32);
let irr = Irrefutable(0);
// This fails to compile because the match is irrefutable.
while let Irrefutable(x) = irr {
// ...
}
```
Try this instead:
```no_run
struct Irrefutable(i32);
let irr = Irrefutable(0);
loop {
let Irrefutable(x) = irr;
// ...
}
``` | unknown | github | https://github.com/rust-lang/rust | compiler/rustc_error_codes/src/error_codes/E0165.md |
from __future__ import unicode_literals
template = {
"Resources": {
"HostedZone": {
"Type": "AWS::Route53::HostedZone",
"Properties": {
"Name": "my_zone"
}
},
"my_health_check": {
"Type": "AWS::Route53::HealthCheck",
"Properties": {
"HealthCheckConfig": {
"FailureThreshold": 3,
"IPAddress": "10.0.0.4",
"Port": 80,
"RequestInterval": 10,
"ResourcePath": "/",
"Type": "HTTP",
}
}
},
"myDNSRecord": {
"Type": "AWS::Route53::RecordSet",
"Properties": {
"HostedZoneId": {"Ref": "HostedZone"},
"Comment": "DNS name for my instance.",
"Name": "my_record_set",
"Type": "A",
"TTL": "900",
"ResourceRecords": ["my.example.com"],
"HealthCheckId": {"Ref": "my_health_check"},
}
}
},
} | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from glance.common import crypt
from glance.common import exception
import glance.context
import glance.db
from glance.openstack.common import uuidutils
import glance.tests.unit.utils as unit_test_utils
import glance.tests.utils as test_utils
CONF = cfg.CONF
CONF.import_opt('metadata_encryption_key', 'glance.common.config')
UUID1 = 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d'
UUID2 = 'a85abd86-55b3-4d5b-b0b4-5d0a6e6042fc'
UUID3 = '971ec09a-8067-4bc8-a91f-ae3557f1c4c7'
UUID4 = '6bbe7cc2-eae7-4c0f-b50d-a7160b0c6a86'
TENANT1 = '6838eb7b-6ded-434a-882c-b344c77fe8df'
TENANT2 = '2c014f32-55eb-467d-8fcb-4bd706012f81'
TENANT3 = '5a3e60e8-cfa9-4a9e-a90a-62b42cea92b8'
TENANT4 = 'c6c87f25-8a94-47ed-8c83-053c25f42df4'
USER1 = '54492ba0-f4df-4e4e-be62-27f4d76b29cf'
UUID1_LOCATION = 'file:///path/to/image'
UUID1_LOCATION_METADATA = {'key': 'value'}
UUID3_LOCATION = 'http://somehost.com/place'
CHECKSUM = '93264c3edf5972c9f1cb309543d38a5c'
CHCKSUM1 = '43264c3edf4972c9f1cb309543d38a55'
def _db_fixture(id, **kwargs):
obj = {
'id': id,
'name': None,
'is_public': False,
'properties': {},
'checksum': None,
'owner': None,
'status': 'queued',
'tags': [],
'size': None,
'locations': [],
'protected': False,
'disk_format': None,
'container_format': None,
'deleted': False,
'min_ram': None,
'min_disk': None,
}
obj.update(kwargs)
return obj
def _db_image_member_fixture(image_id, member_id, **kwargs):
obj = {
'image_id': image_id,
'member': member_id,
}
obj.update(kwargs)
return obj
class TestImageRepo(test_utils.BaseTestCase):
def setUp(self):
super(TestImageRepo, self).setUp()
self.db = unit_test_utils.FakeDB()
self.db.reset()
self.context = glance.context.RequestContext(
user=USER1, tenant=TENANT1)
self.image_repo = glance.db.ImageRepo(self.context, self.db)
self.image_factory = glance.domain.ImageFactory()
self._create_images()
self._create_image_members()
def _create_images(self):
self.db.reset()
self.images = [
_db_fixture(UUID1, owner=TENANT1, checksum=CHECKSUM,
name='1', size=256,
is_public=True, status='active',
locations=[{'url': UUID1_LOCATION,
'metadata': UUID1_LOCATION_METADATA}]),
_db_fixture(UUID2, owner=TENANT1, checksum=CHCKSUM1,
name='2', size=512, is_public=False),
_db_fixture(UUID3, owner=TENANT3, checksum=CHCKSUM1,
name='3', size=1024, is_public=True,
locations=[{'url': UUID3_LOCATION,
'metadata': {}}]),
_db_fixture(UUID4, owner=TENANT4, name='4', size=2048),
]
[self.db.image_create(None, image) for image in self.images]
self.db.image_tag_set_all(None, UUID1, ['ping', 'pong'])
def _create_image_members(self):
self.image_members = [
_db_image_member_fixture(UUID2, TENANT2),
_db_image_member_fixture(UUID2, TENANT3, status='accepted'),
]
[self.db.image_member_create(None, image_member)
for image_member in self.image_members]
def test_get(self):
image = self.image_repo.get(UUID1)
self.assertEquals(image.image_id, UUID1)
self.assertEquals(image.name, '1')
self.assertEquals(image.tags, set(['ping', 'pong']))
self.assertEquals(image.visibility, 'public')
self.assertEquals(image.status, 'active')
self.assertEquals(image.size, 256)
self.assertEquals(image.owner, TENANT1)
def test_location_value(self):
image = self.image_repo.get(UUID3)
self.assertEqual(image.locations[0]['url'], UUID3_LOCATION)
def test_location_data_value(self):
image = self.image_repo.get(UUID1)
self.assertEqual(image.locations[0]['url'], UUID1_LOCATION)
self.assertEqual(image.locations[0]['metadata'],
UUID1_LOCATION_METADATA)
def test_location_data_exists(self):
image = self.image_repo.get(UUID2)
self.assertEqual(image.locations, [])
def test_get_not_found(self):
self.assertRaises(exception.NotFound, self.image_repo.get,
uuidutils.generate_uuid())
def test_get_forbidden(self):
self.assertRaises(exception.NotFound, self.image_repo.get, UUID4)
def test_list(self):
images = self.image_repo.list()
image_ids = set([i.image_id for i in images])
self.assertEqual(set([UUID1, UUID2, UUID3]), image_ids)
def _do_test_list_status(self, status, expected):
self.context = glance.context.RequestContext(
user=USER1, tenant=TENANT3)
self.image_repo = glance.db.ImageRepo(self.context, self.db)
images = self.image_repo.list(member_status=status)
self.assertEqual(expected, len(images))
def test_list_status(self):
self._do_test_list_status(None, 3)
def test_list_status_pending(self):
self._do_test_list_status('pending', 2)
def test_list_status_rejected(self):
self._do_test_list_status('rejected', 2)
def test_list_status_all(self):
self._do_test_list_status('all', 3)
def test_list_with_marker(self):
full_images = self.image_repo.list()
full_ids = [i.image_id for i in full_images]
marked_images = self.image_repo.list(marker=full_ids[0])
actual_ids = [i.image_id for i in marked_images]
self.assertEqual(actual_ids, full_ids[1:])
def test_list_with_last_marker(self):
images = self.image_repo.list()
marked_images = self.image_repo.list(marker=images[-1].image_id)
self.assertEqual(len(marked_images), 0)
def test_limited_list(self):
limited_images = self.image_repo.list(limit=2)
self.assertEqual(len(limited_images), 2)
def test_list_with_marker_and_limit(self):
full_images = self.image_repo.list()
full_ids = [i.image_id for i in full_images]
marked_images = self.image_repo.list(marker=full_ids[0], limit=1)
actual_ids = [i.image_id for i in marked_images]
self.assertEqual(actual_ids, full_ids[1:2])
def test_list_private_images(self):
filters = {'visibility': 'private'}
images = self.image_repo.list(filters=filters)
image_ids = set([i.image_id for i in images])
self.assertEqual(set([UUID2]), image_ids)
def test_list_with_checksum_filter_single_image(self):
filters = {'checksum': CHECKSUM}
images = self.image_repo.list(filters=filters)
image_ids = list([i.image_id for i in images])
self.assertEquals(1, len(image_ids))
self.assertEqual([UUID1], image_ids)
def test_list_with_checksum_filter_multiple_images(self):
filters = {'checksum': CHCKSUM1}
images = self.image_repo.list(filters=filters)
image_ids = list([i.image_id for i in images])
self.assertEquals(2, len(image_ids))
self.assertEqual([UUID3, UUID2], image_ids)
def test_list_with_wrong_checksum(self):
WRONG_CHKSUM = 'd2fd42f979e1ed1aafadc7eb9354bff839c858cd'
filters = {'checksum': WRONG_CHKSUM}
images = self.image_repo.list(filters=filters)
self.assertEquals(0, len(images))
def test_list_with_tags_filter_single_tag(self):
filters = {'tags': ['ping']}
images = self.image_repo.list(filters=filters)
image_ids = list([i.image_id for i in images])
self.assertEquals(1, len(image_ids))
self.assertEqual([UUID1], image_ids)
def test_list_with_tags_filter_multiple_tags(self):
filters = {'tags': ['ping', 'pong']}
images = self.image_repo.list(filters=filters)
image_ids = list([i.image_id for i in images])
self.assertEquals(1, len(image_ids))
self.assertEqual([UUID1], image_ids)
def test_list_with_tags_filter_multiple_tags_and_nonexistent(self):
filters = {'tags': ['ping', 'fake']}
images = self.image_repo.list(filters=filters)
image_ids = list([i.image_id for i in images])
self.assertEquals(0, len(image_ids))
def test_list_with_wrong_tags(self):
filters = {'tags': ['fake']}
images = self.image_repo.list(filters=filters)
self.assertEquals(0, len(images))
def test_list_public_images(self):
filters = {'visibility': 'public'}
images = self.image_repo.list(filters=filters)
image_ids = set([i.image_id for i in images])
self.assertEqual(set([UUID1, UUID3]), image_ids)
def test_sorted_list(self):
images = self.image_repo.list(sort_key='size', sort_dir='asc')
image_ids = [i.image_id for i in images]
self.assertEqual([UUID1, UUID2, UUID3], image_ids)
def test_add_image(self):
image = self.image_factory.new_image(name='added image')
self.assertEqual(image.updated_at, image.created_at)
self.image_repo.add(image)
retreived_image = self.image_repo.get(image.image_id)
self.assertEqual(retreived_image.name, 'added image')
self.assertEqual(retreived_image.updated_at, image.updated_at)
def test_save_image(self):
image = self.image_repo.get(UUID1)
original_update_time = image.updated_at
image.name = 'foo'
image.tags = ['king', 'kong']
self.image_repo.save(image)
current_update_time = image.updated_at
self.assertTrue(current_update_time > original_update_time)
image = self.image_repo.get(UUID1)
self.assertEqual(image.name, 'foo')
self.assertEqual(image.tags, set(['king', 'kong']))
self.assertEqual(image.updated_at, current_update_time)
def test_remove_image(self):
image = self.image_repo.get(UUID1)
previous_update_time = image.updated_at
self.image_repo.remove(image)
self.assertTrue(image.updated_at > previous_update_time)
self.assertRaises(exception.NotFound, self.image_repo.get, UUID1)
class TestEncryptedLocations(test_utils.BaseTestCase):
def setUp(self):
super(TestEncryptedLocations, self).setUp()
self.db = unit_test_utils.FakeDB()
self.db.reset()
self.context = glance.context.RequestContext(
user=USER1, tenant=TENANT1)
self.image_repo = glance.db.ImageRepo(self.context, self.db)
self.image_factory = glance.domain.ImageFactory()
self.crypt_key = '0123456789abcdef'
self.config(metadata_encryption_key=self.crypt_key)
self.foo_bar_location = [{'url': 'foo', 'metadata': {}},
{'url': 'bar', 'metadata': {}}]
def test_encrypt_locations_on_add(self):
image = self.image_factory.new_image(UUID1)
image.locations = self.foo_bar_location
self.image_repo.add(image)
db_data = self.db.image_get(self.context, UUID1)
self.assertNotEqual(db_data['locations'], ['foo', 'bar'])
decrypted_locations = [crypt.urlsafe_decrypt(self.crypt_key, l['url'])
for l in db_data['locations']]
self.assertEqual(decrypted_locations,
[l['url'] for l in self.foo_bar_location])
def test_encrypt_locations_on_save(self):
image = self.image_factory.new_image(UUID1)
self.image_repo.add(image)
image.locations = self.foo_bar_location
self.image_repo.save(image)
db_data = self.db.image_get(self.context, UUID1)
self.assertNotEqual(db_data['locations'], ['foo', 'bar'])
decrypted_locations = [crypt.urlsafe_decrypt(self.crypt_key, l['url'])
for l in db_data['locations']]
self.assertEqual(decrypted_locations,
[l['url'] for l in self.foo_bar_location])
def test_decrypt_locations_on_get(self):
url_loc = ['ping', 'pong']
orig_locations = [{'url': l, 'metadata': {}} for l in url_loc]
encrypted_locs = [crypt.urlsafe_encrypt(self.crypt_key, l)
for l in url_loc]
encrypted_locations = [{'url': l, 'metadata': {}}
for l in encrypted_locs]
self.assertNotEqual(encrypted_locations, orig_locations)
db_data = _db_fixture(UUID1, owner=TENANT1,
locations=encrypted_locations)
self.db.image_create(None, db_data)
image = self.image_repo.get(UUID1)
self.assertEqual(image.locations, orig_locations)
def test_decrypt_locations_on_list(self):
url_loc = ['ping', 'pong']
orig_locations = [{'url': l, 'metadata': {}} for l in url_loc]
encrypted_locs = [crypt.urlsafe_encrypt(self.crypt_key, l)
for l in url_loc]
encrypted_locations = [{'url': l, 'metadata': {}}
for l in encrypted_locs]
self.assertNotEqual(encrypted_locations, orig_locations)
db_data = _db_fixture(UUID1, owner=TENANT1,
locations=encrypted_locations)
self.db.image_create(None, db_data)
image = self.image_repo.list()[0]
self.assertEqual(image.locations, orig_locations)
class TestImageMemberRepo(test_utils.BaseTestCase):
def setUp(self):
super(TestImageMemberRepo, self).setUp()
self.db = unit_test_utils.FakeDB()
self.db.reset()
self.context = glance.context.RequestContext(
user=USER1, tenant=TENANT1)
self.image_repo = glance.db.ImageRepo(self.context, self.db)
self.image_member_factory = glance.domain.ImageMemberFactory()
self._create_images()
self._create_image_members()
image = self.image_repo.get(UUID1)
self.image_member_repo = glance.db.ImageMemberRepo(self.context,
self.db, image)
def _create_images(self):
self.images = [
_db_fixture(UUID1, owner=TENANT1, name='1', size=256,
status='active'),
_db_fixture(UUID2, owner=TENANT1, name='2',
size=512, is_public=False),
]
[self.db.image_create(None, image) for image in self.images]
self.db.image_tag_set_all(None, UUID1, ['ping', 'pong'])
def _create_image_members(self):
self.image_members = [
_db_image_member_fixture(UUID1, TENANT2),
_db_image_member_fixture(UUID1, TENANT3),
]
[self.db.image_member_create(None, image_member)
for image_member in self.image_members]
def test_list(self):
image_members = self.image_member_repo.list()
image_member_ids = set([i.member_id for i in image_members])
self.assertEqual(set([TENANT2, TENANT3]), image_member_ids)
def test_list_no_members(self):
image = self.image_repo.get(UUID2)
self.image_member_repo_uuid2 = glance.db.ImageMemberRepo(
self.context, self.db, image)
image_members = self.image_member_repo_uuid2.list()
image_member_ids = set([i.member_id for i in image_members])
self.assertEqual(set([]), image_member_ids)
def test_save_image_member(self):
image_member = self.image_member_repo.get(TENANT2)
image_member.status = 'accepted'
image_member_updated = self.image_member_repo.save(image_member)
self.assertTrue(image_member.id, image_member_updated.id)
self.assertEqual(image_member_updated.status, 'accepted')
def test_add_image_member(self):
image = self.image_repo.get(UUID1)
image_member = self.image_member_factory.new_image_member(image,
TENANT4)
self.assertTrue(image_member.id is None)
retreived_image_member = self.image_member_repo.add(image_member)
self.assertEqual(retreived_image_member.id, image_member.id)
self.assertEqual(retreived_image_member.image_id,
image_member.image_id)
self.assertEqual(retreived_image_member.member_id,
image_member.member_id)
self.assertEqual(retreived_image_member.status,
'pending')
def test_remove_image_member(self):
image_member = self.image_member_repo.get(TENANT2)
self.image_member_repo.remove(image_member)
self.assertRaises(exception.NotFound, self.image_member_repo.get,
TENANT2)
def test_remove_image_member_does_not_exist(self):
image = self.image_repo.get(UUID2)
fake_member = glance.domain.ImageMemberFactory()\
.new_image_member(image, TENANT4)
self.assertRaises(exception.NotFound, self.image_member_repo.remove,
fake_member) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
# Copyright (C) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# functions that don't fit in well elsewhere
import codecs
import locale
def wrap_stream(stream, encoding='utf-8'):
try:
stream_enc = stream.encoding
except AttributeError:
stream_enc = locale.getpreferredencoding()
if hasattr(stream, 'buffer'):
stream = stream.buffer
if str(stream_enc).lower() == str(encoding).lower():
return stream
return codecs.EncodedFile(stream, encoding, stream_enc) | unknown | codeparrot/codeparrot-clean | ||
#from https://gist.github.com/andersx/6061586
#runas run()
#bench run()
#pythran export run()
# A simple energy minimization program that uses steepest descent
# and a force field to minimize the energy of water in internal coordinates.
# Written by Jan H. Jensen, 2013
def Eandg(rOH,thetaHOH):
""""
Arguments: (internal coordinates of the water molecule)
rOH O-H bond distance
thetaHOH H-O-H bond angle
Returns:
E Molecular force field energy
grOH O-H bond stretch gradient
grthetaHOH H-O-H bond angle bend gradient
Force field parameters:
kOH Harmonic force constant, O-H bond strech
rOHe Equilibrium distance, O-H
kHOH Harmonic angle bend force constant, H-O-H angle bend
thetaHOHe Equilibrium angle, H-O-H
"""
kOH = 50.0
rOHe = 0.95
kHOH = 50.0
thetaHOHe = 104.5
E = 2 * kOH * (rOH - rOHe)**2 + kHOH * (thetaHOH - thetaHOHe)**2
grOH = 2 * kOH * (rOH - rOHe)
grthetaHOH = 2 * kHOH * (thetaHOH - thetaHOHe)
return (E, grOH, grthetaHOH)
def run():
c = 0.005
n_steps = 1000000
#starting geometry
rOH = 10.0
thetaHOH = 180.0
for i in range(n_steps):
(E,grOH,gthetaHOH) = Eandg(rOH,thetaHOH)
if (abs(grOH) >0.001/c or abs(gthetaHOH) > 0.01/c ):
rOH = rOH - c*grOH
thetaHOH = thetaHOH - c*gthetaHOH
converged = (abs(grOH) >0.001/c or abs(gthetaHOH) > 0.01/c )
return converged, E,rOH,thetaHOH | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2012-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.context.properties.source;
import java.util.Arrays;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.Set;
import java.util.function.BiPredicate;
import java.util.stream.Stream;
import org.jspecify.annotations.Nullable;
import org.springframework.boot.env.PropertySourceInfo;
import org.springframework.boot.origin.Origin;
import org.springframework.boot.origin.PropertySourceOrigin;
import org.springframework.core.env.EnumerablePropertySource;
import org.springframework.core.env.MapPropertySource;
import org.springframework.core.env.PropertySource;
import org.springframework.core.env.StandardEnvironment;
import org.springframework.core.env.SystemEnvironmentPropertySource;
import org.springframework.util.Assert;
import org.springframework.util.ConcurrentReferenceHashMap;
/**
* {@link ConfigurationPropertySource} backed by an {@link EnumerablePropertySource}.
* Extends {@link SpringConfigurationPropertySource} with full "relaxed" mapping support.
* In order to use this adapter the underlying {@link PropertySource} must be fully
* enumerable. A security restricted {@link SystemEnvironmentPropertySource} cannot be
* adapted.
*
* @author Phillip Webb
* @author Madhura Bhave
* @see PropertyMapper
*/
class SpringIterableConfigurationPropertySource extends SpringConfigurationPropertySource
implements IterableConfigurationPropertySource, CachingConfigurationPropertySource {
private final BiPredicate<ConfigurationPropertyName, ConfigurationPropertyName> ancestorOfCheck;
private final SoftReferenceConfigurationPropertyCache<Cache> cache;
private volatile @Nullable ConfigurationPropertyName @Nullable [] configurationPropertyNames;
private final @Nullable Map<ConfigurationPropertyName, ConfigurationPropertyState> containsDescendantOfCache;
SpringIterableConfigurationPropertySource(EnumerablePropertySource<?> propertySource,
boolean systemEnvironmentSource, PropertyMapper... mappers) {
super(propertySource, systemEnvironmentSource, mappers);
assertEnumerablePropertySource();
boolean immutable = isImmutablePropertySource();
this.ancestorOfCheck = getAncestorOfCheck(mappers);
this.cache = new SoftReferenceConfigurationPropertyCache<>(immutable);
this.containsDescendantOfCache = (!systemEnvironmentSource) ? null : new ConcurrentReferenceHashMap<>();
}
private BiPredicate<ConfigurationPropertyName, ConfigurationPropertyName> getAncestorOfCheck(
PropertyMapper[] mappers) {
BiPredicate<ConfigurationPropertyName, ConfigurationPropertyName> ancestorOfCheck = mappers[0]
.getAncestorOfCheck();
for (int i = 1; i < mappers.length; i++) {
ancestorOfCheck = ancestorOfCheck.or(mappers[i].getAncestorOfCheck());
}
return ancestorOfCheck;
}
private void assertEnumerablePropertySource() {
if (getPropertySource() instanceof MapPropertySource mapSource) {
try {
mapSource.getSource().size();
}
catch (UnsupportedOperationException ex) {
throw new IllegalArgumentException("PropertySource must be fully enumerable");
}
}
}
@Override
public ConfigurationPropertyCaching getCaching() {
return this.cache;
}
@Override
public @Nullable ConfigurationProperty getConfigurationProperty(@Nullable ConfigurationPropertyName name) {
if (name == null) {
return null;
}
ConfigurationProperty configurationProperty = super.getConfigurationProperty(name);
if (configurationProperty != null) {
return configurationProperty;
}
for (String candidate : getCache().getMapped(name)) {
Object value = getPropertySourceProperty(candidate);
if (value != null) {
Origin origin = PropertySourceOrigin.get(getPropertySource(), candidate);
return ConfigurationProperty.of(this, name, value, origin);
}
}
return null;
}
@Override
protected @Nullable Object getSystemEnvironmentProperty(Map<String, Object> systemEnvironment, String name) {
return getCache().getSystemEnvironmentProperty(name);
}
@Override
public Stream<ConfigurationPropertyName> stream() {
@Nullable ConfigurationPropertyName[] names = getConfigurationPropertyNames();
return Arrays.stream(names).filter(Objects::nonNull);
}
@Override
public Iterator<ConfigurationPropertyName> iterator() {
return new ConfigurationPropertyNamesIterator(getConfigurationPropertyNames());
}
@Override
public ConfigurationPropertyState containsDescendantOf(ConfigurationPropertyName name) {
ConfigurationPropertyState result = super.containsDescendantOf(name);
if (result != ConfigurationPropertyState.UNKNOWN) {
return result;
}
if (this.ancestorOfCheck == PropertyMapper.DEFAULT_ANCESTOR_OF_CHECK) {
Set<ConfigurationPropertyName> descendants = getCache().getDescendants();
if (descendants != null) {
if (name.isEmpty() && !descendants.isEmpty()) {
return ConfigurationPropertyState.PRESENT;
}
return !descendants.contains(name) ? ConfigurationPropertyState.ABSENT
: ConfigurationPropertyState.PRESENT;
}
}
result = (this.containsDescendantOfCache != null) ? this.containsDescendantOfCache.get(name) : null;
if (result == null) {
result = (!ancestorOfCheck(name)) ? ConfigurationPropertyState.ABSENT : ConfigurationPropertyState.PRESENT;
if (this.containsDescendantOfCache != null) {
this.containsDescendantOfCache.put(name, result);
}
}
return result;
}
private boolean ancestorOfCheck(ConfigurationPropertyName name) {
@Nullable ConfigurationPropertyName[] candidates = getConfigurationPropertyNames();
for (ConfigurationPropertyName candidate : candidates) {
if (candidate != null && this.ancestorOfCheck.test(name, candidate)) {
return true;
}
}
return false;
}
@Nullable ConfigurationPropertyName[] getConfigurationPropertyNames() {
if (!isImmutablePropertySource()) {
return getCache().getConfigurationPropertyNames(getPropertySource().getPropertyNames());
}
@Nullable ConfigurationPropertyName[] configurationPropertyNames = this.configurationPropertyNames;
if (configurationPropertyNames == null) {
configurationPropertyNames = getCache()
.getConfigurationPropertyNames(getPropertySource().getPropertyNames());
this.configurationPropertyNames = configurationPropertyNames;
}
return configurationPropertyNames;
}
private Cache getCache() {
return this.cache.get(this::createCache, this::updateCache);
}
private Cache createCache() {
boolean immutable = isImmutablePropertySource();
boolean captureDescendants = this.ancestorOfCheck == PropertyMapper.DEFAULT_ANCESTOR_OF_CHECK;
return new Cache(getMappers(), immutable, captureDescendants, isSystemEnvironmentSource());
}
private Cache updateCache(Cache cache) {
cache.update(getPropertySource());
return cache;
}
boolean isImmutablePropertySource() {
EnumerablePropertySource<?> source = getPropertySource();
if (source instanceof PropertySourceInfo propertySourceInfo) {
return propertySourceInfo.isImmutable();
}
if (StandardEnvironment.SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME.equals(source.getName())) {
return source.getSource() == System.getenv();
}
return false;
}
@Override
protected EnumerablePropertySource<?> getPropertySource() {
return (EnumerablePropertySource<?>) super.getPropertySource();
}
private static class Cache {
private static final ConfigurationPropertyName[] EMPTY_NAMES_ARRAY = {};
private final PropertyMapper[] mappers;
private final boolean immutable;
private final boolean captureDescendants;
private final boolean systemEnvironmentSource;
private volatile @Nullable Data data;
Cache(PropertyMapper[] mappers, boolean immutable, boolean captureDescendants,
boolean systemEnvironmentSource) {
this.mappers = mappers;
this.immutable = immutable;
this.captureDescendants = captureDescendants;
this.systemEnvironmentSource = systemEnvironmentSource;
}
void update(EnumerablePropertySource<?> propertySource) {
if (this.data == null || !this.immutable) {
int count = 0;
while (true) {
try {
tryUpdate(propertySource);
return;
}
catch (ConcurrentModificationException ex) {
if (count++ > 10) {
throw ex;
}
}
}
}
}
private void tryUpdate(EnumerablePropertySource<?> propertySource) {
Data data = this.data;
String[] lastUpdated = (data != null) ? data.lastUpdated() : null;
String[] propertyNames = propertySource.getPropertyNames();
if (lastUpdated != null && Arrays.equals(lastUpdated, propertyNames)) {
return;
}
int size = propertyNames.length;
Map<ConfigurationPropertyName, Set<String>> mappings = cloneOrCreate(
(data != null) ? data.mappings() : null, size);
Map<String, ConfigurationPropertyName> reverseMappings = cloneOrCreate(
(data != null) ? data.reverseMappings() : null, size);
Set<ConfigurationPropertyName> descendants = (!this.captureDescendants) ? null : new HashSet<>();
Map<String, Object> systemEnvironmentCopy = (!this.systemEnvironmentSource) ? null
: copySource(propertySource);
for (PropertyMapper propertyMapper : this.mappers) {
for (String propertyName : propertyNames) {
if (!reverseMappings.containsKey(propertyName)) {
ConfigurationPropertyName configurationPropertyName = propertyMapper.map(propertyName);
if (configurationPropertyName != null && !configurationPropertyName.isEmpty()) {
add(mappings, configurationPropertyName, propertyName);
reverseMappings.put(propertyName, configurationPropertyName);
}
}
}
}
for (String propertyName : propertyNames) {
addParents(descendants, reverseMappings.get(propertyName));
}
ConfigurationPropertyName[] configurationPropertyNames = this.immutable
? reverseMappings.values().toArray(new ConfigurationPropertyName[0]) : null;
lastUpdated = this.immutable ? null : propertyNames;
this.data = new Data(mappings, reverseMappings, descendants, configurationPropertyNames,
systemEnvironmentCopy, lastUpdated);
}
@SuppressWarnings("unchecked")
private HashMap<String, Object> copySource(EnumerablePropertySource<?> propertySource) {
return new HashMap<>((Map<String, Object>) propertySource.getSource());
}
private <K, V> Map<K, V> cloneOrCreate(@Nullable Map<K, V> source, int size) {
return (source != null) ? new LinkedHashMap<>(source) : new LinkedHashMap<>(size);
}
private void addParents(@Nullable Set<ConfigurationPropertyName> descendants,
@Nullable ConfigurationPropertyName name) {
if (descendants == null || name == null || name.isEmpty()) {
return;
}
ConfigurationPropertyName parent = name.getParent();
while (!parent.isEmpty()) {
if (!descendants.add(parent)) {
return;
}
parent = parent.getParent();
}
}
private <K, T> void add(Map<K, Set<T>> map, K key, T value) {
map.computeIfAbsent(key, (k) -> new HashSet<>()).add(value);
}
Set<String> getMapped(ConfigurationPropertyName configurationPropertyName) {
Data data = this.data;
Assert.state(data != null, "'data' must not be null");
return data.mappings().getOrDefault(configurationPropertyName, Collections.emptySet());
}
@Nullable ConfigurationPropertyName[] getConfigurationPropertyNames(String[] propertyNames) {
Data data = this.data;
Assert.state(data != null, "'data' must not be null");
@Nullable ConfigurationPropertyName[] names = data.configurationPropertyNames();
if (names != null) {
return names;
}
Map<String, ConfigurationPropertyName> reverseMappings = data.reverseMappings();
if (reverseMappings == null || reverseMappings.isEmpty()) {
return EMPTY_NAMES_ARRAY;
}
names = new ConfigurationPropertyName[propertyNames.length];
for (int i = 0; i < propertyNames.length; i++) {
names[i] = reverseMappings.get(propertyNames[i]);
}
return names;
}
@Nullable Set<ConfigurationPropertyName> getDescendants() {
Data data = this.data;
Assert.state(data != null, "'data' must not be null");
return data.descendants();
}
@Nullable Object getSystemEnvironmentProperty(String name) {
Data data = this.data;
Assert.state(data != null, "'data' must not be null");
Map<String, Object> systemEnvironmentCopy = data.systemEnvironmentCopy();
Assert.state(systemEnvironmentCopy != null, "'systemEnvironmentCopy' must not be null");
return systemEnvironmentCopy.get(name);
}
private record Data(Map<ConfigurationPropertyName, Set<String>> mappings,
Map<String, ConfigurationPropertyName> reverseMappings,
@Nullable Set<ConfigurationPropertyName> descendants,
ConfigurationPropertyName @Nullable [] configurationPropertyNames,
@Nullable Map<String, Object> systemEnvironmentCopy, String @Nullable [] lastUpdated) {
}
}
/**
* ConfigurationPropertyNames iterator backed by an array.
*/
private static class ConfigurationPropertyNamesIterator implements Iterator<ConfigurationPropertyName> {
private final @Nullable ConfigurationPropertyName[] names;
private int index;
ConfigurationPropertyNamesIterator(@Nullable ConfigurationPropertyName[] names) {
this.names = names;
}
@Override
public boolean hasNext() {
skipNulls();
return this.index < this.names.length;
}
@Override
public @Nullable ConfigurationPropertyName next() {
skipNulls();
if (this.index >= this.names.length) {
throw new NoSuchElementException();
}
return this.names[this.index++];
}
private void skipNulls() {
while (this.index < this.names.length) {
if (this.names[this.index] != null) {
return;
}
this.index++;
}
}
}
} | java | github | https://github.com/spring-projects/spring-boot | core/spring-boot/src/main/java/org/springframework/boot/context/properties/source/SpringIterableConfigurationPropertySource.java |
# torch.onnx.testing
```{eval-rst}
.. automodule:: torch.onnx.testing
```
```{eval-rst}
.. autofunction:: torch.onnx.testing.assert_onnx_program
``` | unknown | github | https://github.com/pytorch/pytorch | docs/source/onnx_testing.md |
package n1
var X = "n1" | go | github | https://github.com/kubernetes/kubernetes | cmd/import-boss/testdata/inverse/neither/n1/file.go |
__all__ = ["LoggingCallbackHandler"]
import logging
from typing import Any
from uuid import UUID
from langchain_core.exceptions import TracerException
from langchain_core.tracers.stdout import FunctionCallbackHandler
from langchain_core.utils.input import get_bolded_text, get_colored_text
from typing_extensions import override
class LoggingCallbackHandler(FunctionCallbackHandler):
"""Tracer that logs via the input Logger."""
name: str = "logging_callback_handler"
def __init__(
self,
logger: logging.Logger,
log_level: int = logging.INFO,
extra: dict | None = None,
**kwargs: Any,
) -> None:
"""Initialize the LoggingCallbackHandler.
Args:
logger: the logger to use for logging
log_level: the logging level (default: logging.INFO)
extra: the extra context to log (default: None)
**kwargs: additional keyword arguments.
"""
log_method = getattr(logger, logging.getLevelName(level=log_level).lower())
def callback(text: str) -> None:
log_method(text, extra=extra)
super().__init__(function=callback, **kwargs)
@override
def on_text(
self,
text: str,
*,
run_id: UUID,
parent_run_id: UUID | None = None,
**kwargs: Any,
) -> None:
try:
crumbs_str = f"[{self.get_breadcrumbs(run=self._get_run(run_id=run_id))}] "
except TracerException:
crumbs_str = ""
self.function_callback(
f"{get_colored_text('[text]', color='blue')}"
f" {get_bolded_text(f'{crumbs_str}New text:')}\n{text}",
) | python | github | https://github.com/langchain-ai/langchain | libs/langchain/langchain_classic/callbacks/tracers/logging.py |
import math
from sverchok.utils.csg_geom import *
class CSG(object):
"""
## License
Copyright (c) 2011 Evan Wallace (http://madebyevan.com/), under the MIT license.
Python port Copyright (c) 2012 Tim Knip (http://www.floorplanner.com), under the MIT license.
"""
def __init__(self):
self.polygons = []
@classmethod
def fromPolygons(cls, polygons):
csg = CSG()
csg.polygons = polygons
return csg
def clone(self):
csg = CSG()
csg.polygons = map(lambda p: p.clone(), self.polygons)
return csg
def toPolygons(self):
return self.polygons
def union(self, csg):
a = CSGNode(self.clone().polygons)
b = CSGNode(csg.clone().polygons)
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons())
return CSG.fromPolygons(a.allPolygons())
def subtract(self, csg):
a = CSGNode(self.clone().polygons)
b = CSGNode(csg.clone().polygons)
a.invert()
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def intersect(self, csg):
a = CSGNode(self.clone().polygons)
b = CSGNode(csg.clone().polygons)
a.invert()
b.clipTo(a)
b.invert()
a.clipTo(b)
b.clipTo(a)
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def inverse(self):
"""
Return a new CSG solid with solid and empty space switched. This solid is
not modified.
"""
csg = self.clone()
map(lambda p: p.flip(), csg.polygons)
return csg
@classmethod
def Obj_from_pydata(cls, verts, faces):
"""
"""
polygons = []
for face in faces:
polyg = []
for idx in face:
co = verts[idx]
polyg.append(CSGVertex(co))
polygons.append(CSGPolygon(polyg))
return CSG.fromPolygons(polygons) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
import struct
import numpy as np
from aston.trace.Trace import AstonFrame
from aston.tracefile.TraceFile import TraceFile
from aston.tracefile.Common import find_offset
class InficonHapsite(TraceFile):
ext = 'HPS'
mgc = '0403'
traces = ['#ms']
def _ions(self, f):
"""
This is a generator that returns the mzs being measured during
each time segment, one segment at a time.
"""
outside_pos = f.tell()
doff = find_offset(f, 4 * b'\xff' + 'HapsSearch'.encode('ascii'))
# actual end of prev section is 34 bytes before, but assume 1 rec
f.seek(doff - 62)
# seek backwards to find the FFFFFFFF header
while True:
f.seek(f.tell() - 8)
if f.read(4) == 4 * b'\xff':
break
f.seek(f.tell() + 64)
nsegments = struct.unpack('<I', f.read(4))[0]
for _ in range(nsegments):
# first 32 bytes are segment name, rest are something else?
f.seek(f.tell() + 96)
nions = struct.unpack('<I', f.read(4))[0]
ions = []
for _ in range(nions):
#TODO: check that itype is actually a SIM/full scan switch
i1, i2, _, _, _, _, itype, _ = \
struct.unpack('<' + 8 * 'I', f.read(32))
if itype == 0: # SIM
ions.append(i1 / 100.)
else: # full scan
#TODO: this might be a little hacky?
# ideally we would need to know n for this, e.g.:
#ions += np.linspace(i1 / 100, i2 / 100, n).tolist()
ions += np.arange(i1 / 100., i2 / 100. + 1, 1).tolist()
# save the file position and load the position
# that we were at before we started this code
inside_pos = f.tell()
f.seek(outside_pos)
yield ions
outside_pos = f.tell()
f.seek(inside_pos)
f.seek(outside_pos)
@property
def data(self):
#TODO: handle skip mass ranges
with open(self.filename, 'rb') as f:
# read in the time segments/mz ranges for the run
# read in the data itself
doff = find_offset(f, 4 * b'\xff' + 'HapsScan'.encode('ascii'))
if doff is None:
return
f.seek(doff - 20)
data_end = doff + struct.unpack('<I', f.read(4))[0] + 55
f.seek(doff + 56)
times, abns, mzs = [], [], []
cur_seg = None
mz_reader = self._ions(f)
while f.tell() <= data_end:
# record info looks like a standard format
n, t, _, recs, _, seg = struct.unpack('<IiHHHH', f.read(16))
if cur_seg != seg:
# if we've switched segments, update the list of mzs
try:
cur_mzs = next(mz_reader)
except StopIteration:
break
mzs += set(cur_mzs).difference(mzs)
mz_idx = [mzs.index(i) for i in cur_mzs]
cur_seg = seg
# just add the new time in
times.append(t)
# read the list of abundances
cur_abns = struct.unpack('<' + 'f' * recs, f.read(4 * recs))
# convert this list into an array that matches up with
# whatever mzs we currently have
empty_row = np.zeros(len(mzs))
empty_row[mz_idx] = cur_abns
# add that row into the list
abns.append(empty_row)
# convert the time from milliseconds to minutes
times = np.array(times, dtype=float) / 60000
# create the data array and populate it
data = np.zeros((len(times), len(mzs)))
for i, r in enumerate(abns):
data[i, 0:len(r)] = r
return AstonFrame(data, times, mzs) | unknown | codeparrot/codeparrot-clean | ||
# Copyright (c) 2009-2010, Michael Gorven
# Released under terms of the MIT/X/Expat Licence. See COPYING for details.
import logging
from twisted.spread import pb
from twisted.application import internet
from twisted.internet import reactor
import ibid
from ibid.source import IbidSourceFactory
from ibid.config import IntOption
from ibid.event import Event
class IbidRoot(pb.Root):
def __init__(self, name):
self.name = name
self.log = logging.getLogger('sources.%s' % name)
def respond(self, event):
return [response['reply'] for response in event.responses]
def remote_message(self, message):
event = Event(self.name, u'message')
event.sender['connection'] = event.sender['id'] = event.sender['nick'] = event.channel = self.name
event.addressed = True
event.public = False
event.message = unicode(message, 'utf-8', 'replace')
self.log.debug(u'message("%s")' % event.message)
return ibid.dispatcher.dispatch(event).addCallback(self.respond)
def remote_get_plugin(self, plugin):
self.log.debug(u'get_plugin("%s")' % plugin)
return ibid.rpc[plugin]
class SourceFactory(IbidSourceFactory):
supports = ('multiline',)
port = IntOption('port', 'Port number to listen on', 8789)
def setServiceParent(self, service):
root = pb.PBServerFactory(IbidRoot(self.name))
if service:
return internet.TCPServer(self.port, root).setServiceParent(service)
else:
reactor.listenTCP(self.port, root)
# vi: set et sta sw=4 ts=4: | unknown | codeparrot/codeparrot-clean | ||
/*
* NFA utilities.
* This file is #included by regcomp.c.
*
* Copyright (c) 1998, 1999 Henry Spencer. All rights reserved.
*
* Development of this software was funded, in part, by Cray Research Inc.,
* UUNET Communications Services Inc., Sun Microsystems Inc., and Scriptics
* Corporation, none of whom are responsible for the results. The author
* thanks all of them.
*
* Redistribution and use in source and binary forms -- with or without
* modification -- are permitted for any purpose, provided that
* redistributions in source form retain this entire copyright notice and
* indicate the origin and nature of any modifications.
*
* I'd appreciate being given credit for this package in the documentation
* of software which uses it, but that is not a requirement.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* HENRY SPENCER BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* src/backend/regex/regc_nfa.c
*
*
* One or two things that technically ought to be in here
* are actually in color.c, thanks to some incestuous relationships in
* the color chains.
*/
#define NISERR() VISERR(nfa->v)
#define NERR(e) VERR(nfa->v, (e))
/*
* newnfa - set up an NFA
*/
static struct nfa * /* the NFA, or NULL */
newnfa(struct vars *v,
struct colormap *cm,
struct nfa *parent) /* NULL if primary NFA */
{
struct nfa *nfa;
nfa = (struct nfa *) MALLOC(sizeof(struct nfa));
if (nfa == NULL)
{
ERR(REG_ESPACE);
return NULL;
}
/* Make the NFA minimally valid, so freenfa() will behave sanely */
nfa->states = NULL;
nfa->slast = NULL;
nfa->freestates = NULL;
nfa->freearcs = NULL;
nfa->lastsb = NULL;
nfa->lastab = NULL;
nfa->lastsbused = 0;
nfa->lastabused = 0;
nfa->nstates = 0;
nfa->cm = cm;
nfa->v = v;
nfa->bos[0] = nfa->bos[1] = COLORLESS;
nfa->eos[0] = nfa->eos[1] = COLORLESS;
nfa->flags = 0;
nfa->minmatchall = nfa->maxmatchall = -1;
nfa->parent = parent; /* Precedes newfstate so parent is valid. */
/* Create required infrastructure */
nfa->post = newfstate(nfa, '@'); /* number 0 */
nfa->pre = newfstate(nfa, '>'); /* number 1 */
nfa->init = newstate(nfa); /* may become invalid later */
nfa->final = newstate(nfa);
if (ISERR())
{
freenfa(nfa);
return NULL;
}
rainbow(nfa, nfa->cm, PLAIN, COLORLESS, nfa->pre, nfa->init);
newarc(nfa, '^', 1, nfa->pre, nfa->init);
newarc(nfa, '^', 0, nfa->pre, nfa->init);
rainbow(nfa, nfa->cm, PLAIN, COLORLESS, nfa->final, nfa->post);
newarc(nfa, '$', 1, nfa->final, nfa->post);
newarc(nfa, '$', 0, nfa->final, nfa->post);
if (ISERR())
{
freenfa(nfa);
return NULL;
}
return nfa;
}
/*
* freenfa - free an entire NFA
*/
static void
freenfa(struct nfa *nfa)
{
struct statebatch *sb;
struct statebatch *sbnext;
struct arcbatch *ab;
struct arcbatch *abnext;
for (sb = nfa->lastsb; sb != NULL; sb = sbnext)
{
sbnext = sb->next;
nfa->v->spaceused -= STATEBATCHSIZE(sb->nstates);
FREE(sb);
}
nfa->lastsb = NULL;
for (ab = nfa->lastab; ab != NULL; ab = abnext)
{
abnext = ab->next;
nfa->v->spaceused -= ARCBATCHSIZE(ab->narcs);
FREE(ab);
}
nfa->lastab = NULL;
nfa->nstates = -1;
FREE(nfa);
}
/*
* newstate - allocate an NFA state, with zero flag value
*/
static struct state * /* NULL on error */
newstate(struct nfa *nfa)
{
struct state *s;
/*
* This is a handy place to check for operation cancel during regex
* compilation, since no code path will go very long without making a new
* state or arc.
*/
INTERRUPT(nfa->v->re);
/* first, recycle anything that's on the freelist */
if (nfa->freestates != NULL)
{
s = nfa->freestates;
nfa->freestates = s->next;
}
/* otherwise, is there anything left in the last statebatch? */
else if (nfa->lastsb != NULL && nfa->lastsbused < nfa->lastsb->nstates)
{
s = &nfa->lastsb->s[nfa->lastsbused++];
}
/* otherwise, need to allocate a new statebatch */
else
{
struct statebatch *newSb;
size_t nstates;
if (nfa->v->spaceused >= REG_MAX_COMPILE_SPACE)
{
NERR(REG_ETOOBIG);
return NULL;
}
nstates = (nfa->lastsb != NULL) ? nfa->lastsb->nstates * 2 : FIRSTSBSIZE;
if (nstates > MAXSBSIZE)
nstates = MAXSBSIZE;
newSb = (struct statebatch *) MALLOC(STATEBATCHSIZE(nstates));
if (newSb == NULL)
{
NERR(REG_ESPACE);
return NULL;
}
nfa->v->spaceused += STATEBATCHSIZE(nstates);
newSb->nstates = nstates;
newSb->next = nfa->lastsb;
nfa->lastsb = newSb;
nfa->lastsbused = 1;
s = &newSb->s[0];
}
assert(nfa->nstates >= 0);
s->no = nfa->nstates++;
s->flag = 0;
if (nfa->states == NULL)
nfa->states = s;
s->nins = 0;
s->ins = NULL;
s->nouts = 0;
s->outs = NULL;
s->tmp = NULL;
s->next = NULL;
if (nfa->slast != NULL)
{
assert(nfa->slast->next == NULL);
nfa->slast->next = s;
}
s->prev = nfa->slast;
nfa->slast = s;
return s;
}
/*
* newfstate - allocate an NFA state with a specified flag value
*/
static struct state * /* NULL on error */
newfstate(struct nfa *nfa, int flag)
{
struct state *s;
s = newstate(nfa);
if (s != NULL)
s->flag = (char) flag;
return s;
}
/*
* dropstate - delete a state's inarcs and outarcs and free it
*/
static void
dropstate(struct nfa *nfa,
struct state *s)
{
struct arc *a;
while ((a = s->ins) != NULL)
freearc(nfa, a);
while ((a = s->outs) != NULL)
freearc(nfa, a);
freestate(nfa, s);
}
/*
* freestate - free a state, which has no in-arcs or out-arcs
*/
static void
freestate(struct nfa *nfa,
struct state *s)
{
assert(s != NULL);
assert(s->nins == 0 && s->nouts == 0);
s->no = FREESTATE;
s->flag = 0;
if (s->next != NULL)
s->next->prev = s->prev;
else
{
assert(s == nfa->slast);
nfa->slast = s->prev;
}
if (s->prev != NULL)
s->prev->next = s->next;
else
{
assert(s == nfa->states);
nfa->states = s->next;
}
s->prev = NULL;
s->next = nfa->freestates; /* don't delete it, put it on the free list */
nfa->freestates = s;
}
/*
* newarc - set up a new arc within an NFA
*
* This function checks to make sure that no duplicate arcs are created.
* In general we never want duplicates.
*
* However: in principle, a RAINBOW arc is redundant with any plain arc
* (unless that arc is for a pseudocolor). But we don't try to recognize
* that redundancy, either here or in allied operations such as moveins().
* The pseudocolor consideration makes that more costly than it seems worth.
*/
static void
newarc(struct nfa *nfa,
int t,
color co,
struct state *from,
struct state *to)
{
struct arc *a;
assert(from != NULL && to != NULL);
/*
* This is a handy place to check for operation cancel during regex
* compilation, since no code path will go very long without making a new
* state or arc.
*/
INTERRUPT(nfa->v->re);
/* check for duplicate arc, using whichever chain is shorter */
if (from->nouts <= to->nins)
{
for (a = from->outs; a != NULL; a = a->outchain)
if (a->to == to && a->co == co && a->type == t)
return;
}
else
{
for (a = to->ins; a != NULL; a = a->inchain)
if (a->from == from && a->co == co && a->type == t)
return;
}
/* no dup, so create the arc */
createarc(nfa, t, co, from, to);
}
/*
* createarc - create a new arc within an NFA
*
* This function must *only* be used after verifying that there is no existing
* identical arc (same type/color/from/to).
*/
static void
createarc(struct nfa *nfa,
int t,
color co,
struct state *from,
struct state *to)
{
struct arc *a;
a = allocarc(nfa);
if (NISERR())
return;
assert(a != NULL);
a->type = t;
a->co = co;
a->to = to;
a->from = from;
/*
* Put the new arc on the beginning, not the end, of the chains; it's
* simpler here, and freearc() is the same cost either way. See also the
* logic in moveins() and its cohorts, as well as fixempties().
*/
a->inchain = to->ins;
a->inchainRev = NULL;
if (to->ins)
to->ins->inchainRev = a;
to->ins = a;
a->outchain = from->outs;
a->outchainRev = NULL;
if (from->outs)
from->outs->outchainRev = a;
from->outs = a;
from->nouts++;
to->nins++;
if (COLORED(a) && nfa->parent == NULL)
colorchain(nfa->cm, a);
}
/*
* allocarc - allocate a new arc within an NFA
*/
static struct arc * /* NULL for failure */
allocarc(struct nfa *nfa)
{
struct arc *a;
/* first, recycle anything that's on the freelist */
if (nfa->freearcs != NULL)
{
a = nfa->freearcs;
nfa->freearcs = a->freechain;
}
/* otherwise, is there anything left in the last arcbatch? */
else if (nfa->lastab != NULL && nfa->lastabused < nfa->lastab->narcs)
{
a = &nfa->lastab->a[nfa->lastabused++];
}
/* otherwise, need to allocate a new arcbatch */
else
{
struct arcbatch *newAb;
size_t narcs;
if (nfa->v->spaceused >= REG_MAX_COMPILE_SPACE)
{
NERR(REG_ETOOBIG);
return NULL;
}
narcs = (nfa->lastab != NULL) ? nfa->lastab->narcs * 2 : FIRSTABSIZE;
if (narcs > MAXABSIZE)
narcs = MAXABSIZE;
newAb = (struct arcbatch *) MALLOC(ARCBATCHSIZE(narcs));
if (newAb == NULL)
{
NERR(REG_ESPACE);
return NULL;
}
nfa->v->spaceused += ARCBATCHSIZE(narcs);
newAb->narcs = narcs;
newAb->next = nfa->lastab;
nfa->lastab = newAb;
nfa->lastabused = 1;
a = &newAb->a[0];
}
return a;
}
/*
* freearc - free an arc
*/
static void
freearc(struct nfa *nfa,
struct arc *victim)
{
struct state *from = victim->from;
struct state *to = victim->to;
struct arc *predecessor;
assert(victim->type != 0);
/* take it off color chain if necessary */
if (COLORED(victim) && nfa->parent == NULL)
uncolorchain(nfa->cm, victim);
/* take it off source's out-chain */
assert(from != NULL);
predecessor = victim->outchainRev;
if (predecessor == NULL)
{
assert(from->outs == victim);
from->outs = victim->outchain;
}
else
{
assert(predecessor->outchain == victim);
predecessor->outchain = victim->outchain;
}
if (victim->outchain != NULL)
{
assert(victim->outchain->outchainRev == victim);
victim->outchain->outchainRev = predecessor;
}
from->nouts--;
/* take it off target's in-chain */
assert(to != NULL);
predecessor = victim->inchainRev;
if (predecessor == NULL)
{
assert(to->ins == victim);
to->ins = victim->inchain;
}
else
{
assert(predecessor->inchain == victim);
predecessor->inchain = victim->inchain;
}
if (victim->inchain != NULL)
{
assert(victim->inchain->inchainRev == victim);
victim->inchain->inchainRev = predecessor;
}
to->nins--;
/* clean up and place on NFA's free list */
victim->type = 0;
victim->from = NULL; /* precautions... */
victim->to = NULL;
victim->inchain = NULL;
victim->inchainRev = NULL;
victim->outchain = NULL;
victim->outchainRev = NULL;
victim->freechain = nfa->freearcs;
nfa->freearcs = victim;
}
/*
* changearcsource - flip an arc to have a different from state
*
* Caller must have verified that there is no pre-existing duplicate arc.
*/
static void
changearcsource(struct arc *a, struct state *newfrom)
{
struct state *oldfrom = a->from;
struct arc *predecessor;
assert(oldfrom != newfrom);
/* take it off old source's out-chain */
assert(oldfrom != NULL);
predecessor = a->outchainRev;
if (predecessor == NULL)
{
assert(oldfrom->outs == a);
oldfrom->outs = a->outchain;
}
else
{
assert(predecessor->outchain == a);
predecessor->outchain = a->outchain;
}
if (a->outchain != NULL)
{
assert(a->outchain->outchainRev == a);
a->outchain->outchainRev = predecessor;
}
oldfrom->nouts--;
a->from = newfrom;
/* prepend it to new source's out-chain */
a->outchain = newfrom->outs;
a->outchainRev = NULL;
if (newfrom->outs)
newfrom->outs->outchainRev = a;
newfrom->outs = a;
newfrom->nouts++;
}
/*
* changearctarget - flip an arc to have a different to state
*
* Caller must have verified that there is no pre-existing duplicate arc.
*/
static void
changearctarget(struct arc *a, struct state *newto)
{
struct state *oldto = a->to;
struct arc *predecessor;
assert(oldto != newto);
/* take it off old target's in-chain */
assert(oldto != NULL);
predecessor = a->inchainRev;
if (predecessor == NULL)
{
assert(oldto->ins == a);
oldto->ins = a->inchain;
}
else
{
assert(predecessor->inchain == a);
predecessor->inchain = a->inchain;
}
if (a->inchain != NULL)
{
assert(a->inchain->inchainRev == a);
a->inchain->inchainRev = predecessor;
}
oldto->nins--;
a->to = newto;
/* prepend it to new target's in-chain */
a->inchain = newto->ins;
a->inchainRev = NULL;
if (newto->ins)
newto->ins->inchainRev = a;
newto->ins = a;
newto->nins++;
}
/*
* hasnonemptyout - Does state have a non-EMPTY out arc?
*/
static int
hasnonemptyout(struct state *s)
{
struct arc *a;
for (a = s->outs; a != NULL; a = a->outchain)
{
if (a->type != EMPTY)
return 1;
}
return 0;
}
/*
* findarc - find arc, if any, from given source with given type and color
* If there is more than one such arc, the result is random.
*/
static struct arc *
findarc(struct state *s,
int type,
color co)
{
struct arc *a;
for (a = s->outs; a != NULL; a = a->outchain)
if (a->type == type && a->co == co)
return a;
return NULL;
}
/*
* cparc - allocate a new arc within an NFA, copying details from old one
*/
static void
cparc(struct nfa *nfa,
struct arc *oa,
struct state *from,
struct state *to)
{
newarc(nfa, oa->type, oa->co, from, to);
}
/*
* sortins - sort the in arcs of a state by from/color/type
*/
static void
sortins(struct nfa *nfa,
struct state *s)
{
struct arc **sortarray;
struct arc *a;
int n = s->nins;
int i;
if (n <= 1)
return; /* nothing to do */
/* make an array of arc pointers ... */
sortarray = (struct arc **) MALLOC(n * sizeof(struct arc *));
if (sortarray == NULL)
{
NERR(REG_ESPACE);
return;
}
i = 0;
for (a = s->ins; a != NULL; a = a->inchain)
sortarray[i++] = a;
assert(i == n);
/* ... sort the array */
qsort(sortarray, n, sizeof(struct arc *), sortins_cmp);
/* ... and rebuild arc list in order */
/* it seems worth special-casing first and last items to simplify loop */
a = sortarray[0];
s->ins = a;
a->inchain = sortarray[1];
a->inchainRev = NULL;
for (i = 1; i < n - 1; i++)
{
a = sortarray[i];
a->inchain = sortarray[i + 1];
a->inchainRev = sortarray[i - 1];
}
a = sortarray[i];
a->inchain = NULL;
a->inchainRev = sortarray[i - 1];
FREE(sortarray);
}
static int
sortins_cmp(const void *a, const void *b)
{
const struct arc *aa = *((const struct arc *const *) a);
const struct arc *bb = *((const struct arc *const *) b);
/* we check the fields in the order they are most likely to be different */
if (aa->from->no < bb->from->no)
return -1;
if (aa->from->no > bb->from->no)
return 1;
if (aa->co < bb->co)
return -1;
if (aa->co > bb->co)
return 1;
if (aa->type < bb->type)
return -1;
if (aa->type > bb->type)
return 1;
return 0;
}
/*
* sortouts - sort the out arcs of a state by to/color/type
*/
static void
sortouts(struct nfa *nfa,
struct state *s)
{
struct arc **sortarray;
struct arc *a;
int n = s->nouts;
int i;
if (n <= 1)
return; /* nothing to do */
/* make an array of arc pointers ... */
sortarray = (struct arc **) MALLOC(n * sizeof(struct arc *));
if (sortarray == NULL)
{
NERR(REG_ESPACE);
return;
}
i = 0;
for (a = s->outs; a != NULL; a = a->outchain)
sortarray[i++] = a;
assert(i == n);
/* ... sort the array */
qsort(sortarray, n, sizeof(struct arc *), sortouts_cmp);
/* ... and rebuild arc list in order */
/* it seems worth special-casing first and last items to simplify loop */
a = sortarray[0];
s->outs = a;
a->outchain = sortarray[1];
a->outchainRev = NULL;
for (i = 1; i < n - 1; i++)
{
a = sortarray[i];
a->outchain = sortarray[i + 1];
a->outchainRev = sortarray[i - 1];
}
a = sortarray[i];
a->outchain = NULL;
a->outchainRev = sortarray[i - 1];
FREE(sortarray);
}
static int
sortouts_cmp(const void *a, const void *b)
{
const struct arc *aa = *((const struct arc *const *) a);
const struct arc *bb = *((const struct arc *const *) b);
/* we check the fields in the order they are most likely to be different */
if (aa->to->no < bb->to->no)
return -1;
if (aa->to->no > bb->to->no)
return 1;
if (aa->co < bb->co)
return -1;
if (aa->co > bb->co)
return 1;
if (aa->type < bb->type)
return -1;
if (aa->type > bb->type)
return 1;
return 0;
}
/*
* Common decision logic about whether to use arc-by-arc operations or
* sort/merge. If there's just a few source arcs we cannot recoup the
* cost of sorting the destination arc list, no matter how large it is.
* Otherwise, limit the number of arc-by-arc comparisons to about 1000
* (a somewhat arbitrary choice, but the breakeven point would probably
* be machine dependent anyway).
*/
#define BULK_ARC_OP_USE_SORT(nsrcarcs, ndestarcs) \
((nsrcarcs) < 4 ? 0 : ((nsrcarcs) > 32 || (ndestarcs) > 32))
/*
* moveins - move all in arcs of a state to another state
*
* You might think this could be done better by just updating the
* existing arcs, and you would be right if it weren't for the need
* for duplicate suppression, which makes it easier to just make new
* ones to exploit the suppression built into newarc.
*
* However, if we have a whole lot of arcs to deal with, retail duplicate
* checks become too slow. In that case we proceed by sorting and merging
* the arc lists, and then we can indeed just update the arcs in-place.
*
* On the other hand, it's also true that this is frequently called with
* a brand-new newState that has no existing in-arcs. In that case,
* de-duplication is unnecessary, so we can just blindly move all the arcs.
*/
static void
moveins(struct nfa *nfa,
struct state *oldState,
struct state *newState)
{
assert(oldState != newState);
if (newState->nins == 0)
{
/* No need for de-duplication */
struct arc *a;
while ((a = oldState->ins) != NULL)
{
createarc(nfa, a->type, a->co, a->from, newState);
freearc(nfa, a);
}
}
else if (!BULK_ARC_OP_USE_SORT(oldState->nins, newState->nins))
{
/* With not too many arcs, just do them one at a time */
struct arc *a;
while ((a = oldState->ins) != NULL)
{
cparc(nfa, a, a->from, newState);
freearc(nfa, a);
}
}
else
{
/*
* With many arcs, use a sort-merge approach. Note changearctarget()
* will put the arc onto the front of newState's chain, so it does not
* break our walk through the sorted part of the chain.
*/
struct arc *oa;
struct arc *na;
/*
* Because we bypass newarc() in this code path, we'd better include a
* cancel check.
*/
INTERRUPT(nfa->v->re);
sortins(nfa, oldState);
sortins(nfa, newState);
if (NISERR())
return; /* might have failed to sort */
oa = oldState->ins;
na = newState->ins;
while (oa != NULL && na != NULL)
{
struct arc *a = oa;
switch (sortins_cmp(&oa, &na))
{
case -1:
/* newState does not have anything matching oa */
oa = oa->inchain;
/*
* Rather than doing createarc+freearc, we can just unlink
* and relink the existing arc struct.
*/
changearctarget(a, newState);
break;
case 0:
/* match, advance in both lists */
oa = oa->inchain;
na = na->inchain;
/* ... and drop duplicate arc from oldState */
freearc(nfa, a);
break;
case +1:
/* advance only na; oa might have a match later */
na = na->inchain;
break;
default:
assert(NOTREACHED);
}
}
while (oa != NULL)
{
/* newState does not have anything matching oa */
struct arc *a = oa;
oa = oa->inchain;
changearctarget(a, newState);
}
}
assert(oldState->nins == 0);
assert(oldState->ins == NULL);
}
/*
* copyins - copy in arcs of a state to another state
*
* The comments for moveins() apply here as well. However, in current
* usage, this is *only* called with brand-new target states, so that
* only the "no need for de-duplication" code path is ever reached.
* We keep the rest #ifdef'd out in case it's needed in the future.
*/
static void
copyins(struct nfa *nfa,
struct state *oldState,
struct state *newState)
{
assert(oldState != newState);
assert(newState->nins == 0); /* see comment above */
if (newState->nins == 0)
{
/* No need for de-duplication */
struct arc *a;
for (a = oldState->ins; a != NULL; a = a->inchain)
createarc(nfa, a->type, a->co, a->from, newState);
}
#ifdef NOT_USED /* see comment above */
else if (!BULK_ARC_OP_USE_SORT(oldState->nins, newState->nins))
{
/* With not too many arcs, just do them one at a time */
struct arc *a;
for (a = oldState->ins; a != NULL; a = a->inchain)
cparc(nfa, a, a->from, newState);
}
else
{
/*
* With many arcs, use a sort-merge approach. Note that createarc()
* will put new arcs onto the front of newState's chain, so it does
* not break our walk through the sorted part of the chain.
*/
struct arc *oa;
struct arc *na;
/*
* Because we bypass newarc() in this code path, we'd better include a
* cancel check.
*/
INTERRUPT(nfa->v->re);
sortins(nfa, oldState);
sortins(nfa, newState);
if (NISERR())
return; /* might have failed to sort */
oa = oldState->ins;
na = newState->ins;
while (oa != NULL && na != NULL)
{
struct arc *a = oa;
switch (sortins_cmp(&oa, &na))
{
case -1:
/* newState does not have anything matching oa */
oa = oa->inchain;
createarc(nfa, a->type, a->co, a->from, newState);
break;
case 0:
/* match, advance in both lists */
oa = oa->inchain;
na = na->inchain;
break;
case +1:
/* advance only na; oa might have a match later */
na = na->inchain;
break;
default:
assert(NOTREACHED);
}
}
while (oa != NULL)
{
/* newState does not have anything matching oa */
struct arc *a = oa;
oa = oa->inchain;
createarc(nfa, a->type, a->co, a->from, newState);
}
}
#endif /* NOT_USED */
}
/*
* mergeins - merge a list of inarcs into a state
*
* This is much like copyins, but the source arcs are listed in an array,
* and are not guaranteed unique. It's okay to clobber the array contents.
*/
static void
mergeins(struct nfa *nfa,
struct state *s,
struct arc **arcarray,
int arccount)
{
struct arc *na;
int i;
int j;
if (arccount <= 0)
return;
/*
* Because we bypass newarc() in this code path, we'd better include a
* cancel check.
*/
INTERRUPT(nfa->v->re);
/* Sort existing inarcs as well as proposed new ones */
sortins(nfa, s);
if (NISERR())
return; /* might have failed to sort */
qsort(arcarray, arccount, sizeof(struct arc *), sortins_cmp);
/*
* arcarray very likely includes dups, so we must eliminate them. (This
* could be folded into the next loop, but it's not worth the trouble.)
*/
j = 0;
for (i = 1; i < arccount; i++)
{
switch (sortins_cmp(&arcarray[j], &arcarray[i]))
{
case -1:
/* non-dup */
arcarray[++j] = arcarray[i];
break;
case 0:
/* dup */
break;
default:
/* trouble */
assert(NOTREACHED);
}
}
arccount = j + 1;
/*
* Now merge into s' inchain. Note that createarc() will put new arcs
* onto the front of s's chain, so it does not break our walk through the
* sorted part of the chain.
*/
i = 0;
na = s->ins;
while (i < arccount && na != NULL)
{
struct arc *a = arcarray[i];
switch (sortins_cmp(&a, &na))
{
case -1:
/* s does not have anything matching a */
createarc(nfa, a->type, a->co, a->from, s);
i++;
break;
case 0:
/* match, advance in both lists */
i++;
na = na->inchain;
break;
case +1:
/* advance only na; array might have a match later */
na = na->inchain;
break;
default:
assert(NOTREACHED);
}
}
while (i < arccount)
{
/* s does not have anything matching a */
struct arc *a = arcarray[i];
createarc(nfa, a->type, a->co, a->from, s);
i++;
}
}
/*
* moveouts - move all out arcs of a state to another state
*
* See comments for moveins()
*/
static void
moveouts(struct nfa *nfa,
struct state *oldState,
struct state *newState)
{
assert(oldState != newState);
if (newState->nouts == 0)
{
/* No need for de-duplication */
struct arc *a;
while ((a = oldState->outs) != NULL)
{
createarc(nfa, a->type, a->co, newState, a->to);
freearc(nfa, a);
}
}
else if (!BULK_ARC_OP_USE_SORT(oldState->nouts, newState->nouts))
{
/* With not too many arcs, just do them one at a time */
struct arc *a;
while ((a = oldState->outs) != NULL)
{
cparc(nfa, a, newState, a->to);
freearc(nfa, a);
}
}
else
{
/*
* With many arcs, use a sort-merge approach. Note changearcsource()
* will put the arc onto the front of newState's chain, so it does not
* break our walk through the sorted part of the chain.
*/
struct arc *oa;
struct arc *na;
/*
* Because we bypass newarc() in this code path, we'd better include a
* cancel check.
*/
INTERRUPT(nfa->v->re);
sortouts(nfa, oldState);
sortouts(nfa, newState);
if (NISERR())
return; /* might have failed to sort */
oa = oldState->outs;
na = newState->outs;
while (oa != NULL && na != NULL)
{
struct arc *a = oa;
switch (sortouts_cmp(&oa, &na))
{
case -1:
/* newState does not have anything matching oa */
oa = oa->outchain;
/*
* Rather than doing createarc+freearc, we can just unlink
* and relink the existing arc struct.
*/
changearcsource(a, newState);
break;
case 0:
/* match, advance in both lists */
oa = oa->outchain;
na = na->outchain;
/* ... and drop duplicate arc from oldState */
freearc(nfa, a);
break;
case +1:
/* advance only na; oa might have a match later */
na = na->outchain;
break;
default:
assert(NOTREACHED);
}
}
while (oa != NULL)
{
/* newState does not have anything matching oa */
struct arc *a = oa;
oa = oa->outchain;
changearcsource(a, newState);
}
}
assert(oldState->nouts == 0);
assert(oldState->outs == NULL);
}
/*
* copyouts - copy out arcs of a state to another state
*
* See comments for copyins()
*/
static void
copyouts(struct nfa *nfa,
struct state *oldState,
struct state *newState)
{
assert(oldState != newState);
assert(newState->nouts == 0); /* see comment above */
if (newState->nouts == 0)
{
/* No need for de-duplication */
struct arc *a;
for (a = oldState->outs; a != NULL; a = a->outchain)
createarc(nfa, a->type, a->co, newState, a->to);
}
#ifdef NOT_USED /* see comment above */
else if (!BULK_ARC_OP_USE_SORT(oldState->nouts, newState->nouts))
{
/* With not too many arcs, just do them one at a time */
struct arc *a;
for (a = oldState->outs; a != NULL; a = a->outchain)
cparc(nfa, a, newState, a->to);
}
else
{
/*
* With many arcs, use a sort-merge approach. Note that createarc()
* will put new arcs onto the front of newState's chain, so it does
* not break our walk through the sorted part of the chain.
*/
struct arc *oa;
struct arc *na;
/*
* Because we bypass newarc() in this code path, we'd better include a
* cancel check.
*/
INTERRUPT(nfa->v->re);
sortouts(nfa, oldState);
sortouts(nfa, newState);
if (NISERR())
return; /* might have failed to sort */
oa = oldState->outs;
na = newState->outs;
while (oa != NULL && na != NULL)
{
struct arc *a = oa;
switch (sortouts_cmp(&oa, &na))
{
case -1:
/* newState does not have anything matching oa */
oa = oa->outchain;
createarc(nfa, a->type, a->co, newState, a->to);
break;
case 0:
/* match, advance in both lists */
oa = oa->outchain;
na = na->outchain;
break;
case +1:
/* advance only na; oa might have a match later */
na = na->outchain;
break;
default:
assert(NOTREACHED);
}
}
while (oa != NULL)
{
/* newState does not have anything matching oa */
struct arc *a = oa;
oa = oa->outchain;
createarc(nfa, a->type, a->co, newState, a->to);
}
}
#endif /* NOT_USED */
}
/*
* cloneouts - copy out arcs of a state to another state pair, modifying type
*
* This is only used to convert PLAIN arcs to AHEAD/BEHIND arcs, which share
* the same interpretation of "co". It wouldn't be sensible with LACONs.
*/
static void
cloneouts(struct nfa *nfa,
struct state *old,
struct state *from,
struct state *to,
int type)
{
struct arc *a;
assert(old != from);
assert(type == AHEAD || type == BEHIND);
for (a = old->outs; a != NULL; a = a->outchain)
{
assert(a->type == PLAIN);
newarc(nfa, type, a->co, from, to);
}
}
/*
* delsub - delete a sub-NFA, updating subre pointers if necessary
*
* This uses a recursive traversal of the sub-NFA, marking already-seen
* states using their tmp pointer.
*/
static void
delsub(struct nfa *nfa,
struct state *lp, /* the sub-NFA goes from here... */
struct state *rp) /* ...to here, *not* inclusive */
{
assert(lp != rp);
rp->tmp = rp; /* mark end */
deltraverse(nfa, lp, lp);
if (NISERR())
return; /* asserts might not hold after failure */
assert(lp->nouts == 0 && rp->nins == 0); /* did the job */
assert(lp->no != FREESTATE && rp->no != FREESTATE); /* no more */
rp->tmp = NULL; /* unmark end */
lp->tmp = NULL; /* and begin, marked by deltraverse */
}
/*
* deltraverse - the recursive heart of delsub
* This routine's basic job is to destroy all out-arcs of the state.
*/
static void
deltraverse(struct nfa *nfa,
struct state *leftend,
struct state *s)
{
struct arc *a;
struct state *to;
/* Since this is recursive, it could be driven to stack overflow */
if (STACK_TOO_DEEP(nfa->v->re))
{
NERR(REG_ETOOBIG);
return;
}
if (s->nouts == 0)
return; /* nothing to do */
if (s->tmp != NULL)
return; /* already in progress */
s->tmp = s; /* mark as in progress */
while ((a = s->outs) != NULL)
{
to = a->to;
deltraverse(nfa, leftend, to);
if (NISERR())
return; /* asserts might not hold after failure */
assert(to->nouts == 0 || to->tmp != NULL);
freearc(nfa, a);
if (to->nins == 0 && to->tmp == NULL)
{
assert(to->nouts == 0);
freestate(nfa, to);
}
}
assert(s->no != FREESTATE); /* we're still here */
assert(s == leftend || s->nins != 0); /* and still reachable */
assert(s->nouts == 0); /* but have no outarcs */
s->tmp = NULL; /* we're done here */
}
/*
* dupnfa - duplicate sub-NFA
*
* Another recursive traversal, this time using tmp to point to duplicates
* as well as mark already-seen states. (You knew there was a reason why
* it's a state pointer, didn't you? :-))
*/
static void
dupnfa(struct nfa *nfa,
struct state *start, /* duplicate of subNFA starting here */
struct state *stop, /* and stopping here */
struct state *from, /* stringing duplicate from here */
struct state *to) /* to here */
{
if (start == stop)
{
newarc(nfa, EMPTY, 0, from, to);
return;
}
stop->tmp = to;
duptraverse(nfa, start, from);
/* done, except for clearing out the tmp pointers */
stop->tmp = NULL;
cleartraverse(nfa, start);
}
/*
* duptraverse - recursive heart of dupnfa
*/
static void
duptraverse(struct nfa *nfa,
struct state *s,
struct state *stmp) /* s's duplicate, or NULL */
{
struct arc *a;
/* Since this is recursive, it could be driven to stack overflow */
if (STACK_TOO_DEEP(nfa->v->re))
{
NERR(REG_ETOOBIG);
return;
}
if (s->tmp != NULL)
return; /* already done */
s->tmp = (stmp == NULL) ? newstate(nfa) : stmp;
if (s->tmp == NULL)
{
assert(NISERR());
return;
}
for (a = s->outs; a != NULL && !NISERR(); a = a->outchain)
{
duptraverse(nfa, a->to, (struct state *) NULL);
if (NISERR())
break;
assert(a->to->tmp != NULL);
cparc(nfa, a, s->tmp, a->to->tmp);
}
}
/*
* removeconstraints - remove any constraints in an NFA
*
* Constraint arcs are replaced by empty arcs, essentially treating all
* constraints as automatically satisfied.
*/
static void
removeconstraints(struct nfa *nfa,
struct state *start, /* process subNFA starting here */
struct state *stop) /* and stopping here */
{
if (start == stop)
return;
stop->tmp = stop;
removetraverse(nfa, start);
/* done, except for clearing out the tmp pointers */
stop->tmp = NULL;
cleartraverse(nfa, start);
}
/*
* removetraverse - recursive heart of removeconstraints
*/
static void
removetraverse(struct nfa *nfa,
struct state *s)
{
struct arc *a;
struct arc *oa;
/* Since this is recursive, it could be driven to stack overflow */
if (STACK_TOO_DEEP(nfa->v->re))
{
NERR(REG_ETOOBIG);
return;
}
if (s->tmp != NULL)
return; /* already done */
s->tmp = s;
for (a = s->outs; a != NULL && !NISERR(); a = oa)
{
removetraverse(nfa, a->to);
if (NISERR())
break;
oa = a->outchain;
switch (a->type)
{
case PLAIN:
case EMPTY:
case CANTMATCH:
/* nothing to do */
break;
case AHEAD:
case BEHIND:
case '^':
case '$':
case LACON:
/* replace it */
newarc(nfa, EMPTY, 0, s, a->to);
freearc(nfa, a);
break;
default:
NERR(REG_ASSERT);
break;
}
}
}
/*
* cleartraverse - recursive cleanup for algorithms that leave tmp ptrs set
*/
static void
cleartraverse(struct nfa *nfa,
struct state *s)
{
struct arc *a;
/* Since this is recursive, it could be driven to stack overflow */
if (STACK_TOO_DEEP(nfa->v->re))
{
NERR(REG_ETOOBIG);
return;
}
if (s->tmp == NULL)
return;
s->tmp = NULL;
for (a = s->outs; a != NULL; a = a->outchain)
cleartraverse(nfa, a->to);
}
/*
* single_color_transition - does getting from s1 to s2 cross one PLAIN arc?
*
* If traversing from s1 to s2 requires a single PLAIN match (possibly of any
* of a set of colors), return a state whose outarc list contains only PLAIN
* arcs of those color(s). Otherwise return NULL.
*
* This is used before optimizing the NFA, so there may be EMPTY arcs, which
* we should ignore; the possibility of an EMPTY is why the result state could
* be different from s1.
*
* It's worth troubling to handle multiple parallel PLAIN arcs here because a
* bracket construct such as [abc] might yield either one or several parallel
* PLAIN arcs depending on earlier atoms in the expression. We'd rather that
* that implementation detail not create user-visible performance differences.
*/
static struct state *
single_color_transition(struct state *s1, struct state *s2)
{
struct arc *a;
/* Ignore leading EMPTY arc, if any */
if (s1->nouts == 1 && s1->outs->type == EMPTY)
s1 = s1->outs->to;
/* Likewise for any trailing EMPTY arc */
if (s2->nins == 1 && s2->ins->type == EMPTY)
s2 = s2->ins->from;
/* Perhaps we could have a single-state loop in between, if so reject */
if (s1 == s2)
return NULL;
/* s1 must have at least one outarc... */
if (s1->outs == NULL)
return NULL;
/* ... and they must all be PLAIN arcs to s2 */
for (a = s1->outs; a != NULL; a = a->outchain)
{
if (a->type != PLAIN || a->to != s2)
return NULL;
}
/* OK, return s1 as the possessor of the relevant outarcs */
return s1;
}
/*
* specialcolors - fill in special colors for an NFA
*/
static void
specialcolors(struct nfa *nfa)
{
/* false colors for BOS, BOL, EOS, EOL */
if (nfa->parent == NULL)
{
nfa->bos[0] = pseudocolor(nfa->cm);
nfa->bos[1] = pseudocolor(nfa->cm);
nfa->eos[0] = pseudocolor(nfa->cm);
nfa->eos[1] = pseudocolor(nfa->cm);
}
else
{
assert(nfa->parent->bos[0] != COLORLESS);
nfa->bos[0] = nfa->parent->bos[0];
assert(nfa->parent->bos[1] != COLORLESS);
nfa->bos[1] = nfa->parent->bos[1];
assert(nfa->parent->eos[0] != COLORLESS);
nfa->eos[0] = nfa->parent->eos[0];
assert(nfa->parent->eos[1] != COLORLESS);
nfa->eos[1] = nfa->parent->eos[1];
}
}
/*
* optimize - optimize an NFA
*
* The main goal of this function is not so much "optimization" (though it
* does try to get rid of useless NFA states) as reducing the NFA to a form
* the regex executor can handle. The executor, and indeed the cNFA format
* that is its input, can only handle PLAIN and LACON arcs. The output of
* the regex parser also includes EMPTY (do-nothing) arcs, as well as
* ^, $, AHEAD, and BEHIND constraint arcs, which we must get rid of here.
* We first get rid of EMPTY arcs and then deal with the constraint arcs.
* The hardest part of either job is to get rid of circular loops of the
* target arc type. We would have to do that in any case, though, as such a
* loop would otherwise allow the executor to cycle through the loop endlessly
* without making any progress in the input string.
*/
static long /* re_info bits */
optimize(struct nfa *nfa,
FILE *f) /* for debug output; NULL none */
{
#ifdef REG_DEBUG
int verbose = (f != NULL) ? 1 : 0;
if (verbose)
fprintf(f, "\ninitial cleanup:\n");
#endif
/* If we have any CANTMATCH arcs, drop them; but this is uncommon */
if (nfa->flags & HASCANTMATCH)
{
removecantmatch(nfa);
nfa->flags &= ~HASCANTMATCH;
}
cleanup(nfa); /* may simplify situation */
#ifdef REG_DEBUG
if (verbose)
dumpnfa(nfa, f);
if (verbose)
fprintf(f, "\nempties:\n");
#endif
fixempties(nfa, f); /* get rid of EMPTY arcs */
#ifdef REG_DEBUG
if (verbose)
fprintf(f, "\nconstraints:\n");
#endif
fixconstraintloops(nfa, f); /* get rid of constraint loops */
pullback(nfa, f); /* pull back constraints backward */
pushfwd(nfa, f); /* push fwd constraints forward */
#ifdef REG_DEBUG
if (verbose)
fprintf(f, "\nfinal cleanup:\n");
#endif
cleanup(nfa); /* final tidying */
#ifdef REG_DEBUG
if (verbose)
dumpnfa(nfa, f);
#endif
return analyze(nfa); /* and analysis */
}
/*
* pullback - pull back constraints backward to eliminate them
*/
static void
pullback(struct nfa *nfa,
FILE *f) /* for debug output; NULL none */
{
struct state *s;
struct state *nexts;
struct arc *a;
struct arc *nexta;
struct state *intermediates;
int progress;
/* find and pull until there are no more */
do
{
progress = 0;
for (s = nfa->states; s != NULL && !NISERR(); s = nexts)
{
nexts = s->next;
intermediates = NULL;
for (a = s->outs; a != NULL && !NISERR(); a = nexta)
{
nexta = a->outchain;
if (a->type == '^' || a->type == BEHIND)
if (pull(nfa, a, &intermediates))
progress = 1;
}
/* clear tmp fields of intermediate states created here */
while (intermediates != NULL)
{
struct state *ns = intermediates->tmp;
intermediates->tmp = NULL;
intermediates = ns;
}
/* if s is now useless, get rid of it */
if ((s->nins == 0 || s->nouts == 0) && !s->flag)
dropstate(nfa, s);
}
if (progress && f != NULL)
dumpnfa(nfa, f);
} while (progress && !NISERR());
if (NISERR())
return;
/*
* Any ^ constraints we were able to pull to the start state can now be
* replaced by PLAIN arcs referencing the BOS or BOL colors. There should
* be no other ^ or BEHIND arcs left in the NFA, though we do not check
* that here (compact() will fail if so).
*/
for (a = nfa->pre->outs; a != NULL; a = nexta)
{
nexta = a->outchain;
if (a->type == '^')
{
assert(a->co == 0 || a->co == 1);
newarc(nfa, PLAIN, nfa->bos[a->co], a->from, a->to);
freearc(nfa, a);
}
}
}
/*
* pull - pull a back constraint backward past its source state
*
* Returns 1 if successful (which it always is unless the source is the
* start state or we have an internal error), 0 if nothing happened.
*
* A significant property of this function is that it deletes no pre-existing
* states, and no outarcs of the constraint's from state other than the given
* constraint arc. This makes the loops in pullback() safe, at the cost that
* we may leave useless states behind. Therefore, we leave it to pullback()
* to delete such states.
*
* If the from state has multiple back-constraint outarcs, and/or multiple
* compatible constraint inarcs, we only need to create one new intermediate
* state per combination of predecessor and successor states. *intermediates
* points to a list of such intermediate states for this from state (chained
* through their tmp fields).
*/
static int
pull(struct nfa *nfa,
struct arc *con,
struct state **intermediates)
{
struct state *from = con->from;
struct state *to = con->to;
struct arc *a;
struct arc *nexta;
struct state *s;
assert(from != to); /* should have gotten rid of this earlier */
if (from->flag) /* can't pull back beyond start */
return 0;
if (from->nins == 0)
{ /* unreachable */
freearc(nfa, con);
return 1;
}
/*
* First, clone from state if necessary to avoid other outarcs. This may
* seem wasteful, but it simplifies the logic, and we'll get rid of the
* clone state again at the bottom.
*/
if (from->nouts > 1)
{
s = newstate(nfa);
if (NISERR())
return 0;
copyins(nfa, from, s); /* duplicate inarcs */
cparc(nfa, con, s, to); /* move constraint arc */
freearc(nfa, con);
if (NISERR())
return 0;
from = s;
con = from->outs;
}
assert(from->nouts == 1);
/* propagate the constraint into the from state's inarcs */
for (a = from->ins; a != NULL && !NISERR(); a = nexta)
{
nexta = a->inchain;
switch (combine(nfa, con, a))
{
case INCOMPATIBLE: /* destroy the arc */
freearc(nfa, a);
break;
case SATISFIED: /* no action needed */
break;
case COMPATIBLE: /* swap the two arcs, more or less */
/* need an intermediate state, but might have one already */
for (s = *intermediates; s != NULL; s = s->tmp)
{
assert(s->nins > 0 && s->nouts > 0);
if (s->ins->from == a->from && s->outs->to == to)
break;
}
if (s == NULL)
{
s = newstate(nfa);
if (NISERR())
return 0;
s->tmp = *intermediates;
*intermediates = s;
}
cparc(nfa, con, a->from, s);
cparc(nfa, a, s, to);
freearc(nfa, a);
break;
case REPLACEARC: /* replace arc's color */
newarc(nfa, a->type, con->co, a->from, to);
freearc(nfa, a);
break;
default:
assert(NOTREACHED);
break;
}
}
/* remaining inarcs, if any, incorporate the constraint */
moveins(nfa, from, to);
freearc(nfa, con);
/* from state is now useless, but we leave it to pullback() to clean up */
return 1;
}
/*
* pushfwd - push forward constraints forward to eliminate them
*/
static void
pushfwd(struct nfa *nfa,
FILE *f) /* for debug output; NULL none */
{
struct state *s;
struct state *nexts;
struct arc *a;
struct arc *nexta;
struct state *intermediates;
int progress;
/* find and push until there are no more */
do
{
progress = 0;
for (s = nfa->states; s != NULL && !NISERR(); s = nexts)
{
nexts = s->next;
intermediates = NULL;
for (a = s->ins; a != NULL && !NISERR(); a = nexta)
{
nexta = a->inchain;
if (a->type == '$' || a->type == AHEAD)
if (push(nfa, a, &intermediates))
progress = 1;
}
/* clear tmp fields of intermediate states created here */
while (intermediates != NULL)
{
struct state *ns = intermediates->tmp;
intermediates->tmp = NULL;
intermediates = ns;
}
/* if s is now useless, get rid of it */
if ((s->nins == 0 || s->nouts == 0) && !s->flag)
dropstate(nfa, s);
}
if (progress && f != NULL)
dumpnfa(nfa, f);
} while (progress && !NISERR());
if (NISERR())
return;
/*
* Any $ constraints we were able to push to the post state can now be
* replaced by PLAIN arcs referencing the EOS or EOL colors. There should
* be no other $ or AHEAD arcs left in the NFA, though we do not check
* that here (compact() will fail if so).
*/
for (a = nfa->post->ins; a != NULL; a = nexta)
{
nexta = a->inchain;
if (a->type == '$')
{
assert(a->co == 0 || a->co == 1);
newarc(nfa, PLAIN, nfa->eos[a->co], a->from, a->to);
freearc(nfa, a);
}
}
}
/*
* push - push a forward constraint forward past its destination state
*
* Returns 1 if successful (which it always is unless the destination is the
* post state or we have an internal error), 0 if nothing happened.
*
* A significant property of this function is that it deletes no pre-existing
* states, and no inarcs of the constraint's to state other than the given
* constraint arc. This makes the loops in pushfwd() safe, at the cost that
* we may leave useless states behind. Therefore, we leave it to pushfwd()
* to delete such states.
*
* If the to state has multiple forward-constraint inarcs, and/or multiple
* compatible constraint outarcs, we only need to create one new intermediate
* state per combination of predecessor and successor states. *intermediates
* points to a list of such intermediate states for this to state (chained
* through their tmp fields).
*/
static int
push(struct nfa *nfa,
struct arc *con,
struct state **intermediates)
{
struct state *from = con->from;
struct state *to = con->to;
struct arc *a;
struct arc *nexta;
struct state *s;
assert(to != from); /* should have gotten rid of this earlier */
if (to->flag) /* can't push forward beyond end */
return 0;
if (to->nouts == 0)
{ /* dead end */
freearc(nfa, con);
return 1;
}
/*
* First, clone to state if necessary to avoid other inarcs. This may
* seem wasteful, but it simplifies the logic, and we'll get rid of the
* clone state again at the bottom.
*/
if (to->nins > 1)
{
s = newstate(nfa);
if (NISERR())
return 0;
copyouts(nfa, to, s); /* duplicate outarcs */
cparc(nfa, con, from, s); /* move constraint arc */
freearc(nfa, con);
if (NISERR())
return 0;
to = s;
con = to->ins;
}
assert(to->nins == 1);
/* propagate the constraint into the to state's outarcs */
for (a = to->outs; a != NULL && !NISERR(); a = nexta)
{
nexta = a->outchain;
switch (combine(nfa, con, a))
{
case INCOMPATIBLE: /* destroy the arc */
freearc(nfa, a);
break;
case SATISFIED: /* no action needed */
break;
case COMPATIBLE: /* swap the two arcs, more or less */
/* need an intermediate state, but might have one already */
for (s = *intermediates; s != NULL; s = s->tmp)
{
assert(s->nins > 0 && s->nouts > 0);
if (s->ins->from == from && s->outs->to == a->to)
break;
}
if (s == NULL)
{
s = newstate(nfa);
if (NISERR())
return 0;
s->tmp = *intermediates;
*intermediates = s;
}
cparc(nfa, con, s, a->to);
cparc(nfa, a, from, s);
freearc(nfa, a);
break;
case REPLACEARC: /* replace arc's color */
newarc(nfa, a->type, con->co, from, a->to);
freearc(nfa, a);
break;
default:
assert(NOTREACHED);
break;
}
}
/* remaining outarcs, if any, incorporate the constraint */
moveouts(nfa, to, from);
freearc(nfa, con);
/* to state is now useless, but we leave it to pushfwd() to clean up */
return 1;
}
/*
* combine - constraint lands on an arc, what happens?
*
* #def INCOMPATIBLE 1 // destroys arc
* #def SATISFIED 2 // constraint satisfied
* #def COMPATIBLE 3 // compatible but not satisfied yet
* #def REPLACEARC 4 // replace arc's color with constraint color
*/
static int
combine(struct nfa *nfa,
struct arc *con,
struct arc *a)
{
#define CA(ct,at) (((ct)<<CHAR_BIT) | (at))
switch (CA(con->type, a->type))
{
case CA('^', PLAIN): /* newlines are handled separately */
case CA('$', PLAIN):
return INCOMPATIBLE;
break;
case CA(AHEAD, PLAIN): /* color constraints meet colors */
case CA(BEHIND, PLAIN):
if (con->co == a->co)
return SATISFIED;
if (con->co == RAINBOW)
{
/* con is satisfied unless arc's color is a pseudocolor */
if (!(nfa->cm->cd[a->co].flags & PSEUDO))
return SATISFIED;
}
else if (a->co == RAINBOW)
{
/* con is incompatible if it's for a pseudocolor */
/* (this is hypothetical; we make no such constraints today) */
if (nfa->cm->cd[con->co].flags & PSEUDO)
return INCOMPATIBLE;
/* otherwise, constraint constrains arc to be only its color */
return REPLACEARC;
}
return INCOMPATIBLE;
break;
case CA('^', '^'): /* collision, similar constraints */
case CA('$', '$'):
if (con->co == a->co) /* true duplication */
return SATISFIED;
return INCOMPATIBLE;
break;
case CA(AHEAD, AHEAD): /* collision, similar constraints */
case CA(BEHIND, BEHIND):
if (con->co == a->co) /* true duplication */
return SATISFIED;
if (con->co == RAINBOW)
{
/* con is satisfied unless arc's color is a pseudocolor */
if (!(nfa->cm->cd[a->co].flags & PSEUDO))
return SATISFIED;
}
else if (a->co == RAINBOW)
{
/* con is incompatible if it's for a pseudocolor */
/* (this is hypothetical; we make no such constraints today) */
if (nfa->cm->cd[con->co].flags & PSEUDO)
return INCOMPATIBLE;
/* otherwise, constraint constrains arc to be only its color */
return REPLACEARC;
}
return INCOMPATIBLE;
break;
case CA('^', BEHIND): /* collision, dissimilar constraints */
case CA(BEHIND, '^'):
case CA('$', AHEAD):
case CA(AHEAD, '$'):
return INCOMPATIBLE;
break;
case CA('^', '$'): /* constraints passing each other */
case CA('^', AHEAD):
case CA(BEHIND, '$'):
case CA(BEHIND, AHEAD):
case CA('$', '^'):
case CA('$', BEHIND):
case CA(AHEAD, '^'):
case CA(AHEAD, BEHIND):
case CA('^', LACON):
case CA(BEHIND, LACON):
case CA('$', LACON):
case CA(AHEAD, LACON):
return COMPATIBLE;
break;
}
assert(NOTREACHED);
return INCOMPATIBLE; /* for benefit of blind compilers */
}
/*
* fixempties - get rid of EMPTY arcs
*/
static void
fixempties(struct nfa *nfa,
FILE *f) /* for debug output; NULL none */
{
struct state *s;
struct state *s2;
struct state *nexts;
struct arc *a;
struct arc *nexta;
int totalinarcs;
struct arc **inarcsorig;
struct arc **arcarray;
int arccount;
int prevnins;
int nskip;
/*
* First, get rid of any states whose sole out-arc is an EMPTY, since
* they're basically just aliases for their successor. The parsing
* algorithm creates enough of these that it's worth special-casing this.
*/
for (s = nfa->states; s != NULL && !NISERR(); s = nexts)
{
nexts = s->next;
if (s->flag || s->nouts != 1)
continue;
a = s->outs;
assert(a != NULL && a->outchain == NULL);
if (a->type != EMPTY)
continue;
if (s != a->to)
moveins(nfa, s, a->to);
dropstate(nfa, s);
}
/*
* Similarly, get rid of any state with a single EMPTY in-arc, by folding
* it into its predecessor.
*/
for (s = nfa->states; s != NULL && !NISERR(); s = nexts)
{
nexts = s->next;
/* while we're at it, ensure tmp fields are clear for next step */
assert(s->tmp == NULL);
if (s->flag || s->nins != 1)
continue;
a = s->ins;
assert(a != NULL && a->inchain == NULL);
if (a->type != EMPTY)
continue;
if (s != a->from)
moveouts(nfa, s, a->from);
dropstate(nfa, s);
}
if (NISERR())
return;
/*
* For each remaining NFA state, find all other states from which it is
* reachable by a chain of one or more EMPTY arcs. Then generate new arcs
* that eliminate the need for each such chain.
*
* We could replace a chain of EMPTY arcs that leads from a "from" state
* to a "to" state either by pushing non-EMPTY arcs forward (linking
* directly from "from"'s predecessors to "to") or by pulling them back
* (linking directly from "from" to "to"'s successors). We choose to
* always do the former; this choice is somewhat arbitrary, but the
* approach below requires that we uniformly do one or the other.
*
* Suppose we have a chain of N successive EMPTY arcs (where N can easily
* approach the size of the NFA). All of the intermediate states must
* have additional inarcs and outarcs, else they'd have been removed by
* the steps above. Assuming their inarcs are mostly not empties, we will
* add O(N^2) arcs to the NFA, since a non-EMPTY inarc leading to any one
* state in the chain must be duplicated to lead to all its successor
* states as well. So there is no hope of doing less than O(N^2) work;
* however, we should endeavor to keep the big-O cost from being even
* worse than that, which it can easily become without care. In
* particular, suppose we were to copy all S1's inarcs forward to S2, and
* then also to S3, and then later we consider pushing S2's inarcs forward
* to S3. If we include the arcs already copied from S1 in that, we'd be
* doing O(N^3) work. (The duplicate-arc elimination built into newarc()
* and its cohorts would get rid of the extra arcs, but not without cost.)
*
* We can avoid this cost by treating only arcs that existed at the start
* of this phase as candidates to be pushed forward. To identify those,
* we remember the first inarc each state had to start with. We rely on
* the fact that newarc() and friends put new arcs on the front of their
* to-states' inchains, and that this phase never deletes arcs, so that
* the original arcs must be the last arcs in their to-states' inchains.
*
* So the process here is that, for each state in the NFA, we gather up
* all non-EMPTY inarcs of states that can reach the target state via
* EMPTY arcs. We then sort, de-duplicate, and merge these arcs into the
* target state's inchain. (We can safely use sort-merge for this as long
* as we update each state's original-arcs pointer after we add arcs to
* it; the sort step of mergeins probably changed the order of the old
* arcs.)
*
* Another refinement worth making is that, because we only add non-EMPTY
* arcs during this phase, and all added arcs have the same from-state as
* the non-EMPTY arc they were cloned from, we know ahead of time that any
* states having only EMPTY outarcs will be useless for lack of outarcs
* after we drop the EMPTY arcs. (They cannot gain non-EMPTY outarcs if
* they had none to start with.) So we need not bother to update the
* inchains of such states at all.
*/
/* Remember the states' first original inarcs */
/* ... and while at it, count how many old inarcs there are altogether */
inarcsorig = (struct arc **) MALLOC(nfa->nstates * sizeof(struct arc *));
if (inarcsorig == NULL)
{
NERR(REG_ESPACE);
return;
}
totalinarcs = 0;
for (s = nfa->states; s != NULL; s = s->next)
{
inarcsorig[s->no] = s->ins;
totalinarcs += s->nins;
}
/*
* Create a workspace for accumulating the inarcs to be added to the
* current target state. totalinarcs is probably a considerable
* overestimate of the space needed, but the NFA is unlikely to be large
* enough at this point to make it worth being smarter.
*/
arcarray = (struct arc **) MALLOC(totalinarcs * sizeof(struct arc *));
if (arcarray == NULL)
{
NERR(REG_ESPACE);
FREE(inarcsorig);
return;
}
/* And iterate over the target states */
for (s = nfa->states; s != NULL && !NISERR(); s = s->next)
{
/* Ignore target states without non-EMPTY outarcs, per note above */
if (!s->flag && !hasnonemptyout(s))
continue;
/* Find predecessor states and accumulate their original inarcs */
arccount = 0;
for (s2 = emptyreachable(nfa, s, s, inarcsorig); s2 != s; s2 = nexts)
{
/* Add s2's original inarcs to arcarray[], but ignore empties */
for (a = inarcsorig[s2->no]; a != NULL; a = a->inchain)
{
if (a->type != EMPTY)
arcarray[arccount++] = a;
}
/* Reset the tmp fields as we walk back */
nexts = s2->tmp;
s2->tmp = NULL;
}
s->tmp = NULL;
assert(arccount <= totalinarcs);
/* Remember how many original inarcs this state has */
prevnins = s->nins;
/* Add non-duplicate inarcs to target state */
mergeins(nfa, s, arcarray, arccount);
/* Now we must update the state's inarcsorig pointer */
nskip = s->nins - prevnins;
a = s->ins;
while (nskip-- > 0)
a = a->inchain;
inarcsorig[s->no] = a;
}
FREE(arcarray);
FREE(inarcsorig);
if (NISERR())
return;
/*
* Now remove all the EMPTY arcs, since we don't need them anymore.
*/
for (s = nfa->states; s != NULL; s = s->next)
{
for (a = s->outs; a != NULL; a = nexta)
{
nexta = a->outchain;
if (a->type == EMPTY)
freearc(nfa, a);
}
}
/*
* And remove any states that have become useless. (This cleanup is not
* very thorough, and would be even less so if we tried to combine it with
* the previous step; but cleanup() will take care of anything we miss.)
*/
for (s = nfa->states; s != NULL; s = nexts)
{
nexts = s->next;
if ((s->nins == 0 || s->nouts == 0) && !s->flag)
dropstate(nfa, s);
}
if (f != NULL)
dumpnfa(nfa, f);
}
/*
* emptyreachable - recursively find all states that can reach s by EMPTY arcs
*
* The return value is the last such state found. Its tmp field links back
* to the next-to-last such state, and so on back to s, so that all these
* states can be located without searching the whole NFA.
*
* Since this is only used in fixempties(), we pass in the inarcsorig[] array
* maintained by that function. This lets us skip over all new inarcs, which
* are certainly not EMPTY arcs.
*
* The maximum recursion depth here is equal to the length of the longest
* loop-free chain of EMPTY arcs, which is surely no more than the size of
* the NFA ... but that could still be enough to cause trouble.
*/
static struct state *
emptyreachable(struct nfa *nfa,
struct state *s,
struct state *lastfound,
struct arc **inarcsorig)
{
struct arc *a;
/* Since this is recursive, it could be driven to stack overflow */
if (STACK_TOO_DEEP(nfa->v->re))
{
NERR(REG_ETOOBIG);
return lastfound;
}
s->tmp = lastfound;
lastfound = s;
for (a = inarcsorig[s->no]; a != NULL; a = a->inchain)
{
if (a->type == EMPTY && a->from->tmp == NULL)
lastfound = emptyreachable(nfa, a->from, lastfound, inarcsorig);
}
return lastfound;
}
/*
* isconstraintarc - detect whether an arc is of a constraint type
*/
static inline int
isconstraintarc(struct arc *a)
{
switch (a->type)
{
case '^':
case '$':
case BEHIND:
case AHEAD:
case LACON:
return 1;
}
return 0;
}
/*
* hasconstraintout - does state have a constraint out arc?
*/
static int
hasconstraintout(struct state *s)
{
struct arc *a;
for (a = s->outs; a != NULL; a = a->outchain)
{
if (isconstraintarc(a))
return 1;
}
return 0;
}
/*
* fixconstraintloops - get rid of loops containing only constraint arcs
*
* A loop of states that contains only constraint arcs is useless, since
* passing around the loop represents no forward progress. Moreover, it
* would cause infinite looping in pullback/pushfwd, so we need to get rid
* of such loops before doing that.
*/
static void
fixconstraintloops(struct nfa *nfa,
FILE *f) /* for debug output; NULL none */
{
struct state *s;
struct state *nexts;
struct arc *a;
struct arc *nexta;
int hasconstraints;
/*
* In the trivial case of a state that loops to itself, we can just drop
* the constraint arc altogether. This is worth special-casing because
* such loops are far more common than loops containing multiple states.
* While we're at it, note whether any constraint arcs survive.
*/
hasconstraints = 0;
for (s = nfa->states; s != NULL && !NISERR(); s = nexts)
{
nexts = s->next;
/* while we're at it, ensure tmp fields are clear for next step */
assert(s->tmp == NULL);
for (a = s->outs; a != NULL && !NISERR(); a = nexta)
{
nexta = a->outchain;
if (isconstraintarc(a))
{
if (a->to == s)
freearc(nfa, a);
else
hasconstraints = 1;
}
}
/* If we removed all the outarcs, the state is useless. */
if (s->nouts == 0 && !s->flag)
dropstate(nfa, s);
}
/* Nothing to do if no remaining constraint arcs */
if (NISERR() || !hasconstraints)
return;
/*
* Starting from each remaining NFA state, search outwards for a
* constraint loop. If we find a loop, break the loop, then start the
* search over. (We could possibly retain some state from the first scan,
* but it would complicate things greatly, and multi-state constraint
* loops are rare enough that it's not worth optimizing the case.)
*/
restart:
for (s = nfa->states; s != NULL && !NISERR(); s = s->next)
{
if (findconstraintloop(nfa, s))
goto restart;
}
if (NISERR())
return;
/*
* Now remove any states that have become useless. (This cleanup is not
* very thorough, and would be even less so if we tried to combine it with
* the previous step; but cleanup() will take care of anything we miss.)
*
* Because findconstraintloop intentionally doesn't reset all tmp fields,
* we have to clear them after it's done. This is a convenient place to
* do that, too.
*/
for (s = nfa->states; s != NULL; s = nexts)
{
nexts = s->next;
s->tmp = NULL;
if ((s->nins == 0 || s->nouts == 0) && !s->flag)
dropstate(nfa, s);
}
if (f != NULL)
dumpnfa(nfa, f);
}
/*
* findconstraintloop - recursively find a loop of constraint arcs
*
* If we find a loop, break it by calling breakconstraintloop(), then
* return 1; otherwise return 0.
*
* State tmp fields are guaranteed all NULL on a success return, because
* breakconstraintloop does that. After a failure return, any state that
* is known not to be part of a loop is marked with s->tmp == s; this allows
* us not to have to re-prove that fact on later calls. (This convention is
* workable because we already eliminated single-state loops.)
*
* Note that the found loop doesn't necessarily include the first state we
* are called on. Any loop reachable from that state will do.
*
* The maximum recursion depth here is one more than the length of the longest
* loop-free chain of constraint arcs, which is surely no more than the size
* of the NFA ... but that could still be enough to cause trouble.
*/
static int
findconstraintloop(struct nfa *nfa, struct state *s)
{
struct arc *a;
/* Since this is recursive, it could be driven to stack overflow */
if (STACK_TOO_DEEP(nfa->v->re))
{
NERR(REG_ETOOBIG);
return 1; /* to exit as quickly as possible */
}
if (s->tmp != NULL)
{
/* Already proven uninteresting? */
if (s->tmp == s)
return 0;
/* Found a loop involving s */
breakconstraintloop(nfa, s);
/* The tmp fields have been cleaned up by breakconstraintloop */
return 1;
}
for (a = s->outs; a != NULL; a = a->outchain)
{
if (isconstraintarc(a))
{
struct state *sto = a->to;
assert(sto != s);
s->tmp = sto;
if (findconstraintloop(nfa, sto))
return 1;
}
}
/*
* If we get here, no constraint loop exists leading out from s. Mark it
* with s->tmp == s so we need not rediscover that fact again later.
*/
s->tmp = s;
return 0;
}
/*
* breakconstraintloop - break a loop of constraint arcs
*
* sinitial is any one member state of the loop. Each loop member's tmp
* field links to its successor within the loop. (Note that this function
* will reset all the tmp fields to NULL.)
*
* We can break the loop by, for any one state S1 in the loop, cloning its
* loop successor state S2 (and possibly following states), and then moving
* all S1->S2 constraint arcs to point to the cloned S2. The cloned S2 should
* copy any non-constraint outarcs of S2. Constraint outarcs should be
* dropped if they point back to S1, else they need to be copied as arcs to
* similarly cloned states S3, S4, etc. In general, each cloned state copies
* non-constraint outarcs, drops constraint outarcs that would lead to itself
* or any earlier cloned state, and sends other constraint outarcs to newly
* cloned states. No cloned state will have any inarcs that aren't constraint
* arcs or do not lead from S1 or earlier-cloned states. It's okay to drop
* constraint back-arcs since they would not take us to any state we've not
* already been in; therefore, no new constraint loop is created. In this way
* we generate a modified NFA that can still represent every useful state
* sequence, but not sequences that represent state loops with no consumption
* of input data. Note that the set of cloned states will certainly include
* all of the loop member states other than S1, and it may also include
* non-loop states that are reachable from S2 via constraint arcs. This is
* important because there is no guarantee that findconstraintloop found a
* maximal loop (and searching for one would be NP-hard, so don't try).
* Frequently the "non-loop states" are actually part of a larger loop that
* we didn't notice, and indeed there may be several overlapping loops.
* This technique ensures convergence in such cases, while considering only
* the originally-found loop does not.
*
* If there is only one S1->S2 constraint arc, then that constraint is
* certainly satisfied when we enter any of the clone states. This means that
* in the common case where many of the constraint arcs are identically
* labeled, we can merge together clone states linked by a similarly-labeled
* constraint: if we can get to the first one we can certainly get to the
* second, so there's no need to distinguish. This greatly reduces the number
* of new states needed, so we preferentially break the given loop at a state
* pair where this is true.
*
* Furthermore, it's fairly common to find that a cloned successor state has
* no outarcs, especially if we're a bit aggressive about removing unnecessary
* outarcs. If that happens, then there is simply not any interesting state
* that can be reached through the predecessor's loop arcs, which means we can
* break the loop just by removing those loop arcs, with no new states added.
*/
static void
breakconstraintloop(struct nfa *nfa, struct state *sinitial)
{
struct state *s;
struct state *shead;
struct state *stail;
struct state *sclone;
struct state *nexts;
struct arc *refarc;
struct arc *a;
struct arc *nexta;
/*
* Start by identifying which loop step we want to break at.
* Preferentially this is one with only one constraint arc. (XXX are
* there any other secondary heuristics we want to use here?) Set refarc
* to point to the selected lone constraint arc, if there is one.
*/
refarc = NULL;
s = sinitial;
do
{
nexts = s->tmp;
assert(nexts != s); /* should not see any one-element loops */
if (refarc == NULL)
{
int narcs = 0;
for (a = s->outs; a != NULL; a = a->outchain)
{
if (a->to == nexts && isconstraintarc(a))
{
refarc = a;
narcs++;
}
}
assert(narcs > 0);
if (narcs > 1)
refarc = NULL; /* multiple constraint arcs here, no good */
}
s = nexts;
} while (s != sinitial);
if (refarc)
{
/* break at the refarc */
shead = refarc->from;
stail = refarc->to;
assert(stail == shead->tmp);
}
else
{
/* for lack of a better idea, break after sinitial */
shead = sinitial;
stail = sinitial->tmp;
}
/*
* Reset the tmp fields so that we can use them for local storage in
* clonesuccessorstates. (findconstraintloop won't mind, since it's just
* going to abandon its search anyway.)
*/
for (s = nfa->states; s != NULL; s = s->next)
s->tmp = NULL;
/*
* Recursively build clone state(s) as needed.
*/
sclone = newstate(nfa);
if (sclone == NULL)
{
assert(NISERR());
return;
}
clonesuccessorstates(nfa, stail, sclone, shead, refarc,
NULL, NULL, nfa->nstates);
if (NISERR())
return;
/*
* It's possible that sclone has no outarcs at all, in which case it's
* useless. (We don't try extremely hard to get rid of useless states
* here, but this is an easy and fairly common case.)
*/
if (sclone->nouts == 0)
{
freestate(nfa, sclone);
sclone = NULL;
}
/*
* Move shead's constraint-loop arcs to point to sclone, or just drop them
* if we discovered we don't need sclone.
*/
for (a = shead->outs; a != NULL; a = nexta)
{
nexta = a->outchain;
if (a->to == stail && isconstraintarc(a))
{
if (sclone)
cparc(nfa, a, shead, sclone);
freearc(nfa, a);
if (NISERR())
break;
}
}
}
/*
* clonesuccessorstates - create a tree of constraint-arc successor states
*
* ssource is the state to be cloned, and sclone is the state to copy its
* outarcs into. sclone's inarcs, if any, should already be set up.
*
* spredecessor is the original predecessor state that we are trying to build
* successors for (it may not be the immediate predecessor of ssource).
* refarc, if not NULL, is the original constraint arc that is known to have
* been traversed out of spredecessor to reach the successor(s).
*
* For each cloned successor state, we transiently create a "donemap" that is
* a boolean array showing which source states we've already visited for this
* clone state. This prevents infinite recursion as well as useless repeat
* visits to the same state subtree (which can add up fast, since typical NFAs
* have multiple redundant arc pathways). Each donemap is a char array
* indexed by state number. The donemaps are all of the same size "nstates",
* which is nfa->nstates as of the start of the recursion. This is enough to
* have entries for all pre-existing states, but *not* entries for clone
* states created during the recursion. That's okay since we have no need to
* mark those.
*
* curdonemap is NULL when recursing to a new sclone state, or sclone's
* donemap when we are recursing without having created a new state (which we
* do when we decide we can merge a successor state into the current clone
* state). outerdonemap is NULL at the top level and otherwise the parent
* clone state's donemap.
*
* The successor states we create and fill here form a strict tree structure,
* with each state having exactly one predecessor, except that the toplevel
* state has no inarcs as yet (breakconstraintloop will add its inarcs from
* spredecessor after we're done). Thus, we can examine sclone's inarcs back
* to the root, plus refarc if any, to identify the set of constraints already
* known valid at the current point. This allows us to avoid generating extra
* successor states.
*/
static void
clonesuccessorstates(struct nfa *nfa,
struct state *ssource,
struct state *sclone,
struct state *spredecessor,
struct arc *refarc,
char *curdonemap,
char *outerdonemap,
int nstates)
{
char *donemap;
struct arc *a;
/* Since this is recursive, it could be driven to stack overflow */
if (STACK_TOO_DEEP(nfa->v->re))
{
NERR(REG_ETOOBIG);
return;
}
/* If this state hasn't already got a donemap, create one */
donemap = curdonemap;
if (donemap == NULL)
{
donemap = (char *) MALLOC(nstates * sizeof(char));
if (donemap == NULL)
{
NERR(REG_ESPACE);
return;
}
if (outerdonemap != NULL)
{
/*
* Not at outermost recursion level, so copy the outer level's
* donemap; this ensures that we see states in process of being
* visited at outer levels, or already merged into predecessor
* states, as ones we shouldn't traverse back to.
*/
memcpy(donemap, outerdonemap, nstates * sizeof(char));
}
else
{
/* At outermost level, only spredecessor is off-limits */
memset(donemap, 0, nstates * sizeof(char));
assert(spredecessor->no < nstates);
donemap[spredecessor->no] = 1;
}
}
/* Mark ssource as visited in the donemap */
assert(ssource->no < nstates);
assert(donemap[ssource->no] == 0);
donemap[ssource->no] = 1;
/*
* We proceed by first cloning all of ssource's outarcs, creating new
* clone states as needed but not doing more with them than that. Then in
* a second pass, recurse to process the child clone states. This allows
* us to have only one child clone state per reachable source state, even
* when there are multiple outarcs leading to the same state. Also, when
* we do visit a child state, its set of inarcs is known exactly, which
* makes it safe to apply the constraint-is-already-checked optimization.
* Also, this ensures that we've merged all the states we can into the
* current clone before we recurse to any children, thus possibly saving
* them from making extra images of those states.
*
* While this function runs, child clone states of the current state are
* marked by setting their tmp fields to point to the original state they
* were cloned from. This makes it possible to detect multiple outarcs
* leading to the same state, and also makes it easy to distinguish clone
* states from original states (which will have tmp == NULL).
*/
for (a = ssource->outs; a != NULL && !NISERR(); a = a->outchain)
{
struct state *sto = a->to;
/*
* We do not consider cloning successor states that have no constraint
* outarcs; just link to them as-is. They cannot be part of a
* constraint loop so there is no need to make copies. In particular,
* this rule keeps us from trying to clone the post state, which would
* be a bad idea.
*/
if (isconstraintarc(a) && hasconstraintout(sto))
{
struct state *prevclone;
int canmerge;
struct arc *a2;
/*
* Back-link constraint arcs must not be followed. Nor is there a
* need to revisit states previously merged into this clone.
*/
assert(sto->no < nstates);
if (donemap[sto->no] != 0)
continue;
/*
* Check whether we already have a child clone state for this
* source state.
*/
prevclone = NULL;
for (a2 = sclone->outs; a2 != NULL; a2 = a2->outchain)
{
if (a2->to->tmp == sto)
{
prevclone = a2->to;
break;
}
}
/*
* If this arc is labeled the same as refarc, or the same as any
* arc we must have traversed to get to sclone, then no additional
* constraints need to be met to get to sto, so we should just
* merge its outarcs into sclone.
*/
if (refarc && a->type == refarc->type && a->co == refarc->co)
canmerge = 1;
else
{
struct state *s;
canmerge = 0;
for (s = sclone; s->ins; s = s->ins->from)
{
if (s->nins == 1 &&
a->type == s->ins->type && a->co == s->ins->co)
{
canmerge = 1;
break;
}
}
}
if (canmerge)
{
/*
* We can merge into sclone. If we previously made a child
* clone state, drop it; there's no need to visit it. (This
* can happen if ssource has multiple pathways to sto, and we
* only just now found one that is provably a no-op.)
*/
if (prevclone)
dropstate(nfa, prevclone); /* kills our outarc, too */
/* Recurse to merge sto's outarcs into sclone */
clonesuccessorstates(nfa,
sto,
sclone,
spredecessor,
refarc,
donemap,
outerdonemap,
nstates);
/* sto should now be marked as previously visited */
assert(NISERR() || donemap[sto->no] == 1);
}
else if (prevclone)
{
/*
* We already have a clone state for this successor, so just
* make another arc to it.
*/
cparc(nfa, a, sclone, prevclone);
}
else
{
/*
* We need to create a new successor clone state.
*/
struct state *stoclone;
stoclone = newstate(nfa);
if (stoclone == NULL)
{
assert(NISERR());
break;
}
/* Mark it as to what it's a clone of */
stoclone->tmp = sto;
/* ... and add the outarc leading to it */
cparc(nfa, a, sclone, stoclone);
}
}
else
{
/*
* Non-constraint outarcs just get copied to sclone, as do outarcs
* leading to states with no constraint outarc.
*/
cparc(nfa, a, sclone, sto);
}
}
/*
* If we are at outer level for this clone state, recurse to all its child
* clone states, clearing their tmp fields as we go. (If we're not
* outermost for sclone, leave this to be done by the outer call level.)
* Note that if we have multiple outarcs leading to the same clone state,
* it will only be recursed-to once.
*/
if (curdonemap == NULL)
{
for (a = sclone->outs; a != NULL && !NISERR(); a = a->outchain)
{
struct state *stoclone = a->to;
struct state *sto = stoclone->tmp;
if (sto != NULL)
{
stoclone->tmp = NULL;
clonesuccessorstates(nfa,
sto,
stoclone,
spredecessor,
refarc,
NULL,
donemap,
nstates);
}
}
/* Don't forget to free sclone's donemap when done with it */
FREE(donemap);
}
}
/*
* removecantmatch - remove CANTMATCH arcs, which are no longer useful
* once we are done with the parsing phase. (We need them only to
* preserve connectedness of NFA subgraphs during parsing.)
*/
static void
removecantmatch(struct nfa *nfa)
{
struct state *s;
for (s = nfa->states; s != NULL; s = s->next)
{
struct arc *a;
struct arc *nexta;
for (a = s->outs; a != NULL; a = nexta)
{
nexta = a->outchain;
if (a->type == CANTMATCH)
{
freearc(nfa, a);
if (NISERR())
return;
}
}
}
}
/*
* cleanup - clean up NFA after optimizations
*/
static void
cleanup(struct nfa *nfa)
{
struct state *s;
struct state *nexts;
int n;
if (NISERR())
return;
/* clear out unreachable or dead-end states */
/* use pre to mark reachable, then post to mark can-reach-post */
markreachable(nfa, nfa->pre, (struct state *) NULL, nfa->pre);
markcanreach(nfa, nfa->post, nfa->pre, nfa->post);
for (s = nfa->states; s != NULL && !NISERR(); s = nexts)
{
nexts = s->next;
if (s->tmp != nfa->post && !s->flag)
dropstate(nfa, s);
}
assert(NISERR() || nfa->post->nins == 0 || nfa->post->tmp == nfa->post);
cleartraverse(nfa, nfa->pre);
assert(NISERR() || nfa->post->nins == 0 || nfa->post->tmp == NULL);
/* the nins==0 (final unreachable) case will be caught later */
/* renumber surviving states */
n = 0;
for (s = nfa->states; s != NULL; s = s->next)
s->no = n++;
nfa->nstates = n;
}
/*
* markreachable - recursive marking of reachable states
*/
static void
markreachable(struct nfa *nfa,
struct state *s,
struct state *okay, /* consider only states with this mark */
struct state *mark) /* the value to mark with */
{
struct arc *a;
/* Since this is recursive, it could be driven to stack overflow */
if (STACK_TOO_DEEP(nfa->v->re))
{
NERR(REG_ETOOBIG);
return;
}
if (s->tmp != okay)
return;
s->tmp = mark;
for (a = s->outs; a != NULL; a = a->outchain)
markreachable(nfa, a->to, okay, mark);
}
/*
* markcanreach - recursive marking of states which can reach here
*/
static void
markcanreach(struct nfa *nfa,
struct state *s,
struct state *okay, /* consider only states with this mark */
struct state *mark) /* the value to mark with */
{
struct arc *a;
/* Since this is recursive, it could be driven to stack overflow */
if (STACK_TOO_DEEP(nfa->v->re))
{
NERR(REG_ETOOBIG);
return;
}
if (s->tmp != okay)
return;
s->tmp = mark;
for (a = s->ins; a != NULL; a = a->inchain)
markcanreach(nfa, a->from, okay, mark);
}
/*
* analyze - ascertain potentially-useful facts about an optimized NFA
*/
static long /* re_info bits to be ORed in */
analyze(struct nfa *nfa)
{
struct arc *a;
struct arc *aa;
if (NISERR())
return 0;
/* Detect whether NFA can't match anything */
if (nfa->pre->outs == NULL)
return REG_UIMPOSSIBLE;
/* Detect whether NFA matches all strings (possibly with length bounds) */
checkmatchall(nfa);
/* Detect whether NFA can possibly match a zero-length string */
for (a = nfa->pre->outs; a != NULL; a = a->outchain)
for (aa = a->to->outs; aa != NULL; aa = aa->outchain)
if (aa->to == nfa->post)
return REG_UEMPTYMATCH;
return 0;
}
/*
* checkmatchall - does the NFA represent no more than a string length test?
*
* If so, set nfa->minmatchall and nfa->maxmatchall correctly (they are -1
* to begin with) and set the MATCHALL bit in nfa->flags.
*
* To succeed, we require all arcs to be PLAIN RAINBOW arcs, except for those
* for pseudocolors (i.e., BOS/BOL/EOS/EOL). We must be able to reach the
* post state via RAINBOW arcs, and if there are any loops in the graph, they
* must be loop-to-self arcs, ensuring that each loop iteration consumes
* exactly one character. (Longer loops are problematic because they create
* non-consecutive possible match lengths; we have no good way to represent
* that situation for lengths beyond the DUPINF limit.)
*
* Pseudocolor arcs complicate things a little. We know that they can only
* appear as pre-state outarcs (for BOS/BOL) or post-state inarcs (for
* EOS/EOL). There, they must exactly replicate the parallel RAINBOW arcs,
* e.g. if the pre state has one RAINBOW outarc to state 2, it must have BOS
* and BOL outarcs to state 2, and no others. Missing or extra pseudocolor
* arcs can occur, meaning that the NFA involves some constraint on the
* adjacent characters, which makes it not a matchall NFA.
*/
static void
checkmatchall(struct nfa *nfa)
{
bool **haspaths;
struct state *s;
int i;
/*
* If there are too many states, don't bother trying to detect matchall.
* This limit serves to bound the time and memory we could consume below.
* Note that even if the graph is all-RAINBOW, if there are significantly
* more than DUPINF states then it's likely that there are paths of length
* more than DUPINF, which would force us to fail anyhow. In practice,
* plausible ways of writing a matchall regex with maximum finite path
* length K tend not to have very many more than K states.
*/
if (nfa->nstates > DUPINF * 2)
return;
/*
* First, scan all the states to verify that only RAINBOW arcs appear,
* plus pseudocolor arcs adjacent to the pre and post states. This lets
* us quickly eliminate most cases that aren't matchall NFAs.
*/
for (s = nfa->states; s != NULL; s = s->next)
{
struct arc *a;
for (a = s->outs; a != NULL; a = a->outchain)
{
if (a->type != PLAIN)
return; /* any LACONs make it non-matchall */
if (a->co != RAINBOW)
{
if (nfa->cm->cd[a->co].flags & PSEUDO)
{
/*
* Pseudocolor arc: verify it's in a valid place (this
* seems quite unlikely to fail, but let's be sure).
*/
if (s == nfa->pre &&
(a->co == nfa->bos[0] || a->co == nfa->bos[1]))
/* okay BOS/BOL arc */ ;
else if (a->to == nfa->post &&
(a->co == nfa->eos[0] || a->co == nfa->eos[1]))
/* okay EOS/EOL arc */ ;
else
return; /* unexpected pseudocolor arc */
/* We'll check these arcs some more below. */
}
else
return; /* any other color makes it non-matchall */
}
}
/* Also, assert that the tmp fields are available for use. */
assert(s->tmp == NULL);
}
/*
* The next cheapest check we can make is to verify that the BOS/BOL
* outarcs of the pre state reach the same states as its RAINBOW outarcs.
* If they don't, the NFA expresses some constraints on the character
* before the matched string, making it non-matchall. Likewise, the
* EOS/EOL inarcs of the post state must match its RAINBOW inarcs.
*/
if (!check_out_colors_match(nfa->pre, RAINBOW, nfa->bos[0]) ||
!check_out_colors_match(nfa->pre, RAINBOW, nfa->bos[1]) ||
!check_in_colors_match(nfa->post, RAINBOW, nfa->eos[0]) ||
!check_in_colors_match(nfa->post, RAINBOW, nfa->eos[1]))
return;
/*
* Initialize an array of path-length arrays, in which
* checkmatchall_recurse will return per-state results. This lets us
* memo-ize the recursive search and avoid exponential time consumption.
*/
haspaths = (bool **) MALLOC(nfa->nstates * sizeof(bool *));
if (haspaths == NULL)
return; /* fail quietly */
memset(haspaths, 0, nfa->nstates * sizeof(bool *));
/*
* Recursively search the graph for all-RAINBOW paths to the "post" state,
* starting at the "pre" state, and computing the lengths of the paths.
* (Given the preceding checks, there should be at least one such path.
* However we could get back a false result anyway, in case there are
* multi-state loops, paths exceeding DUPINF+1 length, or non-algorithmic
* failures such as ENOMEM.)
*/
if (checkmatchall_recurse(nfa, nfa->pre, haspaths))
{
/* The useful result is the path length array for the pre state */
bool *haspath = haspaths[nfa->pre->no];
int minmatch,
maxmatch,
morematch;
assert(haspath != NULL);
/*
* haspath[] now represents the set of possible path lengths; but we
* want to reduce that to a min and max value, because it doesn't seem
* worth complicating regexec.c to deal with nonconsecutive possible
* match lengths. Find min and max of first run of lengths, then
* verify there are no nonconsecutive lengths.
*/
for (minmatch = 0; minmatch <= DUPINF + 1; minmatch++)
{
if (haspath[minmatch])
break;
}
assert(minmatch <= DUPINF + 1); /* else checkmatchall_recurse lied */
for (maxmatch = minmatch; maxmatch < DUPINF + 1; maxmatch++)
{
if (!haspath[maxmatch + 1])
break;
}
for (morematch = maxmatch + 1; morematch <= DUPINF + 1; morematch++)
{
if (haspath[morematch])
{
haspath = NULL; /* fail, there are nonconsecutive lengths */
break;
}
}
if (haspath != NULL)
{
/*
* Success, so record the info. Here we have a fine point: the
* path length from the pre state includes the pre-to-initial
* transition, so it's one more than the actually matched string
* length. (We avoided counting the final-to-post transition
* within checkmatchall_recurse, but not this one.) This is why
* checkmatchall_recurse allows one more level of path length than
* might seem necessary. This decrement also takes care of
* converting checkmatchall_recurse's definition of "infinity" as
* "DUPINF+1" to our normal representation as "DUPINF".
*/
assert(minmatch > 0); /* else pre and post states were adjacent */
nfa->minmatchall = minmatch - 1;
nfa->maxmatchall = maxmatch - 1;
nfa->flags |= MATCHALL;
}
}
/* Clean up */
for (i = 0; i < nfa->nstates; i++)
{
if (haspaths[i] != NULL)
FREE(haspaths[i]);
}
FREE(haspaths);
}
/*
* checkmatchall_recurse - recursive search for checkmatchall
*
* s is the state to be examined in this recursion level.
* haspaths[] is an array of per-state exit path length arrays.
*
* We return true if the search was performed successfully, false if
* we had to fail because of multi-state loops or other internal reasons.
* (Because "dead" states that can't reach the post state have been
* eliminated, and we already verified that only RAINBOW and matching
* pseudocolor arcs exist, every state should have RAINBOW path(s) to
* the post state. Hence we take a false result from recursive calls
* as meaning that we'd better fail altogether, not just that that
* particular state can't reach the post state.)
*
* On success, we store a malloc'd result array in haspaths[s->no],
* showing the possible path lengths from s to the post state.
* Each state's haspath[] array is of length DUPINF+2. The entries from
* k = 0 to DUPINF are true if there is an all-RAINBOW path of length k
* from this state to the string end. haspath[DUPINF+1] is true if all
* path lengths >= DUPINF+1 are possible. (Situations that cannot be
* represented under these rules cause failure.)
*
* checkmatchall is responsible for eventually freeing the haspath[] arrays.
*/
static bool
checkmatchall_recurse(struct nfa *nfa, struct state *s, bool **haspaths)
{
bool result = false;
bool foundloop = false;
bool *haspath;
struct arc *a;
/*
* Since this is recursive, it could be driven to stack overflow. But we
* need not treat that as a hard failure; just deem the NFA non-matchall.
*/
if (STACK_TOO_DEEP(nfa->v->re))
return false;
/* In case the search takes a long time, check for cancel */
INTERRUPT(nfa->v->re);
/* Create a haspath array for this state */
haspath = (bool *) MALLOC((DUPINF + 2) * sizeof(bool));
if (haspath == NULL)
return false; /* again, treat as non-matchall */
memset(haspath, 0, (DUPINF + 2) * sizeof(bool));
/* Mark this state as being visited */
assert(s->tmp == NULL);
s->tmp = s;
for (a = s->outs; a != NULL; a = a->outchain)
{
if (a->co != RAINBOW)
continue; /* ignore pseudocolor arcs */
if (a->to == nfa->post)
{
/* We found an all-RAINBOW path to the post state */
result = true;
/*
* Mark this state as being zero steps away from the string end
* (the transition to the post state isn't counted).
*/
haspath[0] = true;
}
else if (a->to == s)
{
/* We found a cycle of length 1, which we'll deal with below. */
foundloop = true;
}
else if (a->to->tmp != NULL)
{
/* It's busy, so we found a cycle of length > 1, so fail. */
result = false;
break;
}
else
{
/* Consider paths forward through this to-state. */
bool *nexthaspath;
int i;
/* If to-state was not already visited, recurse */
if (haspaths[a->to->no] == NULL)
{
result = checkmatchall_recurse(nfa, a->to, haspaths);
/* Fail if any recursive path fails */
if (!result)
break;
}
else
{
/* The previous visit must have found path(s) to the end */
result = true;
}
assert(a->to->tmp == NULL);
nexthaspath = haspaths[a->to->no];
assert(nexthaspath != NULL);
/*
* Now, for every path of length i from a->to to the string end,
* there is a path of length i + 1 from s to the string end.
*/
if (nexthaspath[DUPINF] != nexthaspath[DUPINF + 1])
{
/*
* a->to has a path of length exactly DUPINF, but not longer;
* or it has paths of all lengths > DUPINF but not one of
* exactly that length. In either case, we cannot represent
* the possible path lengths from s correctly, so fail.
*/
result = false;
break;
}
/* Merge knowledge of these path lengths into what we have */
for (i = 0; i < DUPINF; i++)
haspath[i + 1] |= nexthaspath[i];
/* Infinity + 1 is still infinity */
haspath[DUPINF + 1] |= nexthaspath[DUPINF + 1];
}
}
if (result && foundloop)
{
/*
* If there is a length-1 loop at this state, then find the shortest
* known path length to the end. The loop means that every larger
* path length is possible, too. (It doesn't matter whether any of
* the longer lengths were already known possible.)
*/
int i;
for (i = 0; i <= DUPINF; i++)
{
if (haspath[i])
break;
}
for (i++; i <= DUPINF + 1; i++)
haspath[i] = true;
}
/* Report out the completed path length map */
assert(s->no < nfa->nstates);
assert(haspaths[s->no] == NULL);
haspaths[s->no] = haspath;
/* Mark state no longer busy */
s->tmp = NULL;
return result;
}
/*
* check_out_colors_match - subroutine for checkmatchall
*
* Check whether the set of states reachable from s by arcs of color co1
* is equivalent to the set reachable by arcs of color co2.
* checkmatchall already verified that all of the NFA's arcs are PLAIN,
* so we need not examine arc types here.
*/
static bool
check_out_colors_match(struct state *s, color co1, color co2)
{
bool result = true;
struct arc *a;
/*
* To do this in linear time, we assume that the NFA contains no duplicate
* arcs. Run through the out-arcs, marking states reachable by arcs of
* color co1. Run through again, un-marking states reachable by arcs of
* color co2; if we see a not-marked state, we know this co2 arc is
* unmatched. Then run through again, checking for still-marked states,
* and in any case leaving all the tmp fields reset to NULL.
*/
for (a = s->outs; a != NULL; a = a->outchain)
{
if (a->co == co1)
{
assert(a->to->tmp == NULL);
a->to->tmp = a->to;
}
}
for (a = s->outs; a != NULL; a = a->outchain)
{
if (a->co == co2)
{
if (a->to->tmp != NULL)
a->to->tmp = NULL;
else
result = false; /* unmatched co2 arc */
}
}
for (a = s->outs; a != NULL; a = a->outchain)
{
if (a->co == co1)
{
if (a->to->tmp != NULL)
{
result = false; /* unmatched co1 arc */
a->to->tmp = NULL;
}
}
}
return result;
}
/*
* check_in_colors_match - subroutine for checkmatchall
*
* Check whether the set of states that can reach s by arcs of color co1
* is equivalent to the set that can reach s by arcs of color co2.
* checkmatchall already verified that all of the NFA's arcs are PLAIN,
* so we need not examine arc types here.
*/
static bool
check_in_colors_match(struct state *s, color co1, color co2)
{
bool result = true;
struct arc *a;
/*
* Identical algorithm to check_out_colors_match, except examine the
* from-states of s' inarcs.
*/
for (a = s->ins; a != NULL; a = a->inchain)
{
if (a->co == co1)
{
assert(a->from->tmp == NULL);
a->from->tmp = a->from;
}
}
for (a = s->ins; a != NULL; a = a->inchain)
{
if (a->co == co2)
{
if (a->from->tmp != NULL)
a->from->tmp = NULL;
else
result = false; /* unmatched co2 arc */
}
}
for (a = s->ins; a != NULL; a = a->inchain)
{
if (a->co == co1)
{
if (a->from->tmp != NULL)
{
result = false; /* unmatched co1 arc */
a->from->tmp = NULL;
}
}
}
return result;
}
/*
* compact - construct the compact representation of an NFA
*/
static void
compact(struct nfa *nfa,
struct cnfa *cnfa)
{
struct state *s;
struct arc *a;
size_t nstates;
size_t narcs;
struct carc *ca;
struct carc *first;
assert(!NISERR());
nstates = 0;
narcs = 0;
for (s = nfa->states; s != NULL; s = s->next)
{
nstates++;
narcs += s->nouts + 1; /* need one extra for endmarker */
}
cnfa->stflags = (char *) MALLOC(nstates * sizeof(char));
cnfa->states = (struct carc **) MALLOC(nstates * sizeof(struct carc *));
cnfa->arcs = (struct carc *) MALLOC(narcs * sizeof(struct carc));
if (cnfa->stflags == NULL || cnfa->states == NULL || cnfa->arcs == NULL)
{
if (cnfa->stflags != NULL)
FREE(cnfa->stflags);
if (cnfa->states != NULL)
FREE(cnfa->states);
if (cnfa->arcs != NULL)
FREE(cnfa->arcs);
NERR(REG_ESPACE);
return;
}
cnfa->nstates = nstates;
cnfa->pre = nfa->pre->no;
cnfa->post = nfa->post->no;
cnfa->bos[0] = nfa->bos[0];
cnfa->bos[1] = nfa->bos[1];
cnfa->eos[0] = nfa->eos[0];
cnfa->eos[1] = nfa->eos[1];
cnfa->ncolors = maxcolor(nfa->cm) + 1;
cnfa->flags = nfa->flags;
cnfa->minmatchall = nfa->minmatchall;
cnfa->maxmatchall = nfa->maxmatchall;
ca = cnfa->arcs;
for (s = nfa->states; s != NULL; s = s->next)
{
assert((size_t) s->no < nstates);
cnfa->stflags[s->no] = 0;
cnfa->states[s->no] = ca;
first = ca;
for (a = s->outs; a != NULL; a = a->outchain)
switch (a->type)
{
case PLAIN:
ca->co = a->co;
ca->to = a->to->no;
ca++;
break;
case LACON:
assert(s->no != cnfa->pre);
assert(a->co >= 0);
ca->co = (color) (cnfa->ncolors + a->co);
ca->to = a->to->no;
ca++;
cnfa->flags |= HASLACONS;
break;
default:
NERR(REG_ASSERT);
return;
}
carcsort(first, ca - first);
ca->co = COLORLESS;
ca->to = 0;
ca++;
}
assert(ca == &cnfa->arcs[narcs]);
assert(cnfa->nstates != 0);
/* mark no-progress states */
for (a = nfa->pre->outs; a != NULL; a = a->outchain)
cnfa->stflags[a->to->no] = CNFA_NOPROGRESS;
cnfa->stflags[nfa->pre->no] = CNFA_NOPROGRESS;
}
/*
* carcsort - sort compacted-NFA arcs by color
*/
static void
carcsort(struct carc *first, size_t n)
{
if (n > 1)
qsort(first, n, sizeof(struct carc), carc_cmp);
}
static int
carc_cmp(const void *a, const void *b)
{
const struct carc *aa = (const struct carc *) a;
const struct carc *bb = (const struct carc *) b;
if (aa->co < bb->co)
return -1;
if (aa->co > bb->co)
return +1;
if (aa->to < bb->to)
return -1;
if (aa->to > bb->to)
return +1;
/* This is unreached, since there should be no duplicate arcs now: */
return 0;
}
/*
* freecnfa - free a compacted NFA
*/
static void
freecnfa(struct cnfa *cnfa)
{
assert(!NULLCNFA(*cnfa)); /* not empty already */
FREE(cnfa->stflags);
FREE(cnfa->states);
FREE(cnfa->arcs);
ZAPCNFA(*cnfa);
}
/*
* dumpnfa - dump an NFA in human-readable form
*/
static void
dumpnfa(struct nfa *nfa,
FILE *f)
{
#ifdef REG_DEBUG
struct state *s;
int nstates = 0;
int narcs = 0;
fprintf(f, "pre %d, post %d", nfa->pre->no, nfa->post->no);
if (nfa->bos[0] != COLORLESS)
fprintf(f, ", bos [%ld]", (long) nfa->bos[0]);
if (nfa->bos[1] != COLORLESS)
fprintf(f, ", bol [%ld]", (long) nfa->bos[1]);
if (nfa->eos[0] != COLORLESS)
fprintf(f, ", eos [%ld]", (long) nfa->eos[0]);
if (nfa->eos[1] != COLORLESS)
fprintf(f, ", eol [%ld]", (long) nfa->eos[1]);
if (nfa->flags & HASLACONS)
fprintf(f, ", haslacons");
if (nfa->flags & HASCANTMATCH)
fprintf(f, ", hascantmatch");
if (nfa->flags & MATCHALL)
{
fprintf(f, ", minmatchall %d", nfa->minmatchall);
if (nfa->maxmatchall == DUPINF)
fprintf(f, ", maxmatchall inf");
else
fprintf(f, ", maxmatchall %d", nfa->maxmatchall);
}
fprintf(f, "\n");
for (s = nfa->states; s != NULL; s = s->next)
{
dumpstate(s, f);
nstates++;
narcs += s->nouts;
}
fprintf(f, "total of %d states, %d arcs\n", nstates, narcs);
if (nfa->parent == NULL)
dumpcolors(nfa->cm, f);
fflush(f);
#endif
}
#ifdef REG_DEBUG /* subordinates of dumpnfa */
/*
* dumpstate - dump an NFA state in human-readable form
*/
static void
dumpstate(struct state *s,
FILE *f)
{
struct arc *a;
fprintf(f, "%d%s%c", s->no, (s->tmp != NULL) ? "T" : "",
(s->flag) ? s->flag : '.');
if (s->prev != NULL && s->prev->next != s)
fprintf(f, "\tstate chain bad\n");
if (s->nouts == 0)
fprintf(f, "\tno out arcs\n");
else
dumparcs(s, f);
for (a = s->ins; a != NULL; a = a->inchain)
{
if (a->to != s)
fprintf(f, "\tlink from %d to %d on %d's in-chain\n",
a->from->no, a->to->no, s->no);
}
fflush(f);
}
/*
* dumparcs - dump out-arcs in human-readable form
*/
static void
dumparcs(struct state *s,
FILE *f)
{
int pos;
struct arc *a;
/* printing oldest arcs first is usually clearer */
a = s->outs;
assert(a != NULL);
while (a->outchain != NULL)
a = a->outchain;
pos = 1;
do
{
dumparc(a, s, f);
if (pos == 5)
{
fprintf(f, "\n");
pos = 1;
}
else
pos++;
a = a->outchainRev;
} while (a != NULL);
if (pos != 1)
fprintf(f, "\n");
}
/*
* dumparc - dump one outarc in readable form, including prefixing tab
*/
static void
dumparc(struct arc *a,
struct state *s,
FILE *f)
{
struct arc *aa;
fprintf(f, "\t");
switch (a->type)
{
case PLAIN:
if (a->co == RAINBOW)
fprintf(f, "[*]");
else
fprintf(f, "[%ld]", (long) a->co);
break;
case AHEAD:
if (a->co == RAINBOW)
fprintf(f, ">*>");
else
fprintf(f, ">%ld>", (long) a->co);
break;
case BEHIND:
if (a->co == RAINBOW)
fprintf(f, "<*<");
else
fprintf(f, "<%ld<", (long) a->co);
break;
case LACON:
fprintf(f, ":%ld:", (long) a->co);
break;
case '^':
case '$':
fprintf(f, "%c%d", a->type, (int) a->co);
break;
case EMPTY:
break;
case CANTMATCH:
fprintf(f, "X");
break;
default:
fprintf(f, "0x%x/0%lo", a->type, (long) a->co);
break;
}
if (a->from != s)
fprintf(f, "?%d?", a->from->no);
for (aa = a->from->outs; aa != NULL; aa = aa->outchain)
if (aa == a)
break; /* NOTE BREAK OUT */
if (aa == NULL)
fprintf(f, "?!?"); /* missing from out-chain */
fprintf(f, "->");
if (a->to == NULL)
{
fprintf(f, "NULL");
return;
}
fprintf(f, "%d", a->to->no);
for (aa = a->to->ins; aa != NULL; aa = aa->inchain)
if (aa == a)
break; /* NOTE BREAK OUT */
if (aa == NULL)
fprintf(f, "?!?"); /* missing from in-chain */
}
#endif /* REG_DEBUG */
/*
* dumpcnfa - dump a compacted NFA in human-readable form
*/
#ifdef REG_DEBUG
static void
dumpcnfa(struct cnfa *cnfa,
FILE *f)
{
int st;
fprintf(f, "pre %d, post %d", cnfa->pre, cnfa->post);
if (cnfa->bos[0] != COLORLESS)
fprintf(f, ", bos [%ld]", (long) cnfa->bos[0]);
if (cnfa->bos[1] != COLORLESS)
fprintf(f, ", bol [%ld]", (long) cnfa->bos[1]);
if (cnfa->eos[0] != COLORLESS)
fprintf(f, ", eos [%ld]", (long) cnfa->eos[0]);
if (cnfa->eos[1] != COLORLESS)
fprintf(f, ", eol [%ld]", (long) cnfa->eos[1]);
if (cnfa->flags & HASLACONS)
fprintf(f, ", haslacons");
if (cnfa->flags & MATCHALL)
{
fprintf(f, ", minmatchall %d", cnfa->minmatchall);
if (cnfa->maxmatchall == DUPINF)
fprintf(f, ", maxmatchall inf");
else
fprintf(f, ", maxmatchall %d", cnfa->maxmatchall);
}
fprintf(f, "\n");
for (st = 0; st < cnfa->nstates; st++)
dumpcstate(st, cnfa, f);
fflush(f);
}
#endif
#ifdef REG_DEBUG /* subordinates of dumpcnfa */
/*
* dumpcstate - dump a compacted-NFA state in human-readable form
*/
static void
dumpcstate(int st,
struct cnfa *cnfa,
FILE *f)
{
struct carc *ca;
int pos;
fprintf(f, "%d%s", st, (cnfa->stflags[st] & CNFA_NOPROGRESS) ? ":" : ".");
pos = 1;
for (ca = cnfa->states[st]; ca->co != COLORLESS; ca++)
{
if (ca->co == RAINBOW)
fprintf(f, "\t[*]->%d", ca->to);
else if (ca->co < cnfa->ncolors)
fprintf(f, "\t[%ld]->%d", (long) ca->co, ca->to);
else
fprintf(f, "\t:%ld:->%d", (long) (ca->co - cnfa->ncolors), ca->to);
if (pos == 5)
{
fprintf(f, "\n");
pos = 1;
}
else
pos++;
}
if (ca == cnfa->states[st] || pos != 1)
fprintf(f, "\n");
fflush(f);
}
#endif /* REG_DEBUG */ | c | github | https://github.com/postgres/postgres | src/backend/regex/regc_nfa.c |
# Copyright (c) 2001-2006 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for epoll wrapper.
"""
import socket, errno, time
from twisted.trial import unittest
from twisted.python.util import untilConcludes
try:
from twisted.python import _epoll
except ImportError:
_epoll = None
class EPoll(unittest.TestCase):
"""
Tests for the low-level epoll bindings.
"""
def setUp(self):
"""
Create a listening server port and a list with which to keep track
of created sockets.
"""
self.serverSocket = socket.socket()
self.serverSocket.bind(('127.0.0.1', 0))
self.serverSocket.listen(1)
self.connections = [self.serverSocket]
def tearDown(self):
"""
Close any sockets which were opened by the test.
"""
for skt in self.connections:
skt.close()
def _connectedPair(self):
"""
Return the two sockets which make up a new TCP connection.
"""
client = socket.socket()
client.setblocking(False)
try:
client.connect(('127.0.0.1', self.serverSocket.getsockname()[1]))
except socket.error, e:
self.assertEquals(e.args[0], errno.EINPROGRESS)
else:
raise unittest.FailTest("Connect should have raised EINPROGRESS")
server, addr = self.serverSocket.accept()
self.connections.extend((client, server))
return client, server
def test_create(self):
"""
Test the creation of an epoll object.
"""
try:
p = _epoll.epoll(16)
except OSError, e:
raise unittest.FailTest(str(e))
else:
p.close()
def test_badCreate(self):
"""
Test that attempting to create an epoll object with some random
objects raises a TypeError.
"""
self.assertRaises(TypeError, _epoll.epoll, 1, 2, 3)
self.assertRaises(TypeError, _epoll.epoll, 'foo')
self.assertRaises(TypeError, _epoll.epoll, None)
self.assertRaises(TypeError, _epoll.epoll, ())
self.assertRaises(TypeError, _epoll.epoll, ['foo'])
self.assertRaises(TypeError, _epoll.epoll, {})
self.assertRaises(TypeError, _epoll.epoll)
def test_add(self):
"""
Test adding a socket to an epoll object.
"""
server, client = self._connectedPair()
p = _epoll.epoll(2)
try:
p._control(_epoll.CTL_ADD, server.fileno(), _epoll.IN | _epoll.OUT)
p._control(_epoll.CTL_ADD, client.fileno(), _epoll.IN | _epoll.OUT)
finally:
p.close()
def test_controlAndWait(self):
"""
Test waiting on an epoll object which has had some sockets added to
it.
"""
client, server = self._connectedPair()
p = _epoll.epoll(16)
p._control(_epoll.CTL_ADD, client.fileno(), _epoll.IN | _epoll.OUT |
_epoll.ET)
p._control(_epoll.CTL_ADD, server.fileno(), _epoll.IN | _epoll.OUT |
_epoll.ET)
now = time.time()
events = untilConcludes(p.wait, 4, 1000)
then = time.time()
self.failIf(then - now > 0.01)
events.sort()
expected = [(client.fileno(), _epoll.OUT),
(server.fileno(), _epoll.OUT)]
expected.sort()
self.assertEquals(events, expected)
now = time.time()
events = untilConcludes(p.wait, 4, 200)
then = time.time()
self.failUnless(then - now > 0.1)
self.failIf(events)
client.send("Hello!")
server.send("world!!!")
now = time.time()
events = untilConcludes(p.wait, 4, 1000)
then = time.time()
self.failIf(then - now > 0.01)
events.sort()
expected = [(client.fileno(), _epoll.IN | _epoll.OUT),
(server.fileno(), _epoll.IN | _epoll.OUT)]
expected.sort()
self.assertEquals(events, expected)
if _epoll is None:
EPoll.skip = "_epoll module unavailable"
else:
try:
e = _epoll.epoll(16)
except IOError, exc:
if exc.errno == errno.ENOSYS:
del exc
EPoll.skip = "epoll support missing from platform"
else:
raise
else:
e.close()
del e | unknown | codeparrot/codeparrot-clean | ||
"""
The GeometryColumns and SpatialRefSys models for the SpatiaLite backend.
"""
from django.db import connection, models
from django.contrib.gis.db.backends.base import SpatialRefSysMixin
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class GeometryColumns(models.Model):
"""
The 'geometry_columns' table from SpatiaLite.
"""
f_table_name = models.CharField(max_length=256)
f_geometry_column = models.CharField(max_length=256)
type = models.CharField(max_length=30)
coord_dimension = models.IntegerField()
srid = models.IntegerField(primary_key=True)
spatial_index_enabled = models.IntegerField()
class Meta:
db_table = 'geometry_columns'
managed = False
@classmethod
def table_name_col(cls):
"""
Returns the name of the metadata column used to store the feature table
name.
"""
return 'f_table_name'
@classmethod
def geom_col_name(cls):
"""
Returns the name of the metadata column used to store the feature
geometry column.
"""
return 'f_geometry_column'
def __str__(self):
return "%s.%s - %dD %s field (SRID: %d)" % \
(self.f_table_name, self.f_geometry_column,
self.coord_dimension, self.type, self.srid)
class SpatialRefSys(models.Model, SpatialRefSysMixin):
"""
The 'spatial_ref_sys' table from SpatiaLite.
"""
srid = models.IntegerField(primary_key=True)
auth_name = models.CharField(max_length=256)
auth_srid = models.IntegerField()
ref_sys_name = models.CharField(max_length=256)
proj4text = models.CharField(max_length=2048)
if connection.ops.spatial_version[0] >= 4:
srtext = models.CharField(max_length=2048)
@property
def wkt(self):
if hasattr(self, 'srtext'):
return self.srtext
from django.contrib.gis.gdal import SpatialReference
return SpatialReference(self.proj4text).wkt
class Meta:
db_table = 'spatial_ref_sys'
managed = False | unknown | codeparrot/codeparrot-clean | ||
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from telemetry.core import discover
from telemetry.core import util
class DiscoverTest(unittest.TestCase):
def setUp(self):
self._base_dir = util.GetUnittestDataDir()
self._start_dir = os.path.join(self._base_dir, 'discoverable_classes')
self._base_class = Exception
def testDiscoverClassesBasic(self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class)
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'discover_dummyclass': 'DummyException',
'another_discover_dummyclass': 'DummyExceptionImpl2',
}
self.assertEqual(actual_classes, expected_classes)
def testDiscoverClassesWithPattern(self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class,
pattern='another*')
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'another_discover_dummyclass': 'DummyExceptionImpl2',
}
self.assertEqual(actual_classes, expected_classes)
def testDiscoverClassesByClassName(self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class,
index_by_class_name=True)
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'dummy_exception': 'DummyException',
'dummy_exception_impl1': 'DummyExceptionImpl1',
'dummy_exception_impl2': 'DummyExceptionImpl2',
}
self.assertEqual(actual_classes, expected_classes) | unknown | codeparrot/codeparrot-clean | ||
/*-------------------------------------------------------------------------
*
* md5.c
* Implements the MD5 Message-Digest Algorithm
*
* Fallback implementation of MD5, as specified in RFC 1321. This
* implementation is a simple one, in that it needs every input byte
* to be buffered before doing any calculations.
*
* Portions Copyright (c) 1996-2026, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* IDENTIFICATION
* src/common/md5.c
*
*-------------------------------------------------------------------------
*/
/* $KAME: md5.c,v 1.3 2000/02/22 14:01:17 itojun Exp $ */
/*
* Copyright (C) 1995, 1996, 1997, and 1998 WIDE Project.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the project nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE PROJECT AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE PROJECT OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#ifndef FRONTEND
#include "postgres.h"
#else
#include "postgres_fe.h"
#endif
#include "md5_int.h"
#define SHIFT(X, s) (((X) << (s)) | ((X) >> (32 - (s))))
#define F(X, Y, Z) (((X) & (Y)) | ((~X) & (Z)))
#define G(X, Y, Z) (((X) & (Z)) | ((Y) & (~Z)))
#define H(X, Y, Z) ((X) ^ (Y) ^ (Z))
#define I(X, Y, Z) ((Y) ^ ((X) | (~Z)))
#define ROUND1(a, b, c, d, k, s, i) \
do { \
(a) = (a) + F((b), (c), (d)) + X[(k)] + T[(i)]; \
(a) = SHIFT((a), (s)); \
(a) = (b) + (a); \
} while (0)
#define ROUND2(a, b, c, d, k, s, i) \
do { \
(a) = (a) + G((b), (c), (d)) + X[(k)] + T[(i)]; \
(a) = SHIFT((a), (s)); \
(a) = (b) + (a); \
} while (0)
#define ROUND3(a, b, c, d, k, s, i) \
do { \
(a) = (a) + H((b), (c), (d)) + X[(k)] + T[(i)]; \
(a) = SHIFT((a), (s)); \
(a) = (b) + (a); \
} while (0)
#define ROUND4(a, b, c, d, k, s, i) \
do { \
(a) = (a) + I((b), (c), (d)) + X[(k)] + T[(i)]; \
(a) = SHIFT((a), (s)); \
(a) = (b) + (a); \
} while (0)
#define Sa 7
#define Sb 12
#define Sc 17
#define Sd 22
#define Se 5
#define Sf 9
#define Sg 14
#define Sh 20
#define Si 4
#define Sj 11
#define Sk 16
#define Sl 23
#define Sm 6
#define Sn 10
#define So 15
#define Sp 21
#define MD5_A0 0x67452301
#define MD5_B0 0xefcdab89
#define MD5_C0 0x98badcfe
#define MD5_D0 0x10325476
/* Integer part of 4294967296 times abs(sin(i)), where i is in radians. */
static const uint32 T[65] = {
0,
0xd76aa478, 0xe8c7b756, 0x242070db, 0xc1bdceee,
0xf57c0faf, 0x4787c62a, 0xa8304613, 0xfd469501,
0x698098d8, 0x8b44f7af, 0xffff5bb1, 0x895cd7be,
0x6b901122, 0xfd987193, 0xa679438e, 0x49b40821,
0xf61e2562, 0xc040b340, 0x265e5a51, 0xe9b6c7aa,
0xd62f105d, 0x2441453, 0xd8a1e681, 0xe7d3fbc8,
0x21e1cde6, 0xc33707d6, 0xf4d50d87, 0x455a14ed,
0xa9e3e905, 0xfcefa3f8, 0x676f02d9, 0x8d2a4c8a,
0xfffa3942, 0x8771f681, 0x6d9d6122, 0xfde5380c,
0xa4beea44, 0x4bdecfa9, 0xf6bb4b60, 0xbebfbc70,
0x289b7ec6, 0xeaa127fa, 0xd4ef3085, 0x4881d05,
0xd9d4d039, 0xe6db99e5, 0x1fa27cf8, 0xc4ac5665,
0xf4292244, 0x432aff97, 0xab9423a7, 0xfc93a039,
0x655b59c3, 0x8f0ccc92, 0xffeff47d, 0x85845dd1,
0x6fa87e4f, 0xfe2ce6e0, 0xa3014314, 0x4e0811a1,
0xf7537e82, 0xbd3af235, 0x2ad7d2bb, 0xeb86d391,
};
static const uint8 md5_paddat[MD5_BUFLEN] = {
0x80, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
};
static void
md5_calc(const uint8 *b64, pg_md5_ctx *ctx)
{
uint32 A = ctx->md5_sta;
uint32 B = ctx->md5_stb;
uint32 C = ctx->md5_stc;
uint32 D = ctx->md5_std;
#ifndef WORDS_BIGENDIAN
const uint32 *X = (const uint32 *) b64;
#else
/* 4 byte words */
/* what a brute force but fast! */
uint32 X[16];
uint8 *y = (uint8 *) X;
y[0] = b64[3];
y[1] = b64[2];
y[2] = b64[1];
y[3] = b64[0];
y[4] = b64[7];
y[5] = b64[6];
y[6] = b64[5];
y[7] = b64[4];
y[8] = b64[11];
y[9] = b64[10];
y[10] = b64[9];
y[11] = b64[8];
y[12] = b64[15];
y[13] = b64[14];
y[14] = b64[13];
y[15] = b64[12];
y[16] = b64[19];
y[17] = b64[18];
y[18] = b64[17];
y[19] = b64[16];
y[20] = b64[23];
y[21] = b64[22];
y[22] = b64[21];
y[23] = b64[20];
y[24] = b64[27];
y[25] = b64[26];
y[26] = b64[25];
y[27] = b64[24];
y[28] = b64[31];
y[29] = b64[30];
y[30] = b64[29];
y[31] = b64[28];
y[32] = b64[35];
y[33] = b64[34];
y[34] = b64[33];
y[35] = b64[32];
y[36] = b64[39];
y[37] = b64[38];
y[38] = b64[37];
y[39] = b64[36];
y[40] = b64[43];
y[41] = b64[42];
y[42] = b64[41];
y[43] = b64[40];
y[44] = b64[47];
y[45] = b64[46];
y[46] = b64[45];
y[47] = b64[44];
y[48] = b64[51];
y[49] = b64[50];
y[50] = b64[49];
y[51] = b64[48];
y[52] = b64[55];
y[53] = b64[54];
y[54] = b64[53];
y[55] = b64[52];
y[56] = b64[59];
y[57] = b64[58];
y[58] = b64[57];
y[59] = b64[56];
y[60] = b64[63];
y[61] = b64[62];
y[62] = b64[61];
y[63] = b64[60];
#endif
ROUND1(A, B, C, D, 0, Sa, 1);
ROUND1(D, A, B, C, 1, Sb, 2);
ROUND1(C, D, A, B, 2, Sc, 3);
ROUND1(B, C, D, A, 3, Sd, 4);
ROUND1(A, B, C, D, 4, Sa, 5);
ROUND1(D, A, B, C, 5, Sb, 6);
ROUND1(C, D, A, B, 6, Sc, 7);
ROUND1(B, C, D, A, 7, Sd, 8);
ROUND1(A, B, C, D, 8, Sa, 9);
ROUND1(D, A, B, C, 9, Sb, 10);
ROUND1(C, D, A, B, 10, Sc, 11);
ROUND1(B, C, D, A, 11, Sd, 12);
ROUND1(A, B, C, D, 12, Sa, 13);
ROUND1(D, A, B, C, 13, Sb, 14);
ROUND1(C, D, A, B, 14, Sc, 15);
ROUND1(B, C, D, A, 15, Sd, 16);
ROUND2(A, B, C, D, 1, Se, 17);
ROUND2(D, A, B, C, 6, Sf, 18);
ROUND2(C, D, A, B, 11, Sg, 19);
ROUND2(B, C, D, A, 0, Sh, 20);
ROUND2(A, B, C, D, 5, Se, 21);
ROUND2(D, A, B, C, 10, Sf, 22);
ROUND2(C, D, A, B, 15, Sg, 23);
ROUND2(B, C, D, A, 4, Sh, 24);
ROUND2(A, B, C, D, 9, Se, 25);
ROUND2(D, A, B, C, 14, Sf, 26);
ROUND2(C, D, A, B, 3, Sg, 27);
ROUND2(B, C, D, A, 8, Sh, 28);
ROUND2(A, B, C, D, 13, Se, 29);
ROUND2(D, A, B, C, 2, Sf, 30);
ROUND2(C, D, A, B, 7, Sg, 31);
ROUND2(B, C, D, A, 12, Sh, 32);
ROUND3(A, B, C, D, 5, Si, 33);
ROUND3(D, A, B, C, 8, Sj, 34);
ROUND3(C, D, A, B, 11, Sk, 35);
ROUND3(B, C, D, A, 14, Sl, 36);
ROUND3(A, B, C, D, 1, Si, 37);
ROUND3(D, A, B, C, 4, Sj, 38);
ROUND3(C, D, A, B, 7, Sk, 39);
ROUND3(B, C, D, A, 10, Sl, 40);
ROUND3(A, B, C, D, 13, Si, 41);
ROUND3(D, A, B, C, 0, Sj, 42);
ROUND3(C, D, A, B, 3, Sk, 43);
ROUND3(B, C, D, A, 6, Sl, 44);
ROUND3(A, B, C, D, 9, Si, 45);
ROUND3(D, A, B, C, 12, Sj, 46);
ROUND3(C, D, A, B, 15, Sk, 47);
ROUND3(B, C, D, A, 2, Sl, 48);
ROUND4(A, B, C, D, 0, Sm, 49);
ROUND4(D, A, B, C, 7, Sn, 50);
ROUND4(C, D, A, B, 14, So, 51);
ROUND4(B, C, D, A, 5, Sp, 52);
ROUND4(A, B, C, D, 12, Sm, 53);
ROUND4(D, A, B, C, 3, Sn, 54);
ROUND4(C, D, A, B, 10, So, 55);
ROUND4(B, C, D, A, 1, Sp, 56);
ROUND4(A, B, C, D, 8, Sm, 57);
ROUND4(D, A, B, C, 15, Sn, 58);
ROUND4(C, D, A, B, 6, So, 59);
ROUND4(B, C, D, A, 13, Sp, 60);
ROUND4(A, B, C, D, 4, Sm, 61);
ROUND4(D, A, B, C, 11, Sn, 62);
ROUND4(C, D, A, B, 2, So, 63);
ROUND4(B, C, D, A, 9, Sp, 64);
ctx->md5_sta += A;
ctx->md5_stb += B;
ctx->md5_stc += C;
ctx->md5_std += D;
}
static void
md5_pad(pg_md5_ctx *ctx)
{
unsigned int gap;
/* Don't count up padding. Keep md5_n. */
gap = MD5_BUFLEN - ctx->md5_i;
if (gap > 8)
{
memmove(ctx->md5_buf + ctx->md5_i, md5_paddat,
gap - sizeof(ctx->md5_n));
}
else
{
/* including gap == 8 */
memmove(ctx->md5_buf + ctx->md5_i, md5_paddat, gap);
md5_calc(ctx->md5_buf, ctx);
memmove(ctx->md5_buf, md5_paddat + gap,
MD5_BUFLEN - sizeof(ctx->md5_n));
}
/* 8 byte word */
#ifndef WORDS_BIGENDIAN
memmove(&ctx->md5_buf[56], &ctx->md5_n8[0], 8);
#else
ctx->md5_buf[56] = ctx->md5_n8[7];
ctx->md5_buf[57] = ctx->md5_n8[6];
ctx->md5_buf[58] = ctx->md5_n8[5];
ctx->md5_buf[59] = ctx->md5_n8[4];
ctx->md5_buf[60] = ctx->md5_n8[3];
ctx->md5_buf[61] = ctx->md5_n8[2];
ctx->md5_buf[62] = ctx->md5_n8[1];
ctx->md5_buf[63] = ctx->md5_n8[0];
#endif
md5_calc(ctx->md5_buf, ctx);
}
static void
md5_result(uint8 *digest, pg_md5_ctx *ctx)
{
/* 4 byte words */
#ifndef WORDS_BIGENDIAN
memmove(digest, &ctx->md5_st8[0], 16);
#else
digest[0] = ctx->md5_st8[3];
digest[1] = ctx->md5_st8[2];
digest[2] = ctx->md5_st8[1];
digest[3] = ctx->md5_st8[0];
digest[4] = ctx->md5_st8[7];
digest[5] = ctx->md5_st8[6];
digest[6] = ctx->md5_st8[5];
digest[7] = ctx->md5_st8[4];
digest[8] = ctx->md5_st8[11];
digest[9] = ctx->md5_st8[10];
digest[10] = ctx->md5_st8[9];
digest[11] = ctx->md5_st8[8];
digest[12] = ctx->md5_st8[15];
digest[13] = ctx->md5_st8[14];
digest[14] = ctx->md5_st8[13];
digest[15] = ctx->md5_st8[12];
#endif
}
/* External routines for this MD5 implementation */
/*
* pg_md5_init
*
* Initialize a MD5 context.
*/
void
pg_md5_init(pg_md5_ctx *ctx)
{
ctx->md5_n = 0;
ctx->md5_i = 0;
ctx->md5_sta = MD5_A0;
ctx->md5_stb = MD5_B0;
ctx->md5_stc = MD5_C0;
ctx->md5_std = MD5_D0;
memset(ctx->md5_buf, 0, sizeof(ctx->md5_buf));
}
/*
* pg_md5_update
*
* Update a MD5 context.
*/
void
pg_md5_update(pg_md5_ctx *ctx, const uint8 *data, size_t len)
{
unsigned int gap,
i;
ctx->md5_n += len * 8; /* byte to bit */
gap = MD5_BUFLEN - ctx->md5_i;
if (len >= gap)
{
memmove(ctx->md5_buf + ctx->md5_i, data, gap);
md5_calc(ctx->md5_buf, ctx);
for (i = gap; i + MD5_BUFLEN <= len; i += MD5_BUFLEN)
md5_calc(data + i, ctx);
ctx->md5_i = len - i;
memmove(ctx->md5_buf, data + i, ctx->md5_i);
}
else
{
memmove(ctx->md5_buf + ctx->md5_i, data, len);
ctx->md5_i += len;
}
}
/*
* pg_md5_final
*
* Finalize a MD5 context.
*/
void
pg_md5_final(pg_md5_ctx *ctx, uint8 *dest)
{
md5_pad(ctx);
md5_result(dest, ctx);
} | c | github | https://github.com/postgres/postgres | src/common/md5.c |
# Copyright 2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
'''
Blocks and utilities for COMEDI devices
'''
# The presence of this file turns this directory into a Python package
import os
try:
from comedi_swig import *
except ImportError:
dirname, filename = os.path.split(os.path.abspath(__file__))
__path__.append(os.path.join(dirname, "..", "..", "swig"))
from comedi_swig import * | unknown | codeparrot/codeparrot-clean | ||
#! /bin/bash
set -xue
CHECKSUM_FILE="Django-${VERSION}.checksum.txt"
MEDIA_URL_PREFIX="https://media.djangoproject.com"
RELEASE_URL_PREFIX="https://www.djangoproject.com/m/releases/"
DOWNLOAD_PREFIX="https://www.djangoproject.com/download"
if [[ ! "${VERSION}" =~ ^[0-9]+\.[0-9]+(\.[0-9]+|a[0-9]+|b[0-9]+|rc[0-9]+)?$ ]] ; then
echo "Not a valid version"
fi
rm -rf "${VERSION}"
mkdir "${VERSION}"
cd "${VERSION}"
function cleanup {
cd ..
rm -rf "${VERSION}"
}
trap cleanup EXIT
echo "Download checksum file ..."
curl --fail --output "$CHECKSUM_FILE" "${MEDIA_URL_PREFIX}/pgp/${CHECKSUM_FILE}"
echo "Verify checksum file ..."
if [ -n "${GPG_KEY:-}" ] ; then
gpg --recv-keys "${GPG_KEY}"
fi
gpg --verify "${CHECKSUM_FILE}"
echo "Finding release artifacts ..."
mapfile -t RELEASE_ARTIFACTS < <(grep "${DOWNLOAD_PREFIX}" "${CHECKSUM_FILE}")
echo "Found these release artifacts: "
for ARTIFACT_URL in "${RELEASE_ARTIFACTS[@]}" ; do
echo "- $ARTIFACT_URL"
done
echo "Downloading artifacts ..."
for ARTIFACT_URL in "${RELEASE_ARTIFACTS[@]}" ; do
ARTIFACT_ACTUAL_URL=$(curl --head --write-out '%{redirect_url}' --output /dev/null --silent "${ARTIFACT_URL}")
curl --location --fail --output "$(basename "${ARTIFACT_ACTUAL_URL}")" "${ARTIFACT_ACTUAL_URL}"
done
echo "Verifying artifact hashes ..."
# The `2> /dev/null` moves notes like "sha256sum: WARNING: 60 lines are improperly formatted"
# to /dev/null. That's fine because the return code of the script is still set on error and a
# wrong checksum will still show up as `FAILED`
echo "- MD5 checksums"
md5sum --check "${CHECKSUM_FILE}" 2> /dev/null
echo "- SHA1 checksums"
sha1sum --check "${CHECKSUM_FILE}" 2> /dev/null
echo "- SHA256 checksums"
sha256sum --check "${CHECKSUM_FILE}" 2> /dev/null | unknown | github | https://github.com/django/django | scripts/confirm_release.sh |
"""Module progress tests"""
import unittest
from mock import Mock
from xblock.field_data import DictFieldData
from xmodule.progress import Progress
from xmodule import x_module
from . import get_test_system
class ProgressTest(unittest.TestCase):
''' Test that basic Progress objects work. A Progress represents a
fraction between 0 and 1.
'''
not_started = Progress(0, 17)
part_done = Progress(2, 6)
half_done = Progress(3, 6)
also_half_done = Progress(1, 2)
done = Progress(7, 7)
def test_create_object(self):
# These should work:
p = Progress(0, 2)
p = Progress(1, 2)
p = Progress(2, 2)
p = Progress(2.5, 5.0)
p = Progress(3.7, 12.3333)
# These shouldn't
self.assertRaises(ValueError, Progress, 0, 0)
self.assertRaises(ValueError, Progress, 2, 0)
self.assertRaises(ValueError, Progress, 1, -2)
self.assertRaises(TypeError, Progress, 0, "all")
# check complex numbers just for the heck of it :)
self.assertRaises(TypeError, Progress, 2j, 3)
def test_clamp(self):
self.assertEqual((2, 2), Progress(3, 2).frac())
self.assertEqual((0, 2), Progress(-2, 2).frac())
def test_frac(self):
p = Progress(1, 2)
(a, b) = p.frac()
self.assertEqual(a, 1)
self.assertEqual(b, 2)
def test_percent(self):
self.assertEqual(self.not_started.percent(), 0)
self.assertAlmostEqual(self.part_done.percent(), 33.33333333333333)
self.assertEqual(self.half_done.percent(), 50)
self.assertEqual(self.done.percent(), 100)
self.assertEqual(self.half_done.percent(), self.also_half_done.percent())
def test_started(self):
self.assertFalse(self.not_started.started())
self.assertTrue(self.part_done.started())
self.assertTrue(self.half_done.started())
self.assertTrue(self.done.started())
def test_inprogress(self):
# only true if working on it
self.assertFalse(self.done.inprogress())
self.assertFalse(self.not_started.inprogress())
self.assertTrue(self.part_done.inprogress())
self.assertTrue(self.half_done.inprogress())
def test_done(self):
self.assertTrue(self.done.done())
self.assertFalse(self.half_done.done())
self.assertFalse(self.not_started.done())
def test_str(self):
self.assertEqual(str(self.not_started), "0/17")
self.assertEqual(str(self.part_done), "2/6")
self.assertEqual(str(self.done), "7/7")
def test_ternary_str(self):
self.assertEqual(self.not_started.ternary_str(), "none")
self.assertEqual(self.half_done.ternary_str(), "in_progress")
self.assertEqual(self.done.ternary_str(), "done")
def test_to_js_status(self):
'''Test the Progress.to_js_status_str() method'''
self.assertEqual(Progress.to_js_status_str(self.not_started), "none")
self.assertEqual(Progress.to_js_status_str(self.half_done), "in_progress")
self.assertEqual(Progress.to_js_status_str(self.done), "done")
self.assertEqual(Progress.to_js_status_str(None), "0")
def test_to_js_detail_str(self):
'''Test the Progress.to_js_detail_str() method'''
f = Progress.to_js_detail_str
for p in (self.not_started, self.half_done, self.done):
self.assertEqual(f(p), str(p))
# But None should be encoded as 0
self.assertEqual(f(None), "0")
def test_add(self):
'''Test the Progress.add_counts() method'''
p = Progress(0, 2)
p2 = Progress(1, 3)
p3 = Progress(2, 5)
pNone = None
add = lambda a, b: Progress.add_counts(a, b).frac()
self.assertEqual(add(p, p), (0, 4))
self.assertEqual(add(p, p2), (1, 5))
self.assertEqual(add(p2, p3), (3, 8))
self.assertEqual(add(p2, pNone), p2.frac())
self.assertEqual(add(pNone, p2), p2.frac())
def test_equality(self):
'''Test that comparing Progress objects for equality
works correctly.'''
p = Progress(1, 2)
p2 = Progress(2, 4)
p3 = Progress(1, 2)
self.assertTrue(p == p3)
self.assertFalse(p == p2)
# Check != while we're at it
self.assertTrue(p != p2)
self.assertFalse(p != p3)
class ModuleProgressTest(unittest.TestCase):
''' Test that get_progress() does the right thing for the different modules
'''
def test_xmodule_default(self):
'''Make sure default get_progress exists, returns None'''
xm = x_module.XModule(Mock(), get_test_system(), DictFieldData({'location': 'a://b/c/d/e'}), Mock())
p = xm.get_progress()
self.assertEqual(p, None) | unknown | codeparrot/codeparrot-clean | ||
An inherent implementation was marked unsafe.
Erroneous code example:
```compile_fail,E0197
struct Foo;
unsafe impl Foo { } // error!
```
Inherent implementations (one that do not implement a trait but provide
methods associated with a type) are always safe because they are not
implementing an unsafe trait. Removing the `unsafe` keyword from the inherent
implementation will resolve this error.
```
struct Foo;
impl Foo { } // ok!
``` | unknown | github | https://github.com/rust-lang/rust | compiler/rustc_error_codes/src/error_codes/E0197.md |
#!/usr/bin/env python
"""This modules contains tests for artifact API handler."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import io
import os
from absl import app
from grr_response_core import config
from grr_response_core.lib.rdfvalues import artifacts as rdf_artifacts
from grr_response_server import artifact
from grr_response_server.gui import api_test_lib
from grr_response_server.gui.api_plugins import artifact as artifact_plugin
from grr.test_lib import artifact_test_lib
from grr.test_lib import db_test_lib
from grr.test_lib import flow_test_lib
from grr.test_lib import test_lib
@db_test_lib.TestDatabases()
class ApiListArtifactsHandlerTest(flow_test_lib.FlowTestsBaseclass):
"""Test for ApiListArtifactsHandler."""
def setUp(self):
super(ApiListArtifactsHandlerTest, self).setUp()
self.handler = artifact_plugin.ApiListArtifactsHandler()
@artifact_test_lib.PatchCleanArtifactRegistry
def testNoArtifacts(self, _):
result = self.handler.Handle(self.handler.args_type(), token=self.token)
self.assertEqual(result.total_count, 0)
self.assertEqual(result.items, [])
@artifact_test_lib.PatchDefaultArtifactRegistry
def testPrepackagedArtifacts(self, registry):
test_artifacts_file = os.path.join(config.CONFIG["Test.data_dir"],
"artifacts", "test_artifacts.json")
registry.AddFileSource(test_artifacts_file)
result = self.handler.Handle(self.handler.args_type(), token=self.token)
# Some artifacts are guaranteed to be returned, as they're defined in
# the test_data/artifacts/test_artifacts.json.
self.assertTrue(result.total_count)
# Check that FakeArtifact artifact exists. It's guaranteed to exist, since
# it's defined in test_data/artifacts/test_artifacts.json.
for item in result.items:
if item.artifact.name == "FakeArtifact":
fake_artifact = item
self.assertTrue(fake_artifact)
self.assertTrue(fake_artifact.HasField("is_custom"))
self.assertFalse(fake_artifact.is_custom)
self.assertTrue(fake_artifact.artifact.doc)
self.assertTrue(fake_artifact.artifact.labels)
self.assertTrue(fake_artifact.artifact.supported_os)
@db_test_lib.TestDatabases()
class ApiUploadArtifactHandlerTest(api_test_lib.ApiCallHandlerTest):
def setUp(self):
super(ApiUploadArtifactHandlerTest, self).setUp()
self.handler = artifact_plugin.ApiUploadArtifactHandler()
@artifact_test_lib.PatchCleanArtifactRegistry
def testUpload(self, registry):
test_artifacts_file = os.path.join(config.CONFIG["Test.data_dir"],
"artifacts", "test_artifact.json")
with open(test_artifacts_file, "rb") as fd:
args = self.handler.args_type(artifact=fd.read())
with self.assertRaises(rdf_artifacts.ArtifactNotRegisteredError):
registry.GetArtifact("TestDrivers")
self.handler.Handle(args, token=self.token)
registry.GetArtifact("TestDrivers")
@db_test_lib.TestDatabases()
@artifact_test_lib.PatchDefaultArtifactRegistry
class ApiDeleteArtifactsHandlerTest(api_test_lib.ApiCallHandlerTest):
def setUp(self):
super(ApiDeleteArtifactsHandlerTest, self).setUp()
self.handler = artifact_plugin.ApiDeleteArtifactsHandler()
def UploadTestArtifacts(self):
test_artifacts_file = os.path.join(config.CONFIG["Test.data_dir"],
"artifacts", "test_artifacts.json")
with io.open(test_artifacts_file, mode="r", encoding="utf-8") as fd:
artifact.UploadArtifactYamlFile(fd.read())
def testDeletesArtifactsWithSpecifiedNames(self, registry):
self.UploadTestArtifacts()
count = len(registry.GetArtifacts(reload_datastore_artifacts=True))
args = self.handler.args_type(
names=["TestFilesArtifact", "WMIActiveScriptEventConsumer"])
self.handler.Handle(args, token=self.token)
new_count = len(registry.GetArtifacts())
# Check that we deleted exactly 2 artifacts.
self.assertEqual(new_count, count - 2)
def testDeleteDependency(self, registry):
self.UploadTestArtifacts()
args = self.handler.args_type(names=["TestAggregationArtifact"])
with self.assertRaises(ValueError):
self.handler.Handle(args, token=self.token)
def testDeleteNonExistentArtifact(self, registry):
self.UploadTestArtifacts()
args = self.handler.args_type(names=["NonExistentArtifact"])
e = self.assertRaises(ValueError)
with e:
self.handler.Handle(args, token=self.token)
self.assertEqual(
str(e.exception),
"Artifact(s) to delete (NonExistentArtifact) not found.")
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
app.run(main) | unknown | codeparrot/codeparrot-clean | ||
use serde_derive::Serialize;
#[derive(Serialize)]
enum Enum {
#[serde(serialize_with = "serialize_some_newtype_variant")]
Newtype(#[serde(skip_serializing)] String),
}
fn main() {} | rust | github | https://github.com/serde-rs/serde | test_suite/tests/ui/with-variant/skip_ser_newtype_field.rs |
# Copyright 1999-2011 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from _emerge.DepPriority import DepPriority
class DepPriorityNormalRange(object):
"""
DepPriority properties Index Category
buildtime HARD
runtime 3 MEDIUM
runtime_post 2 MEDIUM_SOFT
optional 1 SOFT
(none of the above) 0 NONE
"""
MEDIUM = 3
MEDIUM_SOFT = 2
SOFT = 1
NONE = 0
@classmethod
def _ignore_optional(cls, priority):
if priority.__class__ is not DepPriority:
return False
return bool(priority.optional)
@classmethod
def _ignore_runtime_post(cls, priority):
if priority.__class__ is not DepPriority:
return False
return bool(priority.optional or priority.runtime_post)
@classmethod
def _ignore_runtime(cls, priority):
if priority.__class__ is not DepPriority:
return False
return bool(priority.optional or not priority.buildtime)
ignore_medium = _ignore_runtime
ignore_medium_soft = _ignore_runtime_post
ignore_soft = _ignore_optional
DepPriorityNormalRange.ignore_priority = (
None,
DepPriorityNormalRange._ignore_optional,
DepPriorityNormalRange._ignore_runtime_post,
DepPriorityNormalRange._ignore_runtime
) | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2014-2019 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.client.plugins
import io.ktor.client.*
import io.ktor.client.request.*
import io.ktor.http.*
import io.ktor.http.content.*
internal actual fun platformRequestDefaultTransform(
contentType: ContentType?,
context: HttpRequestBuilder,
body: Any
): OutgoingContent? = null
internal actual fun HttpClient.platformResponseDefaultTransformers() {} | kotlin | github | https://github.com/ktorio/ktor | ktor-client/ktor-client-core/web/src/io/ktor/client/plugins/DefaultTransform.web.kt |
#!/usr/bin/env python
import sys, os, platform, xml, re, tempfile, glob, datetime, getpass, shutil
from optparse import OptionParser
from subprocess import Popen, PIPE
hostos = os.name # 'nt', 'posix'
hostmachine = platform.machine() # 'x86', 'AMD64', 'x86_64'
errorCode = 0
SIMD_DETECTION_PROGRAM="""
#if __SSE5__
# error SSE5
#endif
#if __AVX2__
# error AVX2
#endif
#if __AVX__
# error AVX
#endif
#if __SSE4_2__
# error SSE4.2
#endif
#if __SSE4_1__
# error SSE4.1
#endif
#if __SSSE3__
# error SSSE3
#endif
#if __SSE3__
# error SSE3
#endif
#if __AES__
# error AES
#endif
#if __SSE2__
# error SSE2
#endif
#if __SSE__
# error SSE
#endif
#if __3dNOW__
# error 3dNOW
#endif
#if __MMX__
# error MMX
#endif
#if __ARM_NEON__
# error NEON
#endif
#error NOSIMD
"""
parse_patterns = (
{'name': "has_perf_tests", 'default': "OFF", 'pattern': re.compile("^BUILD_PERF_TESTS:BOOL=(ON)$")},
{'name': "has_accuracy_tests", 'default': "OFF", 'pattern': re.compile("^BUILD_TESTS:BOOL=(ON)$")},
{'name': "cmake_home", 'default': None, 'pattern': re.compile("^CMAKE_HOME_DIRECTORY:INTERNAL=(.+)$")},
{'name': "opencv_home", 'default': None, 'pattern': re.compile("^OpenCV_SOURCE_DIR:STATIC=(.+)$")},
{'name': "tests_dir", 'default': None, 'pattern': re.compile("^EXECUTABLE_OUTPUT_PATH:PATH=(.+)$")},
{'name': "build_type", 'default': "Release", 'pattern': re.compile("^CMAKE_BUILD_TYPE:STRING=(.*)$")},
{'name': "svnversion_path", 'default': None, 'pattern': re.compile("^SVNVERSION_PATH:FILEPATH=(.*)$")},
{'name': "git_executable", 'default': None, 'pattern': re.compile("^GIT_EXECUTABLE:FILEPATH=(.*)$")},
{'name': "cxx_flags", 'default': "", 'pattern': re.compile("^CMAKE_CXX_FLAGS:STRING=(.*)$")},
{'name': "cxx_flags_debug", 'default': "", 'pattern': re.compile("^CMAKE_CXX_FLAGS_DEBUG:STRING=(.*)$")},
{'name': "cxx_flags_release", 'default': "", 'pattern': re.compile("^CMAKE_CXX_FLAGS_RELEASE:STRING=(.*)$")},
{'name': "opencv_cxx_flags", 'default': "", 'pattern': re.compile("^OPENCV_EXTRA_C_FLAGS:INTERNAL=(.*)$")},
{'name': "opencv_cxx_flags_debug", 'default': "", 'pattern': re.compile("^OPENCV_EXTRA_C_FLAGS_DEBUG:INTERNAL=(.*)$")},
{'name': "opencv_cxx_flags_release", 'default': "", 'pattern': re.compile("^OPENCV_EXTRA_C_FLAGS_RELEASE:INTERNAL=(.*)$")},
{'name': "cxx_flags_android", 'default': None, 'pattern': re.compile("^ANDROID_CXX_FLAGS:INTERNAL=(.*)$")},
{'name': "ndk_path", 'default': None, 'pattern': re.compile("^(?:ANDROID_NDK|ANDROID_STANDALONE_TOOLCHAIN)?:PATH=(.*)$")},
{'name': "android_abi", 'default': None, 'pattern': re.compile("^ANDROID_ABI:STRING=(.*)$")},
{'name': "android_executable", 'default': None, 'pattern': re.compile("^ANDROID_EXECUTABLE:FILEPATH=(.*android.*)$")},
{'name': "ant_executable", 'default': None, 'pattern': re.compile("^ANT_EXECUTABLE:FILEPATH=(.*ant.*)$")},
{'name': "java_test_binary_dir", 'default': None, 'pattern': re.compile("^opencv_test_java_BINARY_DIR:STATIC=(.*)$")},
{'name': "is_x64", 'default': "OFF", 'pattern': re.compile("^CUDA_64_BIT_DEVICE_CODE:BOOL=(ON)$")},#ugly(
{'name': "cmake_generator", 'default': None, 'pattern': re.compile("^CMAKE_GENERATOR:INTERNAL=(.+)$")},
{'name': "cxx_compiler", 'default': None, 'pattern': re.compile("^CMAKE_CXX_COMPILER:FILEPATH=(.+)$")},
{'name': "cxx_compiler_arg1", 'default': None, 'pattern': re.compile("^CMAKE_CXX_COMPILER_ARG1:[A-Z]+=(.+)$")},
{'name': "with_cuda", 'default': "OFF", 'pattern': re.compile("^WITH_CUDA:BOOL=(ON)$")},
{'name': "cuda_library", 'default': None, 'pattern': re.compile("^CUDA_CUDA_LIBRARY:FILEPATH=(.+)$")},
{'name': "core_dependencies", 'default': None, 'pattern': re.compile("^opencv_core_LIB_DEPENDS:STATIC=(.+)$")},
)
def query_yes_no(stdout, question, default="yes"):
valid = {"yes":True, "y":True, "ye":True, "no":False, "n":False}
if default == None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
stdout.write(os.linesep + question + prompt)
choice = raw_input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
stdout.write("Please respond with 'yes' or 'no' "\
"(or 'y' or 'n').\n")
def getRunningProcessExePathByName_win32(name):
from ctypes import windll, POINTER, pointer, Structure, sizeof
from ctypes import c_long , c_int , c_uint , c_char , c_ubyte , c_char_p , c_void_p
class PROCESSENTRY32(Structure):
_fields_ = [ ( 'dwSize' , c_uint ) ,
( 'cntUsage' , c_uint) ,
( 'th32ProcessID' , c_uint) ,
( 'th32DefaultHeapID' , c_uint) ,
( 'th32ModuleID' , c_uint) ,
( 'cntThreads' , c_uint) ,
( 'th32ParentProcessID' , c_uint) ,
( 'pcPriClassBase' , c_long) ,
( 'dwFlags' , c_uint) ,
( 'szExeFile' , c_char * 260 ) ,
( 'th32MemoryBase' , c_long) ,
( 'th32AccessKey' , c_long ) ]
class MODULEENTRY32(Structure):
_fields_ = [ ( 'dwSize' , c_long ) ,
( 'th32ModuleID' , c_long ),
( 'th32ProcessID' , c_long ),
( 'GlblcntUsage' , c_long ),
( 'ProccntUsage' , c_long ) ,
( 'modBaseAddr' , c_long ) ,
( 'modBaseSize' , c_long ) ,
( 'hModule' , c_void_p ) ,
( 'szModule' , c_char * 256 ),
( 'szExePath' , c_char * 260 ) ]
TH32CS_SNAPPROCESS = 2
TH32CS_SNAPMODULE = 0x00000008
## CreateToolhelp32Snapshot
CreateToolhelp32Snapshot= windll.kernel32.CreateToolhelp32Snapshot
CreateToolhelp32Snapshot.reltype = c_long
CreateToolhelp32Snapshot.argtypes = [ c_int , c_int ]
## Process32First
Process32First = windll.kernel32.Process32First
Process32First.argtypes = [ c_void_p , POINTER( PROCESSENTRY32 ) ]
Process32First.rettype = c_int
## Process32Next
Process32Next = windll.kernel32.Process32Next
Process32Next.argtypes = [ c_void_p , POINTER(PROCESSENTRY32) ]
Process32Next.rettype = c_int
## CloseHandle
CloseHandle = windll.kernel32.CloseHandle
CloseHandle.argtypes = [ c_void_p ]
CloseHandle.rettype = c_int
## Module32First
Module32First = windll.kernel32.Module32First
Module32First.argtypes = [ c_void_p , POINTER(MODULEENTRY32) ]
Module32First.rettype = c_int
hProcessSnap = c_void_p(0)
hProcessSnap = CreateToolhelp32Snapshot( TH32CS_SNAPPROCESS , 0 )
pe32 = PROCESSENTRY32()
pe32.dwSize = sizeof( PROCESSENTRY32 )
ret = Process32First( hProcessSnap , pointer( pe32 ) )
path = None
while ret :
if name + ".exe" == pe32.szExeFile:
hModuleSnap = c_void_p(0)
me32 = MODULEENTRY32()
me32.dwSize = sizeof( MODULEENTRY32 )
hModuleSnap = CreateToolhelp32Snapshot( TH32CS_SNAPMODULE, pe32.th32ProcessID )
ret = Module32First( hModuleSnap, pointer(me32) )
path = me32.szExePath
CloseHandle( hModuleSnap )
if path:
break
ret = Process32Next( hProcessSnap, pointer(pe32) )
CloseHandle( hProcessSnap )
return path
def getRunningProcessExePathByName_posix(name):
pids= [pid for pid in os.listdir('/proc') if pid.isdigit()]
for pid in pids:
try:
path = os.readlink(os.path.join('/proc', pid, 'exe'))
if path and path.endswith(name):
return path
except:
pass
def getRunningProcessExePathByName(name):
try:
if hostos == "nt":
return getRunningProcessExePathByName_win32(name)
elif hostos == "posix":
return getRunningProcessExePathByName_posix(name)
else:
return None
except:
return None
class TestSuite(object):
def __init__(self, options, path = None):
self.options = options
self.path = path
self.error = None
self.setUp = None
self.tearDown = None
self.adb = None
self.targetos = None
self.nameprefix = "opencv_" + self.options.mode + "_"
for p in parse_patterns:
setattr(self, p["name"], p["default"])
if self.path:
cachefile = open(os.path.join(self.path, "CMakeCache.txt"), "rt")
try:
for l in cachefile.readlines():
ll = l.strip()
if not ll or ll.startswith("#"):
continue
for p in parse_patterns:
match = p["pattern"].match(ll)
if match:
value = match.groups()[0]
if value and not value.endswith("-NOTFOUND"):
setattr(self, p["name"], value)
except:
pass
cachefile.close()
# detect target platform
if self.android_executable or self.android_abi or self.ndk_path:
self.targetos = "android"
else:
self.targetos = hostos
self.initialize()
def initialize(self):
# fix empty tests dir
if not self.tests_dir:
self.tests_dir = self.path
self.tests_dir = os.path.normpath(self.tests_dir)
# compute path to adb
if self.android_executable:
self.adb = os.path.join(os.path.dirname(os.path.dirname(self.android_executable)), ("platform-tools/adb","platform-tools/adb.exe")[hostos == 'nt'])
if not os.path.isfile(self.adb) or not os.access(self.adb, os.X_OK):
self.adb = None
else:
self.adb = None
if self.targetos == "android":
# fix adb tool location
if not self.adb:
self.adb = getRunningProcessExePathByName("adb")
if not self.adb:
self.adb = "adb"
if self.options.adb_serial:
self.adb = [self.adb, "-s", self.options.adb_serial]
else:
self.adb = [self.adb]
try:
output = Popen(self.adb + ["shell", "ls"], stdout=PIPE, stderr=PIPE).communicate()
except OSError:
self.adb = []
# remember current device serial. Needed if another device is connected while this script runs
if self.adb and not self.options.adb_serial:
adb_res = self.runAdb("devices")
if not adb_res:
self.error = "Could not run adb command: %s (for %s)" % (self.error, self.path)
self.adb = []
else:
# assume here that device name may consists of any characters except newline
connected_devices = re.findall(r"^[^\n]+[ \t]+device\r?$", adb_res, re.MULTILINE)
if not connected_devices:
self.error = "Android device not found"
self.adb = []
elif len(connected_devices) != 1:
self.error = "Too many (%s) devices are connected. Please specify single device using --serial option:\n\n" % (len(connected_devices)) + adb_res
self.adb = []
else:
self.options.adb_serial = connected_devices[0].split("\t")[0]
self.adb = self.adb + ["-s", self.options.adb_serial]
if self.adb:
# construct name for aapt tool
self.aapt = [os.path.join(os.path.dirname(self.adb[0]), ("aapt","aapt.exe")[hostos == 'nt'])]
if not os.path.isfile(self.aapt[0]):
# it's moved in SDK r22
sdk_dir = os.path.dirname( os.path.dirname(self.adb[0]) )
aapt_fn = ("aapt", "aapt.exe")[hostos == 'nt']
for r, ds, fs in os.walk( os.path.join(sdk_dir, 'build-tools') ):
if aapt_fn in fs:
self.aapt = [ os.path.join(r, aapt_fn) ]
break
else:
self.error = "Can't find '%s' tool!" % aapt_fn
# fix has_perf_tests param
self.has_perf_tests = self.has_perf_tests == "ON"
self.has_accuracy_tests = self.has_accuracy_tests == "ON"
# fix is_x64 flag
self.is_x64 = self.is_x64 == "ON"
if not self.is_x64 and ("X64" in "%s %s %s" % (self.cxx_flags, self.cxx_flags_release, self.cxx_flags_debug) or "Win64" in self.cmake_generator):
self.is_x64 = True
# fix test path
if "Visual Studio" in self.cmake_generator:
if self.options.configuration:
self.tests_dir = os.path.join(self.tests_dir, self.options.configuration)
else:
self.tests_dir = os.path.join(self.tests_dir, self.build_type)
elif not self.is_x64 and self.cxx_compiler:
#one more attempt to detect x64 compiler
try:
compiler = [self.cxx_compiler]
if self.cxx_compiler_arg1:
compiler.append(self.cxx_compiler_arg1)
output = Popen(compiler + ["-v"], stdout=PIPE, stderr=PIPE).communicate()
if not output[0] and "x86_64" in output[1]:
self.is_x64 = True
except OSError:
pass
# detect target arch
if self.targetos == "android":
if "armeabi-v7a" in self.android_abi:
self.targetarch = "armv7a"
elif "armeabi-v6" in self.android_abi:
self.targetarch = "armv6"
elif "armeabi" in self.android_abi:
self.targetarch = "armv5te"
elif "x86" in self.android_abi:
self.targetarch = "x86"
elif "mips" in self.android_abi:
self.targetarch = "mips"
else:
self.targetarch = "ARM"
elif self.is_x64 and hostmachine in ["AMD64", "x86_64"]:
self.targetarch = "x64"
elif hostmachine in ["x86", "AMD64", "x86_64"]:
self.targetarch = "x86"
else:
self.targetarch = "unknown"
# fix CUDA attributes
self.with_cuda = self.with_cuda == "ON"
if self.cuda_library and self.cuda_library.endswith("-NOTFOUND"):
self.cuda_library = None
self.has_cuda = self.with_cuda and self.cuda_library and self.targetarch in ["x86", "x64"]
self.hardware = None
self.cmake_home_vcver = self.getVCVersion(self.cmake_home)
if self.opencv_home == self.cmake_home:
self.opencv_home_vcver = self.cmake_home_vcver
else:
self.opencv_home_vcver = self.getVCVersion(self.opencv_home)
self.tests = self.getAvailableTestApps()
def getVCVersion(self, root_path):
if not root_path:
return None
if os.path.isdir(os.path.join(root_path, ".svn")):
return self.getSvnVersion(root_path)
elif os.path.isdir(os.path.join(root_path, ".git")):
return self.getGitHash(root_path)
return None
def getGitHash(self, path):
if not path or not self.git_executable:
return None
try:
output = Popen([self.git_executable, "rev-parse", "--short", "HEAD"], stdout=PIPE, stderr=PIPE, cwd = path).communicate()
if not output[1]:
return output[0].strip()
else:
return None
except OSError:
return None
def getSvnVersion(self, path):
if not path:
val = None
elif not self.svnversion_path and hostos == 'nt':
val = self.tryGetSvnVersionWithTortoise(path)
else:
svnversion = self.svnversion_path
if not svnversion:
svnversion = "svnversion"
try:
output = Popen([svnversion, "-n", path], stdout=PIPE, stderr=PIPE).communicate()
if not output[1]:
val = output[0]
else:
val = None
except OSError:
val = None
if val:
val = val.replace(" ", "_")
return val
def tryGetSvnVersionWithTortoise(self, path):
try:
wcrev = "SubWCRev.exe"
dir = tempfile.mkdtemp()
#print dir
tmpfilename = os.path.join(dir, "svn.tmp")
tmpfilename2 = os.path.join(dir, "svn_out.tmp")
tmpfile = open(tmpfilename, "w")
tmpfile.write("$WCRANGE$$WCMODS?M:$")
tmpfile.close();
output = Popen([wcrev, path, tmpfilename, tmpfilename2, "-f"], stdout=PIPE, stderr=PIPE).communicate()
if "is not a working copy" in output[0]:
version = "exported"
else:
tmpfile = open(tmpfilename2, "r")
version = tmpfile.read()
tmpfile.close()
return version
except:
return None
finally:
if dir:
shutil.rmtree(dir)
def isTest(self, fullpath):
if not os.path.isfile(fullpath):
return False
if self.targetos == "nt" and not fullpath.endswith(".exe"):
return False
if hostos == self.targetos:
return os.access(fullpath, os.X_OK)
if self.targetos == "android" and fullpath.endswith(".apk"):
return True
return True
def getAvailableTestApps(self):
if self.tests_dir and os.path.isdir(self.tests_dir):
files = glob.glob(os.path.join(self.tests_dir, self.nameprefix + "*"))
files = [f for f in files if self.isTest(f)]
if self.ant_executable and self.java_test_binary_dir:
files.append("java")
return files
return []
def getLogName(self, app, timestamp):
app = os.path.basename(app)
if app.endswith(".exe"):
if app.endswith("d.exe"):
app = app[:-5]
else:
app = app[:-4]
if app.startswith(self.nameprefix):
app = app[len(self.nameprefix):]
if self.cmake_home_vcver:
if self.cmake_home_vcver == self.opencv_home_vcver:
rev = self.cmake_home_vcver
elif self.opencv_home_vcver:
rev = self.cmake_home_vcver + "-" + self.opencv_home_vcver
else:
rev = self.cmake_home_vcver
else:
rev = None
if rev:
rev = rev.replace(":","to")
else:
rev = ""
if self.options.useLongNames:
if not rev:
rev = "unknown"
tstamp = timestamp.strftime("%Y%m%d-%H%M%S")
features = []
#OS
_os = ""
if self.targetos == "android":
_os = "Android" + self.runAdb("shell", "getprop ro.build.version.release").strip()
else:
mv = platform.mac_ver()
if mv[0]:
_os = "Darwin" + mv[0]
else:
wv = platform.win32_ver()
if wv[0]:
_os = "Windows" + wv[0]
else:
lv = platform.linux_distribution()
if lv[0]:
_os = lv[0] + lv[1]
else:
_os = self.targetos
features.append(_os)
#HW(x86, x64, ARMv7a)
if self.targetarch:
features.append(self.targetarch)
#TBB
if ";tbb;" in self.core_dependencies:
features.append("TBB")
#CUDA
if self.has_cuda:
#TODO: determine compute capability
features.append("CUDA")
#SIMD
compiler_output = ""
try:
tmpfile = tempfile.mkstemp(suffix=".cpp", text = True)
fd = os.fdopen(tmpfile[0], "w+b")
fd.write(SIMD_DETECTION_PROGRAM)
fd.close();
options = [self.cxx_compiler]
if self.cxx_compiler_arg1:
options.append(self.cxx_compiler_arg1)
cxx_flags = self.cxx_flags + " " + self.cxx_flags_release + " " + self.opencv_cxx_flags + " " + self.opencv_cxx_flags_release
if self.targetos == "android" and self.cxx_flags_android:
cxx_flags = self.cxx_flags_android + " " + cxx_flags
prev_option = None
for opt in cxx_flags.split(" "):
if opt.count('\"') % 2 == 1:
if prev_option is None:
prev_option = opt
else:
options.append(prev_option + " " + opt)
prev_option = None
elif prev_option is None:
options.append(opt)
else:
prev_option = prev_option + " " + opt
options.append(tmpfile[1])
output = Popen(options, stdout=PIPE, stderr=PIPE).communicate()
compiler_output = output[1]
os.remove(tmpfile[1])
except OSError:
pass
if compiler_output:
m = re.search("#error\W+(\w+)", compiler_output)
if m:
features.append(m.group(1))
#fin
return "%s__%s__%s__%s.xml" % (app, rev, tstamp, "_".join(features))
else:
if rev:
rev = rev + "_"
if self.hardware:
hw = str(self.hardware).replace(" ", "_") + "_"
elif self.has_cuda:
hw = "CUDA_"
else:
hw = ""
tstamp = timestamp.strftime("%Y%m%d-%H%M%S")
lname = "%s_%s_%s_%s%s%s.xml" % (app, self.targetos, self.targetarch, hw, rev, tstamp)
lname = str.replace(lname, '(', '_')
lname = str.replace(lname, ')', '_')
return lname
def getTest(self, name):
# full path
if self.isTest(name):
return name
# name only
fullname = os.path.join(self.tests_dir, name)
if self.isTest(fullname):
return fullname
# name without extension
fullname += ".exe"
if self.isTest(fullname):
return fullname
if self.targetos == "android":
fullname += ".apk"
if self.isTest(fullname):
return fullname
# short name for OpenCV tests
for t in self.tests:
if t == name:
return t
fname = os.path.basename(t)
if fname == name:
return t
if fname.endswith(".exe") or (self.targetos == "android" and fname.endswith(".apk")):
fname = fname[:-4]
if fname == name:
return t
if self.options.configuration == "Debug" and fname == name + 'd':
return t
if fname.startswith(self.nameprefix):
fname = fname[len(self.nameprefix):]
if fname == name:
return t
if self.options.configuration == "Debug" and fname == name + 'd':
return t
return None
def runAdb(self, *args):
cmd = self.adb[:]
cmd.extend(args)
try:
output = Popen(cmd, stdout=PIPE, stderr=PIPE).communicate()
if not output[1]:
return output[0]
self.error = output[1]
except OSError:
pass
return None
def isRunnable(self):
if self.error:
return False
if self.targetarch == "x64" and hostmachine == "x86":
self.error = "Target architecture is incompatible with current platform (at %s)" % self.path
return False
if self.targetos == "android":
if not self.adb:
self.error = "Could not find adb executable (for %s)" % self.path
return False
if "armeabi-v7a" in self.android_abi:
adb_res = self.runAdb("shell", "cat /proc/cpuinfo")
if not adb_res:
self.error = "Could not get info about Android platform: %s (for %s)" % (self.error, self.path)
return False
if "ARMv7" not in adb_res:
self.error = "Android device does not support ARMv7 commands, but tests are built for armeabi-v7a (for %s)" % self.path
return False
if "NEON" in self.android_abi and "neon" not in adb_res:
self.error = "Android device has no NEON, but tests are built for %s (for %s)" % (self.android_abi, self.path)
return False
hw = re.search(r"^Hardware[ \t]*:[ \t]*(.*?)$", adb_res, re.MULTILINE)
if hw:
self.hardware = hw.groups()[0].strip()
return True
def runTest(self, path, workingDir, _stdout, _stderr, args = []):
global errorCode
if self.error:
return
args = args[:]
timestamp = datetime.datetime.now()
logfile = self.getLogName(path, timestamp)
exe = os.path.abspath(path)
userlog = [a for a in args if a.startswith("--gtest_output=")]
if len(userlog) == 0:
args.append("--gtest_output=xml:" + logfile)
else:
logfile = userlog[0][userlog[0].find(":")+1:]
if self.targetos == "android" and exe.endswith(".apk"):
print "Run java tests:", exe
try:
# get package info
output = Popen(self.aapt + ["dump", "xmltree", exe, "AndroidManifest.xml"], stdout=PIPE, stderr=_stderr).communicate()
if not output[0]:
print >> _stderr, "fail to dump manifest from", exe
return
tags = re.split(r"[ ]+E: ", output[0])
# get package name
manifest_tag = [t for t in tags if t.startswith("manifest ")]
if not manifest_tag:
print >> _stderr, "fail to read package name from", exe
return
pkg_name = re.search(r"^[ ]+A: package=\"(?P<pkg>.*?)\" \(Raw: \"(?P=pkg)\"\)\r?$", manifest_tag[0], flags=re.MULTILINE).group("pkg")
# get test instrumentation info
instrumentation_tag = [t for t in tags if t.startswith("instrumentation ")]
if not instrumentation_tag:
print >> _stderr, "can not find instrumentation detials in", exe
return
pkg_runner = re.search(r"^[ ]+A: android:name\(0x[0-9a-f]{8}\)=\"(?P<runner>.*?)\" \(Raw: \"(?P=runner)\"\)\r?$", instrumentation_tag[0], flags=re.MULTILINE).group("runner")
pkg_target = re.search(r"^[ ]+A: android:targetPackage\(0x[0-9a-f]{8}\)=\"(?P<pkg>.*?)\" \(Raw: \"(?P=pkg)\"\)\r?$", instrumentation_tag[0], flags=re.MULTILINE).group("pkg")
if not pkg_name or not pkg_runner or not pkg_target:
print >> _stderr, "can not find instrumentation detials in", exe
return
if self.options.junit_package:
if self.options.junit_package.startswith("."):
pkg_target += self.options.junit_package
else:
pkg_target = self.options.junit_package
# uninstall previously installed package
print >> _stderr, "Uninstalling old", pkg_name, "from device..."
Popen(self.adb + ["uninstall", pkg_name], stdout=PIPE, stderr=_stderr).communicate()
print >> _stderr, "Installing new", exe, "to device...",
output = Popen(self.adb + ["install", exe], stdout=PIPE, stderr=PIPE).communicate()
if output[0] and output[0].strip().endswith("Success"):
print >> _stderr, "Success"
else:
print >> _stderr, "Failure"
print >> _stderr, "Failed to install", exe, "to device"
return
print >> _stderr, "Running jUnit tests for ", pkg_target
if self.setUp:
self.setUp()
Popen(self.adb + ["shell", "am instrument -w -e package " + pkg_target + " " + pkg_name + "/" + pkg_runner], stdout=_stdout, stderr=_stderr).wait()
if self.tearDown:
self.tearDown()
except OSError:
pass
return
elif self.targetos == "android":
hostlogpath = ""
usercolor = [a for a in args if a.startswith("--gtest_color=")]
if len(usercolor) == 0 and _stdout.isatty() and hostos != "nt":
args.append("--gtest_color=yes")
try:
tempdir = "/data/local/tmp/"
andoidcwd = tempdir + getpass.getuser().replace(" ","") + "_" + self.options.mode +"/"
exename = os.path.basename(exe)
androidexe = andoidcwd + exename
# upload
_stderr.write("Uploading... ")
output = Popen(self.adb + ["push", exe, androidexe], stdout=_stdout, stderr=_stderr).wait()
if output != 0:
print >> _stderr, "adb finishes unexpectedly with error code", output
return
# chmod
output = Popen(self.adb + ["shell", "chmod 777 " + androidexe], stdout=_stdout, stderr=_stderr).wait()
if output != 0:
print >> _stderr, "adb finishes unexpectedly with error code", output
return
# run
if self.options.help:
command = exename + " --help"
else:
command = exename + " " + " ".join(args)
print >> _stderr, "Run command:", command
if self.setUp:
self.setUp()
env = self.options.android_env.copy()
env['OPENCV_TEST_DATA_PATH'] = self.options.test_data_path
if self.options.android_propagate_opencv_env:
for k, v in os.environ.items():
if k.startswith('OPENCV') and not k in env:
env[k] = v
print >> _stderr, "Android environment variables: \n", '\n'.join([' %s=%s' % (k, v) for k, v in env.items()])
commandPrefix = ''.join(['export %s=%s && ' % (k, v) for k, v in env.items()])
Popen(self.adb + ["shell", commandPrefix + "cd " + andoidcwd + "&& ./" + command], stdout=_stdout, stderr=_stderr).wait()
if self.tearDown:
self.tearDown()
# try get log
if not self.options.help:
#_stderr.write("Pull log... ")
hostlogpath = os.path.join(workingDir, logfile)
output = Popen(self.adb + ["pull", andoidcwd + logfile, hostlogpath], stdout=_stdout, stderr=PIPE).wait()
if output != 0:
print >> _stderr, "adb finishes unexpectedly with error code", output
return
#rm log
Popen(self.adb + ["shell", "rm " + andoidcwd + logfile], stdout=PIPE, stderr=PIPE).wait()
# clean temporary files
Popen(self.adb + ["shell", "rm " + tempdir + "__opencv_temp.*"], stdout=PIPE, stderr=PIPE).wait()
except OSError:
pass
if os.path.isfile(hostlogpath):
return hostlogpath
return None
elif path == "java":
cmd = [self.ant_executable,
"-Dopencv.build.type="
+ (self.options.configuration if self.options.configuration else self.build_type),
"buildAndTest"]
print >> _stderr, "Run command:", " ".join(cmd)
try:
errorCode = Popen(cmd, stdout=_stdout, stderr=_stderr, cwd = self.java_test_binary_dir + "/.build").wait()
except:
print "Unexpected error:", sys.exc_info()[0]
return None
else:
cmd = [exe]
if self.options.help:
cmd.append("--help")
else:
cmd.extend(args)
orig_temp_path = os.environ.get('OPENCV_TEMP_PATH')
temp_path = tempfile.mkdtemp(prefix="__opencv_temp.", dir=orig_temp_path or None)
os.environ['OPENCV_TEMP_PATH'] = temp_path
print >> _stderr, "Run command:", " ".join(cmd)
try:
errorCode = Popen(cmd, stdout=_stdout, stderr=_stderr, cwd = workingDir).wait()
except:
print "Unexpected error:", sys.exc_info()[0]
# clean temporary files
if orig_temp_path:
os.environ['OPENCV_TEMP_PATH'] = orig_temp_path
else:
del os.environ['OPENCV_TEMP_PATH']
try:
shutil.rmtree(temp_path)
pass
except:
pass
logpath = os.path.join(workingDir, logfile)
if os.path.isfile(logpath):
return logpath
return None
def runTests(self, tests, _stdout, _stderr, workingDir, args = []):
if not self.isRunnable():
print >> _stderr, "Error:", self.error
if self.error:
return []
if self.adb and self.targetos == "android":
print "adb command:", " ".join(self.adb)
if not tests:
tests = self.tests
logs = []
for test in tests:
t = self.getTest(test)
if t:
logfile = self.runTest(t, workingDir, _stdout, _stderr, args)
if logfile:
logs.append(os.path.relpath(logfile, "."))
else:
print >> _stderr, "Error: Test \"%s\" is not found in %s" % (test, self.tests_dir)
return logs
def getRunArgs(args):
run_args = []
for path in args:
path = os.path.abspath(path)
while (True):
if os.path.isdir(path) and os.path.isfile(os.path.join(path, "CMakeCache.txt")):
run_args.append(path)
break
npath = os.path.dirname(path)
if npath == path:
break
path = npath
return run_args
if __name__ == "__main__":
test_args = [a for a in sys.argv if a.startswith("--perf_") or a.startswith("--gtest_")]
argv = [a for a in sys.argv if not(a.startswith("--perf_") or a.startswith("--gtest_"))]
parser = OptionParser()
parser.add_option("-t", "--tests", dest="tests", help="comma-separated list of modules to test", metavar="SUITS", default="")
parser.add_option("-w", "--cwd", dest="cwd", help="working directory for tests", metavar="PATH", default=".")
parser.add_option("-a", "--accuracy", dest="accuracy", help="look for accuracy tests instead of performance tests", action="store_true", default=False)
parser.add_option("-l", "--longname", dest="useLongNames", action="store_true", help="generate log files with long names", default=False)
parser.add_option("", "--android_test_data_path", dest="test_data_path", help="OPENCV_TEST_DATA_PATH for Android run", metavar="PATH", default="/sdcard/opencv_testdata/")
parser.add_option("", "--android_env", dest="android_env_array", help="Environment variable for Android run (NAME=VALUE)", action='append')
parser.add_option("", "--android_propagate_opencv_env", dest="android_propagate_opencv_env", help="Propagate OPENCV* environment variables for Android run", action="store_true", default=False)
parser.add_option("", "--configuration", dest="configuration", help="force Debug or Release configuration", metavar="CFG", default="")
parser.add_option("", "--serial", dest="adb_serial", help="Android: directs command to the USB device or emulator with the given serial number", metavar="serial number", default="")
parser.add_option("", "--package", dest="junit_package", help="Android: run jUnit tests for specified package", metavar="package", default="")
parser.add_option("", "--help-tests", dest="help", help="Show help for test executable", action="store_true", default=False)
parser.add_option("", "--check", dest="check", help="Shortcut for '--perf_min_samples=1 --perf_force_samples=1'", action="store_true", default=False)
parser.add_option("", "--list", dest="list", help="List available tests", action="store_true", default=False)
(options, args) = parser.parse_args(argv)
if options.accuracy:
options.mode = "test"
else:
options.mode = "perf"
run_args = getRunArgs(args[1:] or ['.'])
if len(run_args) == 0:
print >> sys.stderr, "Usage:", os.path.basename(sys.argv[0]), "[options] [build_path]"
exit(1)
options.android_env = {}
if options.android_env_array:
for entry in options.android_env_array:
k, v = entry.split("=", 1)
options.android_env[k] = v
tests = [s.strip() for s in options.tests.split(",") if s]
if len(tests) != 1 or len(run_args) != 1:
# remove --gtest_output from params
test_args = [a for a in test_args if not a.startswith("--gtest_output=")]
if options.check:
if not [a for a in test_args if a.startswith("--perf_min_samples=")] :
test_args.extend(["--perf_min_samples=1"])
if not [a for a in test_args if a.startswith("--perf_force_samples=")] :
test_args.extend(["--perf_force_samples=1"])
if not [a for a in test_args if a.startswith("--perf_verify_sanity")] :
test_args.extend(["--perf_verify_sanity"])
logs = []
test_list = []
for path in run_args:
suite = TestSuite(options, path)
#print vars(suite),"\n"
if options.list:
test_list.extend(suite.tests)
else:
logs.extend(suite.runTests(tests, sys.stdout, sys.stderr, options.cwd, test_args))
if options.list:
print os.linesep.join(test_list) or "No tests found"
if logs:
print >> sys.stderr, "Collected: ", " ".join(logs)
if errorCode != 0:
print "Error code: ", errorCode, (" (0x%x)" % (errorCode & 0xffffffff))
exit(errorCode) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
###############################################################################
# Copyright 2018 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
from math import sqrt
class Vector2:
def __init__(self, x, y):
self.x = x
self.y = y
def add(self, v):
return Vector2(self.x + v.x, self.y + v.y)
def subtract(self, v):
return Vector2(self.x - v.x, self.y - v.y)
def dot(self, v):
return self.x * v.x + self.y * v.y
def norm(self):
return sqrt(self.x * self.x + self.y * self.y)
def norm_square(self):
return self.x * self.x + self.y * self.y
def print_point(self):
print(str(self.x) + "\t" + str(self.y) + "\n") | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2004-2012 Pexego Sistemas Informáticos All Rights Reserved
# $Marta Vázquez Rodríguez$ <marta@pexego.es>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "Format product",
"description" : """Add format field to product""",
"version" : "1.0",
"author" : "Pexego",
"depends" : ["base", "product", "stock"],
"category" : "Product",
"init_xml" : [],
"update_xml" : ["product_format_view.xml", "product_view.xml", "security/ir.model.access.csv"],
'demo_xml': [],
'installable': True,
'active': False,
} | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
"""
pygments.lexers.algebra
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for computer algebra systems.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['GAPLexer', 'MathematicaLexer', 'MuPADLexer', 'BCLexer']
class GAPLexer(RegexLexer):
"""
For `GAP <http://www.gap-system.org>`_ source code.
.. versionadded:: 2.0
"""
name = 'GAP'
aliases = ['gap']
filenames = ['*.g', '*.gd', '*.gi', '*.gap']
tokens = {
'root': [
(r'#.*$', Comment.Single),
(r'"(?:[^"\\]|\\.)*"', String),
(r'\(|\)|\[|\]|\{|\}', Punctuation),
(r'''(?x)\b(?:
if|then|elif|else|fi|
for|while|do|od|
repeat|until|
break|continue|
function|local|return|end|
rec|
quit|QUIT|
IsBound|Unbind|
TryNextMethod|
Info|Assert
)\b''', Keyword),
(r'''(?x)\b(?:
true|false|fail|infinity
)\b''',
Name.Constant),
(r'''(?x)\b(?:
(Declare|Install)([A-Z][A-Za-z]+)|
BindGlobal|BIND_GLOBAL
)\b''',
Name.Builtin),
(r'\.|,|:=|;|=|\+|-|\*|/|\^|>|<', Operator),
(r'''(?x)\b(?:
and|or|not|mod|in
)\b''',
Operator.Word),
(r'''(?x)
(?:\w+|`[^`]*`)
(?:::\w+|`[^`]*`)*''', Name.Variable),
(r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
(r'\.[0-9]+(?:e[0-9]+)?', Number),
(r'.', Text)
],
}
class MathematicaLexer(RegexLexer):
"""
Lexer for `Mathematica <http://www.wolfram.com/mathematica/>`_ source code.
.. versionadded:: 2.0
"""
name = 'Mathematica'
aliases = ['mathematica', 'mma', 'nb']
filenames = ['*.nb', '*.cdf', '*.nbp', '*.ma']
mimetypes = ['application/mathematica',
'application/vnd.wolfram.mathematica',
'application/vnd.wolfram.mathematica.package',
'application/vnd.wolfram.cdf']
# http://reference.wolfram.com/mathematica/guide/Syntax.html
operators = (
";;", "=", "=.", "!=" "==", ":=", "->", ":>", "/.", "+", "-", "*", "/",
"^", "&&", "||", "!", "<>", "|", "/;", "?", "@", "//", "/@", "@@",
"@@@", "~~", "===", "&", "<", ">", "<=", ">=",
)
punctuation = (",", ";", "(", ")", "[", "]", "{", "}")
def _multi_escape(entries):
return '(%s)' % ('|'.join(re.escape(entry) for entry in entries))
tokens = {
'root': [
(r'(?s)\(\*.*?\*\)', Comment),
(r'([a-zA-Z]+[A-Za-z0-9]*`)', Name.Namespace),
(r'([A-Za-z0-9]*_+[A-Za-z0-9]*)', Name.Variable),
(r'#\d*', Name.Variable),
(r'([a-zA-Z]+[a-zA-Z0-9]*)', Name),
(r'-?\d+\.\d*', Number.Float),
(r'-?\d*\.\d+', Number.Float),
(r'-?\d+', Number.Integer),
(words(operators), Operator),
(words(punctuation), Punctuation),
(r'".*?"', String),
(r'\s+', Text.Whitespace),
],
}
class MuPADLexer(RegexLexer):
"""
A `MuPAD <http://www.mupad.com>`_ lexer.
Contributed by Christopher Creutzig <christopher@creutzig.de>.
.. versionadded:: 0.8
"""
name = 'MuPAD'
aliases = ['mupad']
filenames = ['*.mu']
tokens = {
'root': [
(r'//.*?$', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
(r'"(?:[^"\\]|\\.)*"', String),
(r'\(|\)|\[|\]|\{|\}', Punctuation),
(r'''(?x)\b(?:
next|break|end|
axiom|end_axiom|category|end_category|domain|end_domain|inherits|
if|%if|then|elif|else|end_if|
case|of|do|otherwise|end_case|
while|end_while|
repeat|until|end_repeat|
for|from|to|downto|step|end_for|
proc|local|option|save|begin|end_proc|
delete|frame
)\b''', Keyword),
(r'''(?x)\b(?:
DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
)\b''', Name.Class),
(r'''(?x)\b(?:
PI|EULER|E|CATALAN|
NIL|FAIL|undefined|infinity|
TRUE|FALSE|UNKNOWN
)\b''',
Name.Constant),
(r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
(r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
(r'''(?x)\b(?:
and|or|not|xor|
assuming|
div|mod|
union|minus|intersect|in|subset
)\b''',
Operator.Word),
(r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
# (r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
(r'''(?x)
((?:[a-zA-Z_#][\w#]*|`[^`]*`)
(?:::[a-zA-Z_#][\w#]*|`[^`]*`)*)(\s*)([(])''',
bygroups(Name.Function, Text, Punctuation)),
(r'''(?x)
(?:[a-zA-Z_#][\w#]*|`[^`]*`)
(?:::[a-zA-Z_#][\w#]*|`[^`]*`)*''', Name.Variable),
(r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
(r'\.[0-9]+(?:e[0-9]+)?', Number),
(r'.', Text)
],
'comment': [
(r'[^*/]', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
}
class BCLexer(RegexLexer):
"""
A `BC <https://www.gnu.org/software/bc/>`_ lexer.
.. versionadded:: 2.1
"""
name = 'BC'
aliases = ['bc']
filenames = ['*.bc']
tokens = {
'root': [
(r'/\*', Comment.Multiline, 'comment'),
(r'"(?:[^"\\]|\\.)*"', String),
(r'[{}();,]', Punctuation),
(words(('if', 'else', 'while', 'for', 'break', 'continue',
'halt', 'return', 'define', 'auto', 'print', 'read',
'length', 'scale', 'sqrt', 'limits', 'quit',
'warranty'), suffix=r'\b'), Keyword),
(r'\+\+|--|\|\||&&|'
r'([-<>+*%\^/!=])=?', Operator),
# bc doesn't support exponential
(r'[0-9]+(\.[0-9]*)?', Number),
(r'\.[0-9]+', Number),
(r'.', Text)
],
'comment': [
(r'[^*/]+', Comment.Multiline),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
} | unknown | codeparrot/codeparrot-clean | ||
<!--Copyright 2025 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
⚠️ Note that this file is in Markdown but contain specific syntax for our doc-builder (similar to MDX) that may not be
rendered properly in your Markdown viewer.
-->
# Serve CLI
The `transformers serve` CLI is a lightweight option for local or self-hosted servers. It avoids the extra runtime and operational overhead of dedicated inference engines like vLLM. Use it for evaluation, experimentation, and moderate load deployments. Features like [continuous batching](../continuous_batching) increases throughput and lowers latency.
> [!TIP]
> For large scale production deployments, use vLLM, SGLang or TGI with a Transformer model as the backend. Learn more in the [Inference backends](../community_integrations/transformers_as_backend) guide.
The `transformers serve` command spawns a local server compatible with the [OpenAI SDK](https://platform.openai.com/docs/overview). The server works with many third-party applications and supports the REST APIs below.
- `/v1/chat/completions` for text and image requests
- `/v1/responses` supports the [Responses API](https://platform.openai.com/docs/api-reference/responses)
- `/v1/audio/transcriptions` for audio transcriptions
- `/v1/models` lists available models for third-party integrations
Install the serving dependencies.
```bash
pip install transformers[serving]
```
Run `transformers serve` to launch a server. The default server address is http://localhost:8000.
```shell
transformers serve
```
## v1/chat/completions
The `v1/chat/completions` API is based on the [Chat Completions API](https://platform.openai.com/docs/api-reference/chat). It supports text and image-based requests for LLMs and VLMs. Use it with `curl`, the [`~huggingface_hub.AsyncInferenceClient`], or the [OpenAI](https://platform.openai.com/docs/quickstart) client.
### Text-based completions
<hfoptions id="chat-completion-http">
<hfoption id="curl">
```shell
curl -X POST http://localhost:8000/v1/chat/completions -H "Content-Type: application/json" -d '{"messages": [{"role": "system", "content": "hello"}], "temperature": 0.9, "max_tokens": 1000, "stream": true, "model": "Qwen/Qwen2.5-0.5B-Instruct"}'
```
The command returns the following response.
```shell
data: {"object": "chat.completion.chunk", "id": "req_0", "created": 1751377863, "model": "Qwen/Qwen2.5-0.5B-Instruct", "system_fingerprint": "", "choices": [{"delta": {"role": "assistant", "content": "", "tool_call_id": null, "tool_calls": null}, "index": 0, "finish_reason": null, "logprobs": null}]}
data: {"object": "chat.completion.chunk", "id": "req_0", "created": 1751377863, "model": "Qwen/Qwen2.5-0.5B-Instruct", "system_fingerprint": "", "choices": [{"delta": {"role": "assistant", "content": "", "tool_call_id": null, "tool_calls": null}, "index": 0, "finish_reason": null, "logprobs": null}]}
(...)
```
</hfoption>
<hfoption id="huggingface_hub">
```python
import asyncio
from huggingface_hub import AsyncInferenceClient
messages = [{"role": "user", "content": "What is the Transformers library known for?"}]
client = AsyncInferenceClient("http://localhost:8000")
async def responses_api_test_async():
async for chunk in (await client.chat_completion(messages, model="Qwen/Qwen2.5-0.5B-Instruct", max_tokens=256, stream=True)):
token = chunk.choices[0].delta.content
if token:
print(token, end='')
asyncio.run(responses_api_test_async())
asyncio.run(client.close())
```
The [`~huggingface_hub.AsyncInferenceClient`] returns a printed string.
```shell
The Transformers library is primarily known for its ability to create and manipulate large-scale language models [...]
```
</hfoption>
<hfoption id="openai">
```python
from openai import OpenAI
client = OpenAI(base_url="http://localhost:8000/v1", api_key="<random_string>")
completion = client.chat.completions.create(
model="Qwen/Qwen2.5-0.5B-Instruct",
messages=[
{
"role": "user",
"content": "What is the Transformers library known for?"
}
],
stream=True
)
for chunk in completion:
token = chunk.choices[0].delta.content
if token:
print(token, end='')
```
The [OpenAI](https://platform.openai.com/docs/quickstart) client returns a printed string.
```shell
The Transformers library is primarily known for its ability to create and manipulate large-scale language models [...]
```
</hfoption>
</hfoptions>
### Text and image-based completions
<hfoptions id="chat-completion-http-images">
<hfoption id="curl">
```shell
curl http://localhost:8000/v1/chat/completions \
-H "Content-Type: application/json" \
-d '{
"model": "Qwen/Qwen2.5-VL-7B-Instruct",
"stream": true,
"messages": [
{
"role": "user",
"content": [
{
"type": "text",
"text": "What is in this image?"
},
{
"type": "image_url",
"image_url": {
"url": "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"
}
}
]
}
],
"max_tokens": 300
}'
```
The command returns the following response.
```shell
data: {"id":"req_0","choices":[{"delta":{"role":"assistant"},"index":0}],"created":1753366665,"model":"Qwen/Qwen2.5-VL-7B-Instruct@main","object":"chat.completion.chunk","system_fingerprint":""}
data: {"id":"req_0","choices":[{"delta":{"content":"The "},"index":0}],"created":1753366701,"model":"Qwen/Qwen2.5-VL-7B-Instruct@main","object":"chat.completion.chunk","system_fingerprint":""}
data: {"id":"req_0","choices":[{"delta":{"content":"image "},"index":0}],"created":1753366701,"model":"Qwen/Qwen2.5-VL-7B-Instruct@main","object":"chat.completion.chunk","system_fingerprint":""}
```
</hfoption>
<hfoption id="huggingface_hub">
```python
import asyncio
from huggingface_hub import AsyncInferenceClient
messages = [
{
"role": "user",
"content": [
{"type": "text", "text": "What's in this image?"},
{
"type": "image_url",
"image_url": {
"url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/diffusers/astronaut.jpg",
}
},
],
}
]
client = AsyncInferenceClient("http://localhost:8000")
async def responses_api_test_async():
async for chunk in (await client.chat_completion(messages, model="Qwen/Qwen2.5-VL-7B-Instruct", max_tokens=256, stream=True)):
token = chunk.choices[0].delta.content
if token:
print(token, end='')
asyncio.run(responses_api_test_async())
asyncio.run(client.close())
```
The [`~huggingface_hub.AsyncInferenceClient`] returns a printed string.
```xmp
The image depicts an astronaut in a space suit standing on what appears to be the surface of the moon, given the barren, rocky landscape and the dark sky in the background. The astronaut is holding a large egg that has cracked open, revealing a small creature inside. The scene is imaginative and playful, combining elements of space exploration with a whimsical twist involving the egg and the creature.
```
</hfoption>
<hfoption id="openai">
```python
from openai import OpenAI
client = OpenAI(base_url="http://localhost:8000/v1", api_key="<random_string>")
completion = client.chat.completions.create(
model="Qwen/Qwen2.5-VL-7B-Instruct",
messages=[
{
"role": "user",
"content": [
{"type": "text", "text": "What's in this image?"},
{
"type": "image_url",
"image_url": {
"url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/diffusers/astronaut.jpg",
}
},
],
}
],
stream=True
)
for chunk in completion:
token = chunk.choices[0].delta.content
if token:
print(token, end='')
```
The [OpenAI](https://platform.openai.com/docs/quickstart) client returns a printed string.
```xmp
The image depicts an astronaut in a space suit standing on what appears to be the surface of the moon, given the barren, rocky landscape and the dark sky in the background. The astronaut is holding a large egg that has cracked open, revealing a small creature inside. The scene is imaginative and playful, combining elements of space exploration with a whimsical twist involving the egg and the creature.
```
</hfoption>
</hfoptions>
## v1/responses
> [!WARNING]
> The `v1/responses` API is still experimental and there may be bugs. Please open an issue if you encounter any errors.
The [Responses API](https://platform.openai.com/docs/api-reference/responses) is OpenAI's latest API endpoint for generation. It supports stateful interactions and integrates built-in tools to extend a model's capabilities. OpenAI [recommends](https://platform.openai.com/docs/guides/migrate-to-responses) using the Responses API over the Chat Completions API for new projects.
The `v1/responses` API supports text-based requests for LLMs through the `curl` command and [OpenAI](https://platform.openai.com/docs/quickstart) client.
<hfoptions id="responses">
<hfoption id="curl">
```shell
curl http://localhost:8000/v1/responses \
-H "Content-Type: application/json" \
-d '{
"model": "Qwen/Qwen2.5-0.5B-Instruct",
"stream": true,
"input": "Tell me a three sentence bedtime story about a unicorn."
}'
```
The command returns the following response.
```shell
data: {"response":{"id":"resp_req_0","created_at":1754059817.783648,"model":"Qwen/Qwen2.5-0.5B-Instruct@main","object":"response","output":[],"parallel_tool_calls":false,"tool_choice":"auto","tools":[],"status":"queued","text":{"format":{"type":"text"}}},"sequence_number":0,"type":"response.created"}
data: {"response":{"id":"resp_req_0","created_at":1754059817.783648,"model":"Qwen/Qwen2.5-0.5B-Instruct@main","object":"response","output":[],"parallel_tool_calls":false,"tool_choice":"auto","tools":[],"status":"in_progress","text":{"format":{"type":"text"}}},"sequence_number":1,"type":"response.in_progress"}
data: {"item":{"id":"msg_req_0","content":[],"role":"assistant","status":"in_progress","type":"message"},"output_index":0,"sequence_number":2,"type":"response.output_item.added"}
data: {"content_index":0,"item_id":"msg_req_0","output_index":0,"part":{"annotations":[],"text":"","type":"output_text"},"sequence_number":3,"type":"response.content_part.added"}
data: {"content_index":0,"delta":"","item_id":"msg_req_0","output_index":0,"sequence_number":4,"type":"response.output_text.delta"}
data: {"content_index":0,"delta":"Once ","item_id":"msg_req_0","output_index":0,"sequence_number":5,"type":"response.output_text.delta"}
data: {"content_index":0,"delta":"upon ","item_id":"msg_req_0","output_index":0,"sequence_number":6,"type":"response.output_text.delta"}
data: {"content_index":0,"delta":"a ","item_id":"msg_req_0","output_index":0,"sequence_number":7,"type":"response.output_text.delta"}
```
</hfoption>
<hfoption id="openai">
```python
from openai import OpenAI
client = OpenAI(base_url="http://localhost:8000/v1", api_key="<KEY>")
response = client.responses.create(
model="Qwen/Qwen2.5-0.5B-Instruct",
instructions="You are a helpful assistant.",
input="Hello!",
stream=True,
metadata={"foo": "bar"},
)
for event in response:
print(event)
```
The [OpenAI](https://platform.openai.com/docs/quickstart) client returns multiple printed strings.
```shell
ResponseCreatedEvent(response=Response(id='resp_req_0', created_at=1754060400.3718212, error=None, incomplete_details=None, instructions='You are a helpful assistant.', metadata={'foo': 'bar'}, model='Qwen/Qwen2.5-0.5B-Instruct@main', object='response', output=[], parallel_tool_calls=False, temperature=None, tool_choice='auto', tools=[], top_p=None, background=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, reasoning=None, service_tier=None, status='queued', text=ResponseTextConfig(format=ResponseFormatText(type='text')), top_logprobs=None, truncation=None, usage=None, user=None), sequence_number=0, type='response.created')
ResponseInProgressEvent(response=Response(id='resp_req_0', created_at=1754060400.3718212, error=None, incomplete_details=None, instructions='You are a helpful assistant.', metadata={'foo': 'bar'}, model='Qwen/Qwen2.5-0.5B-Instruct@main', object='response', output=[], parallel_tool_calls=False, temperature=None, tool_choice='auto', tools=[], top_p=None, background=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, reasoning=None, service_tier=None, status='in_progress', text=ResponseTextConfig(format=ResponseFormatText(type='text')), top_logprobs=None, truncation=None, usage=None, user=None), sequence_number=1, type='response.in_progress')
ResponseOutputItemAddedEvent(item=ResponseOutputMessage(id='msg_req_0', content=[], role='assistant', status='in_progress', type='message'), output_index=0, sequence_number=2, type='response.output_item.added')
ResponseContentPartAddedEvent(content_index=0, item_id='msg_req_0', output_index=0, part=ResponseOutputText(annotations=[], text='', type='output_text', logprobs=None), sequence_number=3, type='response.content_part.added')
ResponseTextDeltaEvent(content_index=0, delta='', item_id='msg_req_0', output_index=0, sequence_number=4, type='response.output_text.delta')
ResponseTextDeltaEvent(content_index=0, delta='', item_id='msg_req_0', output_index=0, sequence_number=5, type='response.output_text.delta')
ResponseTextDeltaEvent(content_index=0, delta='Hello! ', item_id='msg_req_0', output_index=0, sequence_number=6, type='response.output_text.delta')
ResponseTextDeltaEvent(content_index=0, delta='How ', item_id='msg_req_0', output_index=0, sequence_number=7, type='response.output_text.delta')
ResponseTextDeltaEvent(content_index=0, delta='can ', item_id='msg_req_0', output_index=0, sequence_number=8, type='response.output_text.delta')
ResponseTextDeltaEvent(content_index=0, delta='I ', item_id='msg_req_0', output_index=0, sequence_number=9, type='response.output_text.delta')
ResponseTextDeltaEvent(content_index=0, delta='assist ', item_id='msg_req_0', output_index=0, sequence_number=10, type='response.output_text.delta')
ResponseTextDeltaEvent(content_index=0, delta='you ', item_id='msg_req_0', output_index=0, sequence_number=11, type='response.output_text.delta')
ResponseTextDeltaEvent(content_index=0, delta='', item_id='msg_req_0', output_index=0, sequence_number=12, type='response.output_text.delta')
ResponseTextDeltaEvent(content_index=0, delta='', item_id='msg_req_0', output_index=0, sequence_number=13, type='response.output_text.delta')
ResponseTextDeltaEvent(content_index=0, delta='today?', item_id='msg_req_0', output_index=0, sequence_number=14, type='response.output_text.delta')
ResponseTextDoneEvent(content_index=0, item_id='msg_req_0', output_index=0, sequence_number=15, text='Hello! How can I assist you today?', type='response.output_text.done')
ResponseContentPartDoneEvent(content_index=0, item_id='msg_req_0', output_index=0, part=ResponseOutputText(annotations=[], text='Hello! How can I assist you today?', type='output_text', logprobs=None), sequence_number=16, type='response.content_part.done')
ResponseOutputItemDoneEvent(item=ResponseOutputMessage(id='msg_req_0', content=[ResponseOutputText(annotations=[], text='Hello! How can I assist you today?', type='output_text', logprobs=None)], role='assistant', status='completed', type='message', annotations=[]), output_index=0, sequence_number=17, type='response.output_item.done')
ResponseCompletedEvent(response=Response(id='resp_req_0', created_at=1754060400.3718212, error=None, incomplete_details=None, instructions='You are a helpful assistant.', metadata={'foo': 'bar'}, model='Qwen/Qwen2.5-0.5B-Instruct@main', object='response', output=[ResponseOutputMessage(id='msg_req_0', content=[ResponseOutputText(annotations=[], text='Hello! How can I assist you today?', type='output_text', logprobs=None)], role='assistant', status='completed', type='message', annotations=[])], parallel_tool_calls=False, temperature=None, tool_choice='auto', tools=[], top_p=None, background=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, reasoning=None, service_tier=None, status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text')), top_logprobs=None, truncation=None, usage=None, user=None), sequence_number=18, type='response.completed')
```
</hfoption>
</hfoptions>
## v1/audio/transcriptions
The `v1/audio/transcriptions` endpoint transcribes audio using speech-to-text models. It follows the [Audio transcription API](https://platform.openai.com/docs/api-reference/audio/createTranscription) format.
```shell
curl -X POST http://localhost:8000/v1/audio/transcriptions \
-H "Content-Type: multipart/form-data" \
-F "file=@/path/to/audio.wav" \
-F "model=openai/whisper-large-v3"
```
The command returns the following response.
```shell
{
"text": "Transcribed text from the audio file",
}
```
## v1/models
The `v1/models` endpoint scans your local Hugging Face cache and returns a list of downloaded models in the OpenAI-compatible format. Third-party tools use this endpoint to discover available models.
Use the command below to download a model before running `transformers serve`.
```bash
transformers download Qwen/Qwen2.5-0.5B-Instruct
```
The model is now discoverable by the `/v1/models` endpoint.
```shell
curl http://localhost:8000/v1/models
```
This command returns a JSON object containing the list of models.
## Tool calling
The `transformers serve` server supports OpenAI-style function calling. Models trained for tool-use generate structured function calls that your application executes.
> [!NOTE]
> Tool calling is currently limited to the Qwen model family.
Define tools as a list of function specifications following the OpenAI format.
```py
import json
from openai import OpenAI
client = OpenAI(base_url="http://localhost:8000/v1", api_key="<KEY>")
tools = [
{
"type": "function",
"function": {
"name": "get_weather",
"description": "Get the current weather in a location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city name, e.g. San Francisco"
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
"description": "temperature unit"
}
},
"required": ["location"]
}
}
}
]
```
Pass a dictionary of parameters from [`GenerationConfig`] to the `extra_body` argument in [create](https://platform.openai.com/docs/api-reference/responses/create) to customize model generation.
```py
generation_config = {
"max_new_tokens": 512,
"temperature": 0.7,
"top_p": 0.9,
"top_k": 50,
"do_sample": True,
"repetition_penalty": 1.1,
"no_repeat_ngram_size": 3,
}
response = client.responses.create(
model="Qwen/Qwen2.5-7B-Instruct",
instructions="You are a helpful weather assistant. Use the get_weather tool to answer questions.",
input="What's the weather like in San Francisco?",
tools=tools,
stream=True,
extra_body={"generation_config": json.dumps(generation_config)}
)
for event in response:
print(event)
```
## Port forwarding
The `transformers serve` server supports port forwarding. This lets you serve models from a remote server. Make sure you have ssh access from your device to the server. Run the following command on your device to set up port forwarding.
```bash
ssh -N -f -L 8000:localhost:8000 your_server_account@your_server_IP -p port_to_ssh_into_your_server
```
## Reproducibility
Add the `--force-model <repo_id>` argument to avoid per-request model hints. This produces stable, repeatable runs.
```sh
transformers serve \
--force-model Qwen/Qwen2.5-0.5B-Instruct \
--continuous-batching \
--dtype "bfloat16"
``` | unknown | github | https://github.com/huggingface/transformers | docs/source/en/serve-cli/serving.md |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
import sqlalchemy as sa
from buildbot.util import sautils
def upgrade(migrate_engine):
metadata = sa.MetaData()
metadata.bind = migrate_engine
builders = sautils.Table('builders', metadata, autoload=True)
# drop the tags column
builders.c.tags.drop()
tags = sautils.Table(
'tags', metadata,
sa.Column('id', sa.Integer, primary_key=True),
# tag's name
sa.Column('name', sa.Text, nullable=False),
# sha1 of name; used for a unique index
sa.Column('name_hash', sa.String(40), nullable=False),
)
# a many-to-may relationship between builders and tags
builders_tags = sautils.Table(
'builders_tags', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id'),
nullable=False),
sa.Column('tagid', sa.Integer, sa.ForeignKey('tags.id'),
nullable=False),
)
# create the new tables
tags.create()
builders_tags.create()
# and the indices
idx = sa.Index('builders_tags_builderid',
builders_tags.c.builderid)
idx.create()
idx = sa.Index('builders_tags_unique',
builders_tags.c.builderid,
builders_tags.c.tagid,
unique=True)
idx.create()
idx = sa.Index('tag_name_hash', tags.c.name_hash, unique=True)
idx.create() | unknown | codeparrot/codeparrot-clean | ||
from ..base import *
import six
#todo Double check everything is OK
@Js
def Object():
val = arguments.get('0')
if val.is_null() or val.is_undefined():
return PyJsObject(prototype=ObjectPrototype)
return val.to_object()
@Js
def object_constructor():
if len(arguments):
val = arguments.get('0')
if val.TYPE=='Object':
#Implementation dependent, but my will simply return :)
return val
elif val.TYPE in ('Number', 'String', 'Boolean'):
return val.to_object()
return PyJsObject(prototype=ObjectPrototype)
Object.create = object_constructor
Object.own['length']['value'] = Js(1)
class ObjectMethods:
def getPrototypeOf(obj):
if not obj.is_object():
raise MakeError('TypeError', 'Object.getPrototypeOf called on non-object')
return null if obj.prototype is None else obj.prototype
def getOwnPropertyDescriptor (obj, prop):
if not obj.is_object():
raise MakeError('TypeError', 'Object.getOwnPropertyDescriptor called on non-object')
return obj.own.get(prop.to_string().value) # will return undefined if we dont have this prop
def getOwnPropertyNames(obj):
if not obj.is_object():
raise MakeError('TypeError', 'Object.getOwnPropertyDescriptor called on non-object')
return obj.own.keys()
def create(obj):
if not (obj.is_object() or obj.is_null()):
raise MakeError('TypeError', 'Object prototype may only be an Object or null')
temp = PyJsObject(prototype=(None if obj.is_null() else obj))
if len(arguments)>1 and not arguments[1].is_undefined():
if six.PY2:
ObjectMethods.defineProperties.__func__(temp, arguments[1])
else:
ObjectMethods.defineProperties(temp, arguments[1])
return temp
def defineProperty(obj, prop, attrs):
if not obj.is_object():
raise MakeError('TypeError', 'Object.defineProperty called on non-object')
name = prop.to_string().value
if not obj.define_own_property(name, ToPropertyDescriptor(attrs)):
raise MakeError('TypeError', 'Cannot redefine property: %s' % name)
return obj
def defineProperties(obj, properties):
if not obj.is_object():
raise MakeError('TypeError', 'Object.defineProperties called on non-object')
props = properties.to_object()
for name in props:
desc = ToPropertyDescriptor(props.get(name.value))
if not obj.define_own_property(name.value, desc):
raise MakeError('TypeError', 'Failed to define own property: %s'%name.value)
return obj
def seal(obj):
if not obj.is_object():
raise MakeError('TypeError', 'Object.seal called on non-object')
for desc in obj.own.values():
desc['configurable'] = False
obj.extensible = False
return obj
def freeze(obj):
if not obj.is_object():
raise MakeError('TypeError', 'Object.freeze called on non-object')
for desc in obj.own.values():
desc['configurable'] = False
if is_data_descriptor(desc):
desc['writable'] = False
obj.extensible = False
return obj
def preventExtensions(obj):
if not obj.is_object():
raise MakeError('TypeError', 'Object.preventExtensions on non-object')
obj.extensible = False
return obj
def isSealed(obj):
if not obj.is_object():
raise MakeError('TypeError', 'Object.isSealed called on non-object')
if obj.extensible:
return False
for desc in obj.own.values():
if desc['configurable']:
return False
return True
def isFrozen(obj):
if not obj.is_object():
raise MakeError('TypeError', 'Object.isFrozen called on non-object')
if obj.extensible:
return False
for desc in obj.own.values():
if desc['configurable']:
return False
if is_data_descriptor(desc) and desc['writable']:
return False
return True
def isExtensible(obj):
if not obj.is_object():
raise MakeError('TypeError', 'Object.isExtensible called on non-object')
return obj.extensible
def keys(obj):
if not obj.is_object():
raise MakeError('TypeError', 'Object.keys called on non-object')
return [e for e,d in six.iteritems(obj.own) if d.get('enumerable')]
# add methods attached to Object constructor
fill_prototype(Object, ObjectMethods, default_attrs)
# add constructor to prototype
fill_in_props(ObjectPrototype, {'constructor':Object}, default_attrs)
# add prototype property to the constructor.
Object.define_own_property('prototype', {'value': ObjectPrototype,
'enumerable': False,
'writable': False,
'configurable': False})
# some utility functions:
def ToPropertyDescriptor(obj): # page 38 (50 absolute)
if obj.TYPE!='Object':
raise MakeError('TypeError', 'Can\'t convert non-object to property descriptor')
desc = {}
if obj.has_property('enumerable'):
desc['enumerable'] = obj.get('enumerable').to_boolean().value
if obj.has_property('configurable'):
desc['configurable'] = obj.get('configurable').to_boolean().value
if obj.has_property('value'):
desc['value'] = obj.get('value')
if obj.has_property('writable'):
desc['writable'] = obj.get('writable').to_boolean().value
if obj.has_property('get'):
cand = obj.get('get')
if not (cand.is_undefined() or cand.is_callable()):
raise MakeError('TypeError', 'Invalid getter (it has to be a function or undefined)')
desc['get'] = cand
if obj.has_property('set'):
cand = obj.get('set')
if not (cand.is_undefined() or cand.is_callable()):
raise MakeError('TypeError', 'Invalid setter (it has to be a function or undefined)')
desc['set'] = cand
if ('get' in desc or 'set' in desc) and ('value' in desc or 'writable' in desc):
raise MakeError('TypeError', 'Invalid property. A property cannot both have accessors and be writable or have a value.')
return desc | unknown | codeparrot/codeparrot-clean | ||
#if defined(DETECT_FEATURES) && defined(__INTEL_COMPILER)
/*
* Unlike GCC and CLANG, Intel Compiler exposes all supported intrinsics,
* whether or not the build options for those features are specified.
* Therefore, we must test #definitions of CPU features when option native/host
* is enabled via `--cpu-baseline` or through env var `CFLAGS` otherwise
* the test will be broken and leads to enable all possible features.
*/
#ifndef __SSE4_2__
#error "HOST/ARCH doesn't support SSE42"
#endif
#endif
#include <smmintrin.h>
int main(void)
{
__m128 a = _mm_hadd_ps(_mm_setzero_ps(), _mm_setzero_ps());
return (int)_mm_cvtss_f32(a);
} | c | github | https://github.com/numpy/numpy | numpy/_core/src/_simd/checks/cpu_sse42.c |
function Component(props) {
const data = useFreeze(); // assume this returns {items: Array<{...}>}
// In this call `data` and `data.items` have a read effect *and* the lambda itself
// is readonly (it doesn't capture ony mutable references). Further, we ca
// theoretically determine that the lambda doesn't need to be memoized, since
// data.items is an Array and Array.prototype.map does not capture its input (callback)
// in the return value.
// An observation is that even without knowing the exact type of `data`, if we know
// that it is a plain, readonly javascript object, then we can infer that any `.map()`
// calls *must* be Array.prototype.map (or else they are a runtime error), since no
// other builtin has a .map() function.
const items = data.items.map(item => <Item item={item} />);
return <div>{items}</div>;
} | javascript | github | https://github.com/facebook/react | compiler/packages/babel-plugin-react-compiler/src/__tests__/fixtures/compiler/todo.unnecessary-lambda-memoization.js |
//===----------------------------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_CLANG_TOOLS_EXTRA_CLANG_TIDY_MODERNIZE_RAWSTRINGLITERALCHECK_H
#define LLVM_CLANG_TOOLS_EXTRA_CLANG_TIDY_MODERNIZE_RAWSTRINGLITERALCHECK_H
#include "../ClangTidyCheck.h"
#include <bitset>
namespace clang::tidy::modernize {
using CharsBitSet = std::bitset<1 << CHAR_BIT>;
/// This check replaces string literals with escaped characters to
/// raw string literals.
///
/// For the user-facing documentation see:
/// https://clang.llvm.org/extra/clang-tidy/checks/modernize/raw-string-literal.html
class RawStringLiteralCheck : public ClangTidyCheck {
public:
RawStringLiteralCheck(StringRef Name, ClangTidyContext *Context);
bool isLanguageVersionSupported(const LangOptions &LangOpts) const override {
return LangOpts.CPlusPlus11;
}
void storeOptions(ClangTidyOptions::OptionMap &Opts) override;
void registerMatchers(ast_matchers::MatchFinder *Finder) override;
void check(const ast_matchers::MatchFinder::MatchResult &Result) override;
private:
std::string DelimiterStem;
CharsBitSet DisallowedChars;
const bool ReplaceShorterLiterals;
};
} // namespace clang::tidy::modernize
#endif // LLVM_CLANG_TOOLS_EXTRA_CLANG_TIDY_MODERNIZE_RAWSTRINGLITERALCHECK_H | c | github | https://github.com/llvm/llvm-project | clang-tools-extra/clang-tidy/modernize/RawStringLiteralCheck.h |
### Addon-manager
addon-manager manages two classes of addons with given template files in
`$ADDON_PATH` (default `/etc/kubernetes/addons/`).
- Addons with label `addonmanager.kubernetes.io/mode=Reconcile` will be periodically
reconciled. Direct manipulation to these addons through apiserver is discouraged because
addon-manager will bring them back to the original state. In particular:
- Addon will be re-created if it is deleted.
- Addon will be reconfigured to the state given by the supplied fields in the template
file periodically.
- Addon will be deleted when its manifest file is deleted from the `$ADDON_PATH`.
- Addons with label `addonmanager.kubernetes.io/mode=EnsureExists` will be checked for
existence only. Users can edit these addons as they want. In particular:
- Addon will only be created/re-created with the given template file when there is no
instance of the resource with that name.
- Addon will not be deleted when the manifest file is deleted from the `$ADDON_PATH`.
Notes:
- Label `kubernetes.io/cluster-service=true` is deprecated (only for Addon Manager).
In future release (after one year), Addon Manager may not respect it anymore. Addons
have this label but without `addonmanager.kubernetes.io/mode=EnsureExists` will be
treated as "reconcile class addons" for now.
- Resources under `$ADDON_PATH` need to have either one of these two labels.
Otherwise it will be omitted.
#### Images
addon-manager images are pushed to `registry.k8s.io`. As addon-manager is built for multiple architectures, there is an image per architecture in the format - `registry.k8s.io/addon-manager/kube-addon-manager-$(ARCH):$(VERSION)`.
#### How to release
The `addon-manager` is built for multiple architectures.
1. Change something in the source
2. Bump `VERSION` in the `Makefile`
3. Bump `KUBECTL_VERSION` in the `Makefile` if required
4. Build the `amd64` image and test it on a cluster
5. Push all images
```console
# Build for linux/amd64 (default)
$ make push ARCH=amd64
# ---> staging-k8s.gcr.io/addon-manager/kube-addon-manager-amd64:VERSION
# ---> staging-k8s.gcr.io/addon-manager/kube-addon-manager:VERSION (image with backwards-compatible naming)
$ make push ARCH=arm
# ---> staging-k8s.gcr.io/addon-manager/kube-addon-manager-arm:VERSION
$ make push ARCH=arm64
# ---> staging-k8s.gcr.io/addon-manager/kube-addon-manager-arm64:VERSION
$ make push ARCH=ppc64le
# ---> staging-k8s.gcr.io/addon-manager/kube-addon-manager-ppc64le:VERSION
$ make push ARCH=s390x
# ---> staging-k8s.gcr.io/addon-manager/kube-addon-manager-s390x:VERSION
```
If you don't want to push the images, run `make` or `make build` instead | unknown | github | https://github.com/kubernetes/kubernetes | cluster/addons/addon-manager/README.md |
//===--- Sanitizers.h - Helpers related to sanitizers -----------*- C++ -*-===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
#ifndef SWIFT_BASIC_SANITIZERS_H
#define SWIFT_BASIC_SANITIZERS_H
namespace swift {
// Enabling bitwise masking.
enum class SanitizerKind : unsigned {
#define SANITIZER(enum_bit, kind, name, file) kind = (1 << enum_bit),
#include "Sanitizers.def"
};
} // end namespace swift
#endif // SWIFT_BASIC_SANITIZERS_H | c | github | https://github.com/apple/swift | include/swift/Basic/Sanitizers.h |
"use strict";
const path = require("path");
const { describeCases } = require("./TestCases.template");
describe("TestCases", () => {
describeCases({
name: "cache pack",
cache: {
type: "filesystem",
buildDependencies: {
defaultWebpack: []
}
},
snapshot: {
managedPaths: [path.resolve(__dirname, "../node_modules")]
},
optimization: {
innerGraph: true,
usedExports: true,
concatenateModules: true
}
});
}); | javascript | github | https://github.com/webpack/webpack | test/TestCasesCachePack.longtest.js |
# Copyright 2016 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code for building the input for the prediction model."""
import os
import numpy as np
import tensorflow as tf
from tensorflow.python.platform import flags
from tensorflow.python.platform import gfile
EVAL_DATA_DIR = '/home/wangyang59/Data/ILSVRC2016_tf_eval'
#DATA_DIR = '/home/wangyang59/Data/ILSVRC2016_tf_stab/train'
FLAGS = flags.FLAGS
# Original image dimensions
ORIGINAL_WIDTH = 256
ORIGINAL_HEIGHT = 256
COLOR_CHAN = 3
# Default image dimensions.
IMG_WIDTH = 256
IMG_HEIGHT = 256
def build_tfrecord_input_eval(training=True):
"""Create input tfrecord tensors.
Args:
training: training or validation data.
Returns:
list of tensors corresponding to images, actions, and states. The images
tensor is 5D, batch x time x height x width x channels. The state and
action tensors are 3D, batch x time x dimension.
Raises:
RuntimeError: if no files found.
"""
filenames = gfile.Glob(os.path.join(EVAL_DATA_DIR, '*'))
if not filenames:
raise RuntimeError('No data files found.')
index = int(np.floor(0.5 * len(filenames)))
if training:
filenames = filenames[:index]
else:
filenames = filenames[index:]
filename_queue = tf.train.string_input_producer(filenames, shuffle=True)
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = {"image_raw": tf.FixedLenFeature([1], tf.string),
"mask_raw": tf.FixedLenFeature([1], tf.string)}
features = tf.parse_single_example(serialized_example, features=features)
image = tf.decode_raw(features['image_raw'], tf.float32)
image = tf.reshape(image, [ORIGINAL_HEIGHT, ORIGINAL_WIDTH, COLOR_CHAN])
mask = tf.decode_raw(features['mask_raw'], tf.float32)
mask = tf.reshape(mask, [ORIGINAL_HEIGHT, ORIGINAL_WIDTH, 1])
if IMG_HEIGHT != IMG_WIDTH:
raise ValueError('Unequal height and width unsupported')
crop_size = min(ORIGINAL_HEIGHT, ORIGINAL_WIDTH)
image = tf.image.resize_image_with_crop_or_pad(image, crop_size, crop_size)
image = tf.reshape(image, [crop_size, crop_size, COLOR_CHAN])
mask = tf.image.resize_image_with_crop_or_pad(mask, crop_size, crop_size)
mask = tf.reshape(mask, [crop_size, crop_size, 1])
# image_batch = tf.train.batch(
# [image_seq],
# FLAGS.batch_size,
# num_threads=FLAGS.batch_size,
# capacity=100 * FLAGS.batch_size)
image_batch = tf.train.shuffle_batch(
[image, mask],
FLAGS.batch_size,
num_threads=FLAGS.batch_size,
capacity=100 * FLAGS.batch_size,
min_after_dequeue=1600)
return image_batch | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" module for launching cluster job """
import os
import argparse
import socket
import copy
import time
import signal
from fabric.api import run, put, settings, env, prefix
from fabric.tasks import execute
#configuration for cluster
import conf
def refine_unknown_args(cmd_args):
'''
refine unknown parameters to handle some special parameters
'''
new_args = []
for arg in cmd_args:
if arg.startswith("--") and arg.find("=") != -1:
equal_pos = arg.find("=") #find first = pos
arglist = list(arg)
arglist[equal_pos] = " "
arg = "".join(arglist)
arg = arg.lstrip("-")
new_args += arg.split(" ")
elif arg.startswith("--") and arg.find("=") == -1:
arg = arg.lstrip("-")
new_args.append(arg)
else:
new_args.append(arg)
return new_args
def kill_process():
'''
kill comments threads
'''
run("ps aux \
| grep paddle_process_by_paddle \
| grep -v grep \
| awk '{print $2}' \
| xargs kill > /dev/null 2>&1")
def job_prepare(jobdir, data=None):
'''
prepare job related workspace data
Assuming you already installed PaddlePaddle in all nodes which means
PaddlePaddle related bins and dependencies libraries.
Assuming the train/test data have already been installed.
This function just prepare all related model and other resources
needed at runtime.
'''
def job_create_workspace(jobdir, data=None):
'''
prepare job workspace, common file, etc.
'''
log = os.path.join(jobdir, "log")
if data is not None:
#create job dir
run('rm ' + jobdir + ' -fr && ' + 'mkdir -p ' + jobdir)
#push data and paddle bin
put(data + "/*", jobdir)
run("mkdir -p " + log)
run('rm -fr ' + log + "/*")
def set_nodefile(nodeid):
'''
create nodefile for later usage
'''
run('echo ' + str(nodeid) + ' > ' + jobdir + '/nodefile')
execute(job_create_workspace, jobdir, data, hosts=conf.HOSTS)
for i in xrange(len(conf.HOSTS)):
execute(set_nodefile, i, hosts=conf.HOSTS[i])
#clean rubbish caused by exception
with settings(warn_only=True):
execute(kill_process, hosts=conf.HOSTS)
def job_pserver(jobdir, pids=None):
'''
start all pservers
'''
pargs = " --num_gradient_servers=" + str(len(conf.HOSTS))
pargs += (" --nics=" + conf.PADDLE_NIC)
pargs += " --port=" + str(conf.PADDLE_PORT)
pargs += " --ports_num=" + str(conf.PADDLE_PORTS_NUM)
#always start sparse pserver by default
pargs += " --ports_num_for_sparse=" + str(conf.PADDLE_PORTS_NUM_FOR_SPARSE)
pargs += " --comment=" + "paddle_process_by_paddle"
def start_pserver(jobdir, pargs):
'''
start pserver process with fabric executor
'''
with prefix('export LD_LIBRARY_PATH=' + \
conf.LD_LIBRARY_PATH + \
':$LD_LIBRARY_PATH'):
program = 'paddle pserver'
run('cd ' + jobdir + '; ' + \
'GLOG_logtostderr=0 GLOG_log_dir="./log" ' + \
'nohup ' + \
program + " " + pargs + ' > ./log/server.log 2>&1 < /dev/null & ',
pty=False)
execute(start_pserver, jobdir, pargs, hosts=conf.HOSTS)
def job_trainer(jobdir, train_args_dict, pids=None):
'''
start paddle trainer
'''
args = " --num_gradient_servers=" + str(len(conf.HOSTS))
args += " --nics=" + conf.PADDLE_NIC
args += " --port=" + str(conf.PADDLE_PORT)
args += " --ports_num=" + str(conf.PADDLE_PORTS_NUM)
args += " --comment=" + "paddle_process_by_paddle"
ip_string = ""
for i in xrange(len(conf.HOSTS)):
host = conf.HOSTS[i]
left = host.find("@")
right = host.find(':')
left = 0 if left == -1 else left + 1
right = len(host) if right == -1 else right
ip_string += (socket.gethostbyname(host[left:right]) + ",")
ip_string = ip_string.rstrip(",")
args += " --pservers=" + ip_string
args_ext = ""
for key, value in train_args_dict.items():
args_ext += (' --' + key + '=' + value)
args += " " + args_ext
def start_trainer(jobdir, args):
'''
start trainer process with fabric executor
'''
with prefix('export LD_LIBRARY_PATH=' + \
conf.LD_LIBRARY_PATH + \
':$LD_LIBRARY_PATH'):
program = 'paddle train'
run('cd ' + jobdir + '; ' + \
'GLOG_logtostderr=0 '
'GLOG_log_dir="./log" '
'nohup ' + \
program + " " + args + " > ./log/train.log 2>&1 < /dev/null & ",
pty=False)
for i in xrange(len(conf.HOSTS)):
train_args = copy.deepcopy(args)
train_args += " --trainer_id=" + str(i)
execute(start_trainer, jobdir, train_args, hosts=conf.HOSTS[i])
def job_all(job_package, jobdir=None, train_args_dict=None):
'''
param job_package
param train_args_dict
'''
if jobdir is None:
timestamp = time.strftime("%Y%m%d%H%M%S", time.localtime())
jobdir = conf.ROOT_DIR + "/JOB" + timestamp
job_prepare(jobdir, job_package)
job_pserver(jobdir)
time.sleep(5) #wait until pservers completely start
job_trainer(jobdir, train_args_dict)
job_clean()
def job_clean():
'''
if starting job failed from paddle internal, the framework always
is launched successfully since these process are daemon processes.
so this job_clean can alway clean job rubbish process with ctrl+c.
'''
def signal_handler(signal, frame):
'''
SIGINT handler
'''
def kill_process():
run("ps aux \
| grep paddle_process_by_paddle \
| grep -v grep \
| awk '{print $2}' \
| xargs kill > /dev/null 2>&1")
with settings(warn_only=True):
execute(kill_process, hosts=conf.HOSTS)
signal.signal(signal.SIGINT, signal_handler)
signal.pause()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog="paddle.py", description='simple tool for cluster training')
parser.add_argument(
'-j',
'--job_workspace',
required=False,
default=None,
help='job workspace')
parser.add_argument(
'-p',
'--job_dispatch_package',
required=False,
default=None,
help='job package for dispatching to all other nodes')
args, train_args_list = parser.parse_known_args()
train_args = refine_unknown_args(train_args_list)
train_args_dict = dict(zip(train_args[:-1:2], train_args[1::2]))
if args.job_workspace is not None:
#if assigned workspace, do not need to dispatch data,
#so job_local_package should be None
assert args.job_dispatch_package is None
job_all(None, args.job_workspace, train_args_dict)
elif args.job_dispatch_package is not None:
assert args.job_workspace is None
assert os.path.isdir(args.job_dispatch_package)
job_all(args.job_dispatch_package, None, train_args_dict)
else:
print "--job_workspace or --job_dispatch_package should be set" | unknown | codeparrot/codeparrot-clean | ||
This template intentionally left blank | html | github | https://github.com/django/django | tests/templates/views/article_archive_day.html |
#index p.rubric { font-size:150%; font-weight:normal; margin-bottom:.2em; color:#487858; }
#index div.section dt { font-weight: normal; }
#index #s-getting-help { float: right; width: 35em; background: #E1ECE2; padding: 1em; margin: 2em 0 2em 2em; }
#index #s-getting-help h2 { margin: 0; }
#index #s-django-documentation div.section div.section h3 { margin: 0; }
#index #s-django-documentation div.section div.section { background: #E1ECE2; padding: 1em; margin: 2em 0 2em 40.3em; }
#index #s-django-documentation div.section div.section a.reference { white-space: nowrap; }
#index #s-using-django dl,
#index #s-add-on-contrib-applications dl,
#index #s-solving-specific-problems dl,
#index #s-reference dl
{ float: left; width: 41em; }
#index #s-add-on-contrib-applications,
#index #s-solving-specific-problems,
#index #s-reference,
#index #s-and-all-the-rest
{ clear: left; } | css | github | https://github.com/django/django | docs/_theme/djangodocs/static/homepage.css |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, json
from frappe import _, throw, msgprint
from frappe.utils import cstr, nowdate
from frappe.model.document import Document
class SMSSettings(Document):
pass
def validate_receiver_nos(receiver_list):
validated_receiver_list = []
for d in receiver_list:
# remove invalid character
for x in [' ', '+', '-', '(', ')']:
d = d.replace(x, '')
validated_receiver_list.append(d)
if not validated_receiver_list:
throw(_("Please enter valid mobile nos"))
return validated_receiver_list
def get_sender_name():
"returns name as SMS sender"
sender_name = frappe.db.get_single_value('SMS Settings', 'sms_sender_name') or \
'ERPNXT'
if len(sender_name) > 6 and \
frappe.db.get_default("country") == "India":
throw("""As per TRAI rule, sender name must be exactly 6 characters.
Kindly change sender name in Setup --> Global Defaults.
Note: Hyphen, space, numeric digit, special characters are not allowed.""")
return sender_name
@frappe.whitelist()
def get_contact_number(contact_name, value, key):
"returns mobile number of the contact"
number = frappe.db.sql("""select mobile_no, phone from tabContact where name=%s and %s=%s""" %
('%s', key, '%s'), (contact_name, value))
return number and (number[0][0] or number[0][1]) or ''
@frappe.whitelist()
def send_sms(receiver_list, msg, sender_name = ''):
import json
if isinstance(receiver_list, basestring):
receiver_list = json.loads(receiver_list)
if not isinstance(receiver_list, list):
receiver_list = [receiver_list]
receiver_list = validate_receiver_nos(receiver_list)
arg = {
'receiver_list' : receiver_list,
'message' : msg,
'sender_name' : sender_name or get_sender_name()
}
if frappe.db.get_value('SMS Settings', None, 'sms_gateway_url'):
ret = send_via_gateway(arg)
msgprint(ret)
else:
msgprint(_("Please Update SMS Settings"))
def send_via_gateway(arg):
ss = frappe.get_doc('SMS Settings', 'SMS Settings')
args = {ss.message_parameter : arg.get('message')}
for d in ss.get("parameters"):
args[d.parameter] = d.value
resp = []
for d in arg.get('receiver_list'):
args[ss.receiver_parameter] = d
resp.append(send_request(ss.sms_gateway_url, args))
return resp
# Send Request
# =========================================================
def send_request(gateway_url, args):
import httplib, urllib
server, api_url = scrub_gateway_url(gateway_url)
conn = httplib.HTTPConnection(server) # open connection
headers = {}
headers['Accept'] = "text/plain, text/html, */*"
conn.request('GET', api_url + urllib.urlencode(args), headers = headers) # send request
resp = conn.getresponse() # get response
resp = resp.read()
return resp
# Split gateway url to server and api url
# =========================================================
def scrub_gateway_url(url):
url = url.replace('http://', '').strip().split('/')
server = url.pop(0)
api_url = '/' + '/'.join(url)
if not api_url.endswith('?'):
api_url += '?'
return server, api_url
# Create SMS Log
# =========================================================
def create_sms_log(arg, sent_sms):
sl = frappe.get_doc('SMS Log')
sl.sender_name = arg['sender_name']
sl.sent_on = nowdate()
sl.receiver_list = cstr(arg['receiver_list'])
sl.message = arg['message']
sl.no_of_requested_sms = len(arg['receiver_list'])
sl.no_of_sent_sms = sent_sms
sl.save() | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2012 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License",
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for choosing which member of a replica set to read from."""
import random
from pymongo.errors import ConfigurationError
class ReadPreference:
"""An enum that defines the read preference modes supported by PyMongo.
Used in three cases:
:class:`~pymongo.mongo_client.MongoClient` connected to a single host:
* `PRIMARY`: Queries are allowed if the host is standalone or the replica
set primary.
* All other modes allow queries to standalone servers, to the primary, or
to secondaries.
:class:`~pymongo.mongo_client.MongoClient` connected to a mongos, with a
sharded cluster of replica sets:
* `PRIMARY`: Queries are sent to the primary of a shard.
* `PRIMARY_PREFERRED`: Queries are sent to the primary if available,
otherwise a secondary.
* `SECONDARY`: Queries are distributed among shard secondaries. An error
is raised if no secondaries are available.
* `SECONDARY_PREFERRED`: Queries are distributed among shard secondaries,
or the primary if no secondary is available.
* `NEAREST`: Queries are distributed among all members of a shard.
:class:`~pymongo.mongo_replica_set_client.MongoReplicaSetClient`:
* `PRIMARY`: Queries are sent to the primary of the replica set.
* `PRIMARY_PREFERRED`: Queries are sent to the primary if available,
otherwise a secondary.
* `SECONDARY`: Queries are distributed among secondaries. An error
is raised if no secondaries are available.
* `SECONDARY_PREFERRED`: Queries are distributed among secondaries,
or the primary if no secondary is available.
* `NEAREST`: Queries are distributed among all members.
"""
PRIMARY = 0
PRIMARY_PREFERRED = 1
SECONDARY = 2
SECONDARY_ONLY = 2
SECONDARY_PREFERRED = 3
NEAREST = 4
# For formatting error messages
modes = {
ReadPreference.PRIMARY: 'PRIMARY',
ReadPreference.PRIMARY_PREFERRED: 'PRIMARY_PREFERRED',
ReadPreference.SECONDARY: 'SECONDARY',
ReadPreference.SECONDARY_PREFERRED: 'SECONDARY_PREFERRED',
ReadPreference.NEAREST: 'NEAREST',
}
_mongos_modes = [
'primary',
'primaryPreferred',
'secondary',
'secondaryPreferred',
'nearest',
]
def mongos_mode(mode):
return _mongos_modes[mode]
def mongos_enum(enum):
return _mongos_modes.index(enum)
def select_primary(members):
for member in members:
if member.is_primary:
if member.up:
return member
else:
return None
return None
def select_member_with_tags(members, tags, secondary_only, latency):
candidates = []
for candidate in members:
if not candidate.up:
continue
if secondary_only and candidate.is_primary:
continue
if not (candidate.is_primary or candidate.is_secondary):
# In RECOVERING or similar state
continue
if candidate.matches_tags(tags):
candidates.append(candidate)
if not candidates:
return None
# ping_time is in seconds
fastest = min([candidate.get_avg_ping_time() for candidate in candidates])
near_candidates = [
candidate for candidate in candidates
if candidate.get_avg_ping_time() - fastest < latency / 1000.]
return random.choice(near_candidates)
def select_member(
members,
mode=ReadPreference.PRIMARY,
tag_sets=None,
latency=15
):
"""Return a Member or None.
"""
if tag_sets is None:
tag_sets = [{}]
# For brevity
PRIMARY = ReadPreference.PRIMARY
PRIMARY_PREFERRED = ReadPreference.PRIMARY_PREFERRED
SECONDARY = ReadPreference.SECONDARY
SECONDARY_PREFERRED = ReadPreference.SECONDARY_PREFERRED
NEAREST = ReadPreference.NEAREST
if mode == PRIMARY:
if tag_sets != [{}]:
raise ConfigurationError("PRIMARY cannot be combined with tags")
return select_primary(members)
elif mode == PRIMARY_PREFERRED:
# Recurse.
candidate_primary = select_member(members, PRIMARY, [{}], latency)
if candidate_primary:
return candidate_primary
else:
return select_member(members, SECONDARY, tag_sets, latency)
elif mode == SECONDARY:
for tags in tag_sets:
candidate = select_member_with_tags(members, tags, True, latency)
if candidate:
return candidate
return None
elif mode == SECONDARY_PREFERRED:
# Recurse.
candidate_secondary = select_member(
members, SECONDARY, tag_sets, latency)
if candidate_secondary:
return candidate_secondary
else:
return select_member(members, PRIMARY, [{}], latency)
elif mode == NEAREST:
for tags in tag_sets:
candidate = select_member_with_tags(members, tags, False, latency)
if candidate:
return candidate
# Ran out of tags.
return None
else:
raise ConfigurationError("Invalid mode %s" % repr(mode))
"""Commands that may be sent to replica-set secondaries, depending on
ReadPreference and tags. All other commands are always run on the primary.
"""
secondary_ok_commands = frozenset([
"group", "aggregate", "collstats", "dbstats", "count", "distinct",
"geonear", "geosearch", "geowalk", "mapreduce", "getnonce", "authenticate",
])
class MovingAverage(object):
def __init__(self, samples):
"""Immutable structure to track a 5-sample moving average.
"""
self.samples = samples[-5:]
assert self.samples
self.average = sum(self.samples) / float(len(self.samples))
def clone_with(self, sample):
"""Get a copy of this instance plus a new sample"""
return MovingAverage(self.samples + [sample])
def get(self):
return self.average | unknown | codeparrot/codeparrot-clean | ||
from random import random, shuffle
from braser import Braser
# noinspection PyUnresolvedReferences
from circus.desafio_a import main as desafio0
DETAIL, DETAILURL = "dungeon_detail", "DungeonWall.jpg"
MONSTER, MONSTERURL = "monster", "monstersheets.png?"
DETILE = "dungeon_detile"
FIRE, FIREURL = "fire", "http://s19.postimg.org/z9iojs2c3/magicfire.png"
FSP = 1.5
MOVES = {0: (0, FSP*150), 90: (FSP*-150, 0), 180: (0, FSP*-150), 270: (FSP*150, 0)}
DIR = [(1, 0), (1, 1), (0, 1), (-1, 1), (-1, 0), (-1, -1), (0, -1), (1, -1)]
class Masmorra:
_instance = None
def __init__(self):
self.gamer = Braser(800, 600)
self.gamer.subscribe(self)
self.game = self.gamer.game
self.hero = Hero(self)
self.sprite = Monster(self)
self.monsters = self.magic = None
self.monster_list = []
@classmethod
def created(cls):
cls._instance = Masmorra()
cls.created = lambda *_: Masmorra._instance
return cls._instance
def posiciona_monstro(self, m, x, y):
self.monster_list.append((m, x, y))
def preload(self):
self.game.load.spritesheet(MONSTER, MONSTERURL, 64, 63, 16*12)
self.game.load.spritesheet(DETILE, DETAILURL, 128, 128, 12)
self.game.load.spritesheet(FIRE, FIREURL, 96, 96, 25)
def create(self):
self.game.physics.startSystem(self.gamer.PHASER.Physics.ARCADE)
self.game.add.sprite(0, 0, DETILE)
rotate = 0
for i in range(6):
for j in range(5):
detail = self.game.add.sprite(64+i * 128, 64+j * 128, DETILE)
detail.anchor.setTo(0.5, 0.5)
detail.angle = rotate
detail.frame = (6*j+i) % 12
rotate += 90
self.monsters = self.game.add.group()
self.magic = self.game.add.group()
self.monsters.enableBody = True
self.magic.enableBody = True
self.magic.checkWorldBounds = True
self.magic.outOfBoundsKill = True
def update(self):
def kill(_, monster):
monster.kill()
def killall(magic, monster):
magic.kill()
monster.kill()
self.game.physics.arcade.overlap(self.hero.sprite, self.sprite.sprite, kill, None, self)
# self.game.physics.arcade.overlap(self.magic, self.monsters, killall, None, self)
self.game.physics.arcade.overlap(self.magic, self.hero.sprite, killall, None, self)
class Monster:
def __init__(self, masmorra):
self.masmorra = masmorra
masmorra.gamer.subscribe(self)
self.game = masmorra.gamer.game
self.sprite = None
self.direction = 0
self.first = True
def create(self):
sprite = self.game.add.sprite(148, 148, MONSTER)
sprite.animations.add('mon', [6 * 16 + 0, 6 * 16 + 1, 6 * 16 + 2, 6 * 16 + 3], 4, True)
sprite.play('mon')
self.game.physics.arcade.enable(sprite)
sprite.body.setCircle(28)
sprite.anchor.setTo(0.5, 0.5)
sprite.body.collideWorldBounds = True
sprite.body.bounce.setTo(1, 1)
self.masmorra.monsters.add(sprite)
self.sprite = sprite
def preload(self):
pass
def update(self):
player = self.sprite
player.angle = (self.direction*45+270) % 360
if self.sprite.alive and int(random() + 0.02) or self.first:
player.body.velocity.x, player.body.velocity.y = self.redirect(player, self.direction)
player.animations.play('mon')
def redirect(self, play, dd):
self.first = False
vx, vy = DIR[dd]
self.direction = d = int(random() * 8.0)
x, y = play.body.position.x, play.body.position.y
if vx or vy:
Magic(self.masmorra, x + 30, y + 30, vx * 150, vy * 150, (dd * 45 + 180) % 360)
x, y = DIR[d]
return x * 150, y * 150
class Magic:
def __init__(self, masmorra, x, y, vx, vy, d):
self.masmorra, self.x, self.y, self.d = masmorra, x, y, d
self.v = vx * 1.5, vy * 1.5
masmorra.gamer.subscribe(self)
self.game = masmorra.gamer.game
self.sprite = None
self._create = self.create
def kill(self):
if not self.sprite.inWorld:
print("kill")
self.sprite.alive = False
def create(self):
sprite = self.game.add.sprite(self.x, self.y, FIRE)
sprite.animations.add('fire', [10, 11, 12, 13, 14], 16, True)
sprite.play('fire')
sprite.scale.setTo(0.5, 0.5)
self.game.physics.arcade.enable(sprite)
sprite.body.setCircle(28)
sprite.anchor.setTo(0.5, 0.5)
self.masmorra.magic.add(sprite)
self.sprite = sprite
player = self.sprite
player.body.velocity.x, player.body.velocity.y = self.v
player.angle = self.d
self._create = self.kill
def preload(self):
pass
def update(self):
self._create()
class Hero:
def __init__(self, gamer):
self.gamer = gamer.gamer
self.gamer.subscribe(self)
self.game = self.gamer.game
self.sprite = self.cursors = None
def create(self):
sprite = self.game.add.sprite(20, 148, MONSTER)
sprite.animations.add('ani', [0, 1, 2, 3], 16, True)
sprite.play('ani')
self.game.physics.arcade.enable(sprite)
sprite.body.setCircle(28)
sprite.anchor.setTo(0.5, 0.5)
sprite.body.collideWorldBounds = True
self.sprite = sprite
self.cursors = self.game.input.keyboard.createCursorKeys()
def preload(self):
pass
def update(self):
crs, player = self.cursors, self.sprite
player.body.velocity.x, player.body.velocity.y = 0, 0
player.animations.play('ani')
moves = [(crs.left.isDown, 90, (-150, 0)), (crs.right.isDown, 270, (150, 0)),
(crs.up.isDown, 180, (0, -150)), (crs.down.isDown, 0, (0, 150))]
stopped = True
for move in moves:
if move[0]:
player.angle = move[1]
player.body.velocity.x, player.body.velocity.y = move[2]
stopped = False
if stopped:
player.animations.stop()
TOPO_ESQUERDA = "LS"
TOPO_DIREITA = "KO"
TOPO_CENTRO = "JN"
MEIO_ESQUERDA, CENTRO, MEIO_DIREITA = "IO", "FN", "IL"
FUNDO_ESQUERDA, FUNDO_CENTRO, FUNDO_DIREITA = "GS", "JS", "GL"
MASMORRA = [[TOPO_ESQUERDA, TOPO_CENTRO, TOPO_DIREITA], [MEIO_ESQUERDA, CENTRO,
MEIO_DIREITA], [FUNDO_ESQUERDA, FUNDO_CENTRO, FUNDO_DIREITA]]
ORDERED_KEYS = [['Coycol', 'Cauha', 'Tetlah'],
['Huatlya', 'Zitllo', 'Micpe'],
['Nenea', 'Cahuitz', 'Pallotl']]
PLAIN_KEYS = ['Coycol', 'Cauha', 'Tetlah'] +\
['Huatlya', 'Zitllo', 'Micpe'] +\
['Nenea', 'Cahuitz', 'Pallotl']
SHUFFLE_KEYS = PLAIN_KEYS[:]
shuffle(SHUFFLE_KEYS)
SHUFFLE_DIRS = list("NLSO")
DIRS = list("NLSO")
shuffle(SHUFFLE_DIRS)
def desafio3(mmap):
marray = []
for line in ORDERED_KEYS:
mline = []
for key in line:
mline.append(mmap[key])
marray.append(mline)
desafio0(marray)
def desafio4(mmap):
marray = []
keys = [SHUFFLE_KEYS[cel:cel+3] for cel in range(9)[::3]]
for line in keys:
mline = []
for key in line:
mline.append(mmap[key])
marray.append(mline)
desafio0(marray)
shuffle(SHUFFLE_KEYS)
def desafio6(mmap):
marray = []
keys = [SHUFFLE_KEYS[cel:cel+3] for cel in range(9)[::3]]
for line in keys:
mline = []
for key in line:
code = mmap[key]
rotate = (SHUFFLE_KEYS.index(key) + SHUFFLE_DIRS.index(code[1])) % 4
mline.append(code[0] + DIRS[rotate])
marray.append(mline)
desafio0(marray)
shuffle(SHUFFLE_KEYS)
shuffle(SHUFFLE_DIRS)
def desafio5(mmap):
marray = []
keys = [SHUFFLE_KEYS[cel:cel+3] for cel in range(9)[::3]]
print(keys)
for line in keys:
mline = []
for key in line:
code = mmap[key]
mline.append(code[0] + DIRS[SHUFFLE_DIRS.index(code[1])])
marray.append(mline)
desafio0(marray)
shuffle(SHUFFLE_KEYS)
shuffle(SHUFFLE_DIRS)
def main(_=None):
from browser import doc
doc["pydiv"].html = ""
Masmorra()
DES = [main, desafio0, desafio0, desafio3, desafio4, desafio5, desafio6]
def posiciona_monstro(m, x, y):
masmorra = Masmorra.created()
masmorra.posiciona_monstro(m, x, y)
def circus(desafio=1, param=MASMORRA):
# desafio6({'Coycol':'AN', 'Cauha':'BN', 'Tetlah':'CN',
# 'Huatlya':'DN', 'Zitllo':'EN', 'Micpe':'FN',
# 'Nenea':'GN', 'Cahuitz':'HN', 'Pallotl':'IN'})
DES[desafio](param)
print(__name__)
if __name__ == "__main__":
main()
PAGE0 = '''
<div class="section" id="bem-vindos-ao-circo-voador-da-programacao-python">
<h1>Bem Vindos ao Circo Voador da Programação Python<a class="headerlink"
href="#bem-vindos-ao-circo-voador-da-programacao-python" title="Permalink to this headline">¶</a></h1>
<p>Aqui vamos ter uma introdução rápida de como programar jogos para Web usando Python.
Na verdade vamos usar o Brython que é o Python que funciona dentro de um navegador web como o Firefox.</p>
<img alt="http://s19.postimg.org/ufgi8eztf/PPFC.jpg" src="http://s19.postimg.org/ufgi8eztf/PPFC.jpg" />
</div>
<div class="section" id="sumario">
<h1>Sumário<a class="headerlink" href="#sumario" title="Permalink to this headline">¶</a></h1>
<div class="toctree-wrapper compound">
<ul>
<li class="toctree-l1"><a class="reference internal" href="inicia.html">Primeiro Cenário do Jogo</a></li>
<li class="toctree-l1"><a class="reference internal" href="desafio_a.html">Criando uma Câmara com Constantes</a></li>
<li class="toctree-l1"><a class="reference internal" href="desafio_b.html">Posicionando um Personagem com Inteiros</a>
</li>
</ul>
</div>
</div>
<div class="section" id="indices-e-tabelas">
<h1>Indices e Tabelas<a class="headerlink" href="#indices-e-tabelas" title="Permalink to this headline">¶</a></h1>
<ul class="simple">
<li><a class="reference internal" href="genindex.html"><em>Index</em></a></li>
<li><a class="reference internal" href="py-modindex.html"><em>Module Index</em></a></li>
<li><a class="reference internal" href="search.html"><em>Search Page</em></a></li>
</ul>
</div>
</div>
</div>
</div>
<div class="footer">
© Copyright 2016, Carlo E. T. Oliveira.
Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.3.
</div>
'''
PAGE = [""]*10
PAGE[0] = '''
<div class="section" id="bem-vindos-ao-circo-voador-da-programacao-python">
<h1>Bem Vindos ao Circo Voador da Programação Python<a class="headerlink" href="#bem-vindos-ao-circo-voador-da-programacao-python" title="Permalink to this headline">¶</a></h1>
<p>Aqui vamos ter uma introdução rápida de como programar jogos para Web usando Python.
Na verdade vamos usar o Brython que é o Python que funciona dentro de um navegador web como o Firefox.</p>
<img alt="http://s19.postimg.org/ufgi8eztf/PPFC.jpg" src="http://s19.postimg.org/ufgi8eztf/PPFC.jpg" />
</div>
<div class="section" id="sumario">
<h1>Sumário<a class="headerlink" href="#sumario" title="Permalink to this headline">¶</a></h1>
<div class="toctree-wrapper compound">
<ul>
<li class="toctree-l1"><a class="reference internal" href="inicia.html">Primeiro Cenário do Jogo</a></li>
<li class="toctree-l1"><a class="reference internal" href="desafio_a.html">Criando uma Câmara com Constantes</a></li>
<li class="toctree-l1"><a class="reference internal" href="desafio_b.html">Posicionando um Personagem com Inteiros</a></li>
</ul>
</div>
</div>
<div class="section" id="indices-e-tabelas">
<h1>Indices e Tabelas<a class="headerlink" href="#indices-e-tabelas" title="Permalink to this headline">¶</a></h1>
<ul class="simple">
<li><a class="reference internal" href="genindex.html"><em>Index</em></a></li>
<li><a class="reference internal" href="py-modindex.html"><em>Module Index</em></a></li>
<li><a class="reference internal" href="search.html"><em>Search Page</em></a></li>
</ul>
</div>
</div>
</div>
</div>
<div class="footer">
© Copyright 2016, Carlo E. T. Oliveira.
Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.3.
</div>
'''
PAGE[1] ="""
<div class="section" id="criando-uma-camara-com-constantes">
<span id="desafio-a"></span><h1>Criando uma Câmara com Constantes<a class="headerlink" href="#criando-uma-camara-com-constantes" title="Permalink to this headline">¶</a></h1>
<p>Uma constante é um valor que não se modifica ao longo de um programa.
Em Python a constante é escrita com todas as letras maiúsculas como no nome TOPO_ESQUERDA abaixo.</p>
<p>Use os ladrilhos nomeados de A a L para montar a câmara mostrada à direita.</p>
<img alt="_images/desafio_a.png" src="http://s19.postimg.org/del9469xv/desafio_a.png" />
<div class="highlight-python"><div class="highlight"><pre><span class="kn">from</span> <span class="nn">circus.circus</span> <span class="kn">import</span> <span class="n">circus</span>
<span class="n">TOPO_ESQUERDA</span> <span class="o">=</span> <span class="s">"AN"</span>
<span class="n">TOPO_DIREITA</span> <span class="o">=</span> <span class="s">"AN"</span>
<span class="n">TOPO_CENTRO</span> <span class="o">=</span> <span class="s">"AN"</span>
<span class="n">MEIO_ESQUERDA</span><span class="p">,</span> <span class="n">CENTRO</span><span class="p">,</span> <span class="n">MEIO_DIREITA</span> <span class="o">=</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span>
<span class="n">FUNDO_ESQUERDA</span><span class="p">,</span> <span class="n">FUNDO_CENTRO</span><span class="p">,</span> <span class="n">FUNDO_DIREITA</span> <span class="o">=</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span>
<span class="c"># O comando abaixo voce vai entender no próximo desafio</span>
<span class="n">circus</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="p">[[</span><span class="n">TOPO_ESQUERDA</span><span class="p">,</span> <span class="n">TOPO_CENTRO</span><span class="p">,</span> <span class="n">TOPO_DIREITA</span><span class="p">],</span> <span class="p">[</span><span class="n">MEIO_ESQUERDA</span><span class="p">,</span> <span class="n">CENTRO</span><span class="p">,</span>
<span class="n">MEIO_DIREITA</span><span class="p">],</span> <span class="p">[</span><span class="n">FUNDO_ESQUERDA</span><span class="p">,</span> <span class="n">FUNDO_CENTRO</span><span class="p">,</span> <span class="n">FUNDO_DIREITA</span><span class="p">]])</span>
</pre></div>
</div>
<div class="admonition note">
<p class="first admonition-title">Note</p>
<p class="last">No texto “AN” a primeira letra determina o ladriho e a segunda se está girada para Norte, Leste, Sul ou Oeste.</p>
</div>
</div>
</div>
</div>
</div>
"""
PAGE[2] ="""
<div class="related">
<h3>Navigation</h3>
<ul>
<li class="right" style="margin-right: 10px">
<a href="genindex.html" title="General Index"
accesskey="I">index</a></li>
<li class="right" >
<a href="desafio_c.html" title="Posicionando um Personagem com Inteiros"
accesskey="N">next</a> |</li>
<li class="right" >
<a href="desafio_a.html" title="Criando uma Câmara com Constantes"
accesskey="P">previous</a> |</li>
<li><a href="index.html">Flying Circus 0.1.0 documentation</a> »</li>
</ul>
</div>
<div class="document">
<div class="documentwrapper">
<div class="bodywrapper">
<div class="body">
<div class="section" id="criando-uma-camara-com-listas">
<span id="desafio-b"></span><h1>Criando uma Câmara com Listas<a class="headerlink" href="#criando-uma-camara-com-listas" title="Permalink to this headline">¶</a></h1>
<p>Uma lista é um conjunto de coisas, pode ser um conjunto de números, letras, palavras ou qualquer outro objeto.
Em Python a lista é escrita assim: <em>[<uma coisa>, <outra coisa>]</em>.</p>
<p>Use os ladrilhos nomeados de A a L para montar a câmara mostrada abaixo, consulte o exercício anterior.</p>
<img alt="_images/masmorra.jpg" src="_images/masmorra.jpg" />
<div class="highlight-python"><div class="highlight"><pre><span class="kn">from</span> <span class="nn">circus.circus</span> <span class="kn">import</span> <span class="n">circus</span>
<span class="n">MASMORRA</span> <span class="o">=</span> <span class="p">[[</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">],</span>
<span class="p">[</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">],</span>
<span class="p">[</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">],</span>
<span class="p">[</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">],</span>
<span class="p">[</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">,</span> <span class="s">"AN"</span><span class="p">]</span>
<span class="p">]</span>
<span class="n">circus</span><span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="n">MASMORRA</span><span class="p">)</span>
</pre></div>
</div>
<div class="admonition note">
<p class="first admonition-title">Note</p>
<p class="last">No texto “AN” a primeira letra determina o ladriho e a segunda se está girada para Norte, Leste, Sul ou Oeste.</p>
</div>
</div>
</div>
</div>
</div>
<div class="sphinxsidebar">
<div class="sphinxsidebarwrapper">
<h4>Previous topic</h4>
<p class="topless"><a href="desafio_a.html"
title="previous chapter">Criando uma Câmara com Constantes</a></p>
<h4>Next topic</h4>
<p class="topless"><a href="desafio_c.html"
title="next chapter">Posicionando um Personagem com Inteiros</a></p>
<h3>This Page</h3>
<ul class="this-page-menu">
<li><a href="_sources/desafio_b.txt"
rel="nofollow">Show Source</a></li>
</ul>
<div id="searchbox" style="display: none">
<h3>Quick search</h3>
<form class="search" action="search.html" method="get">
<input type="text" name="q" />
<input type="submit" value="Go" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
<p class="searchtip" style="font-size: 90%">
Enter search terms or a module, class or function name.
</p>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
</div>
</div>
<div class="clearer"></div>
</div>
<div class="related">
<h3>Navigation</h3>
<ul>
<li class="right" style="margin-right: 10px">
<a href="genindex.html" title="General Index"
>index</a></li>
<li class="right" >
<a href="desafio_c.html" title="Posicionando um Personagem com Inteiros"
>next</a> |</li>
<li class="right" >
<a href="desafio_a.html" title="Criando uma Câmara com Constantes"
>previous</a> |</li>
<li><a href="index.html">Flying Circus 0.1.0 documentation</a> »</li>
</ul>
</div>
<div class="footer">
© Copyright 2016, Carlo E. T. Oliveira.
Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.3.
</div>
"""
PAGE1 ="""
<div class="related">
<h3>Navigation</h3>
<ul>
<li class="right" style="margin-right: 10px">
<a href="genindex.html" title="General Index"
accesskey="I">index</a></li>
<li class="right" >
<a href="desafio_g.html" title="Dar Nomes para os Monstros com string"
accesskey="N">next</a> |</li>
<li class="right" >
<a href="desafio_e.html" title="Matar o monstro com if"
accesskey="P">previous</a> |</li>
<li><a href="index.html">Flying Circus 0.1.0 documentation</a> »</li>
</ul>
</div>
<div class="document">
<div class="documentwrapper">
<div class="bodywrapper">
<div class="body">
<div class="section" id="criando-varios-monstros">
<span id="desafio-f"></span><h1>Criando Vários Monstros<a class="headerlink" href="#criando-varios-monstros" title="Permalink to this headline">¶</a></h1>
<p>O comando <em>for</em> caminha em uma lista e executa o conjunto de comandos indicado para cada elemento.
Em Python o for é escrito assim: <em>for <elemento> in <lista>:</em>.
Se cada elemento da lista for outra lista, você pode colocar vários elementos separados por vírgualas, veja:</p>
<p><em>for <elemento0>, <elemento1> in <lista com listas>:</em></p>
<dl class="docutils">
<dt>Complete a lista de elementos com coordenadas para diversos monstros</dt>
<dd>e chame a função <em>posiciona_monstro()</em> para cada um deles.</dd>
</dl>
<div class="highlight-python"><div class="highlight"><pre><span class="kn">from</span> <span class="nn">circus.circus</span> <span class="kn">import</span> <span class="n">posiciona_monstro</span>
<span class="c"># lista_de_posições = [(0, 0, 0), (<>), <>]</span>
<span class="c"># for <> :</span>
<span class="c"># <></span>
</pre></div>
</div>
<div class="admonition note">
<p class="first admonition-title">Note</p>
<p class="last">Na tripla ordenada (0, 1, 2) o 0 serve para usar a figura de monstro 0, o 1 para colocar o monstro na posição x=1 e o 2 na posição y=2.</p>
</div>
</div>
</div>
</div>
</div>
<div class="sphinxsidebar">
<div class="sphinxsidebarwrapper">
<h4>Previous topic</h4>
<p class="topless"><a href="desafio_e.html"
title="previous chapter">Matar o monstro com if</a></p>
<h4>Next topic</h4>
<p class="topless"><a href="desafio_g.html"
title="next chapter">Dar Nomes para os Monstros com string</a></p>
<h3>This Page</h3>
<ul class="this-page-menu">
<li><a href="_sources/desafio_f.txt"
rel="nofollow">Show Source</a></li>
</ul>
<div id="searchbox" style="display: none">
<h3>Quick search</h3>
<form class="search" action="search.html" method="get">
<input type="text" name="q" />
<input type="submit" value="Go" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
<p class="searchtip" style="font-size: 90%">
Enter search terms or a module, class or function name.
</p>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
</div>
</div>
<div class="clearer"></div>
</div>
<div class="related">
<h3>Navigation</h3>
<ul>
<li class="right" style="margin-right: 10px">
<a href="genindex.html" title="General Index"
>index</a></li>
<li class="right" >
<a href="desafio_g.html" title="Dar Nomes para os Monstros com string"
>next</a> |</li>
<li class="right" >
<a href="desafio_e.html" title="Matar o monstro com if"
>previous</a> |</li>
<li><a href="index.html">Flying Circus 0.1.0 documentation</a> »</li>
</ul>
</div>
<div class="footer">
© Copyright 2016, Carlo E. T. Oliveira.
Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.3.
</div>
"""
PAGE1 ="""
"""
def desafio(desafio=1):
from browser import doc
doc["pydiv"].html = PAGE[desafio] | unknown | codeparrot/codeparrot-clean | ||
import os
import sys
from collections import defaultdict
from collections.abc import Callable
from dataclasses import dataclass
from typing import Any, Optional
import matplotlib.pyplot as plt
from scipy.stats import gmean
import torch
from torch._inductor.runtime.benchmarking import benchmarker
def benchmark_kernel_in_milliseconds(func: Callable, *args, **kwargs) -> float:
# warmup
for _ in range(5):
func(*args, **kwargs)
with torch.compiler.set_stance("fail_on_recompile"):
return benchmarker.benchmark_gpu(lambda: func(*args, **kwargs))
@dataclass
class Performance:
# Benchmark setting usually the shape of the input tensor
setting: str
# Latency in milliseconds
latency: float
# Number of memory access in bytes
memory_bytes: float
# Memory bandwidth in GB/s
memory_bandwidth: float = 0.0
# Compute intensity in FLOPs/byte
compute_intensity: float = 0.0
def __post_init__(self):
self.memory_bandwidth = self.memory_bytes / (self.latency / 1000) / 1e9
def __str__(self):
return f"setting: {self.setting}, latency: {self.latency} ms, memory bandwidth: {self.memory_bandwidth} GB/s"
class BenchmarkKernel:
def __init__(self, script_args):
self.script_args = script_args
self.name = self.__class__.__name__
self.available_backends: list[str] = []
self.compile_mode: str = script_args.compile_mode
# mapping from backend to list of performance results
self.profiling_results: defaultdict[str, list[Performance]] = defaultdict(list)
def get_memory_bytes(self, args, kwargs) -> int:
# Get the necessary memory access in bytes for the kernelßß
raise NotImplementedError
def get_shapes(self) -> tuple[tuple[int, ...], ...]:
# Get a list of input shapes to benchmark the kernel
raise NotImplementedError
def eager(self, args, kwargs) -> Any:
raise NotImplementedError
def compiled(self, args, kwargs) -> Any:
raise NotImplementedError
def helion(self, args, kwargs) -> Any:
raise NotImplementedError
def quack(self, args, kwargs) -> Any:
raise NotImplementedError
def liger(self, args, kwargs) -> Any:
raise NotImplementedError
def triton(self, args, kwargs) -> Any:
raise NotImplementedError
def benchmark(self):
raise NotImplementedError
def clone_inputs(self, args, kwargs) -> Any:
args_ref = [
arg.clone().detach().requires_grad_(arg.requires_grad) for arg in args
]
kwargs_ref = (
{
k: (
v.clone().detach().requires_grad_(v.requires_grad)
if isinstance(v, torch.Tensor)
else v
)
for k, v in kwargs.items()
}
if kwargs
else kwargs
)
return args_ref, kwargs_ref
def check_accuracy(self, args, kwargs) -> None:
res = {}
for backend in self.available_backends:
args_ref, kwargs_ref = self.clone_inputs(args, kwargs)
res[backend] = getattr(self, backend)(args_ref, kwargs_ref)()
if (
"compiled" in self.available_backends
and self.script_args.custom_compile_options
):
torch._dynamo.reset() # cause recompile
with torch._inductor.config.patch(self.script_args.custom_compile_options):
args_ref, kwargs_ref = self.clone_inputs(args, kwargs)
res[self.script_args.custom_compile_name] = self.compiled(
args_ref, kwargs_ref
)()
gold = res["eager"]
tol = {}
if self.script_args.tolerance:
tol = {
"atol": self.script_args.tolerance,
"rtol": self.script_args.tolerance,
}
for backend in res:
if backend == "eager":
continue
try:
torch.testing.assert_close(res[backend], gold, **tol)
for t, gold_t in zip(res[backend], gold):
if t.requires_grad:
torch.testing.assert_close(t.grad, gold_t.grad, **tol)
print(
f"Accuracy check \033[92m✓ succeed\033[0m for {backend} backend on {self.name} kernel"
)
except Exception as e:
print(
f"Accuracy check \033[91m✗ failed\033[0m for {backend} backend on {self.name} kernel. Error {e}"
)
if self.script_args.exit_on_accuracy_failure:
print("Exit right away since --exit-on-accuracy-failure is set")
sys.exit(1)
def benchmark_single_shape_for_backend(
self, backend, args, kwargs, setting, fn=None
) -> bool:
if fn is None:
fn = getattr(self, backend)
args_ref, kwargs_ref = self.clone_inputs(args, kwargs)
try:
avg_time = benchmark_kernel_in_milliseconds(fn(args_ref, kwargs_ref))
except Exception as e:
print(
f"Failed to run {backend} backend on {self.name} kernel for {setting} due to {e}"
)
self.available_backends.remove(backend) # noqa: B909
return False
mem_bytes = self.get_memory_bytes(args_ref, kwargs_ref)
perf = Performance(setting, avg_time, mem_bytes)
print(f"{self.name} kernel on {backend} backend. {perf}")
self.profiling_results[backend].append(perf)
return True
def benchmark_single_shape(
self, args, kwargs=None, should_check_accuracy=True, setting: str = ""
):
for backend in self.available_backends:
self.benchmark_single_shape_for_backend(backend, args, kwargs, setting)
if (
"compiled" in self.available_backends
and self.script_args.custom_compile_options
):
torch._dynamo.reset() # cause recompile
with torch._inductor.config.patch(self.script_args.custom_compile_options):
status = self.benchmark_single_shape_for_backend(
self.script_args.custom_compile_name,
args,
kwargs,
setting,
fn=self.compiled,
)
if not status:
self.script_args.custom_compile_options = (
None # once fail, don't run again
)
if should_check_accuracy:
self.check_accuracy(args, kwargs)
def visualize(self) -> None:
device_name = torch.cuda.get_device_name(0)
visualize_comparison(
self.profiling_results,
title=f"{self.name} ({device_name})",
output_path=f"{self.name}_bench",
)
return
def report_geomean_speedup(self) -> None:
print(f"Geomean speedup for benchmark {self.name}")
eager_result = {
result.setting: result for result in self.profiling_results["eager"]
}
print(f" eager {len(eager_result)} data points")
for backend, backend_result in self.profiling_results.items():
if backend == "eager":
continue
speeduplist = []
for result in backend_result:
eager_latency = eager_result[result.setting].latency
backend_latency = result.latency
speeduplist.append(
eager_latency / backend_latency if backend_latency != 0 else 0.0
)
if len(speeduplist) > 0:
print(
f" {backend} {len(speeduplist)} data points, {gmean(speeduplist):.2f}x speedup"
)
def get_backend_colors() -> dict[str, str]:
"""Get consistent color scheme for different backends."""
return {
"eager": "#1f77b4", # blue
"compiled": "#ff7f0e", # orange
"quack": "#2ca02c", # green
"liger": "#d62728", # red
"helion": "#9467bd", # purple
"triton": "#8c564b", # brown
"cutlass": "#e377c2", # pink
"flash_attn": "#7f7f7f", # gray
"default": "#000000", # black
}
def visualize_comparison(
profiling_results: dict[str, list[Performance]],
title: Optional[str] = None,
output_path: Optional[str] = None,
) -> None:
"""
Create a single memory_bandwidth comparison plot from profiling results.
Args:
profiling_results: Dict mapping backend names to lists of Performance objects
output_path: Path to save the plot (optional)
"""
# Get backend colors
backend_colors = get_backend_colors()
# Extract settings from eager backend which runs all settings
all_settings = []
for perf in profiling_results["eager"]:
all_settings.append(perf.setting)
# Create single plot
fig, ax = plt.subplots(1, 1, figsize=(12, 8))
for backend in profiling_results:
backend_perfs = profiling_results[backend]
perf_dict = {perf.setting: perf for perf in backend_perfs}
x_vals = []
y_vals = []
for i, setting in enumerate(all_settings):
if setting in perf_dict:
x_vals.append(i)
y_vals.append(perf_dict[setting].memory_bandwidth)
if x_vals: # Only plot if we have data
color = backend_colors.get(backend, backend_colors["default"])
ax.plot(
x_vals,
y_vals,
"o-",
label=backend,
color=color,
linewidth=2,
markersize=8,
alpha=0.8,
)
# Configure the plot
ax.set_title(title or "Memory Bandwidth Comparison", fontsize=16)
ax.set_xlabel("Shape", fontsize=12)
ax.set_ylabel("memory bandwidth (GB/s)", fontsize=12)
ax.set_xticks(range(len(all_settings)))
ax.set_xticklabels(
[
s.replace("shape: ", "").replace("[", "").replace("]", "")
for s in all_settings
],
rotation=45,
ha="right",
)
ax.legend(fontsize=10)
ax.grid(True, alpha=0.3)
plt.tight_layout()
# Save the plot if output path is provided
if output_path:
# Save as PNG
os.makedirs("pics", exist_ok=True)
full_path = os.path.join("pics", output_path + ".png")
plt.savefig(full_path, dpi=300, bbox_inches="tight", facecolor="white")
print(f"Chart saved to {full_path}")
plt.close() | python | github | https://github.com/pytorch/pytorch | benchmarks/dynamo/genai_layers/utils.py |
# -*- coding: utf-8 -*-
from core import httptools
from core import scrapertools
from platformcode import logger
def test_video_exists(page_url):
logger.info()
global server, vid
server = scrapertools.find_single_match(page_url, 'www.([A-z0-9-]+).com')
vid = scrapertools.find_single_match(page_url, '(?:embed|video)/([0-9]+)')
data = httptools.downloadpage(page_url).data
if "File was deleted" in data\
or "not Found" in data:
# or "small" in data:
return False, "[%s] El video ha sido borrado o no existe" % server
return True, ""
def get_video_url(page_url, video_password):
logger.info("(page_url='%s')" % page_url)
video_urls = []
url= "https://www.%s.com/player_config_json/?vid=%s&aid=0&domain_id=0&embed=0&ref=null&check_speed=0" %(server,vid)
data = httptools.downloadpage(url).data
data = scrapertools.find_single_match(data, '"files":(.*?)"quality"')
patron = '"([lh])q":"([^"]+)"'
matches = scrapertools.find_multiple_matches(data, patron)
for quality, scrapedurl in matches:
url = scrapedurl.replace("\/", "/")
if "l" in quality: quality = "360p"
if "h" in quality: quality = "720p"
video_urls.append(["[%s] %s" %(server,quality), url])
return video_urls | unknown | codeparrot/codeparrot-clean | ||
import datetime
from django.test import TestCase, override_settings
from django.utils import timezone
from .models import Article, Category, Comment
class DateTimesTests(TestCase):
def test_related_model_traverse(self):
a1 = Article.objects.create(
title="First one",
pub_date=datetime.datetime(2005, 7, 28, 9, 0, 0),
)
a2 = Article.objects.create(
title="Another one",
pub_date=datetime.datetime(2010, 7, 28, 10, 0, 0),
)
a3 = Article.objects.create(
title="Third one, in the first day",
pub_date=datetime.datetime(2005, 7, 28, 17, 0, 0),
)
a1.comments.create(
text="Im the HULK!",
pub_date=datetime.datetime(2005, 7, 28, 9, 30, 0),
)
a1.comments.create(
text="HULK SMASH!",
pub_date=datetime.datetime(2005, 7, 29, 1, 30, 0),
)
a2.comments.create(
text="LMAO",
pub_date=datetime.datetime(2010, 7, 28, 10, 10, 10),
)
a3.comments.create(
text="+1",
pub_date=datetime.datetime(2005, 8, 29, 10, 10, 10),
)
c = Category.objects.create(name="serious-news")
c.articles.add(a1, a3)
self.assertSequenceEqual(
Comment.objects.datetimes("article__pub_date", "year"), [
datetime.datetime(2005, 1, 1),
datetime.datetime(2010, 1, 1),
],
)
self.assertSequenceEqual(
Comment.objects.datetimes("article__pub_date", "month"), [
datetime.datetime(2005, 7, 1),
datetime.datetime(2010, 7, 1),
],
)
self.assertSequenceEqual(
Comment.objects.datetimes("article__pub_date", "day"), [
datetime.datetime(2005, 7, 28),
datetime.datetime(2010, 7, 28),
],
)
self.assertSequenceEqual(
Article.objects.datetimes("comments__pub_date", "day"), [
datetime.datetime(2005, 7, 28),
datetime.datetime(2005, 7, 29),
datetime.datetime(2005, 8, 29),
datetime.datetime(2010, 7, 28),
],
)
self.assertQuerysetEqual(
Article.objects.datetimes("comments__approval_date", "day"), []
)
self.assertSequenceEqual(
Category.objects.datetimes("articles__pub_date", "day"), [
datetime.datetime(2005, 7, 28),
],
)
@override_settings(USE_TZ=True)
def test_21432(self):
now = timezone.localtime(timezone.now().replace(microsecond=0))
Article.objects.create(title="First one", pub_date=now)
qs = Article.objects.datetimes('pub_date', 'second')
self.assertEqual(qs[0], now)
def test_datetimes_returns_available_dates_for_given_scope_and_given_field(self):
pub_dates = [
datetime.datetime(2005, 7, 28, 12, 15),
datetime.datetime(2005, 7, 29, 2, 15),
datetime.datetime(2005, 7, 30, 5, 15),
datetime.datetime(2005, 7, 31, 19, 15)]
for i, pub_date in enumerate(pub_dates):
Article(pub_date=pub_date, title='title #{}'.format(i)).save()
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'year'),
["datetime.datetime(2005, 1, 1, 0, 0)"])
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'month'),
["datetime.datetime(2005, 7, 1, 0, 0)"])
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'day'),
["datetime.datetime(2005, 7, 28, 0, 0)",
"datetime.datetime(2005, 7, 29, 0, 0)",
"datetime.datetime(2005, 7, 30, 0, 0)",
"datetime.datetime(2005, 7, 31, 0, 0)"])
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'day', order='ASC'),
["datetime.datetime(2005, 7, 28, 0, 0)",
"datetime.datetime(2005, 7, 29, 0, 0)",
"datetime.datetime(2005, 7, 30, 0, 0)",
"datetime.datetime(2005, 7, 31, 0, 0)"])
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'day', order='DESC'),
["datetime.datetime(2005, 7, 31, 0, 0)",
"datetime.datetime(2005, 7, 30, 0, 0)",
"datetime.datetime(2005, 7, 29, 0, 0)",
"datetime.datetime(2005, 7, 28, 0, 0)"])
def test_datetimes_has_lazy_iterator(self):
pub_dates = [
datetime.datetime(2005, 7, 28, 12, 15),
datetime.datetime(2005, 7, 29, 2, 15),
datetime.datetime(2005, 7, 30, 5, 15),
datetime.datetime(2005, 7, 31, 19, 15)]
for i, pub_date in enumerate(pub_dates):
Article(pub_date=pub_date, title='title #{}'.format(i)).save()
# Use iterator() with datetimes() to return a generator that lazily
# requests each result one at a time, to save memory.
dates = []
with self.assertNumQueries(0):
article_datetimes_iterator = Article.objects.datetimes('pub_date', 'day', order='DESC').iterator()
with self.assertNumQueries(1):
for article in article_datetimes_iterator:
dates.append(article)
self.assertEqual(dates, [
datetime.datetime(2005, 7, 31, 0, 0),
datetime.datetime(2005, 7, 30, 0, 0),
datetime.datetime(2005, 7, 29, 0, 0),
datetime.datetime(2005, 7, 28, 0, 0)])
def test_datetimes_disallows_date_fields(self):
dt = datetime.datetime(2005, 7, 28, 12, 15)
Article.objects.create(pub_date=dt, published_on=dt.date(), title="Don't put dates into datetime functions!")
with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'published_on' to DateTimeField"):
list(Article.objects.datetimes('published_on', 'second')) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
from openerp import models, fields, api, osv
# We just create a new model
class mother(models.Model):
_name = 'test.inherit.mother'
_columns = {
# check interoperability of field inheritance with old-style fields
'name': osv.fields.char('Name', required=True),
}
surname = fields.Char(compute='_compute_surname')
state = fields.Selection([('a', 'A'), ('b', 'B')])
@api.one
@api.depends('name')
def _compute_surname(self):
self.surname = self.name or ''
# We want to inherits from the parent model and we add some fields
# in the child object
class daughter(models.Model):
_name = 'test.inherit.daughter'
_inherits = {'test.inherit.mother': 'template_id'}
template_id = fields.Many2one('test.inherit.mother', 'Template',
required=True, ondelete='cascade')
field_in_daughter = fields.Char('Field1')
# We add a new field in the parent object. Because of a recent refactoring,
# this feature was broken.
# This test and these models try to show the bug and fix it.
class mother(models.Model):
_inherit = 'test.inherit.mother'
field_in_mother = fields.Char()
# extend the name field by adding a default value
name = fields.Char(default='Unknown')
# extend the selection of the state field
state = fields.Selection(selection_add=[('c', 'C')])
# override the computed field, and extend its dependencies
@api.one
@api.depends('field_in_mother')
def _compute_surname(self):
if self.field_in_mother:
self.surname = self.field_in_mother
else:
super(mother, self)._compute_surname()
class mother(models.Model):
_inherit = 'test.inherit.mother'
# extend again the selection of the state field
state = fields.Selection(selection_add=[('d', 'D')])
class daughter(models.Model):
_inherit = 'test.inherit.daughter'
# simply redeclare the field without adding any option
template_id = fields.Many2one()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | unknown | codeparrot/codeparrot-clean | ||
# coding=utf-8
"""
Shells out to get the exim queue length
#### Dependencies
* /usr/sbin/exim
"""
import diamond.collector
import subprocess
import os
from diamond.collector import str_to_bool
class EximCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(EximCollector, self).get_default_config_help()
config_help.update({
'bin': 'The path to the exim binary',
'use_sudo': 'Use sudo?',
'sudo_cmd': 'Path to sudo',
'sudo_user': 'User to sudo as',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(EximCollector, self).get_default_config()
config.update({
'path': 'exim',
'bin': '/usr/sbin/exim',
'use_sudo': False,
'sudo_cmd': '/usr/bin/sudo',
'sudo_user': 'root',
})
return config
def collect(self):
if not os.access(self.config['bin'], os.X_OK):
return
command = [self.config['bin'], '-bpc']
if str_to_bool(self.config['use_sudo']):
command = [
self.config['sudo_cmd'],
'-u',
self.config['sudo_user']
].extend(command)
queuesize = subprocess.Popen(
command, stdout=subprocess.PIPE).communicate()[0].split()
if not len(queuesize):
return
queuesize = queuesize[-1]
self.publish('queuesize', queuesize) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Shim to run nacl toolchain download script only if there is a nacl dir."""
import os
import shutil
import sys
def Main(args):
# Exit early if disable_nacl=1.
if 'disable_nacl=1' in os.environ.get('GYP_DEFINES', ''):
return 0
script_dir = os.path.dirname(os.path.abspath(__file__))
src_dir = os.path.dirname(script_dir)
nacl_dir = os.path.join(src_dir, 'native_client')
nacl_build_dir = os.path.join(nacl_dir, 'build')
package_version_dir = os.path.join(nacl_build_dir, 'package_version')
package_version = os.path.join(package_version_dir, 'package_version.py')
if not os.path.exists(package_version):
print "Can't find '%s'" % package_version
print 'Presumably you are intentionally building without NativeClient.'
print 'Skipping NativeClient toolchain download.'
sys.exit(0)
sys.path.insert(0, package_version_dir)
import package_version
# BUG:
# We remove this --optional-pnacl argument, and instead replace it with
# --no-pnacl for most cases. However, if the bot name is an sdk
# bot then we will go ahead and download it. This prevents increasing the
# gclient sync time for developers, or standard Chrome bots.
if '--optional-pnacl' in args:
args.remove('--optional-pnacl')
use_pnacl = False
buildbot_name = os.environ.get('BUILDBOT_BUILDERNAME', '')
if 'pnacl' in buildbot_name and 'sdk' in buildbot_name:
use_pnacl = True
if use_pnacl:
print '\n*** DOWNLOADING PNACL TOOLCHAIN ***\n'
else:
args = ['--exclude', 'pnacl_newlib'] + args
# Only download the ARM gcc toolchain if we are building for ARM
# TODO(olonho): we need to invent more reliable way to get build
# configuration info, to know if we're building for ARM.
if 'target_arch=arm' not in os.environ.get('GYP_DEFINES', ''):
args = ['--exclude', 'nacl_arm_newlib'] + args
package_version.main(args)
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv[1:])) | unknown | codeparrot/codeparrot-clean | ||
/**
* \file cipher_invasive.h
*
* \brief Cipher module: interfaces for invasive testing only.
*
* The interfaces in this file are intended for testing purposes only.
* They SHOULD NOT be made available in library integrations except when
* building the library for testing.
*/
/*
* Copyright The Mbed TLS Contributors
* SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
*/
#ifndef MBEDTLS_CIPHER_INVASIVE_H
#define MBEDTLS_CIPHER_INVASIVE_H
#include "common.h"
#if defined(MBEDTLS_TEST_HOOKS) && defined(MBEDTLS_CIPHER_C)
MBEDTLS_STATIC_TESTABLE int mbedtls_get_pkcs_padding(unsigned char *input,
size_t input_len,
size_t *data_len);
#endif
#endif /* MBEDTLS_CIPHER_INVASIVE_H */ | c | github | https://github.com/nodejs/node | deps/LIEF/third-party/mbedtls/library/cipher_invasive.h |
/*
* This file was automatically generated.
* DO NOT MODIFY BY HAND.
* Run `yarn fix:special` to update
*/
export interface ProfilingPluginOptions {
/**
* Path to the output file e.g. `path.resolve(__dirname, 'profiling/events.json')`. Defaults to `events.json`.
*/
outputPath?: string;
} | typescript | github | https://github.com/webpack/webpack | declarations/plugins/debug/ProfilingPlugin.d.ts |
# -*- coding: utf-8 -*-
#
# Copyright 2012-2014 Romain Dorgueil
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from copy import copy
from sqlalchemy import MetaData, Table
from rdc.etl.error import ProhibitedOperationError
from rdc.etl.hash import Hash
from rdc.etl.io import STDIN, INSERT, UPDATE, SELECT, STDERR
from rdc.etl.transform import Transform
from rdc.etl.util import now, cached_property
class DatabaseLoad(Transform):
"""
TODO doc this !!! test this !!!!
"""
engine = None
table_name = None
fetch_columns = None
insert_only_fields = ()
discriminant = ('id', )
created_at_field = 'created_at'
updated_at_field = 'updated_at'
allowed_operations = (INSERT, UPDATE, )
def __init__(self, engine=None, table_name=None, fetch_columns=None, discriminant=None, created_at_field=None,
updated_at_field=None, insert_only_fields=None, allowed_operations=None):
super(DatabaseLoad, self).__init__()
self.engine = engine or self.engine
self.table_name = table_name or self.table_name
# XXX should take self.fetch_columns into account if provided
self.fetch_columns = {}
if isinstance(fetch_columns, (list, tuple, )):
self.add_fetch_column(*fetch_columns)
elif isinstance(fetch_columns, dict):
self.add_fetch_column(**fetch_columns)
self.discriminant = discriminant or self.discriminant
self.created_at_field = created_at_field or self.created_at_field
self.updated_at_field = updated_at_field or self.updated_at_field
self.insert_only_fields = insert_only_fields or self.insert_only_fields
self.allowed_operations = allowed_operations or self.allowed_operations
self._buffer = []
self._connection = None
self._max_buffer_size = 1000
self._last_duration = None
self._last_commit_at = None
self._query_count = 0
@property
def connection(self):
if self._connection is None:
self._connection = self.engine.connect()
return self._connection
def commit(self):
with self.connection.begin():
while len(self._buffer):
hash = self._buffer.pop(0)
try:
yield self.do_transform(copy(hash))
except Exception as e:
yield Hash((
('_input', hash, ),
('_transform', self, ),
('_error', e, ),
)), STDERR
def close_connection(self):
self._connection.close()
self._connection = None
def get_insert_columns_for(self, hash):
"""List of columns we can use for insert."""
return self.columns
def get_update_columns_for(self, hash, row):
"""List of columns we can use for update."""
return [
column for column in self.columns
if not column in self.insert_only_fields
]
def get_columns_for(self, hash, row=None):
"""Retrieve list of table column names for which we have a value in given hash.
"""
if row:
column_names = self.get_update_columns_for(hash, row)
else:
column_names = self.get_insert_columns_for(hash)
return [key for key in hash if key in column_names]
def find(self, dataset, connection=None):
query = '''SELECT * FROM {table} WHERE {criteria} LIMIT 1'''.format(
table=self.table_name,
criteria=' AND '.join([key_atom + ' = %s' for key_atom in self.discriminant]),
)
rp = (connection or self.connection).execute(query, [dataset.get(key_atom) for key_atom in self.discriminant])
# Increment stats
self._input._special_stats[SELECT] += 1
return rp.fetchone()
def initialize(self):
super(DatabaseLoad, self).initialize()
self._input._special_stats[SELECT] = 0
self._output._special_stats[INSERT] = 0
self._output._special_stats[UPDATE] = 0
def do_transform(self, hash):
"""Actual database load transformation logic, without the buffering / transaction logic.
"""
# find line, if it exist
row = self.find(hash)
now = self.now
column_names = self.table.columns.keys()
# UpdatedAt field configured ? Let's set the value in source hash
if self.updated_at_field in column_names:
hash[self.updated_at_field] = now
# Otherwise, make sure there is no such field
else:
if self.updated_at_field in hash:
del hash[self.updated_at_field]
# UPDATE
if row:
if not UPDATE in self.allowed_operations:
raise ProhibitedOperationError('UPDATE operations are not allowed by this transformation.')
_columns = self.get_columns_for(hash, row)
query = '''UPDATE {table} SET {values} WHERE {criteria}'''.format(
table=self.table_name,
values=', '.join((
'{column} = %s'.format(column=_column) for _column in _columns
if not _column in self.discriminant
)),
criteria=' AND '.join((
'{key} = %s'.format(key=_key) for _key in self.discriminant
))
)
values = [hash[_column] for _column in _columns if not _column in self.discriminant] + \
[hash[_column] for _column in self.discriminant]
# INSERT
else:
if not INSERT in self.allowed_operations:
raise ProhibitedOperationError('INSERT operations are not allowed by this transformation.')
if self.created_at_field in column_names:
hash[self.created_at_field] = now
else:
if self.created_at_field in hash:
del hash[self.created_at_field]
_columns = self.get_columns_for(hash)
query = '''INSERT INTO {table} ({keys}) VALUES ({values})'''.format(
table=self.table_name,
keys=', '.join(_columns),
values=', '.join(['%s'] * len(_columns))
)
values = [hash[key] for key in _columns]
# Execute
self.connection.execute(query, values)
# Increment stats
if row:
self._output._special_stats[UPDATE] += 1
else:
self._output._special_stats[INSERT] += 1
# If user required us to fetch some columns, let's query again to get their actual values.
if self.fetch_columns and len(self.fetch_columns):
if not row:
row = self.find(hash)
if not row:
raise ValueError('Could not find matching row after load.')
for alias, column in self.fetch_columns.iteritems():
hash[alias] = row[column]
return hash
def transform(self, hash, channel=STDIN):
"""Transform method. Stores the input in a buffer, and only unstack buffer content if some limit has been
exceeded.
TODO for now buffer limit is hardcoded as 1000, but we may use a few criterias to add intelligence to this:
time since last commit, duration of last commit, buffer length ...
"""
self._buffer.append(hash)
if len(self._buffer) >= self._max_buffer_size:
for _out in self.commit():
yield _out
def finalize(self):
"""Transform's finalize method.
Empties the remaining lines in buffer by loading them into database and close database connection.
"""
super(DatabaseLoad, self).finalize()
for _out in self.commit():
yield _out
self.close_connection()
def add_fetch_column(self, *columns, **aliased_columns):
self.fetch_columns.update(aliased_columns)
for column in columns:
self.fetch_columns[column] = column
@cached_property
def columns(self):
return self.table.columns.keys()
@cached_property
def metadata(self):
"""SQLAlchemy metadata."""
return MetaData()
@cached_property
def table(self):
"""SQLAlchemy table object, using metadata autoloading from database to avoid the need of column definitions."""
return Table(self.table_name, self.metadata, autoload=True, autoload_with=self.engine)
@property
def now(self):
"""Current timestamp, used for created/updated at fields."""
return now() | unknown | codeparrot/codeparrot-clean | ||
/*!
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.dev/license
*/
import {h, JSX} from 'preact';
import {InitializerApiFunctionRenderable} from '../entities/renderables.mjs';
import {API_REFERENCE_CONTAINER, REFERENCE_MEMBERS} from '../styling/css-classes.mjs';
import {getFunctionMetadataRenderable} from '../transforms/function-transforms.mjs';
import {signatureCard} from './function-reference';
import {HeaderApi} from './header-api';
import {SectionApi} from './section-api';
import {SectionUsageNotes} from './section-usage-notes';
/** Component to render a constant API reference document. */
export function InitializerApiFunction(entry: InitializerApiFunctionRenderable) {
// Use signatures as header if there are multiple signatures.
const printSignaturesAsHeader =
entry.callFunction.signatures.length > 1 ||
entry.subFunctions.some((sub) => sub.signatures.length > 1);
// If the initializer API function is just a function, checked by existence of an
// implementation, and the descriptions of the "API" and the first function match,
// avoid rendering it another time in the member card.
if (
entry.callFunction.signatures.length === 1 &&
entry.callFunction.implementation !== null &&
entry.description === entry.callFunction.signatures[0].description
) {
entry.callFunction.signatures[0].description = '';
}
return (
<div className={API_REFERENCE_CONTAINER}>
<HeaderApi entry={entry} showFullDescription={true} />
<SectionApi entry={entry} />
<div class={REFERENCE_MEMBERS}>
{entry.callFunction.signatures.map((s, i) =>
signatureCard(s.name, getFunctionMetadataRenderable(s, entry.moduleName, entry.repo), {
id: `${s.name}_${i}`,
printSignaturesAsHeader,
}),
)}
{entry.subFunctions.reduce(
(elements, subFunction) => [
...elements,
...subFunction.signatures.map((s, i) =>
signatureCard(
`${entry.name}.${s.name}`,
getFunctionMetadataRenderable(s, entry.moduleName, entry.repo),
{
id: `${entry.name}_${s.name}_${i}`,
printSignaturesAsHeader,
},
),
),
],
[] as JSX.Element[],
)}
</div>
<SectionUsageNotes entry={entry} />
</div>
);
} | typescript | github | https://github.com/angular/angular | adev/shared-docs/pipeline/api-gen/rendering/templates/initializer-api-function.tsx |
# encoding: utf-8
"""A fancy version of Python's builtin :func:`dir` function.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def get_class_members(cls):
ret = dir(cls)
if hasattr(cls, '__bases__'):
try:
bases = cls.__bases__
except AttributeError:
# `obj` lied to hasattr (e.g. Pyro), ignore
pass
else:
for base in bases:
ret.extend(get_class_members(base))
return ret
def dir2(obj):
"""dir2(obj) -> list of strings
Extended version of the Python builtin dir(), which does a few extra
checks, and supports common objects with unusual internals that confuse
dir(), such as Traits and PyCrust.
This version is guaranteed to return only a list of true strings, whereas
dir() returns anything that objects inject into themselves, even if they
are later not really valid for attribute access (many extension libraries
have such bugs).
"""
# Start building the attribute list via dir(), and then complete it
# with a few extra special-purpose calls.
words = set(dir(obj))
if hasattr(obj, '__class__'):
#words.add('__class__')
words |= set(get_class_members(obj.__class__))
# for objects with Enthought's traits, add trait_names() list
# for PyCrust-style, add _getAttributeNames() magic method list
for attr in ('trait_names', '_getAttributeNames'):
if hasattr(obj, attr):
try:
func = getattr(obj, attr)
if callable(func):
words |= set(func())
except:
# TypeError: obj is class not instance
pass
# filter out non-string attributes which may be stuffed by dir() calls
# and poor coding in third-party modules
words = [w for w in words if isinstance(w, basestring)]
return sorted(words) | unknown | codeparrot/codeparrot-clean | ||
# ASN.1 named integers
from pyasn1 import error
__all__ = [ 'NamedValues' ]
class NamedValues:
def __init__(self, *namedValues):
self.nameToValIdx = {}; self.valToNameIdx = {}
self.namedValues = ()
automaticVal = 1
for namedValue in namedValues:
if isinstance(namedValue, tuple):
name, val = namedValue
else:
name = namedValue
val = automaticVal
if name in self.nameToValIdx:
raise error.PyAsn1Error('Duplicate name %s' % (name,))
self.nameToValIdx[name] = val
if val in self.valToNameIdx:
raise error.PyAsn1Error('Duplicate value %s=%s' % (name, val))
self.valToNameIdx[val] = name
self.namedValues = self.namedValues + ((name, val),)
automaticVal = automaticVal + 1
def __str__(self): return str(self.namedValues)
def getName(self, value):
if value in self.valToNameIdx:
return self.valToNameIdx[value]
def getValue(self, name):
if name in self.nameToValIdx:
return self.nameToValIdx[name]
def __getitem__(self, i): return self.namedValues[i]
def __len__(self): return len(self.namedValues)
def __add__(self, namedValues):
return self.__class__(*self.namedValues + namedValues)
def __radd__(self, namedValues):
return self.__class__(*namedValues + tuple(self))
def clone(self, *namedValues):
return self.__class__(*tuple(self) + namedValues)
# XXX clone/subtype? | unknown | codeparrot/codeparrot-clean | ||
#
# script to read the version information from `../configure`
# relevant lines are starting with:
# `doxygen_version_major`
# `doxygen_version_minor`
# `doxygen_version_revision`
# `doxygen_version_mmn`
# the collected information is written to: `../VERSION` and `../src/version.cpp`
#
import sys
import os
#
# set 'default' values
#
major = 0
minor = 0
revision = 0
mnt = 'NO'
configure = '../configure'
if len(sys.argv) > 2:
configure = sys.argv[2]
#
# open input file
# read file and get relevant information
# close
#
f = open(configure, 'r')
for line in f:
# check if line can match (saves 3 comparisons)
if (line.startswith('doxygen_version')):
if (line.startswith('doxygen_version_major')):
major = line.replace('doxygen_version_major=','')
elif (line.startswith('doxygen_version_minor')):
minor = line.replace('doxygen_version_minor=','')
elif (line.startswith('doxygen_version_revision')):
revision = line.replace('doxygen_version_revision=','')
elif (line.startswith('doxygen_version_mmn')):
mnt = line.replace('doxygen_version_mmn=','')
f.close()
# strip superfluous '\n`
major = major.replace('\n','')
minor = minor.replace('\n','')
revision = revision.replace('\n','')
mnt = mnt.replace('\n','')
#
# open output files
# write relevant infomation
# close files
#
f1 = open('../VERSION','w')
f2 = open(os.path.join(sys.argv[1],'version.cpp'),'w')
if (mnt == 'NO'):
f1.write(major + '.' + minor + '.' + revision)
f2.write('char versionString[]="' + major + '.' + minor + '.' + revision + '";')
else:
f1.write(major + '.' + minor + '.' + revision + '-' + mnt)
f2.write('char versionString[]="' + major + '.' + minor + '.' + revision + '-' + mnt + '";')
f1.close()
f2.close() | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models
class AccountOperationTemplate(models.Model):
_inherit = 'account.reconcile.model'
@api.model
def product_changed(self, product_id):
"""
Helper to get the account and analytic account in reconcile view.
:param product_id:
:return: account_id, analytic_id
"""
res = super(AccountOperationTemplate, self).product_changed(product_id)
if product_id:
analytic_id = self.env['account.analytic.default'].account_get(
product_id).analytic_id.id
res['analytic_id'] = analytic_id
return res | unknown | codeparrot/codeparrot-clean | ||
{
"name": "@next/swc-linux-arm64-musl",
"version": "0.0.0",
"repository": {
"type": "git",
"url": "https://github.com/vercel/next.js",
"directory": "crates/next-napi-bindings/npm/linux-arm64-musl"
},
"os": [
"linux"
],
"cpu": [
"arm64"
],
"libc": [
"musl"
],
"main": "next-swc.linux-arm64-musl.node",
"files": [
"next-swc.linux-arm64-musl.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
} | json | github | https://github.com/vercel/next.js | crates/next-napi-bindings/npm/linux-arm64-musl/package.json |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
========================================================================
Gaussian Processes regression: goodness-of-fit on the 'diabetes' dataset
========================================================================
In this example, we fit a Gaussian Process model onto the diabetes
dataset.
We determine the correlation parameters with maximum likelihood
estimation (MLE). We use an anisotropic squared exponential
correlation model with a constant regression model. We also use a
nugget of 1e-2 to account for the (strong) noise in the targets.
We compute a cross-validation estimate of the coefficient of
determination (R2) without reperforming MLE, using the set of correlation
parameters found on the whole dataset.
"""
print(__doc__)
# Author: Vincent Dubourg <vincent.dubourg@gmail.com>
# Licence: BSD 3 clause
from sklearn import datasets
from sklearn.gaussian_process import GaussianProcess
from sklearn.cross_validation import cross_val_score, KFold
# Load the dataset from scikit's data sets
diabetes = datasets.load_diabetes()
X, y = diabetes.data, diabetes.target
# Instanciate a GP model
gp = GaussianProcess(regr='constant', corr='absolute_exponential',
theta0=[1e-4] * 10, thetaL=[1e-12] * 10,
thetaU=[1e-2] * 10, nugget=1e-2, optimizer='Welch')
# Fit the GP model to the data performing maximum likelihood estimation
gp.fit(X, y)
# Deactivate maximum likelihood estimation for the cross-validation loop
gp.theta0 = gp.theta_ # Given correlation parameter = MLE
gp.thetaL, gp.thetaU = None, None # None bounds deactivate MLE
# Perform a cross-validation estimate of the coefficient of determination using
# the cross_validation module using all CPUs available on the machine
K = 20 # folds
R2 = cross_val_score(gp, X, y=y, cv=KFold(y.size, K), n_jobs=1).mean()
print("The %d-Folds estimate of the coefficient of determination is R2 = %s"
% (K, R2)) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
import sys
from argparse import ArgumentParser
try:
import cProfile as profile
except:
import profile
from . import helpers
from . import TEST_DIR
helpers.setup_path()
import pbxproj
def get_argument_parser():
parser = ArgumentParser()
parser.add_argument("file",
help="file to be parsed during the profiling")
parser.add_argument("-p", "--profile",
help="store profile under this file path",
default=None)
parser.add_argument("-r", "--runs",
type=int,
help="how often should we merge the file",
default=1)
return parser
def main():
parser = get_argument_parser()
args = parser.parse_args()
profile.runctx("for i in range(runs): pbxproj.read(file)",
globals={},
locals=dict(
pbxproj=pbxproj, file=args.file, runs=args.runs
),
filename=args.profile)
if __name__ == "__main__":
main() | unknown | codeparrot/codeparrot-clean | ||
use crate::spec::{
Abi, Arch, Cc, FloatAbi, LinkerFlavor, Lld, SanitizerSet, Target, TargetMetadata,
TargetOptions, base,
};
// This target if is for the baseline of the Android v7a ABI
// in thumb mode. It's named armv7-* instead of thumbv7-*
// for historical reasons. See the thumbv7neon variant for
// enabling NEON.
// See https://developer.android.com/ndk/guides/abis.html#v7a
// for target ABI requirements.
pub(crate) fn target() -> Target {
let mut base = base::android::opts();
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-march=armv7-a"]);
Target {
llvm_target: "armv7-none-linux-android".into(),
metadata: TargetMetadata {
description: Some("Armv7-A Android".into()),
tier: Some(2),
host_tools: Some(false),
std: Some(true),
},
pointer_width: 32,
data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(),
arch: Arch::Arm,
options: TargetOptions {
abi: Abi::Eabi,
llvm_floatabi: Some(FloatAbi::Soft),
features: "+v7,+thumb-mode,+thumb2,+vfp3d16,-neon".into(),
supported_sanitizers: SanitizerSet::ADDRESS,
max_atomic_width: Some(64),
..base
},
}
} | rust | github | https://github.com/rust-lang/rust | compiler/rustc_target/src/spec/targets/armv7_linux_androideabi.rs |
from .stream import Stream
from .wrappers import StreamIOThreadWrapper
from ..compat import str
from ..exceptions import StreamError
from ..packages import pbs as sh
import os
import time
import tempfile
class StreamProcessIO(StreamIOThreadWrapper):
def __init__(self, session, process, **kwargs):
self.process = process
StreamIOThreadWrapper.__init__(self, session,
process.stdout,
**kwargs)
def close(self):
try:
self.process.kill()
except Exception:
pass
StreamIOThreadWrapper.close(self)
class StreamProcess(Stream):
def __init__(self, session, params=None, timeout=60.0):
Stream.__init__(self, session)
if not params:
params = {}
self.params = params
self.errorlog = self.session.options.get("subprocess-errorlog")
self.timeout = timeout
def open(self):
cmd = self._check_cmd()
params = self.params.copy()
params["_bg"] = True
if self.errorlog:
tmpfile = tempfile.NamedTemporaryFile(prefix="livestreamer",
suffix=".err", delete=False)
params["_err"] = tmpfile
else:
params["_err"] = open(os.devnull, "wb")
with params["_err"]:
stream = cmd(**params)
# Wait 0.5 seconds to see if program exited prematurely
time.sleep(0.5)
process_alive = stream.process.returncode is None
if not process_alive:
if self.errorlog:
raise StreamError(("Error while executing subprocess, "
"error output logged to: {0}").format(tmpfile.name))
else:
raise StreamError("Error while executing subprocess")
return StreamProcessIO(self.session, stream.process,
timeout=self.timeout)
def _check_cmd(self):
try:
cmd = sh.create_command(self.cmd)
except sh.CommandNotFound as err:
raise StreamError("Unable to find {0} command".format(err))
return cmd
def cmdline(self):
cmd = self._check_cmd()
return str(cmd.bake(**self.params))
@classmethod
def is_usable(cls, cmd):
try:
cmd = sh.create_command(cmd)
except sh.CommandNotFound:
return False
return True
__all__ = ["StreamProcess"] | unknown | codeparrot/codeparrot-clean | ||
"""Utility for loading all modules within a package."""
import importlib
import pkgutil
def load_all_modules(name, path):
"""Dynamically load all modules in the 'name' package.
This function is useful in combination with the registry.py module
so that any classes declared within the package are automatically
registered.
The following is the intended usage within the __init__.py file for
a package:
from utils import autoloader as _autoloader
_autoloader.load_all_modules(name=__name__, path=__path__)
"""
for _, module, _ in pkgutil.walk_packages(path=path):
importlib.import_module("." + module, package=name) | python | github | https://github.com/mongodb/mongo | buildscripts/resmokelib/utils/autoloader.py |
import numpy as np
from copy import deepcopy
from sklearn.base import BaseEstimator, TransformerMixin
def epoch_data(data, events, stim_ID, tmin=-.2, tmax=0.399):
"""Epoch data."""
ix_events = np.where(np.diff(np.int32(events > 0)) == 1)[0] + 1
ix_min = int(tmin*1000)
ix_max = int(tmax*1000)
nsamp = ix_max - ix_min
X = np.zeros((len(ix_events), data.shape[0], nsamp))
y = np.int32(events[ix_events] > 50)
st_id = np.int32(stim_ID[ix_events])
for i, ix in enumerate(ix_events):
sl = slice((ix + ix_min), (ix + ix_max))
tmp = data[:, sl]
X[i, :, 0:tmp.shape[1]] = tmp
return X, y, st_id
class DownSampler(BaseEstimator, TransformerMixin):
"""Downsample transformer"""
def __init__(self, factor=4):
"""Init."""
self.factor = factor
def fit(self, X, y):
return self
def transform(self, X):
return X[:, :, ::self.factor]
class EpochsVectorizer(BaseEstimator, TransformerMixin):
"""Vectorize epochs."""
def __init__(self):
"""Init."""
def fit(self, X, y):
return self
def transform(self, X):
X2 = np.array([x.flatten() for x in X])
return X2
class CospBoostingClassifier(BaseEstimator, TransformerMixin):
"""Cospectral matrice bagging."""
def __init__(self, baseclf):
"""Init."""
self.baseclf = baseclf
def fit(self, X, y):
self.clfs_ = []
for i in range(X.shape[-1]):
clf = deepcopy(self.baseclf)
self.clfs_.append(clf.fit(X[:, :, :, i], y))
return self
def predict_proba(self, X):
proba = []
for i in range(X.shape[-1]):
proba.append(self.clfs_[i].predict_proba(X[:, :, :, i]))
proba = np.mean(proba, axis=0)
return proba
def transform(self, X):
proba = []
for i in range(X.shape[-1]):
proba.append(self.clfs_[i].predict_proba(X[:, :, :, i]))
proba = np.concatenate(proba, 1)
return proba | unknown | codeparrot/codeparrot-clean | ||
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from unittest import mock
import pytest
from airflow.models.taskmap import TaskMap, TaskMapVariant
from airflow.providers.standard.operators.empty import EmptyOperator
from tests_common.test_utils.taskinstance import create_task_instance
pytestmark = pytest.mark.db_test
def test_task_map_from_task_instance_xcom():
task = EmptyOperator(task_id="test_task")
ti = create_task_instance(task=task, run_id="test_run", map_index=0, dag_version_id=mock.MagicMock())
ti.dag_id = "test_dag"
value = {"key1": "value1", "key2": "value2"}
# Test case where run_id is not None
task_map = TaskMap.from_task_instance_xcom(ti, value)
assert task_map.dag_id == ti.dag_id
assert task_map.task_id == ti.task_id
assert task_map.run_id == ti.run_id
assert task_map.map_index == ti.map_index
assert task_map.length == len(value)
assert task_map.keys == list(value)
# Test case where run_id is None
ti.run_id = None
with pytest.raises(ValueError, match="cannot record task map for unrun task instance"):
TaskMap.from_task_instance_xcom(ti, value)
def test_task_map_with_invalid_task_instance():
task = EmptyOperator(task_id="test_task")
ti = create_task_instance(task=task, run_id=None, map_index=0, dag_version_id=mock.MagicMock())
ti.dag_id = "test_dag"
# Define some arbitrary XCom-like value data
value = {"example_key": "example_value"}
with pytest.raises(ValueError, match="cannot record task map for unrun task instance"):
TaskMap.from_task_instance_xcom(ti, value)
def test_task_map_variant():
# Test case where keys is None
task_map = TaskMap(
dag_id="test_dag",
task_id="test_task",
run_id="test_run",
map_index=0,
length=3,
keys=None,
)
assert task_map.variant == TaskMapVariant.LIST
# Test case where keys is not None
task_map.keys = ["key1", "key2"]
assert task_map.variant == TaskMapVariant.DICT | python | github | https://github.com/apache/airflow | airflow-core/tests/unit/models/test_taskmap.py |
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build dragonfly || freebsd || linux || netbsd || openbsd || solaris
package runtime
func Pipe() (r, w int32, errno int32) {
return pipe2(0)
} | go | github | https://github.com/golang/go | src/runtime/export_pipe2_test.go |
"""Some generic utilties."""
import keyring
import getpass
import re
UN = "everpy"
def refresh_token():
"""Set new token."""
print("Set a new a token")
keyring.set_password(UN, UN, getpass.getpass("Password: "))
return keyring.get_password(UN, UN)
def get_token():
"""Get a token."""
dev_token = keyring.get_password(UN, UN)
if not dev_token:
dev_token = refresh_token()
return dev_token
def get_template_tokens(content):
regex = r"\$\{(\d+.*?)\}"
tokens = {}
with open("Templates/simple_sections.txt", "r") as f:
content = f.read()
matches = re.finditer(regex, content)
for match_num, match in enumerate(matches):
match_num = match_num + 1
tok = match.group(1)
tok_id, tok_name = None, None
if len(tok.split(":")) == 2:
tok_id, tok_name = tok.split(":")
else:
tok_id, tok_name = tok, None
tokens[tok_id] = {"name": tok_name, "val": None}
return tokens
if __name__ == '__main__':
# Test methods
token = get_token()
print(token) | unknown | codeparrot/codeparrot-clean | ||
# SPDX-License-Identifier: (GPL-2.0 OR BSD-2-Clause)
%YAML 1.2
---
$id: http://devicetree.org/schemas/display/atmel,lcdc-display.yaml#
$schema: http://devicetree.org/meta-schemas/core.yaml#
title: Microchip's LCDC Display
maintainers:
- Nicolas Ferre <nicolas.ferre@microchip.com>
- Dharma Balasubiramani <dharma.b@microchip.com>
description:
The LCD Controller (LCDC) consists of logic for transferring LCD image data
from an external display buffer to a TFT LCD panel. The LCDC has one display
input buffer per layer that fetches pixels through the single bus host
interface and a look-up table to allow palletized display configurations. The
LCDC is programmable on a per layer basis, and supports different LCD
resolutions, window sizes, image formats and pixel depths.
# We need a select here since this schema is applicable only for nodes with the
# following properties
select:
anyOf:
- required: [ 'atmel,dmacon' ]
- required: [ 'atmel,lcdcon2' ]
- required: [ 'atmel,guard-time' ]
properties:
atmel,dmacon:
$ref: /schemas/types.yaml#/definitions/uint32
description: dma controller configuration
atmel,lcdcon2:
$ref: /schemas/types.yaml#/definitions/uint32
description: lcd controller configuration
atmel,guard-time:
$ref: /schemas/types.yaml#/definitions/uint32
description: lcd guard time (Delay in frame periods)
maximum: 127
bits-per-pixel:
$ref: /schemas/types.yaml#/definitions/uint32
description: lcd panel bit-depth.
enum: [1, 2, 4, 8, 16, 24, 32]
atmel,lcdcon-backlight:
$ref: /schemas/types.yaml#/definitions/flag
description: enable backlight
atmel,lcdcon-backlight-inverted:
$ref: /schemas/types.yaml#/definitions/flag
description: invert backlight PWM polarity
atmel,lcd-wiring-mode:
$ref: /schemas/types.yaml#/definitions/string
description: lcd wiring mode "RGB" or "BRG"
enum:
- RGB
- BRG
atmel,power-control-gpio:
description: gpio to power on or off the LCD (as many as needed)
maxItems: 1
display-timings:
$ref: panel/display-timings.yaml#
required:
- atmel,dmacon
- atmel,lcdcon2
- atmel,guard-time
- bits-per-pixel
additionalProperties: false
examples:
- |
display: panel {
bits-per-pixel = <32>;
atmel,lcdcon-backlight;
atmel,dmacon = <0x1>;
atmel,lcdcon2 = <0x80008002>;
atmel,guard-time = <9>;
atmel,lcd-wiring-mode = "RGB";
display-timings {
native-mode = <&timing0>;
timing0: timing0 {
clock-frequency = <9000000>;
hactive = <480>;
vactive = <272>;
hback-porch = <1>;
hfront-porch = <1>;
vback-porch = <40>;
vfront-porch = <1>;
hsync-len = <45>;
vsync-len = <1>;
};
};
}; | unknown | github | https://github.com/torvalds/linux | Documentation/devicetree/bindings/display/atmel,lcdc-display.yaml |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: acme_certificate_revoke
author: "Felix Fontein (@felixfontein)"
version_added: "2.7"
short_description: Revoke certificates with the ACME protocol
description:
- "Allows to revoke certificates issued by a CA supporting the
L(ACME protocol,https://tools.ietf.org/html/rfc8555),
such as L(Let's Encrypt,https://letsencrypt.org/)."
notes:
- "Exactly one of C(account_key_src), C(account_key_content),
C(private_key_src) or C(private_key_content) must be specified."
- "Trying to revoke an already revoked certificate
should result in an unchanged status, even if the revocation reason
was different than the one specified here. Also, depending on the
server, it can happen that some other error is returned if the
certificate has already been revoked."
seealso:
- name: The Let's Encrypt documentation
description: Documentation for the Let's Encrypt Certification Authority.
Provides useful information for example on rate limits.
link: https://letsencrypt.org/docs/
- name: Automatic Certificate Management Environment (ACME)
description: The specification of the ACME protocol (RFC 8555).
link: https://tools.ietf.org/html/rfc8555
- module: acme_inspect
description: Allows to debug problems.
extends_documentation_fragment:
- acme
options:
certificate:
description:
- "Path to the certificate to revoke."
type: path
required: yes
account_key_src:
description:
- "Path to a file containing the ACME account RSA or Elliptic Curve
key."
- "RSA keys can be created with C(openssl rsa ...). Elliptic curve keys can
be created with C(openssl ecparam -genkey ...). Any other tool creating
private keys in PEM format can be used as well."
- "Mutually exclusive with C(account_key_content)."
- "Required if C(account_key_content) is not used."
type: path
account_key_content:
description:
- "Content of the ACME account RSA or Elliptic Curve key."
- "Note that exactly one of C(account_key_src), C(account_key_content),
C(private_key_src) or C(private_key_content) must be specified."
- "I(Warning): the content will be written into a temporary file, which will
be deleted by Ansible when the module completes. Since this is an
important private key — it can be used to change the account key,
or to revoke your certificates without knowing their private keys
—, this might not be acceptable."
- "In case C(cryptography) is used, the content is not written into a
temporary file. It can still happen that it is written to disk by
Ansible in the process of moving the module with its argument to
the node where it is executed."
type: str
private_key_src:
description:
- "Path to the certificate's private key."
- "Note that exactly one of C(account_key_src), C(account_key_content),
C(private_key_src) or C(private_key_content) must be specified."
type: path
private_key_content:
description:
- "Content of the certificate's private key."
- "Note that exactly one of C(account_key_src), C(account_key_content),
C(private_key_src) or C(private_key_content) must be specified."
- "I(Warning): the content will be written into a temporary file, which will
be deleted by Ansible when the module completes. Since this is an
important private key — it can be used to change the account key,
or to revoke your certificates without knowing their private keys
—, this might not be acceptable."
- "In case C(cryptography) is used, the content is not written into a
temporary file. It can still happen that it is written to disk by
Ansible in the process of moving the module with its argument to
the node where it is executed."
type: str
revoke_reason:
description:
- "One of the revocation reasonCodes defined in
L(Section 5.3.1 of RFC5280,https://tools.ietf.org/html/rfc5280#section-5.3.1)."
- "Possible values are C(0) (unspecified), C(1) (keyCompromise),
C(2) (cACompromise), C(3) (affiliationChanged), C(4) (superseded),
C(5) (cessationOfOperation), C(6) (certificateHold),
C(8) (removeFromCRL), C(9) (privilegeWithdrawn),
C(10) (aACompromise)"
type: int
'''
EXAMPLES = '''
- name: Revoke certificate with account key
acme_certificate_revoke:
account_key_src: /etc/pki/cert/private/account.key
certificate: /etc/httpd/ssl/sample.com.crt
- name: Revoke certificate with certificate's private key
acme_certificate_revoke:
private_key_src: /etc/httpd/ssl/sample.com.key
certificate: /etc/httpd/ssl/sample.com.crt
'''
RETURN = '''
'''
from ansible.module_utils.acme import (
ModuleFailException,
ACMEAccount,
nopad_b64,
pem_to_der,
handle_standard_module_arguments,
get_default_argspec,
)
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = get_default_argspec()
argument_spec.update(dict(
private_key_src=dict(type='path'),
private_key_content=dict(type='str', no_log=True),
certificate=dict(type='path', required=True),
revoke_reason=dict(type='int'),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=(
['account_key_src', 'account_key_content', 'private_key_src', 'private_key_content'],
),
mutually_exclusive=(
['account_key_src', 'account_key_content', 'private_key_src', 'private_key_content'],
),
supports_check_mode=False,
)
handle_standard_module_arguments(module)
try:
account = ACMEAccount(module)
# Load certificate
certificate = pem_to_der(module.params.get('certificate'))
certificate = nopad_b64(certificate)
# Construct payload
payload = {
'certificate': certificate
}
if module.params.get('revoke_reason') is not None:
payload['reason'] = module.params.get('revoke_reason')
# Determine endpoint
if module.params.get('acme_version') == 1:
endpoint = account.directory['revoke-cert']
payload['resource'] = 'revoke-cert'
else:
endpoint = account.directory['revokeCert']
# Get hold of private key (if available) and make sure it comes from disk
private_key = module.params.get('private_key_src')
private_key_content = module.params.get('private_key_content')
# Revoke certificate
if private_key or private_key_content:
# Step 1: load and parse private key
error, private_key_data = account.parse_key(private_key, private_key_content)
if error:
raise ModuleFailException("error while parsing private key: %s" % error)
# Step 2: sign revokation request with private key
jws_header = {
"alg": private_key_data['alg'],
"jwk": private_key_data['jwk'],
}
result, info = account.send_signed_request(endpoint, payload, key_data=private_key_data, jws_header=jws_header)
else:
# Step 1: get hold of account URI
created, account_data = account.setup_account(allow_creation=False)
if created:
raise AssertionError('Unwanted account creation')
if account_data is None:
raise ModuleFailException(msg='Account does not exist or is deactivated.')
# Step 2: sign revokation request with account key
result, info = account.send_signed_request(endpoint, payload)
if info['status'] != 200:
already_revoked = False
# Standardized error from draft 14 on (https://tools.ietf.org/html/rfc8555#section-7.6)
if result.get('type') == 'urn:ietf:params:acme:error:alreadyRevoked':
already_revoked = True
else:
# Hack for Boulder errors
if module.params.get('acme_version') == 1:
error_type = 'urn:acme:error:malformed'
else:
error_type = 'urn:ietf:params:acme:error:malformed'
if result.get('type') == error_type and result.get('detail') == 'Certificate already revoked':
# Fallback: boulder returns this in case the certificate was already revoked.
already_revoked = True
# If we know the certificate was already revoked, we don't fail,
# but successfully terminate while indicating no change
if already_revoked:
module.exit_json(changed=False)
raise ModuleFailException('Error revoking certificate: {0} {1}'.format(info['status'], result))
module.exit_json(changed=True)
except ModuleFailException as e:
e.do_fail(module)
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015, 2016 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
from __future__ import print_function
import os
import sphinx.environment
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Do not warn on external images.
suppress_warnings = ['image.nonlocal_uri']
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Invenio-Communities'
copyright = u'2015, 2016, CERN'
author = u'CERN'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# Get the version string. Cannot be done with import!
g = {}
with open(os.path.join('..', 'invenio_communities', 'version.py'), 'rt') as fp:
exec(fp.read(), g)
version = g['__version__']
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
html_theme = 'alabaster'
html_theme_options = {
'description': 'Invenio module that adds support for communities.',
'github_user': 'inveniosoftware',
'github_repo': 'invenio-communities',
'github_button': False,
'github_banner': True,
'show_powered_by': False,
'extra_nav_links': {
'invenio-communities@GitHub': 'https://github.com/inveniosoftware/invenio-communities',
'invenio-communities@PyPI': 'https://pypi.python.org/pypi/invenio-communities/',
}
}
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'searchbox.html',
'donate.html',
]
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'invenio-communities_namedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'invenio-communities.tex', u'invenio-communities Documentation',
u'CERN', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'invenio-communities', u'invenio-communities Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'invenio-communities', u'Invenio-Communities Documentation',
author, 'invenio-communities', 'Invenio module that adds support for communities.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
# Autodoc configuraton.
autoclass_content = 'both' | unknown | codeparrot/codeparrot-clean | ||
"""Review Board e-mail module."""
from __future__ import absolute_import, unicode_literals
from django.db.models.signals import post_delete
from djblets.auth.signals import user_registered
from reviewboard.notifications.email.signal_handlers import (
send_reply_published_mail,
send_review_published_mail,
send_review_request_closed_mail,
send_review_request_published_mail,
send_user_registered_mail,
send_webapi_token_created_mail,
send_webapi_token_deleted_mail,
send_webapi_token_updated_mail)
from reviewboard.notifications.email.hooks import (register_email_hook,
unregister_email_hook)
from reviewboard.reviews.models import ReviewRequest, Review
from reviewboard.reviews.signals import (review_request_published,
review_published, reply_published,
review_request_closed)
from reviewboard.webapi.models import WebAPIToken
from djblets.webapi.signals import webapi_token_created, webapi_token_updated
def connect_signals():
"""Connect e-mail callbacks to signals."""
signal_table = [
(reply_published, send_reply_published_mail, Review),
(review_published, send_review_published_mail, Review),
(review_request_closed, send_review_request_closed_mail,
ReviewRequest),
(review_request_published, send_review_request_published_mail,
ReviewRequest),
(user_registered, send_user_registered_mail, None),
(webapi_token_created, send_webapi_token_created_mail, WebAPIToken),
(webapi_token_updated, send_webapi_token_updated_mail, WebAPIToken),
(post_delete, send_webapi_token_deleted_mail, WebAPIToken),
]
for signal, handler, sender in signal_table:
signal.connect(handler, sender=sender)
__all__ = [
'register_email_hook',
'unregister_email_hook',
] | unknown | codeparrot/codeparrot-clean | ||
import Avatar from "./avatar";
import Date from "./date";
import CoverImage from "./cover-image";
import Link from "next/link";
export default function HeroPost({
title,
coverImage,
date,
excerpt,
author,
slug,
}) {
return (
<section>
<div className="mb-8 md:mb-16">
<CoverImage
title={title}
url={coverImage}
slug={slug}
width={2000}
height={1216}
/>
</div>
<div className="md:grid md:grid-cols-2 md:gap-x-16 lg:gap-x-8 mb-20 md:mb-28">
<div>
<h3 className="mb-4 text-4xl lg:text-6xl leading-tight">
<Link href={`/posts/${slug}`} className="hover:underline">
{title}
</Link>
</h3>
<div className="mb-4 md:mb-0 text-lg">
<Date dateString={date} />
</div>
</div>
<div>
<p className="text-lg leading-relaxed mb-4">{excerpt}</p>
<Avatar name={author.name} picture={author.profile_image} />
</div>
</div>
</section>
);
} | javascript | github | https://github.com/vercel/next.js | examples/cms-ghost/components/hero-post.js |
#!/usr/bin/python
#
# Ansible module to manage IPv4 policy objects in fortigate devices
# (c) 2017, Benjamin Jolivot <bjolivot@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: fortios_ipv4_policy
version_added: "2.3"
author: "Benjamin Jolivot (@bjolivot)"
short_description: Manage IPv4 policy objects on Fortinet FortiOS firewall devices
description:
- This module provides management of firewall IPv4 policies on FortiOS devices.
extends_documentation_fragment: fortios
options:
id:
description:
- "Policy ID.
Warning: policy ID number is different than Policy sequence number.
The policy ID is the number assigned at policy creation.
The sequence number represents the order in which the Fortigate will evaluate the rule for policy enforcement,
and also the order in which rules are listed in the GUI and CLI.
These two numbers do not necessarily correlate: this module is based off policy ID.
TIP: policy ID can be viewed in the GUI by adding 'ID' to the display columns"
required: true
state:
description:
- Specifies if policy I(id) need to be added or deleted.
choices: ['present', 'absent']
default: present
src_intf:
description:
- Specifies source interface name.
default: any
dst_intf:
description:
- Specifies destination interface name.
default: any
src_addr:
description:
- Specifies source address (or group) object name(s). Required when I(state=present).
src_addr_negate:
description:
- Negate source address param.
default: false
choices: ["true", "false"]
dst_addr:
description:
- Specifies destination address (or group) object name(s). Required when I(state=present).
dst_addr_negate:
description:
- Negate destination address param.
default: false
choices: ["true", "false"]
policy_action:
description:
- Specifies accept or deny action policy. Required when I(state=present).
choices: ['accept', 'deny']
aliases: ['action']
service:
description:
- "Specifies policy service(s), could be a list (ex: ['MAIL','DNS']). Required when I(state=present)."
aliases:
- services
service_negate:
description:
- Negate policy service(s) defined in service value.
default: false
choices: ["true", "false"]
schedule:
description:
- defines policy schedule.
default: 'always'
nat:
description:
- Enable or disable Nat.
default: false
choices: ["true", "false"]
fixedport:
description:
- Use fixed port for nat.
default: false
choices: ["true", "false"]
poolname:
description:
- Specifies NAT pool name.
av_profile:
description:
- Specifies Antivirus profile name.
webfilter_profile:
description:
- Specifies Webfilter profile name.
ips_sensor:
description:
- Specifies IPS Sensor profile name.
application_list:
description:
- Specifies Application Control name.
logtraffic:
version_added: "2.4"
description:
- Logs sessions that matched policy.
default: utm
choices: ['disable', 'utm', 'all']
logtraffic_start:
version_added: "2.4"
description:
- Logs beginning of session as well.
default: false
choices: ["true", "false"]
comment:
description:
- free text to describe policy.
notes:
- This module requires pyFG library.
"""
EXAMPLES = """
- name: Allow external DNS call
fortios_ipv4_policy:
host: 192.168.0.254
username: admin
password: password
id: 42
src_addr: internal_network
dst_addr: all
service: dns
nat: True
state: present
policy_action: accept
logtraffic: disable
- name: Public Web
fortios_ipv4_policy:
host: 192.168.0.254
username: admin
password: password
id: 42
src_addr: all
dst_addr: webservers
services:
- http
- https
state: present
policy_action: accept
"""
RETURN = """
firewall_address_config:
description: full firewall addresses config string
returned: always
type: string
change_string:
description: The commands executed by the module
returned: only if config changed
type: string
msg_error_list:
description: "List of errors returned by CLI (use -vvv for better readability)."
returned: only when error
type: string
"""
from ansible.module_utils.fortios import fortios_argument_spec, fortios_required_if
from ansible.module_utils.fortios import backup, AnsibleFortios
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
def main():
argument_spec = dict(
comment = dict(type='str'),
id = dict(type='int', required=True),
src_intf = dict(default='any'),
dst_intf = dict(default='any'),
state = dict(choices=['present', 'absent'], default='present'),
src_addr = dict(type='list'),
dst_addr = dict(type='list'),
src_addr_negate = dict(type='bool', default=False),
dst_addr_negate = dict(type='bool', default=False),
policy_action = dict(choices=['accept','deny'], aliases=['action']),
service = dict(aliases=['services'], type='list'),
service_negate = dict(type='bool', default=False),
schedule = dict(type='str', default='always'),
nat = dict(type='bool', default=False),
fixedport = dict(type='bool', default=False),
poolname = dict(type='str'),
av_profile = dict(type='str'),
webfilter_profile = dict(type='str'),
ips_sensor = dict(type='str'),
application_list = dict(type='str'),
logtraffic = dict(choices=['disable','all','utm'], default='utm'),
logtraffic_start = dict(type='bool', default=False),
)
#merge global required_if & argument_spec from module_utils/fortios.py
argument_spec.update(fortios_argument_spec)
ipv4_policy_required_if = [
['state', 'present', ['src_addr', 'dst_addr', 'policy_action', 'service']],
]
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=fortios_required_if + ipv4_policy_required_if ,
)
#init forti object
fortigate = AnsibleFortios(module)
#Security policies root path
config_path = 'firewall policy'
#test params
#NAT related
if not module.params['nat']:
if module.params['poolname']:
module.fail_json(msg='Poolname param requires NAT to be true.')
if module.params['fixedport']:
module.fail_json(msg='Fixedport param requires NAT to be true.')
#log options
if module.params['logtraffic_start']:
if not module.params['logtraffic'] == 'all':
module.fail_json(msg='Logtraffic_start param requires logtraffic to be set to "all".')
#id must be str(int) for pyFG to work
policy_id = str(module.params['id'])
#load config
fortigate.load_config(config_path)
#Absent State
if module.params['state'] == 'absent':
fortigate.candidate_config[config_path].del_block(policy_id)
#Present state
elif module.params['state'] == 'present':
new_policy = fortigate.get_empty_configuration_block(policy_id, 'edit')
#src / dest / service / interfaces
new_policy.set_param('srcintf', '"%s"' % (module.params['src_intf']))
new_policy.set_param('dstintf', '"%s"' % (module.params['dst_intf']))
new_policy.set_param('srcaddr', " ".join('"' + item + '"' for item in module.params['src_addr']))
new_policy.set_param('dstaddr', " ".join('"' + item + '"' for item in module.params['dst_addr']))
new_policy.set_param('service', " ".join('"' + item + '"' for item in module.params['service']))
# negate src / dest / service
if module.params['src_addr_negate']:
new_policy.set_param('srcaddr-negate', 'enable')
if module.params['dst_addr_negate']:
new_policy.set_param('dstaddr-negate', 'enable')
if module.params['service_negate']:
new_policy.set_param('service-negate', 'enable')
# action
new_policy.set_param('action', '%s' % (module.params['policy_action']))
#logging
new_policy.set_param('logtraffic', '%s' % (module.params['logtraffic']))
if module.params['logtraffic'] == 'all':
if module.params['logtraffic_start']:
new_policy.set_param('logtraffic-start', 'enable')
else:
new_policy.set_param('logtraffic-start', 'disable')
# Schedule
new_policy.set_param('schedule', '%s' % (module.params['schedule']))
#NAT
if module.params['nat']:
new_policy.set_param('nat', 'enable')
if module.params['fixedport']:
new_policy.set_param('fixedport', 'enable')
if module.params['poolname'] is not None:
new_policy.set_param('ippool', 'enable')
new_policy.set_param('poolname', '"%s"' % (module.params['poolname']))
#security profiles:
if module.params['av_profile'] is not None:
new_policy.set_param('av-profile', '"%s"' % (module.params['av_profile']))
if module.params['webfilter_profile'] is not None:
new_policy.set_param('webfilter-profile', '"%s"' % (module.params['webfilter_profile']))
if module.params['ips_sensor'] is not None:
new_policy.set_param('ips-sensor', '"%s"' % (module.params['ips_sensor']))
if module.params['application_list'] is not None:
new_policy.set_param('application-list', '"%s"' % (module.params['application_list']))
# comment
if module.params['comment'] is not None:
new_policy.set_param('comment', '"%s"' % (module.params['comment']))
#add the new policy to the device
fortigate.add_block(policy_id, new_policy)
#Apply changes
fortigate.apply_changes()
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
#!/bin/python
import os, subprocess
import logging
from autotest.client import test
from autotest.client.shared import error
class perl_XML_RegExp(test.test):
"""
Autotest module for testing basic functionality
of perl_XML_RegExp
@author Ramya BS <ramyabs1@in.ibm.com> ##
"""
version = 1
nfail = 0
path = ''
def initialize(self):
"""
Sets the overall failure counter for the test.
"""
self.nfail = 0
logging.info('\n Test initialize successfully')
def run_once(self, test_path=''):
"""
Trigger test run
"""
try:
os.environ["LTPBIN"] = "%s/shared" %(test_path)
ret_val = subprocess.Popen(['./perl-XML-RegExp.sh'], cwd="%s/perl_XML_RegExp" %(test_path))
ret_val.communicate()
if ret_val.returncode != 0:
self.nfail += 1
except error.CmdError, e:
self.nfail += 1
logging.error("Test Failed: %s", e)
def postprocess(self):
if self.nfail != 0:
logging.info('\n nfails is non-zero')
raise error.TestError('\nTest failed')
else:
logging.info('\n Test completed successfully ') | unknown | codeparrot/codeparrot-clean | ||
//
// Code generated by grafana-app-sdk. DO NOT EDIT.
//
package v0alpha1
import (
"encoding/json"
"io"
"github.com/grafana/grafana-app-sdk/resource"
)
// ExternalGroupMappingJSONCodec is an implementation of resource.Codec for kubernetes JSON encoding
type ExternalGroupMappingJSONCodec struct{}
// Read reads JSON-encoded bytes from `reader` and unmarshals them into `into`
func (*ExternalGroupMappingJSONCodec) Read(reader io.Reader, into resource.Object) error {
return json.NewDecoder(reader).Decode(into)
}
// Write writes JSON-encoded bytes into `writer` marshaled from `from`
func (*ExternalGroupMappingJSONCodec) Write(writer io.Writer, from resource.Object) error {
return json.NewEncoder(writer).Encode(from)
}
// Interface compliance checks
var _ resource.Codec = &ExternalGroupMappingJSONCodec{} | go | github | https://github.com/grafana/grafana | apps/iam/pkg/apis/iam/v0alpha1/externalgroupmapping_codec_gen.go |
import datetime
import errno
import os
import tempfile
from django.conf import settings
from django.contrib.sessions.backends.base import SessionBase, CreateError, VALID_KEY_CHARS
from django.core.exceptions import SuspiciousOperation, ImproperlyConfigured
from django.utils import timezone
class SessionStore(SessionBase):
"""
Implements a file based session store.
"""
def __init__(self, session_key=None):
self.storage_path = type(self)._get_storage_path()
self.file_prefix = settings.SESSION_COOKIE_NAME
super(SessionStore, self).__init__(session_key)
@classmethod
def _get_storage_path(cls):
try:
return cls._storage_path
except AttributeError:
storage_path = getattr(settings, "SESSION_FILE_PATH", None)
if not storage_path:
storage_path = tempfile.gettempdir()
# Make sure the storage path is valid.
if not os.path.isdir(storage_path):
raise ImproperlyConfigured(
"The session storage path %r doesn't exist. Please set your"
" SESSION_FILE_PATH setting to an existing directory in which"
" Django can store session data." % storage_path)
cls._storage_path = storage_path
return storage_path
def _key_to_file(self, session_key=None):
"""
Get the file associated with this session key.
"""
if session_key is None:
session_key = self._get_or_create_session_key()
# Make sure we're not vulnerable to directory traversal. Session keys
# should always be md5s, so they should never contain directory
# components.
if not set(session_key).issubset(set(VALID_KEY_CHARS)):
raise SuspiciousOperation(
"Invalid characters in session key")
return os.path.join(self.storage_path, self.file_prefix + session_key)
def _last_modification(self):
"""
Return the modification time of the file storing the session's content.
"""
modification = os.stat(self._key_to_file()).st_mtime
if settings.USE_TZ:
modification = datetime.datetime.utcfromtimestamp(modification)
modification = modification.replace(tzinfo=timezone.utc)
else:
modification = datetime.datetime.fromtimestamp(modification)
return modification
def load(self):
session_data = {}
try:
with open(self._key_to_file(), "rb") as session_file:
file_data = session_file.read()
# Don't fail if there is no data in the session file.
# We may have opened the empty placeholder file.
if file_data:
try:
session_data = self.decode(file_data)
except (EOFError, SuspiciousOperation):
self.create()
# Remove expired sessions.
expiry_age = self.get_expiry_age(
modification=self._last_modification(),
expiry=session_data.get('_session_expiry'))
if expiry_age < 0:
session_data = {}
self.delete()
self.create()
except IOError:
self.create()
return session_data
def create(self):
while True:
self._session_key = self._get_new_session_key()
try:
self.save(must_create=True)
except CreateError:
continue
self.modified = True
self._session_cache = {}
return
def save(self, must_create=False):
# Get the session data now, before we start messing
# with the file it is stored within.
session_data = self._get_session(no_load=must_create)
session_file_name = self._key_to_file()
try:
# Make sure the file exists. If it does not already exist, an
# empty placeholder file is created.
flags = os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0)
if must_create:
flags |= os.O_EXCL
fd = os.open(session_file_name, flags)
os.close(fd)
except OSError as e:
if must_create and e.errno == errno.EEXIST:
raise CreateError
raise
# Write the session file without interfering with other threads
# or processes. By writing to an atomically generated temporary
# file and then using the atomic os.rename() to make the complete
# file visible, we avoid having to lock the session file, while
# still maintaining its integrity.
#
# Note: Locking the session file was explored, but rejected in part
# because in order to be atomic and cross-platform, it required a
# long-lived lock file for each session, doubling the number of
# files in the session storage directory at any given time. This
# rename solution is cleaner and avoids any additional overhead
# when reading the session data, which is the more common case
# unless SESSION_SAVE_EVERY_REQUEST = True.
#
# See ticket #8616.
dir, prefix = os.path.split(session_file_name)
try:
output_file_fd, output_file_name = tempfile.mkstemp(dir=dir,
prefix=prefix + '_out_')
renamed = False
try:
try:
os.write(output_file_fd, self.encode(session_data).encode())
finally:
os.close(output_file_fd)
os.rename(output_file_name, session_file_name)
renamed = True
finally:
if not renamed:
os.unlink(output_file_name)
except (OSError, IOError, EOFError):
pass
def exists(self, session_key):
return os.path.exists(self._key_to_file(session_key))
def delete(self, session_key=None):
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
try:
os.unlink(self._key_to_file(session_key))
except OSError:
pass
def clean(self):
pass
@classmethod
def clear_expired(cls):
storage_path = cls._get_storage_path()
file_prefix = settings.SESSION_COOKIE_NAME
for session_file in os.listdir(storage_path):
if not session_file.startswith(file_prefix):
continue
session_key = session_file[len(file_prefix):]
session = cls(session_key)
# When an expired session is loaded, its file is removed, and a
# new file is immediately created. Prevent this by disabling
# the create() method.
session.create = lambda: None
session.load() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
## \file updateHistoryMap.py
# \brief Python script for updating the historyMap.py file.
# \author T. Albring
# \version 7.0.0 "Falcon"
#
# SU2 Project Website: https://su2code.github.io
#
# The SU2 Project is maintained by the SU2 Foundation
# (http://su2foundation.org)
#
# Copyright 2012-2020, SU2 Contributors (cf. AUTHORS.md)
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
import os, pprint
su2_home = os.environ['SU2_HOME']
fileList = ['CFlowOutput.cpp',
'CFlowIncOutput.cpp',
'CFlowCompOutput.cpp',
'CHeatOutput.cpp',
'CFlowCompFEMOutput.cpp',
'CElasticityOutput.cpp',
'CAdjHeatOutput.cpp',
'CAdjFlowIncOutput.cpp',
'CAdjFlowCompOutput.cpp',
'CAdjElasticityOutput.cpp']
fileList = [os.path.join(su2_home, 'SU2_CFD/src/output/' + i) for i in fileList]
def parse_output(files):
outputFields = dict()
for file in files:
print('Parsing ' + file)
f = open(file,'r')
while(1):
s = f.readline().strip(' ')
if not s:
break
if s.startswith('AddHistoryOutput('):
s = s.replace('AddHistoryOutput', '').strip('()').split(',')
curOutputField = dict()
name = s[0].strip(' ()"\n;')
curOutputField['HEADER'] = s[1].strip(' ()"\n;')
curOutputField['GROUP'] = s[3].strip(' ()"\n;')
curOutputField['DESCRIPTION'] = s[4].strip(' ()"\n;')
if len(s) == 6:
curOutputField['TYPE'] = s[5].strip(' ()"\n;').split('::')[1]
else:
curOutputField['TYPE'] = 'DEFAULT'
outputFields[name] = curOutputField
f.close()
addedOutputFields = dict()
for field in outputFields:
if outputFields[field]['TYPE'] == 'COEFFICIENT':
curOutputField = dict()
name = 'D_' + field
curOutputField['HEADER'] = 'd[' + outputFields[field]['HEADER'] + ']'
curOutputField['GROUP'] = 'D_' + outputFields[field]['GROUP']
curOutputField['TYPE'] = 'D_COEFFICIENT'
curOutputField['DESCRIPTION'] = 'Derivative value'
addedOutputFields[name] = curOutputField
name = 'TAVG_' + field
curOutputField = dict()
curOutputField['HEADER'] = 'tavg[' + outputFields[field]['HEADER'] + ']'
curOutputField['GROUP'] = 'TAVG_' + outputFields[field]['GROUP']
curOutputField['TYPE'] = 'TAVG_COEFFICIENT'
curOutputField['DESCRIPTION'] = 'weighted time average value'
addedOutputFields[name] = curOutputField
name = 'TAVG_D_' + field
curOutputField = dict()
curOutputField['HEADER'] = 'dtavg[' + outputFields[field]['HEADER'] + ']'
curOutputField['GROUP'] = 'TAVG_D_' + outputFields[field]['GROUP']
curOutputField['TYPE'] = 'TAVG_D_COEFFICIENT'
curOutputField['DESCRIPTION'] = 'weighted time average derivative value'
addedOutputFields[name] = curOutputField
outputFields.update(addedOutputFields)
f = open(os.path.join(su2_home, 'SU2_PY/SU2/io/historyMap.py'), 'w')
f.write('history_header_map = ')
pprint.pprint(outputFields, f)
f.close()
parse_output(fileList) | unknown | codeparrot/codeparrot-clean | ||
<?php
$loader->load('container1.php');
$container->loadFromExtension('security', [
'password_hashers' => [
'JMS\FooBundle\Entity\User7' => [
'algorithm' => 'sodium',
'time_cost' => 8,
'memory_cost' => 128 * 1024,
],
],
]); | php | github | https://github.com/symfony/symfony | src/Symfony/Bundle/SecurityBundle/Tests/DependencyInjection/Fixtures/php/sodium_hasher.php |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Resource management library."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
def open_file(path):
"""Opens the file at given path, where path is relative to tensorflowjs/.
Args:
path: a string resource path relative to tensorflowjs/.
Returns:
An open file of that resource.
Raises:
IOError: If the path is not found, or the resource can't be opened.
"""
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), path)
return open(path)
def list_dir(path):
"""List the files inside a dir where path is relative to tensorflowjs/.
Args:
path: a string path to a resource directory relative to tensorflowjs/.
Returns:
A list of files inside that directory.
Raises:
IOError: If the path is not found, or the resource can't be read.
"""
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), path)
return os.listdir(path) | unknown | codeparrot/codeparrot-clean | ||
import AxiosError from '../../../lib/core/AxiosError';
describe('core::AxiosError', function() {
it('should create an Error with message, config, code, request, response, stack and isAxiosError', function() {
const request = { path: '/foo' };
const response = { status: 200, data: { foo: 'bar' } };
const error = new AxiosError('Boom!', 'ESOMETHING', { foo: 'bar' }, request, response);
expect(error instanceof Error).toBe(true);
expect(error.message).toBe('Boom!');
expect(error.config).toEqual({ foo: 'bar' });
expect(error.code).toBe('ESOMETHING');
expect(error.request).toBe(request);
expect(error.response).toBe(response);
expect(error.isAxiosError).toBe(true);
expect(error.stack).toBeDefined();
});
it('should create an Error that can be serialized to JSON', function() {
// Attempting to serialize request and response results in
// TypeError: Converting circular structure to JSON
const request = { path: '/foo' };
const response = { status: 200, data: { foo: 'bar' } };
const error = new AxiosError('Boom!', 'ESOMETHING', { foo: 'bar' }, request, response);
const json = error.toJSON();
expect(json.message).toBe('Boom!');
expect(json.config).toEqual({ foo: 'bar' });
expect(json.code).toBe('ESOMETHING');
expect(json.status).toBe(200);
expect(json.request).toBe(undefined);
expect(json.response).toBe(undefined);
});
describe('core::createError.from', function() {
it('should add config, config, request and response to error', function() {
const error = new Error('Boom!');
const request = { path: '/foo' };
const response = { status: 200, data: { foo: 'bar' } };
const axiosError = AxiosError.from(error, 'ESOMETHING', { foo: 'bar' }, request, response);
expect(axiosError.config).toEqual({ foo: 'bar' });
expect(axiosError.code).toBe('ESOMETHING');
expect(axiosError.request).toBe(request);
expect(axiosError.response).toBe(response);
expect(axiosError.isAxiosError).toBe(true);
});
it('should return error', function() {
const error = new Error('Boom!');
expect(AxiosError.from(error, 'ESOMETHING', { foo: 'bar' }) instanceof AxiosError).toBeTruthy();
});
});
it('should be a native error as checked by the NodeJS `isNativeError` function', function (){
if((typeof process !== 'undefined') && (process.release.name === 'node')){
let {isNativeError} = require('node:util/types');
expect(isNativeError(new AxiosError("My Axios Error"))).toBeTruthy();
}
});
it('should create an error using one of the static class properties as an error code', function (){
const myError = new AxiosError("My Axios Error", AxiosError.ECONNABORTED);
expect(myError.code).toEqual(AxiosError.ECONNABORTED);
});
it('should have status property when response was passed to the constructor', () => {
const err = new AxiosError('test', 'foo', {}, {}, {status: 400});
expect(err.status).toBe(400);
});
}); | javascript | github | https://github.com/axios/axios | test/specs/core/AxiosError.spec.js |
import logging
import re
from collections import defaultdict
from itertools import permutations
from copy import deepcopy as copy
from fst import FSA, FST
from matcher import KRPosMatcher
from pymachine.machine import Machine
from pymachine.control import KRPosControl
from constants import deep_cases, deep_case_to_grammatical_case
from avm import AVM
from operators import ExpandOperator, FillArgumentOperator
from np_parser import parse_rule
class Construction(object):
SEMANTIC, CHUNK, AVM = xrange(3) # types
def __init__(self, name, control, type_=SEMANTIC):
"""
@param type_ the type of the construction -- SEMANTIC, CHUNK or AVM.
"""
self.name = name
if not isinstance(control, FSA):
raise TypeError("control has to be an FSA instance")
self.control = control
self.type_ = type_
def check(self, seq):
#don't create debug messages unless necessary, will slow down SA
#logging.debug((u"Checking {0} construction for matching with " +
# u"{1} machines").format(self.name,
# u" ".join(unicode(m) for m in seq)).encode("utf-8"))
self.control.reset()
for machine in seq:
self.control.read(machine, dry_run=True)
return self.control.in_final()
def run(self, seq):
"""Shorthand for if check: act."""
# read the sequence first, and give it to the control
self.check(seq)
# if control got into acceptor state, do something
if self.control.in_final():
return self.act(seq)
else:
return None
def last_check(self, seq):
"""last_check() is called after construction is activated by the
spreading activation. Can be used for order checking for example"""
return True
def act(self, seq):
"""@return a sequence of machines, or @c None, if last_check() failed.
"""
logging.debug("Construction matched, running action")
self.control.reset()
for machine in seq:
self.control.read(machine)
# arbitrary python code, now every construction will have it
# hardcoded into the code, later it will be done by Machine objects
class NPConstruction(Construction):
def __init__(self, name, rule, operators):
self.rule = rule # TODO: create control
self.matchers = parse_rule(self.rule)
self.operators = operators
Construction.__init__(self, name, self._create_control(),
Construction.CHUNK)
def _create_control(self):
control = FSA()
control.add_state("0", is_init=True, is_final=False)
for state in xrange(1, len(self.matchers) + 1):
control.add_state(
str(state), is_init=False,
is_final=(state == len(self.matchers)))
control.add_transition(self.matchers[state - 1],
str(state - 1), str(state))
return control
def last_check(self, seq):
"""
Checks if the greek letters (e.g. bound variables) are consistent.
@todo Implement it similarly VerbConstruction, e.g. as an argument
filling problem.
"""
patterns = [m.pattern for m in self.matchers]
assert len(seq) == len(patterns)
greeks = defaultdict(set)
for i in xrange(len(seq)):
if not self._collect_variable_values(
patterns[i], seq[i].control.kr, greeks):
return False
for v in greeks.values():
if len(v) > 1:
return False
return True
def _collect_variable_values(self, tmpl, data, greeks):
"""
Collects the values of the variables in @p tmpl from @p cata and adds
them to the multimap @p greeks.
"""
for key in tmpl:
if key not in data:
# Should be; it's already checked in check()
return False
else:
if isinstance(tmpl[key], dict):
return self._collect_variable_values(
tmpl[key], data[key], greeks)
else:
if tmpl[key][0] == '@':
greeks[tmpl[key]].add(data[key])
return True
def act(self, seq):
logging.info('acting, operators: {}'.format(self.operators))
for operator in self.operators:
seq = operator.act(seq)
return seq
class VerbConstruction(Construction):
"""A default construction for verbs. It reads definitions, discovers
cases, and builds a control from it. After that, the act() will do the
linking process, eg. link the verb with other words, object, subject, etc.
Defines a single Machine as the "working area": the element in X that we
follow. An operator represents a relation in phi; however, typically we
only care about one element among the potentially infinite number of x's.
Hence, it is enough to maintain a single Machine as a placeholder for
this element.
"""
def __init__(self, name, lexicon, supp_dict, max_depth=3):
self.name = name
self.lexicon = lexicon
self.supp_dict = supp_dict
self.max_depth = max_depth
self.matchers = {}
self.working_area = [Machine(None, KRPosControl('stem/VERB'))]
# indexing 0th element in static because that is the canonical machine
self.discover_arguments(lexicon.static[name][0])
control = self.generate_control()
self.case_pattern = re.compile("N(OUN|P)[^C]*CAS<([^>]*)>")
Construction.__init__(self, name, control)
self.activated = False
logging.info('VerbConstruction {0} created. Matchers: {1}'.format(
self.name, self.matchers))
logging.info('Control: {0}'.format(self.control))
f = open('control.dot', 'w')
f.write(self.control.to_dot())
def generate_control(self):
arguments = self.matchers.keys()
# this will be a hypercube
control = FST()
# zero state is for verb
control.add_state("0", is_init=True, is_final=False)
# inside states for the cube, except the last, accepting state
for i in xrange(1, pow(2, len(arguments))):
control.add_state(str(i), is_init=False, is_final=False)
# last node of the hypercube
control.add_state(
str(int(pow(2, len(arguments)))),
is_init=False, is_final=True)
# first transition
control.add_transition(KRPosMatcher("VERB"), [ExpandOperator(
self.lexicon, self.working_area)], "0", "1")
# count every transition as an increase in number of state
for path in permutations(arguments):
actual_state = 1
for arg in path:
increase = pow(2, arguments.index(arg))
new_state = actual_state + increase
control.add_transition(
self.matchers[arg],
[FillArgumentOperator(arg, self.working_area)],
str(actual_state), str(new_state))
actual_state = new_state
return control
def discover_arguments(self, machine, depth=0):
if depth > self.max_depth:
return
if depth == 0:
self.traversed = set()
logging.info('\t\t'*depth + 'discovering arguments of {0}...'.format(
machine))
for pi, p in enumerate(machine.partitions):
logging.info('\t\t'*depth + 'partition #{0}: {1}'.format(pi, p))
for mi, part_machine in enumerate(p):
if part_machine in self.traversed:
continue
self.traversed.add(part_machine)
logging.info('\t\t'*depth + '\tmachine #{0}: {1}'.format(
mi, part_machine))
pn = part_machine.printname()
# we are interested in deep cases and
# supplementary regexps
if pn in deep_cases or pn.startswith("$"):
if pn.startswith("$"):
self.matchers[pn] = self.supp_dict[pn]
else:
#TODO get grammatical case from deep case!
#This is a temporary hack
gr_case = deep_case_to_grammatical_case.get(pn, 'NOM')
self.matchers[pn] = KRPosMatcher(
"NOUN<CAS<{0}>>".format(gr_case))
# recursive call
self.discover_arguments(part_machine, depth=depth+1)
def check(self, seq):
if self.activated:
return False
else:
res = Construction.check(self, seq)
if res:
logging.debug('check is True!')
#logging.debug("Result of check is {0}".format(res) +
# " and working area is:\n{0}".format(
# Machine.to_debug_str(self.working_area[0])))
return res
class AVMConstruction(Construction):
"""this class will fill the slots in the AVM"""
def __init__(self, avm):
self.avm = avm
self.phi = self.generate_phi()
control = self.generate_control()
Construction.__init__(
self, avm.name + 'Construction', control, type_=Construction.AVM)
def generate_phi(self):
phi = {}
for key in self.avm:
matcher = self.avm.get_field(key, AVM.TYPE)
phi[matcher] = key
return phi
def generate_control(self):
control = FSA()
control.add_state("0", is_init=True, is_final=False)
state_num = 1
for key in self.avm:
state_name = str(state_num)
matcher = self.avm.get_field(key, AVM.TYPE)
control.add_state(state_name, is_init=False, is_final=True)
control.add_transition(matcher, "0", state_name)
state_num += 1
return control
def check(self, seq):
return True
def act(self, seq):
for machine in seq:
for matcher in self.phi:
if matcher.match(machine):
self.avm[self.phi[matcher]] = machine
else:
if self.avm[self.phi[matcher]] == machine:
dv = self.avm.get_field(self.phi[matcher], AVM.DEFAULT)
self.avm[self.phi[matcher]] = dv
return [self.avm]
def test():
#a = Machine("the", PosControl("DET"))
#kek = Machine("kek", PosControl("ADJ"))
#kockat = Machine("kockat", PosControl("NOUN<CAS<ACC>>"))
m = Machine("vonat")
m2 = Machine("tb")
m.append(m2)
m2.append(m)
m3 = copy(m)
assert m3
if __name__ == "__main__":
test() | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Approximate kernel mapper for RBF kernel based on Random Fourier Features."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
from tensorflow.contrib.kernel_methods.python.mappers import dense_kernel_mapper as dkm
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import math_ops
# TODO(sibyl-vie3Poto,felixyu): add an option to control whether the parameters in the
# kernel map are trainable.
class RandomFourierFeatureMapper(dkm.DenseKernelMapper):
r"""Class that implements Random Fourier Feature Mapping (RFFM) in TensorFlow.
The RFFM mapping is used to approximate the Gaussian (RBF) kernel:
```
exp(-||x-y||_2^2 / (2 * sigma^2))
```
The implementation of RFFM is based on the following paper:
"Random Features for Large-Scale Kernel Machines" by Ali Rahimi and Ben Recht.
(link: https://people.eecs.berkeley.edu/~brecht/papers/07.rah.rec.nips.pdf)
The mapping uses a matrix `Omega \in R^{d x D}` and a bias vector `b \in R^D`
where `d` is the input dimension (number of dense input features) and `D` is
the output dimension (i.e., dimension of the feature space the input is mapped
to). Each entry of `Omega` is sampled i.i.d. from a (scaled) Gaussian
distribution and each entry of `b` is sampled independently and uniformly from
[0, 2 * pi].
For a single input feature vector x in R^d, its RFFM is defined as:
```
sqrt(2/D) * cos(x * Omega + b)
```
where `cos` is the element-wise cosine function and `x, b` are represented as
row vectors. The aforementioned paper shows that the linear kernel of
RFFM-mapped vectors approximates the Gaussian kernel of the initial vectors.
"""
def __init__(self, input_dim, output_dim, stddev=1.0, seed=1, name=None):
"""Constructs a RandomFourierFeatureMapper instance.
Args:
input_dim: The dimension (number of features) of the tensors to be mapped.
output_dim: The output dimension of the mapping.
stddev: The standard deviation of the Gaussian kernel to be approximated.
The error of the classifier trained using this approximation is very
sensitive to this parameter.
seed: An integer used to initialize the parameters (`Omega` and `b`) of
the mapper. For repeatable sequences across different invocations of the
mapper object (for instance, to ensure consistent mapping both at
training and eval/inference if these happen in different invocations),
set this to the same integer.
name: name for the mapper object.
"""
# TODO(sibyl-vie3Poto): Maybe infer input_dim and/or output_dim (if not explicitly
# provided). input_dim can be inferred lazily, the first time map is called.
# output_dim can be inferred from input_dim using heuristics on the error of
# the approximation (and, by extension, the error of the classification
# based on the approximation).
self._input_dim = input_dim
self._output_dim = output_dim
self._stddev = stddev
self._seed = seed
self._name = name
@property
def name(self):
"""Returns a name for the `RandomFourierFeatureMapper` instance.
If the name provided in the constructor is `None`, then the object's unique
id is returned.
Returns:
A name for the `RandomFourierFeatureMapper` instance.
"""
return self._name or str(id(self))
@property
def input_dim(self):
return self._input_dim
@property
def output_dim(self):
return self._output_dim
def map(self, input_tensor):
"""Maps each row of input_tensor using random Fourier features.
Args:
input_tensor: a `Tensor` containing input features. It's shape is
[batch_size, self._input_dim].
Returns:
A `Tensor` of shape [batch_size, self._output_dim] containing RFFM-mapped
features.
Raises:
InvalidShapeError: if the shape of the `input_tensor` is inconsistent with
expected input dimension.
"""
input_tensor_shape = input_tensor.get_shape()
if len(input_tensor_shape) != 2:
raise dkm.InvalidShapeError(
'The shape of the tensor should be 2. Got %d instead.' %
len(input_tensor_shape))
features_dim = input_tensor_shape[1]
if features_dim != self._input_dim:
raise dkm.InvalidShapeError(
'Invalid dimension: expected %d input features, got %d instead.' %
(self._input_dim, features_dim))
# Add ops that compute (deterministically) omega_matrix and bias based on
# the provided seed.
# TODO(sibyl-vie3Poto): Storing the mapper's parameters (omega_matrix and bias) as
# constants incurs no RPC calls to the parameter server during distributed
# training. However, if the parameters grow too large (for instance if they
# don't fit into memory or if they blow up the size of the GraphDef proto),
# stroring them as constants is no longer an option. In this case, we should
# have a heuristic to choose out of one of the following alternatives:
# a) store them as variables (in the parameter server)
# b) store them as worker local variables
# c) generating on the fly the omega matrix at each step
np.random.seed(self._seed)
omega_matrix_shape = [self._input_dim, self._output_dim]
bias_shape = [self._output_dim]
omega_matrix = constant_op.constant(
np.random.normal(
scale=1.0 / self._stddev, size=omega_matrix_shape),
dtype=dtypes.float32)
bias = constant_op.constant(
np.random.uniform(
low=0.0, high=2 * np.pi, size=bias_shape),
dtype=dtypes.float32)
x_omega_plus_bias = math_ops.add(
math_ops.matmul(input_tensor, omega_matrix), bias)
return math.sqrt(2.0 / self._output_dim) * math_ops.cos(x_omega_plus_bias) | unknown | codeparrot/codeparrot-clean | ||
import { test } from '../../test';
export default test({
get props() {
return {
a: [{ id: 'foo' }, { id: 'bar' }, { id: 'baz' }]
};
},
html: `
<span>foo</span><span>bar</span><span>baz</span>
`,
test({ assert, component, target }) {
component.a = [{ id: 'yep' }, { id: 'nope' }];
assert.htmlEqual(
target.innerHTML,
`
<span>yep</span><span>nope</span>
`
);
}
}); | javascript | github | https://github.com/sveltejs/svelte | packages/svelte/tests/runtime-legacy/samples/component-binding-each-object/_config.js |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009 Sharoon Thomas
# Copyright (C) 2010-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import base64
import logging
from openerp import netsvc
from openerp.osv import osv, fields
from openerp.osv import fields
from openerp import tools
from openerp.tools.translate import _
from urllib import urlencode, quote as quote
_logger = logging.getLogger(__name__)
try:
# We use a jinja2 sandboxed environment to render mako templates.
# Note that the rendering does not cover all the mako syntax, in particular
# arbitrary Python statements are not accepted, and not all expressions are
# allowed: only "public" attributes (not starting with '_') of objects may
# be accessed.
# This is done on purpose: it prevents incidental or malicious execution of
# Python code that may break the security of the server.
from jinja2.sandbox import SandboxedEnvironment
mako_template_env = SandboxedEnvironment(
block_start_string="<%",
block_end_string="%>",
variable_start_string="${",
variable_end_string="}",
comment_start_string="<%doc>",
comment_end_string="</%doc>",
line_statement_prefix="%",
line_comment_prefix="##",
trim_blocks=True, # do not output newline after blocks
autoescape=True, # XML/HTML automatic escaping
)
mako_template_env.globals.update({
'str': str,
'quote': quote,
'urlencode': urlencode,
})
except ImportError:
_logger.warning("jinja2 not available, templating features will not work!")
class email_template(osv.osv):
"Templates for sending email"
_name = "email.template"
_description = 'Email Templates'
_order = 'name'
def render_template(self, cr, uid, template, model, res_id, context=None):
"""Render the given template text, replace mako expressions ``${expr}``
with the result of evaluating these expressions with
an evaluation context containing:
* ``user``: browse_record of the current user
* ``object``: browse_record of the document record this mail is
related to
* ``context``: the context passed to the mail composition wizard
:param str template: the template text to render
:param str model: model name of the document record this mail is related to.
:param int res_id: id of the document record this mail is related to.
"""
if not template:
return u""
if context is None:
context = {}
try:
template = tools.ustr(template)
record = None
if res_id:
record = self.pool.get(model).browse(cr, uid, res_id, context=context)
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
variables = {
'object': record,
'user': user,
'ctx': context, # context kw would clash with mako internals
}
result = mako_template_env.from_string(template).render(variables)
if result == u"False":
result = u""
return result
except Exception:
_logger.exception("failed to render mako template value %r", template)
return u""
def get_email_template(self, cr, uid, template_id=False, record_id=None, context=None):
if context is None:
context = {}
if not template_id:
return False
template = self.browse(cr, uid, template_id, context)
lang = self.render_template(cr, uid, template.lang, template.model, record_id, context)
if lang:
# Use translated template if necessary
ctx = context.copy()
ctx['lang'] = lang
template = self.browse(cr, uid, template.id, ctx)
else:
template = self.browse(cr, uid, int(template_id), context)
return template
def onchange_model_id(self, cr, uid, ids, model_id, context=None):
mod_name = False
if model_id:
mod_name = self.pool.get('ir.model').browse(cr, uid, model_id, context).model
return {'value': {'model': mod_name}}
_columns = {
'name': fields.char('Name'),
'model_id': fields.many2one('ir.model', 'Applies to', help="The kind of document with with this template can be used"),
'model': fields.related('model_id', 'model', type='char', string='Related Document Model',
size=128, select=True, store=True, readonly=True),
'lang': fields.char('Language',
help="Optional translation language (ISO code) to select when sending out an email. "
"If not set, the english version will be used. "
"This should usually be a placeholder expression "
"that provides the appropriate language code, e.g. "
"${object.partner_id.lang.code}.",
placeholder="${object.partner_id.lang.code}"),
'user_signature': fields.boolean('Add Signature',
help="If checked, the user's signature will be appended to the text version "
"of the message"),
'subject': fields.char('Subject', translate=True, help="Subject (placeholders may be used here)",),
'email_from': fields.char('From',
help="Sender address (placeholders may be used here). If not set, the default "
"value will be the author's email alias if configured, or email address."),
'email_to': fields.char('To (Emails)', help="Comma-separated recipient addresses (placeholders may be used here)"),
'email_recipients': fields.char('To (Partners)', help="Comma-separated ids of recipient partners (placeholders may be used here)"),
'email_cc': fields.char('Cc', help="Carbon copy recipients (placeholders may be used here)"),
'reply_to': fields.char('Reply-To', help="Preferred response address (placeholders may be used here)"),
'mail_server_id': fields.many2one('ir.mail_server', 'Outgoing Mail Server', readonly=False,
help="Optional preferred server for outgoing mails. If not set, the highest "
"priority one will be used."),
'body_html': fields.text('Body', translate=True, help="Rich-text/HTML version of the message (placeholders may be used here)"),
'report_name': fields.char('Report Filename', translate=True,
help="Name to use for the generated report file (may contain placeholders)\n"
"The extension can be omitted and will then come from the report type."),
'report_template': fields.many2one('ir.actions.report.xml', 'Optional report to print and attach'),
'ref_ir_act_window': fields.many2one('ir.actions.act_window', 'Sidebar action', readonly=True,
help="Sidebar action to make this template available on records "
"of the related document model"),
'ref_ir_value': fields.many2one('ir.values', 'Sidebar Button', readonly=True,
help="Sidebar button to open the sidebar action"),
'attachment_ids': fields.many2many('ir.attachment', 'email_template_attachment_rel', 'email_template_id',
'attachment_id', 'Attachments',
help="You may attach files to this template, to be added to all "
"emails created from this template"),
'auto_delete': fields.boolean('Auto Delete', help="Permanently delete this email after sending it, to save space"),
# Fake fields used to implement the placeholder assistant
'model_object_field': fields.many2one('ir.model.fields', string="Field",
help="Select target field from the related document model.\n"
"If it is a relationship field you will be able to select "
"a target field at the destination of the relationship."),
'sub_object': fields.many2one('ir.model', 'Sub-model', readonly=True,
help="When a relationship field is selected as first field, "
"this field shows the document model the relationship goes to."),
'sub_model_object_field': fields.many2one('ir.model.fields', 'Sub-field',
help="When a relationship field is selected as first field, "
"this field lets you select the target field within the "
"destination document model (sub-model)."),
'null_value': fields.char('Default Value', help="Optional value to use if the target field is empty"),
'copyvalue': fields.char('Placeholder Expression', help="Final placeholder expression, to be copy-pasted in the desired template field."),
}
_defaults = {
'auto_delete': True,
}
def create_action(self, cr, uid, ids, context=None):
vals = {}
action_obj = self.pool.get('ir.actions.act_window')
data_obj = self.pool.get('ir.model.data')
for template in self.browse(cr, uid, ids, context=context):
src_obj = template.model_id.model
model_data_id = data_obj._get_id(cr, uid, 'mail', 'email_compose_message_wizard_form')
res_id = data_obj.browse(cr, uid, model_data_id, context=context).res_id
button_name = _('Send Mail (%s)') % template.name
vals['ref_ir_act_window'] = action_obj.create(cr, uid, {
'name': button_name,
'type': 'ir.actions.act_window',
'res_model': 'mail.compose.message',
'src_model': src_obj,
'view_type': 'form',
'context': "{'default_composition_mode': 'mass_mail', 'default_template_id' : %d, 'default_use_template': True}" % (template.id),
'view_mode':'form,tree',
'view_id': res_id,
'target': 'new',
'auto_refresh':1
}, context)
vals['ref_ir_value'] = self.pool.get('ir.values').create(cr, uid, {
'name': button_name,
'model': src_obj,
'key2': 'client_action_multi',
'value': "ir.actions.act_window," + str(vals['ref_ir_act_window']),
'object': True,
}, context)
self.write(cr, uid, ids, {
'ref_ir_act_window': vals.get('ref_ir_act_window',False),
'ref_ir_value': vals.get('ref_ir_value',False),
}, context)
return True
def unlink_action(self, cr, uid, ids, context=None):
for template in self.browse(cr, uid, ids, context=context):
try:
if template.ref_ir_act_window:
self.pool.get('ir.actions.act_window').unlink(cr, uid, template.ref_ir_act_window.id, context)
if template.ref_ir_value:
ir_values_obj = self.pool.get('ir.values')
ir_values_obj.unlink(cr, uid, template.ref_ir_value.id, context)
except Exception:
raise osv.except_osv(_("Warning"), _("Deletion of the action record failed."))
return True
def unlink(self, cr, uid, ids, context=None):
self.unlink_action(cr, uid, ids, context=context)
return super(email_template, self).unlink(cr, uid, ids, context=context)
def copy(self, cr, uid, id, default=None, context=None):
template = self.browse(cr, uid, id, context=context)
if default is None:
default = {}
default = default.copy()
default.update(
name=_("%s (copy)") % (template.name),
ref_ir_act_window=False,
ref_ir_value=False)
return super(email_template, self).copy(cr, uid, id, default, context)
def build_expression(self, field_name, sub_field_name, null_value):
"""Returns a placeholder expression for use in a template field,
based on the values provided in the placeholder assistant.
:param field_name: main field name
:param sub_field_name: sub field name (M2O)
:param null_value: default value if the target value is empty
:return: final placeholder expression
"""
expression = ''
if field_name:
expression = "${object." + field_name
if sub_field_name:
expression += "." + sub_field_name
if null_value:
expression += " or '''%s'''" % null_value
expression += "}"
return expression
def onchange_sub_model_object_value_field(self, cr, uid, ids, model_object_field, sub_model_object_field=False, null_value=None, context=None):
result = {
'sub_object': False,
'copyvalue': False,
'sub_model_object_field': False,
'null_value': False
}
if model_object_field:
fields_obj = self.pool.get('ir.model.fields')
field_value = fields_obj.browse(cr, uid, model_object_field, context)
if field_value.ttype in ['many2one', 'one2many', 'many2many']:
res_ids = self.pool.get('ir.model').search(cr, uid, [('model', '=', field_value.relation)], context=context)
sub_field_value = False
if sub_model_object_field:
sub_field_value = fields_obj.browse(cr, uid, sub_model_object_field, context)
if res_ids:
result.update({
'sub_object': res_ids[0],
'copyvalue': self.build_expression(field_value.name, sub_field_value and sub_field_value.name or False, null_value or False),
'sub_model_object_field': sub_model_object_field or False,
'null_value': null_value or False
})
else:
result.update({
'copyvalue': self.build_expression(field_value.name, False, null_value or False),
'null_value': null_value or False
})
return {'value': result}
def generate_email(self, cr, uid, template_id, res_id, context=None):
"""Generates an email from the template for given (model, res_id) pair.
:param template_id: id of the template to render.
:param res_id: id of the record to use for rendering the template (model
is taken from template definition)
:returns: a dict containing all relevant fields for creating a new
mail.mail entry, with one extra key ``attachments``, in the
format expected by :py:meth:`mail_thread.message_post`.
"""
if context is None:
context = {}
report_xml_pool = self.pool.get('ir.actions.report.xml')
template = self.get_email_template(cr, uid, template_id, res_id, context)
values = {}
for field in ['subject', 'body_html', 'email_from',
'email_to', 'email_recipients', 'email_cc', 'reply_to']:
values[field] = self.render_template(cr, uid, getattr(template, field),
template.model, res_id, context=context) \
or False
if template.user_signature:
signature = self.pool.get('res.users').browse(cr, uid, uid, context).signature
values['body_html'] = tools.append_content_to_html(values['body_html'], signature)
if values['body_html']:
values['body'] = tools.html_sanitize(values['body_html'])
values.update(mail_server_id=template.mail_server_id.id or False,
auto_delete=template.auto_delete,
model=template.model,
res_id=res_id or False)
attachments = []
# Add report in attachments
if template.report_template:
report_name = self.render_template(cr, uid, template.report_name, template.model, res_id, context=context)
report_service = 'report.' + report_xml_pool.browse(cr, uid, template.report_template.id, context).report_name
# Ensure report is rendered using template's language
ctx = context.copy()
if template.lang:
ctx['lang'] = self.render_template(cr, uid, template.lang, template.model, res_id, context)
service = netsvc.LocalService(report_service)
(result, format) = service.create(cr, uid, [res_id], {'model': template.model}, ctx)
result = base64.b64encode(result)
if not report_name:
report_name = report_service
ext = "." + format
if not report_name.endswith(ext):
report_name += ext
attachments.append((report_name, result))
attachment_ids = []
# Add template attachments
for attach in template.attachment_ids:
attachment_ids.append(attach.id)
values['attachments'] = attachments
values['attachment_ids'] = attachment_ids
return values
def send_mail(self, cr, uid, template_id, res_id, force_send=False, context=None):
"""Generates a new mail message for the given template and record,
and schedules it for delivery through the ``mail`` module's scheduler.
:param int template_id: id of the template to render
:param int res_id: id of the record to render the template with
(model is taken from the template)
:param bool force_send: if True, the generated mail.message is
immediately sent after being created, as if the scheduler
was executed for this message only.
:returns: id of the mail.message that was created
"""
if context is None:
context = {}
mail_mail = self.pool.get('mail.mail')
ir_attachment = self.pool.get('ir.attachment')
# create a mail_mail based on values, without attachments
values = self.generate_email(cr, uid, template_id, res_id, context=context)
assert values.get('email_from'), 'email_from is missing or empty after template rendering, send_mail() cannot proceed'
del values['email_recipients'] # TODO Properly use them.
attachment_ids = values.pop('attachment_ids', [])
attachments = values.pop('attachments', [])
msg_id = mail_mail.create(cr, uid, values, context=context)
mail = mail_mail.browse(cr, uid, msg_id, context=context)
# manage attachments
for attachment in attachments:
attachment_data = {
'name': attachment[0],
'datas_fname': attachment[0],
'datas': attachment[1],
'res_model': 'mail.message',
'res_id': mail.mail_message_id.id,
}
context.pop('default_type', None)
attachment_ids.append(ir_attachment.create(cr, uid, attachment_data, context=context))
if attachment_ids:
values['attachment_ids'] = [(6, 0, attachment_ids)]
mail_mail.write(cr, uid, msg_id, {'attachment_ids': [(6, 0, attachment_ids)]}, context=context)
if force_send:
mail_mail.send(cr, uid, [msg_id], context=context)
return msg_id
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | unknown | codeparrot/codeparrot-clean | ||
/*
* 'OpenSSL for Ruby' project
* Copyright (C) 2001-2002 Michal Rokos <m.rokos@sh.cvut.cz>
* All rights reserved.
*/
/*
* This program is licensed under the same licence as Ruby.
* (See the file 'COPYING'.)
*/
#if !defined(_OSSL_SSL_H_)
#define _OSSL_SSL_H_
#define GetSSL(obj, ssl) do { \
TypedData_Get_Struct((obj), SSL, &ossl_ssl_type, (ssl)); \
if (!(ssl)) { \
ossl_raise(rb_eRuntimeError, "SSL is not initialized"); \
} \
} while (0)
#define GetSSLSession(obj, sess) do { \
TypedData_Get_Struct((obj), SSL_SESSION, &ossl_ssl_session_type, (sess)); \
if (!(sess)) { \
ossl_raise(rb_eRuntimeError, "SSL Session wasn't initialized."); \
} \
} while (0)
extern const rb_data_type_t ossl_ssl_type;
extern const rb_data_type_t ossl_ssl_session_type;
extern VALUE mSSL;
extern VALUE cSSLSocket;
extern VALUE cSSLSession;
void Init_ossl_ssl(void);
void Init_ossl_ssl_session(void);
#endif /* _OSSL_SSL_H_ */ | c | github | https://github.com/ruby/ruby | ext/openssl/ossl_ssl.h |
# test_getopt.py
# David Goodger <dgoodger@bigfoot.com> 2000-08-19
from test.test_support import verbose, run_doctest, run_unittest, EnvironmentVarGuard
import unittest
import getopt
sentinel = object()
class GetoptTests(unittest.TestCase):
def setUp(self):
self.env = EnvironmentVarGuard()
if "POSIXLY_CORRECT" in self.env:
del self.env["POSIXLY_CORRECT"]
def tearDown(self):
self.env.__exit__()
del self.env
def assertError(self, *args, **kwargs):
self.assertRaises(getopt.GetoptError, *args, **kwargs)
def test_short_has_arg(self):
self.assertTrue(getopt.short_has_arg('a', 'a:'))
self.assertFalse(getopt.short_has_arg('a', 'a'))
self.assertError(getopt.short_has_arg, 'a', 'b')
def test_long_has_args(self):
has_arg, option = getopt.long_has_args('abc', ['abc='])
self.assertTrue(has_arg)
self.assertEqual(option, 'abc')
has_arg, option = getopt.long_has_args('abc', ['abc'])
self.assertFalse(has_arg)
self.assertEqual(option, 'abc')
has_arg, option = getopt.long_has_args('abc', ['abcd'])
self.assertFalse(has_arg)
self.assertEqual(option, 'abcd')
self.assertError(getopt.long_has_args, 'abc', ['def'])
self.assertError(getopt.long_has_args, 'abc', [])
self.assertError(getopt.long_has_args, 'abc', ['abcd','abcde'])
def test_do_shorts(self):
opts, args = getopt.do_shorts([], 'a', 'a', [])
self.assertEqual(opts, [('-a', '')])
self.assertEqual(args, [])
opts, args = getopt.do_shorts([], 'a1', 'a:', [])
self.assertEqual(opts, [('-a', '1')])
self.assertEqual(args, [])
#opts, args = getopt.do_shorts([], 'a=1', 'a:', [])
#self.assertEqual(opts, [('-a', '1')])
#self.assertEqual(args, [])
opts, args = getopt.do_shorts([], 'a', 'a:', ['1'])
self.assertEqual(opts, [('-a', '1')])
self.assertEqual(args, [])
opts, args = getopt.do_shorts([], 'a', 'a:', ['1', '2'])
self.assertEqual(opts, [('-a', '1')])
self.assertEqual(args, ['2'])
self.assertError(getopt.do_shorts, [], 'a1', 'a', [])
self.assertError(getopt.do_shorts, [], 'a', 'a:', [])
def test_do_longs(self):
opts, args = getopt.do_longs([], 'abc', ['abc'], [])
self.assertEqual(opts, [('--abc', '')])
self.assertEqual(args, [])
opts, args = getopt.do_longs([], 'abc=1', ['abc='], [])
self.assertEqual(opts, [('--abc', '1')])
self.assertEqual(args, [])
opts, args = getopt.do_longs([], 'abc=1', ['abcd='], [])
self.assertEqual(opts, [('--abcd', '1')])
self.assertEqual(args, [])
opts, args = getopt.do_longs([], 'abc', ['ab', 'abc', 'abcd'], [])
self.assertEqual(opts, [('--abc', '')])
self.assertEqual(args, [])
# Much like the preceding, except with a non-alpha character ("-") in
# option name that precedes "="; failed in
# http://python.org/sf/126863
opts, args = getopt.do_longs([], 'foo=42', ['foo-bar', 'foo=',], [])
self.assertEqual(opts, [('--foo', '42')])
self.assertEqual(args, [])
self.assertError(getopt.do_longs, [], 'abc=1', ['abc'], [])
self.assertError(getopt.do_longs, [], 'abc', ['abc='], [])
def test_getopt(self):
# note: the empty string between '-a' and '--beta' is significant:
# it simulates an empty string option argument ('-a ""') on the
# command line.
cmdline = ['-a', '1', '-b', '--alpha=2', '--beta', '-a', '3', '-a',
'', '--beta', 'arg1', 'arg2']
opts, args = getopt.getopt(cmdline, 'a:b', ['alpha=', 'beta'])
self.assertEqual(opts, [('-a', '1'), ('-b', ''),
('--alpha', '2'), ('--beta', ''),
('-a', '3'), ('-a', ''), ('--beta', '')])
# Note ambiguity of ('-b', '') and ('-a', '') above. This must be
# accounted for in the code that calls getopt().
self.assertEqual(args, ['arg1', 'arg2'])
self.assertError(getopt.getopt, cmdline, 'a:b', ['alpha', 'beta'])
def test_gnu_getopt(self):
# Test handling of GNU style scanning mode.
cmdline = ['-a', 'arg1', '-b', '1', '--alpha', '--beta=2']
# GNU style
opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta='])
self.assertEqual(args, ['arg1'])
self.assertEqual(opts, [('-a', ''), ('-b', '1'),
('--alpha', ''), ('--beta', '2')])
# recognize "-" as an argument
opts, args = getopt.gnu_getopt(['-a', '-', '-b', '-'], 'ab:', [])
self.assertEqual(args, ['-'])
self.assertEqual(opts, [('-a', ''), ('-b', '-')])
# Posix style via +
opts, args = getopt.gnu_getopt(cmdline, '+ab:', ['alpha', 'beta='])
self.assertEqual(opts, [('-a', '')])
self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2'])
# Posix style via POSIXLY_CORRECT
self.env["POSIXLY_CORRECT"] = "1"
opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta='])
self.assertEqual(opts, [('-a', '')])
self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2'])
def test_libref_examples(self):
s = """
Examples from the Library Reference: Doc/lib/libgetopt.tex
An example using only Unix style options:
>>> import getopt
>>> args = '-a -b -cfoo -d bar a1 a2'.split()
>>> args
['-a', '-b', '-cfoo', '-d', 'bar', 'a1', 'a2']
>>> optlist, args = getopt.getopt(args, 'abc:d:')
>>> optlist
[('-a', ''), ('-b', ''), ('-c', 'foo'), ('-d', 'bar')]
>>> args
['a1', 'a2']
Using long option names is equally easy:
>>> s = '--condition=foo --testing --output-file abc.def -x a1 a2'
>>> args = s.split()
>>> args
['--condition=foo', '--testing', '--output-file', 'abc.def', '-x', 'a1', 'a2']
>>> optlist, args = getopt.getopt(args, 'x', [
... 'condition=', 'output-file=', 'testing'])
>>> optlist
[('--condition', 'foo'), ('--testing', ''), ('--output-file', 'abc.def'), ('-x', '')]
>>> args
['a1', 'a2']
"""
import types
m = types.ModuleType("libreftest", s)
run_doctest(m, verbose)
def test_issue4629(self):
longopts, shortopts = getopt.getopt(['--help='], '', ['help='])
self.assertEqual(longopts, [('--help', '')])
longopts, shortopts = getopt.getopt(['--help=x'], '', ['help='])
self.assertEqual(longopts, [('--help', 'x')])
self.assertRaises(getopt.GetoptError, getopt.getopt, ['--help='], '', ['help'])
def test_main():
run_unittest(GetoptTests)
if __name__ == "__main__":
test_main() | unknown | codeparrot/codeparrot-clean | ||
"""
Support for statistics for sensor values.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.statistics/
"""
import logging
import statistics
from collections import deque
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_NAME, CONF_ENTITY_ID, EVENT_HOMEASSISTANT_START, STATE_UNKNOWN,
ATTR_UNIT_OF_MEASUREMENT)
from homeassistant.core import callback
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_state_change
from homeassistant.util import dt as dt_util
from homeassistant.components.recorder.util import session_scope, execute
_LOGGER = logging.getLogger(__name__)
ATTR_AVERAGE_CHANGE = 'average_change'
ATTR_CHANGE = 'change'
ATTR_CHANGE_RATE = 'change_rate'
ATTR_COUNT = 'count'
ATTR_MAX_AGE = 'max_age'
ATTR_MAX_VALUE = 'max_value'
ATTR_MEAN = 'mean'
ATTR_MEDIAN = 'median'
ATTR_MIN_AGE = 'min_age'
ATTR_MIN_VALUE = 'min_value'
ATTR_SAMPLING_SIZE = 'sampling_size'
ATTR_STANDARD_DEVIATION = 'standard_deviation'
ATTR_TOTAL = 'total'
ATTR_VARIANCE = 'variance'
CONF_SAMPLING_SIZE = 'sampling_size'
CONF_MAX_AGE = 'max_age'
CONF_PRECISION = 'precision'
DEFAULT_NAME = 'Stats'
DEFAULT_SIZE = 20
DEFAULT_PRECISION = 2
ICON = 'mdi:calculator'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SAMPLING_SIZE, default=DEFAULT_SIZE):
vol.All(vol.Coerce(int), vol.Range(min=1)),
vol.Optional(CONF_MAX_AGE): cv.time_period,
vol.Optional(CONF_PRECISION, default=DEFAULT_PRECISION):
vol.Coerce(int)
})
async def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Set up the Statistics sensor."""
entity_id = config.get(CONF_ENTITY_ID)
name = config.get(CONF_NAME)
sampling_size = config.get(CONF_SAMPLING_SIZE)
max_age = config.get(CONF_MAX_AGE, None)
precision = config.get(CONF_PRECISION)
async_add_entities([StatisticsSensor(entity_id, name, sampling_size,
max_age, precision)], True)
return True
class StatisticsSensor(Entity):
"""Representation of a Statistics sensor."""
def __init__(self, entity_id, name, sampling_size, max_age,
precision):
"""Initialize the Statistics sensor."""
self._entity_id = entity_id
self.is_binary = self._entity_id.split('.')[0] == 'binary_sensor'
if not self.is_binary:
self._name = '{} {}'.format(name, ATTR_MEAN)
else:
self._name = '{} {}'.format(name, ATTR_COUNT)
self._sampling_size = sampling_size
self._max_age = max_age
self._precision = precision
self._unit_of_measurement = None
self.states = deque(maxlen=self._sampling_size)
self.ages = deque(maxlen=self._sampling_size)
self.count = 0
self.mean = self.median = self.stdev = self.variance = None
self.total = self.min = self.max = None
self.min_age = self.max_age = None
self.change = self.average_change = self.change_rate = None
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def async_stats_sensor_state_listener(entity, old_state, new_state):
"""Handle the sensor state changes."""
self._unit_of_measurement = new_state.attributes.get(
ATTR_UNIT_OF_MEASUREMENT)
self._add_state_to_queue(new_state)
self.async_schedule_update_ha_state(True)
@callback
def async_stats_sensor_startup(event):
"""Add listener and get recorded state."""
_LOGGER.debug("Startup for %s", self.entity_id)
async_track_state_change(
self.hass, self._entity_id, async_stats_sensor_state_listener)
if 'recorder' in self.hass.config.components:
# Only use the database if it's configured
self.hass.async_create_task(
self._async_initialize_from_database()
)
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, async_stats_sensor_startup)
def _add_state_to_queue(self, new_state):
"""Add the state to the queue."""
if new_state.state == STATE_UNKNOWN:
return
try:
if self.is_binary:
self.states.append(new_state.state)
else:
self.states.append(float(new_state.state))
self.ages.append(new_state.last_updated)
except ValueError:
_LOGGER.error("%s: parsing error, expected number and received %s",
self.entity_id, new_state.state)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self.mean if not self.is_binary else self.count
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement if not self.is_binary else None
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
if not self.is_binary:
return {
ATTR_SAMPLING_SIZE: self._sampling_size,
ATTR_COUNT: self.count,
ATTR_MEAN: self.mean,
ATTR_MEDIAN: self.median,
ATTR_STANDARD_DEVIATION: self.stdev,
ATTR_VARIANCE: self.variance,
ATTR_TOTAL: self.total,
ATTR_MIN_VALUE: self.min,
ATTR_MAX_VALUE: self.max,
ATTR_MIN_AGE: self.min_age,
ATTR_MAX_AGE: self.max_age,
ATTR_CHANGE: self.change,
ATTR_AVERAGE_CHANGE: self.average_change,
ATTR_CHANGE_RATE: self.change_rate,
}
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
def _purge_old(self):
"""Remove states which are older than self._max_age."""
now = dt_util.utcnow()
_LOGGER.debug("%s: purging records older then %s(%s)",
self.entity_id, dt_util.as_local(now - self._max_age),
self._max_age)
while self.ages and (now - self.ages[0]) > self._max_age:
_LOGGER.debug("%s: purging record with datetime %s(%s)",
self.entity_id, dt_util.as_local(self.ages[0]),
(now - self.ages[0]))
self.ages.popleft()
self.states.popleft()
async def async_update(self):
"""Get the latest data and updates the states."""
_LOGGER.debug("%s: updating statistics.", self.entity_id)
if self._max_age is not None:
self._purge_old()
self.count = len(self.states)
if not self.is_binary:
try: # require only one data point
self.mean = round(statistics.mean(self.states),
self._precision)
self.median = round(statistics.median(self.states),
self._precision)
except statistics.StatisticsError as err:
_LOGGER.debug("%s: %s", self.entity_id, err)
self.mean = self.median = STATE_UNKNOWN
try: # require at least two data points
self.stdev = round(statistics.stdev(self.states),
self._precision)
self.variance = round(statistics.variance(self.states),
self._precision)
except statistics.StatisticsError as err:
_LOGGER.debug("%s: %s", self.entity_id, err)
self.stdev = self.variance = STATE_UNKNOWN
if self.states:
self.total = round(sum(self.states), self._precision)
self.min = round(min(self.states), self._precision)
self.max = round(max(self.states), self._precision)
self.min_age = self.ages[0]
self.max_age = self.ages[-1]
self.change = self.states[-1] - self.states[0]
self.average_change = self.change
self.change_rate = 0
if len(self.states) > 1:
self.average_change /= len(self.states) - 1
time_diff = (self.max_age - self.min_age).total_seconds()
if time_diff > 0:
self.change_rate = self.average_change / time_diff
self.change = round(self.change, self._precision)
self.average_change = round(self.average_change,
self._precision)
self.change_rate = round(self.change_rate, self._precision)
else:
self.total = self.min = self.max = STATE_UNKNOWN
self.min_age = self.max_age = dt_util.utcnow()
self.change = self.average_change = STATE_UNKNOWN
self.change_rate = STATE_UNKNOWN
async def _async_initialize_from_database(self):
"""Initialize the list of states from the database.
The query will get the list of states in DESCENDING order so that we
can limit the result to self._sample_size. Afterwards reverse the
list so that we get it in the right order again.
If MaxAge is provided then query will restrict to entries younger then
current datetime - MaxAge.
"""
from homeassistant.components.recorder.models import States
_LOGGER.debug("%s: initializing values from the database",
self.entity_id)
with session_scope(hass=self.hass) as session:
query = session.query(States)\
.filter(States.entity_id == self._entity_id.lower())
if self._max_age is not None:
records_older_then = dt_util.utcnow() - self._max_age
_LOGGER.debug("%s: retrieve records not older then %s",
self.entity_id, records_older_then)
query = query.filter(States.last_updated >= records_older_then)
else:
_LOGGER.debug("%s: retrieving all records.", self.entity_id)
query = query\
.order_by(States.last_updated.desc())\
.limit(self._sampling_size)
states = execute(query)
for state in reversed(states):
self._add_state_to_queue(state)
self.async_schedule_update_ha_state(True)
_LOGGER.debug("%s: initializing from database completed",
self.entity_id) | unknown | codeparrot/codeparrot-clean |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.