code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
<?php
namespace SMW;
use SMWQueryResult;
use Title;
/**
* Printer for embedded data.
*
* Embeds in the page output the contents of the pages in the query result set.
* Printouts are ignored: it only matters which pages were returned by the query.
* The optional "titlestyle" formatting parameter can be used to apply a format to
* the headings for the page titles. If "titlestyle" is not specified, a <h1> tag is
* used.
*
* @license GNU GPL v2+
* @since 1.7
*
* @author Fernando Correia
* @author Markus Krötzsch
*/
class EmbeddedResultPrinter extends ResultPrinter {
protected $m_showhead;
protected $m_embedformat;
/**
* @see SMWResultPrinter::handleParameters
*
* @since 1.7
*
* @param array $params
* @param $outputmode
*/
protected function handleParameters( array $params, $outputmode ) {
parent::handleParameters( $params, $outputmode );
$this->m_showhead = !$params['embedonly'];
$this->m_embedformat = $params['embedformat'];
}
public function getName() {
return wfMessage( 'smw_printername_embedded' )->text();
}
protected function getResultText( SMWQueryResult $res, $outputMode ) {
global $wgParser;
// No page should embed itself, find out who we are:
if ( $wgParser->getTitle() instanceof Title ) {
$title = $wgParser->getTitle()->getPrefixedText();
} else { // this is likely to be in vain -- this case is typical if we run on special pages
global $wgTitle;
$title = $wgTitle->getPrefixedText();
}
// print header
$result = '';
$footer = '';
$embstart = '';
$embend = '';
$headstart = '';
$headend = '';
$this->hasTemplates = true;
switch ( $this->m_embedformat ) {
case 'h1': case 'h2': case 'h3': case 'h4': case 'h5': case 'h6':
$headstart = '<' . $this->m_embedformat . '>';
$headend = '</' . $this->m_embedformat . ">\n";
break;
case 'ul': case 'ol':
$result .= '<' . $this->m_embedformat . '>';
$footer = '</' . $this->m_embedformat . '>';
$embstart = '<li>';
$headend = "<br />\n";
$embend = "</li>\n";
break;
}
// Print all result rows:
foreach ( $res->getResults() as $diWikiPage ) {
if ( $diWikiPage instanceof DIWikiPage ) { // ensure that we deal with title-likes
$dvWikiPage = DataValueFactory::getInstance()->newDataItemValue( $diWikiPage, null );
$result .= $embstart;
if ( $this->m_showhead ) {
$result .= $headstart . $dvWikiPage->getLongWikiText( $this->mLinker ) . $headend;
}
if ( $dvWikiPage->getLongWikiText() != $title ) {
if ( $diWikiPage->getNamespace() == NS_MAIN ) {
$result .= '{{:' . $diWikiPage->getDBkey() . '}}';
} else {
$result .= '{{' . $dvWikiPage->getLongWikiText() . '}}';
}
} else { // block recursion
$result .= '<b>' . $dvWikiPage->getLongWikiText() . '</b>';
}
$result .= $embend;
}
}
// show link to more results
if ( $this->linkFurtherResults( $res ) ) {
$result .= $embstart
. $this->getFurtherResultsLink( $res, $outputMode )->getText( SMW_OUTPUT_WIKI, $this->mLinker )
. $embend;
}
$result .= $footer;
return $result;
}
public function getParameters() {
$params = parent::getParameters();
$params[] = array(
'name' => 'embedformat',
'message' => 'smw-paramdesc-embedformat',
'default' => 'h1',
'values' => array( 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'ol', 'ul' ),
);
$params[] = array(
'name' => 'embedonly',
'type' => 'boolean',
'message' => 'smw-paramdesc-embedonly',
'default' => false,
);
return $params;
}
}
| owen-kellie-smith/mediawiki | wiki/extensions/SemanticMediaWiki/includes/queryprinters/EmbeddedResultPrinter.php | PHP | mit | 3,564 |
require File.dirname(__FILE__) + '/../spec_helper'
require 'mspec/guards/conflict'
describe Object, "#conflicts_with" do
before :each do
ScratchPad.clear
end
it "does not yield if Object.constants includes any of the arguments" do
Object.stub!(:constants).and_return(["SomeClass", "OtherClass"])
conflicts_with(:SomeClass, :AClass, :BClass) { ScratchPad.record :yield }
ScratchPad.recorded.should_not == :yield
end
it "does not yield if Object.constants (as Symbols) includes any of the arguments" do
Object.stub!(:constants).and_return([:SomeClass, :OtherClass])
conflicts_with(:SomeClass, :AClass, :BClass) { ScratchPad.record :yield }
ScratchPad.recorded.should_not == :yield
end
it "yields if Object.constants does not include any of the arguments" do
Object.stub!(:constants).and_return(["SomeClass", "OtherClass"])
conflicts_with(:AClass, :BClass) { ScratchPad.record :yield }
ScratchPad.recorded.should == :yield
end
it "yields if Object.constants (as Symbols) does not include any of the arguments" do
Object.stub!(:constants).and_return([:SomeClass, :OtherClass])
conflicts_with(:AClass, :BClass) { ScratchPad.record :yield }
ScratchPad.recorded.should == :yield
end
end
describe Object, "#conflicts_with" do
before :each do
@guard = ConflictsGuard.new
ConflictsGuard.stub!(:new).and_return(@guard)
end
it "sets the name of the guard to :conflicts_with" do
conflicts_with(:AClass, :BClass) { }
@guard.name.should == :conflicts_with
end
it "calls #unregister even when an exception is raised in the guard block" do
@guard.should_receive(:unregister)
lambda do
conflicts_with(:AClass, :BClass) { raise Exception }
end.should raise_error(Exception)
end
end
| timfel/mspec | spec/guards/conflict_spec.rb | Ruby | mit | 1,789 |
# -*- encoding: ascii-8bit -*-
require File.expand_path('../../../spec_helper', __FILE__)
with_feature :encoding do
# TODO: add IO
describe "Encoding.compatible? String, String" do
describe "when the first's Encoding is valid US-ASCII" do
before :each do
@str = "abc".force_encoding Encoding::US_ASCII
end
it "returns US-ASCII when the second's is US-ASCII" do
Encoding.compatible?(@str, "def".encode("us-ascii")).should == Encoding::US_ASCII
end
it "returns US-ASCII if the second String is ASCII-8BIT and ASCII only" do
Encoding.compatible?(@str, "\x7f").should == Encoding::US_ASCII
end
it "returns ASCII-8BIT if the second String is ASCII-8BIT but not ASCII only" do
Encoding.compatible?(@str, "\xff").should == Encoding::ASCII_8BIT
end
it "returns US-ASCII if the second String is UTF-8 and ASCII only" do
Encoding.compatible?(@str, "\x7f".encode("utf-8")).should == Encoding::US_ASCII
end
it "returns UTF-8 if the second String is UTF-8 but not ASCII only" do
Encoding.compatible?(@str, "\u3042".encode("utf-8")).should == Encoding::UTF_8
end
end
describe "when the first's Encoding is ASCII compatible and ASCII only" do
it "returns the first's Encoding if the second is ASCII compatible and ASCII only" do
[ [Encoding, "abc".force_encoding("UTF-8"), "123".force_encoding("Shift_JIS"), Encoding::UTF_8],
[Encoding, "123".force_encoding("Shift_JIS"), "abc".force_encoding("UTF-8"), Encoding::Shift_JIS]
].should be_computed_by(:compatible?)
end
it "returns the first's Encoding if the second is ASCII compatible and ASCII only" do
[ [Encoding, "abc".force_encoding("ASCII-8BIT"), "123".force_encoding("US-ASCII"), Encoding::ASCII_8BIT],
[Encoding, "123".force_encoding("US-ASCII"), "abc".force_encoding("ASCII-8BIT"), Encoding::US_ASCII]
].should be_computed_by(:compatible?)
end
it "returns the second's Encoding if the second is ASCII compatible but not ASCII only" do
[ [Encoding, "abc".force_encoding("UTF-8"), "\xff".force_encoding("Shift_JIS"), Encoding::Shift_JIS],
[Encoding, "123".force_encoding("Shift_JIS"), "\xff".force_encoding("UTF-8"), Encoding::UTF_8],
[Encoding, "abc".force_encoding("ASCII-8BIT"), "\xff".force_encoding("US-ASCII"), Encoding::US_ASCII],
[Encoding, "123".force_encoding("US-ASCII"), "\xff".force_encoding("ASCII-8BIT"), Encoding::ASCII_8BIT],
].should be_computed_by(:compatible?)
end
it "returns nil if the second's Encoding is not ASCII compatible" do
a = "abc".force_encoding("UTF-8")
b = "123".force_encoding("UTF-16LE")
Encoding.compatible?(a, b).should be_nil
end
end
describe "when the first's Encoding is ASCII compatible but not ASCII only" do
it "returns the first's Encoding if the second's is valid US-ASCII" do
Encoding.compatible?("\xff", "def".encode("us-ascii")).should == Encoding::ASCII_8BIT
end
it "returns the first's Encoding if the second's is UTF-8 and ASCII only" do
Encoding.compatible?("\xff", "\u{7f}".encode("utf-8")).should == Encoding::ASCII_8BIT
end
it "returns nil if the second encoding is ASCII compatible but neither String's encoding is ASCII only" do
Encoding.compatible?("\xff", "\u3042".encode("utf-8")).should be_nil
end
end
describe "when the first's Encoding is not ASCII compatible" do
before :each do
@str = "abc".force_encoding Encoding::UTF_7
end
it "returns nil when the second String is US-ASCII" do
Encoding.compatible?(@str, "def".encode("us-ascii")).should be_nil
end
it "returns nil when the second String is ASCII-8BIT and ASCII only" do
Encoding.compatible?(@str, "\x7f").should be_nil
end
it "returns nil when the second String is ASCII-8BIT but not ASCII only" do
Encoding.compatible?(@str, "\xff").should be_nil
end
it "returns the Encoding when the second's Encoding is not ASCII compatible but the same as the first's Encoding" do
encoding = Encoding.compatible?(@str, "def".force_encoding("utf-7"))
encoding.should == Encoding::UTF_7
end
end
describe "when the first's Encoding is invalid" do
before :each do
@str = "\xff".force_encoding Encoding::UTF_8
end
it "returns the first's Encoding when the second's Encoding is US-ASCII" do
Encoding.compatible?(@str, "def".encode("us-ascii")).should == Encoding::UTF_8
end
it "returns the first's Encoding when the second String is ASCII only" do
Encoding.compatible?(@str, "\x7f").should == Encoding::UTF_8
end
it "returns nil when the second's Encoding is ASCII-8BIT but not ASCII only" do
Encoding.compatible?(@str, "\xff").should be_nil
end
it "returns nil when the second's Encoding is invalid and ASCII only" do
Encoding.compatible?(@str, "\x7f".force_encoding("utf-16be")).should be_nil
end
it "returns nil when the second's Encoding is invalid and not ASCII only" do
Encoding.compatible?(@str, "\xff".force_encoding("utf-16be")).should be_nil
end
it "returns the Encoding when the second's Encoding is invalid but the same as the first" do
Encoding.compatible?(@str, @str).should == Encoding::UTF_8
end
end
end
describe "Encoding.compatible? String, Regexp" do
it "returns US-ASCII if both are US-ASCII" do
str = "abc".force_encoding("us-ascii")
Encoding.compatible?(str, /abc/).should == Encoding::US_ASCII
end
it "returns the String's Encoding if it is not US-ASCII but both are ASCII only" do
[ [Encoding, "abc", Encoding::ASCII_8BIT],
[Encoding, "abc".encode("utf-8"), Encoding::UTF_8],
[Encoding, "abc".encode("euc-jp"), Encoding::EUC_JP],
[Encoding, "abc".encode("shift_jis"), Encoding::Shift_JIS],
].should be_computed_by(:compatible?, /abc/)
end
it "returns the String's Encoding if the String is not ASCII only" do
[ [Encoding, "\xff", Encoding::ASCII_8BIT],
[Encoding, "\u3042".encode("utf-8"), Encoding::UTF_8],
[Encoding, "\xa4\xa2".force_encoding("euc-jp"), Encoding::EUC_JP],
[Encoding, "\x82\xa0".force_encoding("shift_jis"), Encoding::Shift_JIS],
].should be_computed_by(:compatible?, /abc/)
end
end
describe "Encoding.compatible? String, Symbol" do
it "returns US-ASCII if both are ASCII only" do
str = "abc".force_encoding("us-ascii")
Encoding.compatible?(str, :abc).should == Encoding::US_ASCII
end
it "returns the String's Encoding if it is not US-ASCII but both are ASCII only" do
[ [Encoding, "abc", Encoding::ASCII_8BIT],
[Encoding, "abc".encode("utf-8"), Encoding::UTF_8],
[Encoding, "abc".encode("euc-jp"), Encoding::EUC_JP],
[Encoding, "abc".encode("shift_jis"), Encoding::Shift_JIS],
].should be_computed_by(:compatible?, :abc)
end
it "returns the String's Encoding if the String is not ASCII only" do
[ [Encoding, "\xff", Encoding::ASCII_8BIT],
[Encoding, "\u3042".encode("utf-8"), Encoding::UTF_8],
[Encoding, "\xa4\xa2".force_encoding("euc-jp"), Encoding::EUC_JP],
[Encoding, "\x82\xa0".force_encoding("shift_jis"), Encoding::Shift_JIS],
].should be_computed_by(:compatible?, :abc)
end
end
describe "Encoding.compatible? Regexp, String" do
it "returns US-ASCII if both are US-ASCII" do
str = "abc".force_encoding("us-ascii")
Encoding.compatible?(/abc/, str).should == Encoding::US_ASCII
end
end
describe "Encoding.compatible? Regexp, Regexp" do
it "returns US-ASCII if both are US-ASCII" do
Encoding.compatible?(/abc/, /def/).should == Encoding::US_ASCII
end
it "returns the first's Encoding if it is not US-ASCII and not ASCII only" do
[ [Encoding, Regexp.new("\xff"), Encoding::ASCII_8BIT],
[Encoding, Regexp.new("\u3042".encode("utf-8")), Encoding::UTF_8],
[Encoding, Regexp.new("\xa4\xa2".force_encoding("euc-jp")), Encoding::EUC_JP],
[Encoding, Regexp.new("\x82\xa0".force_encoding("shift_jis")), Encoding::Shift_JIS],
].should be_computed_by(:compatible?, /abc/)
end
end
describe "Encoding.compatible? Regexp, Symbol" do
it "returns US-ASCII if both are US-ASCII" do
Encoding.compatible?(/abc/, :def).should == Encoding::US_ASCII
end
it "returns the first's Encoding if it is not US-ASCII and not ASCII only" do
[ [Encoding, Regexp.new("\xff"), Encoding::ASCII_8BIT],
[Encoding, Regexp.new("\u3042".encode("utf-8")), Encoding::UTF_8],
[Encoding, Regexp.new("\xa4\xa2".force_encoding("euc-jp")), Encoding::EUC_JP],
[Encoding, Regexp.new("\x82\xa0".force_encoding("shift_jis")), Encoding::Shift_JIS],
].should be_computed_by(:compatible?, /abc/)
end
end
describe "Encoding.compatible? Symbol, String" do
it "returns US-ASCII if both are ASCII only" do
str = "abc".force_encoding("us-ascii")
Encoding.compatible?(str, :abc).should == Encoding::US_ASCII
end
end
describe "Encoding.compatible? Symbol, Regexp" do
it "returns US-ASCII if both are US-ASCII" do
Encoding.compatible?(:abc, /def/).should == Encoding::US_ASCII
end
it "returns the Regexp's Encoding if it is not US-ASCII and not ASCII only" do
a = Regexp.new("\xff")
b = Regexp.new("\u3042".encode("utf-8"))
c = Regexp.new("\xa4\xa2".force_encoding("euc-jp"))
d = Regexp.new("\x82\xa0".force_encoding("shift_jis"))
[ [Encoding, :abc, a, Encoding::ASCII_8BIT],
[Encoding, :abc, b, Encoding::UTF_8],
[Encoding, :abc, c, Encoding::EUC_JP],
[Encoding, :abc, d, Encoding::Shift_JIS],
].should be_computed_by(:compatible?)
end
end
describe "Encoding.compatible? Symbol, Symbol" do
it "returns US-ASCII if both are US-ASCII" do
Encoding.compatible?(:abc, :def).should == Encoding::US_ASCII
end
it "returns the first's Encoding if it is not ASCII only" do
[ [Encoding, "\xff".to_sym, Encoding::ASCII_8BIT],
[Encoding, "\u3042".encode("utf-8").to_sym, Encoding::UTF_8],
[Encoding, "\xa4\xa2".force_encoding("euc-jp").to_sym, Encoding::EUC_JP],
[Encoding, "\x82\xa0".force_encoding("shift_jis").to_sym, Encoding::Shift_JIS],
].should be_computed_by(:compatible?, :abc)
end
end
describe "Encoding.compatible? Encoding, Encoding" do
it "returns nil if one of the encodings is a dummy encoding" do
[ [Encoding, Encoding::UTF_7, Encoding::US_ASCII, nil],
[Encoding, Encoding::US_ASCII, Encoding::UTF_7, nil],
[Encoding, Encoding::EUC_JP, Encoding::UTF_7, nil],
[Encoding, Encoding::UTF_7, Encoding::EUC_JP, nil],
[Encoding, Encoding::UTF_7, Encoding::ASCII_8BIT, nil],
[Encoding, Encoding::ASCII_8BIT, Encoding::UTF_7, nil],
].should be_computed_by(:compatible?)
end
it "returns nil if one of the encodings is not US-ASCII" do
[ [Encoding, Encoding::UTF_8, Encoding::ASCII_8BIT, nil],
[Encoding, Encoding::ASCII_8BIT, Encoding::UTF_8, nil],
[Encoding, Encoding::ASCII_8BIT, Encoding::EUC_JP, nil],
[Encoding, Encoding::Shift_JIS, Encoding::EUC_JP, nil],
].should be_computed_by(:compatible?)
end
it "returns the first if the second is US-ASCII" do
[ [Encoding, Encoding::UTF_8, Encoding::US_ASCII, Encoding::UTF_8],
[Encoding, Encoding::EUC_JP, Encoding::US_ASCII, Encoding::EUC_JP],
[Encoding, Encoding::Shift_JIS, Encoding::US_ASCII, Encoding::Shift_JIS],
[Encoding, Encoding::ASCII_8BIT, Encoding::US_ASCII, Encoding::ASCII_8BIT],
].should be_computed_by(:compatible?)
end
it "returns the Encoding if both are the same" do
[ [Encoding, Encoding::UTF_8, Encoding::UTF_8, Encoding::UTF_8],
[Encoding, Encoding::US_ASCII, Encoding::US_ASCII, Encoding::US_ASCII],
[Encoding, Encoding::ASCII_8BIT, Encoding::ASCII_8BIT, Encoding::ASCII_8BIT],
[Encoding, Encoding::UTF_7, Encoding::UTF_7, Encoding::UTF_7],
].should be_computed_by(:compatible?)
end
end
describe "Encoding.compatible? Object, Object" do
it "returns nil for Object, String" do
Encoding.compatible?(Object.new, "abc").should be_nil
end
it "returns nil for Object, Regexp" do
Encoding.compatible?(Object.new, /./).should be_nil
end
it "returns nil for Object, Symbol" do
Encoding.compatible?(Object.new, :sym).should be_nil
end
it "returns nil for String, Object" do
Encoding.compatible?("abc", Object.new).should be_nil
end
it "returns nil for Regexp, Object" do
Encoding.compatible?(/./, Object.new).should be_nil
end
it "returns nil for Symbol, Object" do
Encoding.compatible?(:sym, Object.new).should be_nil
end
end
end
| askl56/rubyspec | core/encoding/compatible_spec.rb | Ruby | mit | 13,521 |
<?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\Mailer\Bridge\Amazon\Tests\Transport;
use PHPUnit\Framework\TestCase;
use Symfony\Component\HttpClient\MockHttpClient;
use Symfony\Component\HttpClient\Response\MockResponse;
use Symfony\Component\Mailer\Bridge\Amazon\Transport\SesApiTransport;
use Symfony\Component\Mailer\Exception\HttpTransportException;
use Symfony\Component\Mime\Address;
use Symfony\Component\Mime\Email;
use Symfony\Contracts\HttpClient\ResponseInterface;
class SesApiTransportTest extends TestCase
{
/**
* @dataProvider getTransportData
*/
public function testToString(SesApiTransport $transport, string $expected)
{
$this->assertSame($expected, (string) $transport);
}
public function getTransportData()
{
return [
[
new SesApiTransport('ACCESS_KEY', 'SECRET_KEY'),
'ses+api://ACCESS_KEY@email.eu-west-1.amazonaws.com',
],
[
new SesApiTransport('ACCESS_KEY', 'SECRET_KEY', 'us-east-1'),
'ses+api://ACCESS_KEY@email.us-east-1.amazonaws.com',
],
[
(new SesApiTransport('ACCESS_KEY', 'SECRET_KEY'))->setHost('example.com'),
'ses+api://ACCESS_KEY@example.com',
],
[
(new SesApiTransport('ACCESS_KEY', 'SECRET_KEY'))->setHost('example.com')->setPort(99),
'ses+api://ACCESS_KEY@example.com:99',
],
];
}
public function testSend()
{
$client = new MockHttpClient(function (string $method, string $url, array $options): ResponseInterface {
$this->assertSame('POST', $method);
$this->assertSame('https://email.eu-west-1.amazonaws.com:8984/', $url);
$this->assertStringContainsStringIgnoringCase('X-Amzn-Authorization: AWS3-HTTPS AWSAccessKeyId=ACCESS_KEY,Algorithm=HmacSHA256,Signature=', $options['headers'][0] ?? $options['request_headers'][0]);
parse_str($options['body'], $content);
$this->assertSame('Hello!', $content['Message_Subject_Data']);
$this->assertSame('Saif Eddin <saif.gmati@symfony.com>', $content['Destination_ToAddresses_member'][0]);
$this->assertSame('Fabien <fabpot@symfony.com>', $content['Source']);
$this->assertSame('Hello There!', $content['Message_Body_Text_Data']);
$xml = '<SendEmailResponse xmlns="https://email.amazonaws.com/doc/2010-03-31/">
<SendEmailResult>
<MessageId>foobar</MessageId>
</SendEmailResult>
</SendEmailResponse>';
return new MockResponse($xml, [
'http_code' => 200,
]);
});
$transport = new SesApiTransport('ACCESS_KEY', 'SECRET_KEY', null, $client);
$transport->setPort(8984);
$mail = new Email();
$mail->subject('Hello!')
->to(new Address('saif.gmati@symfony.com', 'Saif Eddin'))
->from(new Address('fabpot@symfony.com', 'Fabien'))
->text('Hello There!');
$message = $transport->send($mail);
$this->assertSame('foobar', $message->getMessageId());
}
public function testSendThrowsForErrorResponse()
{
$client = new MockHttpClient(function (string $method, string $url, array $options): ResponseInterface {
$xml = "<SendEmailResponse xmlns=\"https://email.amazonaws.com/doc/2010-03-31/\">
<Error>
<Message>i'm a teapot</Message>
<Code>418</Code>
</Error>
</SendEmailResponse>";
return new MockResponse($xml, [
'http_code' => 418,
]);
});
$transport = new SesApiTransport('ACCESS_KEY', 'SECRET_KEY', null, $client);
$transport->setPort(8984);
$mail = new Email();
$mail->subject('Hello!')
->to(new Address('saif.gmati@symfony.com', 'Saif Eddin'))
->from(new Address('fabpot@symfony.com', 'Fabien'))
->text('Hello There!');
$this->expectException(HttpTransportException::class);
$this->expectExceptionMessage('Unable to send an email: i\'m a teapot (code 418).');
$transport->send($mail);
}
}
| localheinz/symfony | src/Symfony/Component/Mailer/Bridge/Amazon/Tests/Transport/SesApiTransportTest.php | PHP | mit | 4,500 |
function makeData() {
"use strict";
return [makeRandomData(10), makeRandomData(10)];
}
function run(svg, data, Plottable) {
"use strict";
var largeX = function(d, i){
d.x = Math.pow(10, i);
};
var bigNumbers = [];
deepCopy(data[0], bigNumbers);
bigNumbers.forEach(largeX);
var dataseries1 = new Plottable.Dataset(bigNumbers);
//Axis
var xScale = new Plottable.Scales.Linear();
var yScale = new Plottable.Scales.Linear();
var xAxis = new Plottable.Axes.Numeric(xScale, "bottom");
var yAxis = new Plottable.Axes.Numeric(yScale, "left");
var IdTitle = new Plottable.Components.Label("Identity");
var GenTitle = new Plottable.Components.Label("General");
var FixTitle = new Plottable.Components.Label("Fixed");
var CurrTitle = new Plottable.Components.Label("Currency");
var PerTitle = new Plottable.Components.Label("Percentage");
var SITitle = new Plottable.Components.Label("SI");
var SSTitle = new Plottable.Components.Label("Short Scale");
var plot = new Plottable.Plots.Line().addDataset(dataseries1);
plot.x(function(d) { return d.x; }, xScale).y(function(d) { return d.y; }, yScale);
var basicTable = new Plottable.Components.Table([[yAxis, plot], [null, xAxis]]);
var formatChoices = new Plottable.Components.Table([[IdTitle, GenTitle, FixTitle], [CurrTitle, null, PerTitle], [SITitle, null, SSTitle]]);
var bigTable = new Plottable.Components.Table([[basicTable], [formatChoices]]);
formatChoices.xAlignment("center");
bigTable.renderTo(svg);
function useIdentityFormatter() {
xAxis.formatter(Plottable.Formatters.identity(2.1));
yAxis.formatter(Plottable.Formatters.identity());
}
function useGeneralFormatter() {
xAxis.formatter(Plottable.Formatters.general(7));
yAxis.formatter(Plottable.Formatters.general(3));
}
function useFixedFormatter() {
xAxis.formatter(Plottable.Formatters.fixed(2.00));
yAxis.formatter(Plottable.Formatters.fixed(7.00));
}
function useCurrencyFormatter() {
xAxis.formatter(Plottable.Formatters.currency(3, "$", true));
yAxis.formatter(Plottable.Formatters.currency(3, "$", true));
}
function usePercentageFormatter() {
xAxis.formatter(Plottable.Formatters.percentage(12.3 - 11.3));
yAxis.formatter(Plottable.Formatters.percentage(2.5 + 1.5));
}
function useSIFormatter() {
xAxis.formatter(Plottable.Formatters.siSuffix(7));
yAxis.formatter(Plottable.Formatters.siSuffix(14));
}
function useSSFormatter() {
xAxis.formatter(Plottable.Formatters.shortScale(0));
yAxis.formatter(Plottable.Formatters.shortScale(0));
}
new Plottable.Interactions.Click().onClick(useIdentityFormatter).attachTo(IdTitle);
new Plottable.Interactions.Click().onClick(useGeneralFormatter).attachTo(GenTitle);
new Plottable.Interactions.Click().onClick(useFixedFormatter).attachTo(FixTitle);
new Plottable.Interactions.Click().onClick(useCurrencyFormatter).attachTo(CurrTitle);
new Plottable.Interactions.Click().onClick(usePercentageFormatter).attachTo(PerTitle);
new Plottable.Interactions.Click().onClick(useSIFormatter).attachTo(SITitle);
new Plottable.Interactions.Click().onClick(useSSFormatter).attachTo(SSTitle);
}
| iobeam/plottable | quicktests/overlaying/tests/functional/formatter.js | JavaScript | mit | 3,212 |
JsonRoutes.add('post', '/' + Meteor.settings.private.stripe.webhookEndpoint, function (req, res) {
Letterpress.Services.Buy.handleEvent(req.body);
JsonRoutes.sendResult(res, 200);
}); | FleetingClouds/Letterpress | server/api/webhooks-api.js | JavaScript | mit | 187 |
'use strict';
var spawn = require('child_process').spawn;
var os = require('os');
var pathlib = require('path');
var fs = require('fs');
var net = require('net');
var crypto = require('crypto');
var which = require('which');
var pathOpenSSL;
var tempDir = process.env.PEMJS_TMPDIR || (os.tmpdir || os.tmpDir) && (os.tmpdir || os.tmpDir)() || '/tmp';
module.exports.createPrivateKey = createPrivateKey;
module.exports.createDhparam = createDhparam;
module.exports.createCSR = createCSR;
module.exports.createCertificate = createCertificate;
module.exports.readCertificateInfo = readCertificateInfo;
module.exports.getPublicKey = getPublicKey;
module.exports.getFingerprint = getFingerprint;
module.exports.getModulus = getModulus;
module.exports.config = config;
// PUBLIC API
/**
* Creates a private key
*
* @param {Number} [keyBitsize=2048] Size of the key, defaults to 2048bit
* @param {Object} [options] object of cipher and password {cipher:'aes128',password:'xxx'}, defaults empty object
* @param {Function} callback Callback function with an error object and {key}
*/
function createPrivateKey(keyBitsize, options, callback) {
var clientKeyPassword;
if (!callback && !options && typeof keyBitsize === 'function') {
callback = keyBitsize;
keyBitsize = undefined;
options = {};
}
else if (!callback && keyBitsize && typeof options === 'function') {
callback = options;
options = {};
}
keyBitsize = Number(keyBitsize) || 2048;
var params = ['genrsa',
'-rand',
'/var/log/mail:/var/log/messages'
];
var cipher = ["aes128", "aes192", "aes256", "camellia128", "camellia192", "camellia256", "des", "des3", "idea"];
if (options && options.cipher && ( -1 !== Number(cipher.indexOf(options.cipher)) ) && options.password){
clientKeyPassword = pathlib.join(tempDir, crypto.randomBytes(20).toString('hex'));
fs.writeFileSync(clientKeyPassword, options.password);
params.push( '-' + options.cipher );
params.push( '-passout' );
params.push( 'file:' + clientKeyPassword );
}
params.push(keyBitsize);
execOpenSSL(params, 'RSA PRIVATE KEY', function(error, key) {
if(clientKeyPassword) {
fs.unlink(clientKeyPassword);
}
if (error) {
return callback(error);
}
return callback(null, {
key: key
});
});
}
/**
* Creates a dhparam key
*
* @param {Number} [keyBitsize=512] Size of the key, defaults to 512bit
* @param {Function} callback Callback function with an error object and {dhparam}
*/
function createDhparam(keyBitsize, callback) {
if (!callback && typeof keyBitsize === 'function') {
callback = keyBitsize;
keyBitsize = undefined;
}
keyBitsize = Number(keyBitsize) || 512;
var params = ['dhparam',
'-outform',
'PEM',
keyBitsize
];
execOpenSSL(params, 'DH PARAMETERS', function(error, dhparam) {
if (error) {
return callback(error);
}
return callback(null, {
dhparam: dhparam
});
});
}
/**
* Creates a Certificate Signing Request
*
* If client key is undefined, a new key is created automatically. The used key is included
* in the callback return as clientKey
*
* @param {Object} [options] Optional options object
* @param {String} [options.clientKey] Optional client key to use
* @param {Number} [options.keyBitsize] If clientKey is undefined, bit size to use for generating a new key (defaults to 2048)
* @param {String} [options.hash] Hash function to use (either md5 sha1 or sha256, defaults to sha256)
* @param {String} [options.country] CSR country field
* @param {String} [options.state] CSR state field
* @param {String} [options.locality] CSR locality field
* @param {String} [options.organization] CSR organization field
* @param {String} [options.organizationUnit] CSR organizational unit field
* @param {String} [options.commonName='localhost'] CSR common name field
* @param {String} [options.emailAddress] CSR email address field
* @param {Array} [options.altNames] is a list of subjectAltNames in the subjectAltName field
* @param {Function} callback Callback function with an error object and {csr, clientKey}
*/
function createCSR(options, callback) {
if (!callback && typeof options === 'function') {
callback = options;
options = undefined;
}
options = options || {};
// http://stackoverflow.com/questions/14089872/why-does-node-js-accept-ip-addresses-in-certificates-only-for-san-not-for-cn
if (options.commonName && (net.isIPv4(options.commonName) || net.isIPv6(options.commonName))) {
if (!options.altNames) {
options.altNames = [options.commonName];
} else if (options.altNames.indexOf(options.commonName) === -1) {
options.altNames = options.altNames.concat([options.commonName]);
}
}
if (!options.clientKey) {
createPrivateKey(options.keyBitsize || 2048, function(error, keyData) {
if (error) {
return callback(error);
}
options.clientKey = keyData.key;
createCSR(options, callback);
});
return;
}
var params = ['req',
'-new',
'-' + (options.hash || 'sha256'),
'-subj',
generateCSRSubject(options),
'-key',
'--TMPFILE--'
];
var tmpfiles = [options.clientKey];
var config = null;
if (options.altNames) {
params.push('-extensions');
params.push('v3_req');
params.push('-config');
params.push('--TMPFILE--');
var altNamesRep = [];
for (var i = 0; i < options.altNames.length; i++) {
altNamesRep.push((net.isIP(options.altNames[i]) ? 'IP' : 'DNS') + '.' + (i + 1) + ' = ' + options.altNames[i]);
}
tmpfiles.push(config = [
'[req]',
'req_extensions = v3_req',
'distinguished_name = req_distinguished_name',
'[v3_req]',
'subjectAltName = @alt_names',
'[alt_names]',
altNamesRep.join('\n'),
'[req_distinguished_name]',
'commonName = Common Name',
'commonName_max = 64',
].join('\n'));
}
var passwordFilePath = null;
if (options.clientKeyPassword) {
passwordFilePath = pathlib.join(tempDir, crypto.randomBytes(20).toString('hex'));
fs.writeFileSync(passwordFilePath, options.clientKeyPassword);
params.push('-passin');
params.push('file:' + passwordFilePath);
}
execOpenSSL(params, 'CERTIFICATE REQUEST', tmpfiles, function(error, data) {
if (passwordFilePath) {
fs.unlink(passwordFilePath);
}
if (error) {
return callback(error);
}
var response = {
csr: data,
config: config,
clientKey: options.clientKey
};
return callback(null, response);
});
}
/**
* Creates a certificate based on a CSR. If CSR is not defined, a new one
* will be generated automatically. For CSR generation all the options values
* can be used as with createCSR.
*
* @param {Object} [options] Optional options object
* @param {String} [options.serviceKey] Private key for signing the certificate, if not defined a new one is generated
* @param {Boolean} [options.selfSigned] If set to true and serviceKey is not defined, use clientKey for signing
* @param {String} [options.hash] Hash function to use (either md5 sha1 or sha256, defaults to sha256)
* @param {String} [options.csr] CSR for the certificate, if not defined a new one is generated
* @param {Number} [options.days] Certificate expire time in days
* @param {Function} callback Callback function with an error object and {certificate, csr, clientKey, serviceKey}
*/
function createCertificate(options, callback) {
if (!callback && typeof options === 'function') {
callback = options;
options = undefined;
}
options = options || {};
if (!options.csr) {
createCSR(options, function(error, keyData) {
if (error) {
return callback(error);
}
options.csr = keyData.csr;
options.config = keyData.config;
options.clientKey = keyData.clientKey;
createCertificate(options, callback);
});
return;
}
if (!options.serviceKey) {
if (options.selfSigned) {
options.serviceKey = options.clientKey;
} else {
createPrivateKey(options.keyBitsize || 2048, function(error, keyData) {
if (error) {
return callback(error);
}
options.serviceKey = keyData.key;
createCertificate(options, callback);
});
return;
}
}
var params = ['x509',
'-req',
'-' + (options.hash || 'sha256'),
'-days',
Number(options.days) || '365',
'-in',
'--TMPFILE--'
];
var tmpfiles = [options.csr];
if (options.serviceCertificate) {
if (!options.serial) {
return callback(new Error('serial option required for CA signing'));
}
params.push('-CA');
params.push('--TMPFILE--');
params.push('-CAkey');
params.push('--TMPFILE--');
params.push('-set_serial');
params.push('0x' + ('00000000' + options.serial.toString(16)).slice(-8));
tmpfiles.push(options.serviceCertificate);
tmpfiles.push(options.serviceKey);
} else {
params.push('-signkey');
params.push('--TMPFILE--');
tmpfiles.push(options.serviceKey);
}
if (options.config) {
params.push('-extensions');
params.push('v3_req');
params.push('-extfile');
params.push('--TMPFILE--');
tmpfiles.push(options.config);
}
execOpenSSL(params, 'CERTIFICATE', tmpfiles, function(error, data) {
if (error) {
return callback(error);
}
var response = {
csr: options.csr,
clientKey: options.clientKey,
certificate: data,
serviceKey: options.serviceKey
};
return callback(null, response);
});
}
/**
* Exports a public key from a private key, CSR or certificate
*
* @param {String} certificate PEM encoded private key, CSR or certificate
* @param {Function} callback Callback function with an error object and {publicKey}
*/
function getPublicKey(certificate, callback) {
if (!callback && typeof certificate === 'function') {
callback = certificate;
certificate = undefined;
}
certificate = (certificate || '').toString();
var params;
if (certificate.match(/BEGIN(\sNEW)? CERTIFICATE REQUEST/)) {
params = ['req',
'-in',
'--TMPFILE--',
'-pubkey',
'-noout'
];
} else if (certificate.match(/BEGIN RSA PRIVATE KEY/)) {
params = ['rsa',
'-in',
'--TMPFILE--',
'-pubout'
];
} else {
params = ['x509',
'-in',
'--TMPFILE--',
'-pubkey',
'-noout'
];
}
execOpenSSL(params, 'PUBLIC KEY', certificate, function(error, key) {
if (error) {
return callback(error);
}
return callback(null, {
publicKey: key
});
});
}
/**
* Reads subject data from a certificate or a CSR
*
* @param {String} certificate PEM encoded CSR or certificate
* @param {Function} callback Callback function with an error object and {country, state, locality, organization, organizationUnit, commonName, emailAddress}
*/
function readCertificateInfo(certificate, callback) {
if (!callback && typeof certificate === 'function') {
callback = certificate;
certificate = undefined;
}
certificate = (certificate || '').toString();
var type = certificate.match(/BEGIN(\sNEW)? CERTIFICATE REQUEST/) ? 'req' : 'x509',
params = [type,
'-noout',
'-text',
'-in',
'--TMPFILE--'
];
spawnWrapper(params, certificate, function(err, code, stdout) {
if (err) {
return callback(err);
}
return fetchCertificateData(stdout, callback);
});
}
/**
* get the modulus from a certificate, a CSR or a private key
*
* @param {String} certificate PEM encoded, CSR PEM encoded, or private key
* @param {Function} callback Callback function with an error object and {modulus}
*/
function getModulus(certificate, callback) {
certificate = Buffer.isBuffer(certificate) && certificate.toString() || certificate;
var type = '';
if (certificate.match(/BEGIN(\sNEW)? CERTIFICATE REQUEST/)) {
type = 'req';
} else if (certificate.match(/BEGIN RSA PRIVATE KEY/)) {
type = 'rsa';
} else {
type = 'x509';
}
var params = [type,
'-noout',
'-modulus',
'-in',
'--TMPFILE--'
];
spawnWrapper(params, certificate, function(err, code, stdout) {
if (err) {
return callback(err);
}
var match = stdout.match(/Modulus=([0-9a-fA-F]+)$/m);
if (match) {
return callback(null, {
modulus: match[1]
});
} else {
return callback(new Error('No modulus'));
}
});
}
/**
* config the pem module
* @param {Object} options
*/
function config(options) {
if (options.pathOpenSSL) {
pathOpenSSL = options.pathOpenSSL;
}
}
/**
* Gets the fingerprint for a certificate
*
* @param {String} PEM encoded certificate
* @param {Function} callback Callback function with an error object and {fingerprint}
*/
function getFingerprint(certificate, hash, callback) {
if (!callback && typeof hash === 'function') {
callback = hash;
hash = undefined;
}
hash = hash || 'sha1';
var params = ['x509',
'-in',
'--TMPFILE--',
'-fingerprint',
'-noout',
'-' + hash
];
spawnWrapper(params, certificate, function(err, code, stdout) {
if (err) {
return callback(err);
}
var match = stdout.match(/Fingerprint=([0-9a-fA-F:]+)$/m);
if (match) {
return callback(null, {
fingerprint: match[1]
});
} else {
return callback(new Error('No fingerprint'));
}
});
}
// HELPER FUNCTIONS
function fetchCertificateData(certData, callback) {
certData = (certData || '').toString();
var subject, subject2, extra, tmp, certValues = {};
var validity = {};
var san;
if ((subject = certData.match(/Subject:([^\n]*)\n/)) && subject.length > 1) {
subject2 = linebrakes(subject[1] + '\n');
subject = subject[1];
extra = subject.split('/');
subject = extra.shift() + '\n';
extra = extra.join('/') + '\n';
// country
tmp = subject2.match(/\sC=([^\n].*?)[\n]/);
certValues.country = tmp && tmp[1] || '';
// state
tmp = subject2.match(/\sST=([^\n].*?)[\n]/);
certValues.state = tmp && tmp[1] || '';
// locality
tmp = subject2.match(/\sL=([^\n].*?)[\n]/);
certValues.locality = tmp && tmp[1] || '';
// organization
tmp = subject2.match(/\sO=([^\n].*?)[\n]/);
certValues.organization = tmp && tmp[1] || '';
// unit
tmp = subject2.match(/\sOU=([^\n].*?)[\n]/);
certValues.organizationUnit = tmp && tmp[1] || '';
// common name
tmp = subject2.match(/\sCN=([^\n].*?)[\n]/);
certValues.commonName = tmp && tmp[1] || '';
//email
tmp = extra.match(/emailAddress=([^\n\/].*?)[\n\/]/);
certValues.emailAddress = tmp && tmp[1] || '';
}
if ((san = certData.match(/X509v3 Subject Alternative Name: \n([^\n]*)\n/)) && san.length > 1) {
san = san[1].trim() + '\n';
certValues.san = {};
// country
tmp = preg_match_all('DNS:([^,\\n].*?)[,\\n]', san);
certValues.san.dns = tmp || '';
// country
tmp = preg_match_all('IP Address:([^,\\n].*?)[,\\n\\s]', san);
certValues.san.ip = tmp || '';
}
if ((tmp = certData.match(/Not Before\s?:\s?([^\n]*)\n/)) && tmp.length > 1) {
validity.start = Date.parse(tmp && tmp[1] || '');
}
if ((tmp = certData.match(/Not After\s?:\s?([^\n]*)\n/)) && tmp.length > 1) {
validity.end = Date.parse(tmp && tmp[1] || '');
}
if (validity.start && validity.end) {
certValues.validity = validity;
}
callback(null, certValues);
}
function linebrakes(content) {
var helper_x, p, subject;
helper_x = content.replace(/(C|L|O|OU|ST|CN)=/g, '\n$1=');
helper_x = preg_match_all('((C|L|O|OU|ST|CN)=[^\n].*)', helper_x);
for (p in helper_x) {
subject = helper_x[p].trim();
content = subject.split('/');
subject = content.shift();
helper_x[p] = rtrim(subject, ',');
}
return ' ' + helper_x.join('\n') + '\n';
}
function rtrim(str, charlist) {
charlist = !charlist ? ' \\s\u00A0' : (charlist + '')
.replace(/([\[\]\(\)\.\?\/\*\{\}\+\$\^\:])/g, '\\$1');
var re = new RegExp('[' + charlist + ']+$', 'g');
return (str + '')
.replace(re, '');
}
function preg_match_all(regex, haystack) {
var globalRegex = new RegExp(regex, 'g');
var globalMatch = haystack.match(globalRegex);
var matchArray = [],
nonGlobalRegex, nonGlobalMatch;
for (var i in globalMatch) {
nonGlobalRegex = new RegExp(regex);
nonGlobalMatch = globalMatch[i].match(nonGlobalRegex);
matchArray.push(nonGlobalMatch[1]);
}
return matchArray;
}
function generateCSRSubject(options) {
options = options || {};
var csrData = {
C: options.country || options.C || '',
ST: options.state || options.ST || '',
L: options.locality || options.L || '',
O: options.organization || options.O || '',
OU: options.organizationUnit || options.OU || '',
CN: options.commonName || options.CN || 'localhost',
emailAddress: options.emailAddress || ''
},
csrBuilder = [];
Object.keys(csrData).forEach(function(key) {
if (csrData[key]) {
csrBuilder.push('/' + key + '=' + csrData[key].replace(/[^\w \.\*\-@]+/g, ' ').trim());
}
});
return csrBuilder.join('');
}
/**
* Generically spawn openSSL, without processing the result
*
* @param {Array} params The parameters to pass to openssl
* @param {String|Array} tmpfiles Stuff to pass to tmpfiles
* @param {Function} callback Called with (error, exitCode, stdout, stderr)
*/
function spawnOpenSSL(params, callback) {
var pathBin = pathOpenSSL || process.env.OPENSSL_BIN || 'openssl';
testOpenSSLPath(pathBin, function(err) {
if (err) {
return callback(err);
}
var openssl = spawn(pathBin, params),
stdout = '',
stderr = '';
openssl.stdout.on('data', function(data) {
stdout += (data || '').toString('binary');
});
openssl.stderr.on('data', function(data) {
stderr += (data || '').toString('binary');
});
// We need both the return code and access to all of stdout. Stdout isn't
// *really* available until the close event fires; the timing nuance was
// making this fail periodically.
var needed = 2; // wait for both exit and close.
var code = -1;
var finished = false;
var done = function(err) {
if (finished) {
return;
}
if (err) {
finished = true;
return callback(err);
}
if (--needed < 1) {
finished = true;
if (code) {
callback(new Error('Invalid openssl exit code: ' + code + '\n% openssl ' + params.join(' ') + '\n' + stderr), code);
} else {
callback(null, code, stdout, stderr);
}
}
};
openssl.on('error', done);
openssl.on('exit', function(ret) {
code = ret;
done();
});
openssl.on('close', function() {
stdout = new Buffer(stdout, 'binary').toString('utf-8');
stderr = new Buffer(stderr, 'binary').toString('utf-8');
done();
});
});
}
function spawnWrapper(params, tmpfiles, callback) {
var files = [];
var toUnlink = [];
if (tmpfiles) {
tmpfiles = [].concat(tmpfiles || []);
params.forEach(function(value, i) {
var fpath;
if (value === '--TMPFILE--') {
fpath = pathlib.join(tempDir, crypto.randomBytes(20).toString('hex'));
files.push({
path: fpath,
contents: tmpfiles.shift()
});
params[i] = fpath;
}
});
}
var processFiles = function() {
var file = files.shift();
if (!file) {
return spawnSSL();
}
fs.writeFile(file.path, file.contents, function() {
toUnlink.push(file.path);
processFiles();
});
};
var spawnSSL = function() {
spawnOpenSSL(params, function(err, code, stdout, stderr) {
toUnlink.forEach(function(filePath) {
fs.unlink(filePath);
});
callback(err, code, stdout, stderr);
});
};
processFiles();
}
/**
* Spawn an openssl command
*/
function execOpenSSL(params, searchStr, tmpfiles, callback) {
if (!callback && typeof tmpfiles === 'function') {
callback = tmpfiles;
tmpfiles = false;
}
spawnWrapper(params, tmpfiles, function(err, code, stdout, stderr) {
var start, end;
if (err) {
return callback(err);
}
if ((start = stdout.match(new RegExp('\\-+BEGIN ' + searchStr + '\\-+$', 'm')))) {
start = start.index;
} else {
start = -1;
}
if ((end = stdout.match(new RegExp('^\\-+END ' + searchStr + '\\-+', 'm')))) {
end = end.index + (end[0] || '').length;
} else {
end = -1;
}
if (start >= 0 && end >= 0) {
return callback(null, stdout.substring(start, end));
} else {
return callback(new Error(searchStr + ' not found from openssl output:\n---stdout---\n' + stdout + '\n---stderr---\n' + stderr + '\ncode: ' + code));
}
});
}
/**
* Validates the pathBin for the openssl command.
*
* @param {String} pathBin The path to OpenSSL Bin
* @param {Function} callback Callback function with an error object
*/
function testOpenSSLPath(pathBin, callback) {
which(pathBin, function(error) {
if (error) {
return callback(new Error('Could not find openssl on your system on this path: ' + pathBin));
}
callback();
});
} | rishigb/NodeProject_IOTstyle | socketIo/VideoLiveStreamSockets/node_modules/exprestify/node_modules/pem/lib/pem.js | JavaScript | mit | 24,288 |
<?php
namespace Neos\Eel\Tests\Unit;
/*
* This file is part of the Neos.Eel package.
*
* (c) Contributors of the Neos Project - www.neos.io
*
* This package is Open Source Software. For the full copyright and license
* information, please view the LICENSE file which was distributed with this
* source code.
*/
use Neos\Eel\Helper\DateHelper;
use Neos\Flow\I18n\Locale;
/**
* Tests for DateHelper
*/
class DateHelperTest extends \Neos\Flow\Tests\UnitTestCase
{
/**
* @return array
*/
public function parseExamples()
{
$date = \DateTime::createFromFormat('Y-m-d', '2013-07-03');
$dateTime = \DateTime::createFromFormat('Y-m-d H:i:s', '2013-07-03 12:34:56');
return [
'basic date' => ['2013-07-03', 'Y-m-d', $date],
'date with time' => ['2013-07-03 12:34:56', 'Y-m-d H:i:s', $dateTime]
];
}
/**
* @test
* @dataProvider parseExamples
*/
public function parseWorks($string, $format, $expected)
{
$helper = new DateHelper();
$result = $helper->parse($string, $format);
self::assertInstanceOf(\DateTime::class, $result);
self::assertEqualsWithDelta((float)$expected->format('U'), (float)$result->format('U'), 60, 'Timestamps should match');
}
/**
* @return array
*/
public function formatExamples()
{
$dateTime = \DateTime::createFromFormat('Y-m-d H:i:s', '2013-07-03 12:34:56');
return [
'DateTime object' => [$dateTime, 'Y-m-d H:i:s', '2013-07-03 12:34:56'],
'timestamp as integer' => [1372856513, 'Y-m-d', '2013-07-03'],
'now' => ['now', 'Y-m-d', date('Y-m-d')],
'interval' => [new \DateInterval('P1D'), '%d days', '1 days']
];
}
/**
* @test
* @dataProvider formatExamples
*/
public function formatWorks($dateOrString, $format, $expected)
{
$helper = new DateHelper();
$result = $helper->format($dateOrString, $format);
self::assertSame($expected, $result);
}
/**
* @test
*/
public function formatCldrThrowsOnEmptyArguments()
{
$this->expectException(\InvalidArgumentException::class);
$helper = new DateHelper();
$helper->formatCldr(null, null);
}
/**
* @test
*/
public function formatCldrWorksWithEmptyLocale()
{
$locale = new Locale('en');
$expected = 'whatever-value';
$configurationMock = $this->createMock(\Neos\Flow\I18n\Configuration::class);
$configurationMock->expects(self::atLeastOnce())->method('getCurrentLocale')->willReturn($locale);
$localizationServiceMock = $this->createMock(\Neos\Flow\I18n\Service::class);
$localizationServiceMock->expects(self::atLeastOnce())->method('getConfiguration')->willReturn($configurationMock);
$formatMock = $this->createMock(\Neos\Flow\I18n\Formatter\DatetimeFormatter::class);
$formatMock->expects(self::atLeastOnce())->method('formatDateTimeWithCustomPattern')->willReturn($expected);
$helper = new DateHelper();
$this->inject($helper, 'datetimeFormatter', $formatMock);
$this->inject($helper, 'localizationService', $localizationServiceMock);
$date = \DateTime::createFromFormat('Y-m-d H:i:s', '2013-07-03 12:34:56');
$format = 'whatever-format';
$helper->formatCldr($date, $format);
}
/**
* @test
*/
public function formatCldrCallsFormatService()
{
$date = \DateTime::createFromFormat('Y-m-d H:i:s', '2013-07-03 12:34:56');
$format = 'whatever-format';
$locale = 'en';
$expected = '2013-07-03 12:34:56';
$formatMock = $this->createMock(\Neos\Flow\I18n\Formatter\DatetimeFormatter::class);
$formatMock->expects(self::atLeastOnce())->method('formatDateTimeWithCustomPattern');
$helper = new DateHelper();
$this->inject($helper, 'datetimeFormatter', $formatMock);
$helper->formatCldr($date, $format, $locale);
}
/**
* @test
*/
public function nowWorks()
{
$helper = new DateHelper();
$result = $helper->now();
self::assertInstanceOf(\DateTime::class, $result);
self::assertEqualsWithDelta(time(), (integer)$result->format('U'), 1, 'Now should be now');
}
/**
* @test
*/
public function createWorks()
{
$helper = new DateHelper();
$result = $helper->create('yesterday noon');
$expected = new \DateTime('yesterday noon');
self::assertInstanceOf(\DateTime::class, $result);
self::assertEqualsWithDelta($expected->getTimestamp(), $result->getTimestamp(), 1, 'Created DateTime object should match expected');
}
/**
* @test
*/
public function todayWorks()
{
$helper = new DateHelper();
$result = $helper->today();
self::assertInstanceOf(\DateTime::class, $result);
$today = new \DateTime('today');
self::assertEqualsWithDelta($today->getTimestamp(), $result->getTimestamp(), 1, 'Today should be today');
}
/**
* @return array
*/
public function calculationExamples()
{
$dateTime = \DateTime::createFromFormat('Y-m-d H:i:s', '2013-07-03 12:34:56');
return [
'add DateTime with DateInterval' => ['add', $dateTime, new \DateInterval('P1D'), '2013-07-04 12:34:56'],
'add DateTime with string' => ['add', $dateTime, 'P1D', '2013-07-04 12:34:56'],
'subtract DateTime with DateInterval' => ['subtract', $dateTime, new \DateInterval('P1D'), '2013-07-02 12:34:56'],
'subtract DateTime with string' => ['subtract', $dateTime, 'P1D', '2013-07-02 12:34:56'],
];
}
/**
* @test
* @dataProvider calculationExamples
*/
public function calculationWorks($method, $dateTime, $interval, $expected)
{
$timestamp = $dateTime->getTimestamp();
$helper = new DateHelper();
$result = $helper->$method($dateTime, $interval);
self::assertEquals($timestamp, $dateTime->getTimeStamp(), 'DateTime should not be modified');
self::assertEquals($expected, $result->format('Y-m-d H:i:s'));
}
/**
* @test
*/
public function diffWorks()
{
$earlierTime = \DateTime::createFromFormat('Y-m-d H:i:s', '2013-07-03 12:34:56');
$futureTime = \DateTime::createFromFormat('Y-m-d H:i:s', '2013-07-10 12:33:56');
$helper = new DateHelper();
$result = $helper->diff($earlierTime, $futureTime);
self::assertEquals(6, $result->d);
self::assertEquals(23, $result->h);
self::assertEquals(59, $result->i);
}
/**
* @test
*/
public function dateAccessorsWork()
{
$helper = new DateHelper();
$date = new \DateTime('2013-10-16 14:59:27');
self::assertSame(2013, $helper->year($date));
self::assertSame(10, $helper->month($date));
self::assertSame(16, $helper->dayOfMonth($date));
self::assertSame(14, $helper->hour($date));
self::assertSame(59, $helper->minute($date));
self::assertSame(27, $helper->second($date));
}
}
| neos/eel | Tests/Unit/Helper/DateHelperTest.php | PHP | mit | 7,259 |
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
import argparse
import zipfile
from os import getcwd, listdir, makedirs, mkdir, rename
from os.path import isdir, isfile, join
from shutil import move, rmtree
from sys import exit as sys_exit
from sys import path
path.append("..")
from platformio.util import exec_command, get_home_dir
def _unzip_generated_file(mbed_dir, output_dir, mcu):
filename = join(
mbed_dir, "build", "export", "MBED_A1_emblocks_%s.zip" % mcu)
variant_dir = join(output_dir, "variant", mcu)
if isfile(filename):
with zipfile.ZipFile(filename) as zfile:
mkdir(variant_dir)
zfile.extractall(variant_dir)
for f in listdir(join(variant_dir, "MBED_A1")):
if not f.lower().startswith("mbed"):
continue
move(join(variant_dir, "MBED_A1", f), variant_dir)
rename(join(variant_dir, "MBED_A1.eix"),
join(variant_dir, "%s.eix" % mcu))
rmtree(join(variant_dir, "MBED_A1"))
else:
print "Warning! Skipped board: %s" % mcu
def buildlib(mbed_dir, mcu, lib="mbed"):
build_command = [
"python",
join(mbed_dir, "workspace_tools", "build.py"),
"--mcu", mcu,
"-t", "GCC_ARM"
]
if lib is not "mbed":
build_command.append(lib)
build_result = exec_command(build_command, cwd=getcwd())
if build_result['returncode'] != 0:
print "* %s doesn't support %s library!" % (mcu, lib)
def copylibs(mbed_dir, output_dir):
libs = ["dsp", "fat", "net", "rtos", "usb", "usb_host"]
libs_dir = join(output_dir, "libs")
makedirs(libs_dir)
print "Moving generated libraries to framework dir..."
for lib in libs:
if lib == "net":
move(join(mbed_dir, "build", lib, "eth"), libs_dir)
continue
move(join(mbed_dir, "build", lib), libs_dir)
def main(mbed_dir, output_dir):
print "Starting..."
path.append(mbed_dir)
from workspace_tools.export import gccarm
if isdir(output_dir):
print "Deleting previous framework dir..."
rmtree(output_dir)
settings_file = join(mbed_dir, "workspace_tools", "private_settings.py")
if not isfile(settings_file):
with open(settings_file, "w") as f:
f.write("GCC_ARM_PATH = '%s'" %
join(get_home_dir(), "packages", "toolchain-gccarmnoneeabi",
"bin"))
makedirs(join(output_dir, "variant"))
mbed_libs = ["--rtos", "--dsp", "--fat", "--eth", "--usb", "--usb_host"]
for mcu in set(gccarm.GccArm.TARGETS):
print "Processing board: %s" % mcu
buildlib(mbed_dir, mcu)
for lib in mbed_libs:
buildlib(mbed_dir, mcu, lib)
result = exec_command(
["python", join(mbed_dir, "workspace_tools", "project.py"),
"--mcu", mcu, "-i", "emblocks", "-p", "0", "-b"], cwd=getcwd()
)
if result['returncode'] != 0:
print "Unable to build the project for %s" % mcu
continue
_unzip_generated_file(mbed_dir, output_dir, mcu)
copylibs(mbed_dir, output_dir)
with open(join(output_dir, "boards.txt"), "w") as fp:
fp.write("\n".join(sorted(listdir(join(output_dir, "variant")))))
print "Complete!"
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--mbed', help="The path to mbed framework")
parser.add_argument('--output', help="The path to output directory")
args = vars(parser.parse_args())
sys_exit(main(args["mbed"], args["output"]))
| mseroczynski/platformio | scripts/mbed_to_package.py | Python | mit | 3,667 |
<?php
namespace Concrete\Core\Attribute;
use Concrete\Core\Attribute\Key\SearchIndexer\SearchIndexerInterface;
interface AttributeKeyInterface
{
/**
* @return int
*/
public function getAttributeKeyID();
/**
* @return string
*/
public function getAttributeKeyHandle();
/**
* @return \Concrete\Core\Entity\Attribute\Type
*/
public function getAttributeType();
/**
* @return bool
*/
public function isAttributeKeySearchable();
/**
* @return SearchIndexerInterface
*/
public function getSearchIndexer();
/**
* @return Controller
*/
public function getController();
}
| jaromirdalecky/concrete5 | concrete/src/Attribute/AttributeKeyInterface.php | PHP | mit | 678 |
'''tzinfo timezone information for GMT_minus_0.'''
from pytz.tzinfo import StaticTzInfo
from pytz.tzinfo import memorized_timedelta as timedelta
class GMT_minus_0(StaticTzInfo):
'''GMT_minus_0 timezone definition. See datetime.tzinfo for details'''
zone = 'GMT_minus_0'
_utcoffset = timedelta(seconds=0)
_tzname = 'GMT'
GMT_minus_0 = GMT_minus_0()
| newvem/pytz | pytz/zoneinfo/GMT_minus_0.py | Python | mit | 367 |
<?php
$loader = require __DIR__.'/../vendor/autoload.php';
$loader->add('Namshi\\JOSE\\Test', __DIR__);
define('TEST_DIR', __DIR__);
define('SSL_KEYS_PATH', 'file://'.TEST_DIR.DIRECTORY_SEPARATOR);
| DevKhater/symfony2-testing | vendor/namshi/jose/tests/bootstrap.php | PHP | mit | 200 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.testsdk import ScenarioTest
class CdnEdgeNodecenarioTest(ScenarioTest):
def test_edge_node_crud(self):
self.cmd('cdn edge-node list', checks=self.check('length(@)', 3))
| yugangw-msft/azure-cli | src/azure-cli/azure/cli/command_modules/cdn/tests/latest/test_nodes_scenarios.py | Python | mit | 543 |
<?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\Lock\Tests\Store;
use Symfony\Component\Cache\Traits\RedisClusterProxy;
use Symfony\Component\Cache\Traits\RedisProxy;
use Symfony\Component\Lock\Exception\InvalidArgumentException;
use Symfony\Component\Lock\Exception\LockConflictedException;
use Symfony\Component\Lock\Key;
use Symfony\Component\Lock\PersistingStoreInterface;
use Symfony\Component\Lock\Store\RedisStore;
/**
* @author Jérémy Derussé <jeremy@derusse.com>
*/
abstract class AbstractRedisStoreTest extends AbstractStoreTest
{
use ExpiringStoreTestTrait;
/**
* {@inheritdoc}
*/
protected function getClockDelay()
{
return 250000;
}
abstract protected function getRedisConnection(): \Redis|\RedisArray|\RedisCluster|\Predis\ClientInterface;
/**
* {@inheritdoc}
*/
public function getStore(): PersistingStoreInterface
{
return new RedisStore($this->getRedisConnection());
}
public function testBackwardCompatibility()
{
$resource = uniqid(__METHOD__, true);
$key1 = new Key($resource);
$key2 = new Key($resource);
$oldStore = new Symfony51Store($this->getRedisConnection());
$newStore = $this->getStore();
$oldStore->save($key1);
$this->assertTrue($oldStore->exists($key1));
$this->expectException(LockConflictedException::class);
$newStore->save($key2);
}
}
class Symfony51Store
{
private $redis;
public function __construct($redis)
{
$this->redis = $redis;
}
public function save(Key $key)
{
$script = '
if redis.call("GET", KEYS[1]) == ARGV[1] then
return redis.call("PEXPIRE", KEYS[1], ARGV[2])
elseif redis.call("SET", KEYS[1], ARGV[1], "NX", "PX", ARGV[2]) then
return 1
else
return 0
end
';
if (!$this->evaluate($script, (string) $key, [$this->getUniqueToken($key), (int) ceil(5 * 1000)])) {
throw new LockConflictedException();
}
}
public function exists(Key $key)
{
return $this->redis->get((string) $key) === $this->getUniqueToken($key);
}
private function evaluate(string $script, string $resource, array $args)
{
if (
$this->redis instanceof \Redis ||
$this->redis instanceof \RedisCluster ||
$this->redis instanceof RedisProxy ||
$this->redis instanceof RedisClusterProxy
) {
return $this->redis->eval($script, array_merge([$resource], $args), 1);
}
if ($this->redis instanceof \RedisArray) {
return $this->redis->_instance($this->redis->_target($resource))->eval($script, array_merge([$resource], $args), 1);
}
if ($this->redis instanceof \Predis\ClientInterface) {
return $this->redis->eval(...array_merge([$script, 1, $resource], $args));
}
throw new InvalidArgumentException(sprintf('"%s()" expects being initialized with a Redis, RedisArray, RedisCluster or Predis\ClientInterface, "%s" given.', __METHOD__, get_debug_type($this->redis)));
}
private function getUniqueToken(Key $key): string
{
if (!$key->hasState(__CLASS__)) {
$token = base64_encode(random_bytes(32));
$key->setState(__CLASS__, $token);
}
return $key->getState(__CLASS__);
}
}
| mpdude/symfony | src/Symfony/Component/Lock/Tests/Store/AbstractRedisStoreTest.php | PHP | mit | 3,693 |
<?php
class PSU_Student_Finaid_Application_Factory {
public function fetch_by_pidm_aidy_seqno( $pidm, $aidy, $seqno ) {
$args = array(
'pidm' => $pidm,
'aidy' => $aidy,
'seqno' => $seqno,
);
$where = array(
'rcrapp1_pidm = :pidm',
'rcrapp1_aidy_code = :aidy',
'rcrapp1_seq_no = :seqno',
);
$rset = $this->query( $args, $where );
return new PSU_Student_Finaid_Application( $rset );
}
public function query( $args, $where = array() ) {
$where[] = '1=1';
$where_sql = ' AND ' . implode( ' AND ', $where );
$sql = "
SELECT
rcrapp4_fath_ssn, rcrapp4_fath_last_name, rcrapp4_fath_first_name_ini, rcrapp4_fath_birth_date,
rcrapp4_moth_ssn, rcrapp4_moth_last_name, rcrapp4_moth_first_name_ini, rcrapp4_moth_birth_date
FROM
rcrapp1 LEFT JOIN rcrapp4 ON
rcrapp1_aidy_code = rcrapp4_aidy_code AND
rcrapp1_pidm = rcrapp4_pidm AND
rcrapp1_infc_code = rcrapp4_infc_code AND
rcrapp1_seq_no = rcrapp4_seq_no
WHERE
rcrapp1_infc_code = 'EDE'
$where_sql
";
$rset = PSU::db('banner')->GetRow( $sql, $args );
return $rset;
}
}
| jbthibeault/plymouth-webapp | lib/PSU/Student/Finaid/Application/Factory.php | PHP | mit | 1,109 |
<head>
<meta charset=utf-8>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Recharge</title>
<!-- Load Roboto font -->
<link href='http://fonts.googleapis.com/css?family=Roboto:400,300,700&subset=latin,latin-ext' rel='stylesheet' type='text/css'>
<!-- Load css styles -->
<link rel="stylesheet" type="text/css" href="css/bootstrap.css" />
<link rel="stylesheet" type="text/css" href="css/bootstrap-responsive.css" />
<link rel="stylesheet" type="text/css" href="css/stylemain_tb.css" />
<link rel="stylesheet" type="text/css" href="css/pluton.css" />
<link rel="stylesheet" type="text/css" href="css/style1_payment_portal.css" />
<!--[if IE 7]>
<link rel="stylesheet" type="text/css" href="css/pluton-ie7.css" />
<![endif]-->
<link rel="stylesheet" type="text/css" href="css/jquery.cslider_tb.css" />
<link rel="stylesheet" type="text/css" href="css/jquery.bxslider.css" />
<link rel="stylesheet" type="text/css" href="css/animate.css" />
<!-- Fav and touch icons -->
<link rel="apple-touch-icon-precomposed" sizes="144x144" href="images/ico/apple-touch-icon-144.png">
<link rel="apple-touch-icon-precomposed" sizes="114x114" href="images/ico/apple-touch-icon-114.png">
<link rel="apple-touch-icon-precomposed" sizes="72x72" href="images/apple-touch-icon-72.png">
<link rel="apple-touch-icon-precomposed" href="images/ico/apple-touch-icon-57.png">
<link rel="shortcut icon" href="images/ico/favicon.ico">
</head> | ankushrgv/tollbooth-management-system | tb_management1/laxmi/includes/head_recharge.php | PHP | mit | 1,662 |
<?php
return [
'Names' => [
'af' => 'Tiếng Afrikaans',
'af_NA' => 'Tiếng Afrikaans (Namibia)',
'af_ZA' => 'Tiếng Afrikaans (Nam Phi)',
'ak' => 'Tiếng Akan',
'ak_GH' => 'Tiếng Akan (Ghana)',
'am' => 'Tiếng Amharic',
'am_ET' => 'Tiếng Amharic (Ethiopia)',
'ar' => 'Tiếng Ả Rập',
'ar_001' => 'Tiếng Ả Rập (Thế giới)',
'ar_AE' => 'Tiếng Ả Rập (Các Tiểu Vương quốc Ả Rập Thống nhất)',
'ar_BH' => 'Tiếng Ả Rập (Bahrain)',
'ar_DJ' => 'Tiếng Ả Rập (Djibouti)',
'ar_DZ' => 'Tiếng Ả Rập (Algeria)',
'ar_EG' => 'Tiếng Ả Rập (Ai Cập)',
'ar_EH' => 'Tiếng Ả Rập (Tây Sahara)',
'ar_ER' => 'Tiếng Ả Rập (Eritrea)',
'ar_IL' => 'Tiếng Ả Rập (Israel)',
'ar_IQ' => 'Tiếng Ả Rập (Iraq)',
'ar_JO' => 'Tiếng Ả Rập (Jordan)',
'ar_KM' => 'Tiếng Ả Rập (Comoros)',
'ar_KW' => 'Tiếng Ả Rập (Kuwait)',
'ar_LB' => 'Tiếng Ả Rập (Li-băng)',
'ar_LY' => 'Tiếng Ả Rập (Libya)',
'ar_MA' => 'Tiếng Ả Rập (Ma-rốc)',
'ar_MR' => 'Tiếng Ả Rập (Mauritania)',
'ar_OM' => 'Tiếng Ả Rập (Oman)',
'ar_PS' => 'Tiếng Ả Rập (Lãnh thổ Palestine)',
'ar_QA' => 'Tiếng Ả Rập (Qatar)',
'ar_SA' => 'Tiếng Ả Rập (Ả Rập Xê-út)',
'ar_SD' => 'Tiếng Ả Rập (Sudan)',
'ar_SO' => 'Tiếng Ả Rập (Somalia)',
'ar_SS' => 'Tiếng Ả Rập (Nam Sudan)',
'ar_SY' => 'Tiếng Ả Rập (Syria)',
'ar_TD' => 'Tiếng Ả Rập (Chad)',
'ar_TN' => 'Tiếng Ả Rập (Tunisia)',
'ar_YE' => 'Tiếng Ả Rập (Yemen)',
'as' => 'Tiếng Assam',
'as_IN' => 'Tiếng Assam (Ấn Độ)',
'az' => 'Tiếng Azerbaijan',
'az_AZ' => 'Tiếng Azerbaijan (Azerbaijan)',
'az_Cyrl' => 'Tiếng Azerbaijan (Chữ Kirin)',
'az_Cyrl_AZ' => 'Tiếng Azerbaijan (Chữ Kirin, Azerbaijan)',
'az_Latn' => 'Tiếng Azerbaijan (Chữ La tinh)',
'az_Latn_AZ' => 'Tiếng Azerbaijan (Chữ La tinh, Azerbaijan)',
'be' => 'Tiếng Belarus',
'be_BY' => 'Tiếng Belarus (Belarus)',
'bg' => 'Tiếng Bulgaria',
'bg_BG' => 'Tiếng Bulgaria (Bulgaria)',
'bm' => 'Tiếng Bambara',
'bm_ML' => 'Tiếng Bambara (Mali)',
'bn' => 'Tiếng Bangla',
'bn_BD' => 'Tiếng Bangla (Bangladesh)',
'bn_IN' => 'Tiếng Bangla (Ấn Độ)',
'bo' => 'Tiếng Tây Tạng',
'bo_CN' => 'Tiếng Tây Tạng (Trung Quốc)',
'bo_IN' => 'Tiếng Tây Tạng (Ấn Độ)',
'br' => 'Tiếng Breton',
'br_FR' => 'Tiếng Breton (Pháp)',
'bs' => 'Tiếng Bosnia',
'bs_BA' => 'Tiếng Bosnia (Bosnia và Herzegovina)',
'bs_Cyrl' => 'Tiếng Bosnia (Chữ Kirin)',
'bs_Cyrl_BA' => 'Tiếng Bosnia (Chữ Kirin, Bosnia và Herzegovina)',
'bs_Latn' => 'Tiếng Bosnia (Chữ La tinh)',
'bs_Latn_BA' => 'Tiếng Bosnia (Chữ La tinh, Bosnia và Herzegovina)',
'ca' => 'Tiếng Catalan',
'ca_AD' => 'Tiếng Catalan (Andorra)',
'ca_ES' => 'Tiếng Catalan (Tây Ban Nha)',
'ca_FR' => 'Tiếng Catalan (Pháp)',
'ca_IT' => 'Tiếng Catalan (Italy)',
'ce' => 'Tiếng Chechen',
'ce_RU' => 'Tiếng Chechen (Nga)',
'cs' => 'Tiếng Séc',
'cs_CZ' => 'Tiếng Séc (Séc)',
'cy' => 'Tiếng Wales',
'cy_GB' => 'Tiếng Wales (Vương quốc Anh)',
'da' => 'Tiếng Đan Mạch',
'da_DK' => 'Tiếng Đan Mạch (Đan Mạch)',
'da_GL' => 'Tiếng Đan Mạch (Greenland)',
'de' => 'Tiếng Đức',
'de_AT' => 'Tiếng Đức (Áo)',
'de_BE' => 'Tiếng Đức (Bỉ)',
'de_CH' => 'Tiếng Đức (Thụy Sĩ)',
'de_DE' => 'Tiếng Đức (Đức)',
'de_IT' => 'Tiếng Đức (Italy)',
'de_LI' => 'Tiếng Đức (Liechtenstein)',
'de_LU' => 'Tiếng Đức (Luxembourg)',
'dz' => 'Tiếng Dzongkha',
'dz_BT' => 'Tiếng Dzongkha (Bhutan)',
'ee' => 'Tiếng Ewe',
'ee_GH' => 'Tiếng Ewe (Ghana)',
'ee_TG' => 'Tiếng Ewe (Togo)',
'el' => 'Tiếng Hy Lạp',
'el_CY' => 'Tiếng Hy Lạp (Síp)',
'el_GR' => 'Tiếng Hy Lạp (Hy Lạp)',
'en' => 'Tiếng Anh',
'en_001' => 'Tiếng Anh (Thế giới)',
'en_150' => 'Tiếng Anh (Châu Âu)',
'en_AE' => 'Tiếng Anh (Các Tiểu Vương quốc Ả Rập Thống nhất)',
'en_AG' => 'Tiếng Anh (Antigua và Barbuda)',
'en_AI' => 'Tiếng Anh (Anguilla)',
'en_AS' => 'Tiếng Anh (Samoa thuộc Mỹ)',
'en_AT' => 'Tiếng Anh (Áo)',
'en_AU' => 'Tiếng Anh (Australia)',
'en_BB' => 'Tiếng Anh (Barbados)',
'en_BE' => 'Tiếng Anh (Bỉ)',
'en_BI' => 'Tiếng Anh (Burundi)',
'en_BM' => 'Tiếng Anh (Bermuda)',
'en_BS' => 'Tiếng Anh (Bahamas)',
'en_BW' => 'Tiếng Anh (Botswana)',
'en_BZ' => 'Tiếng Anh (Belize)',
'en_CA' => 'Tiếng Anh (Canada)',
'en_CC' => 'Tiếng Anh (Quần đảo Cocos [Keeling])',
'en_CH' => 'Tiếng Anh (Thụy Sĩ)',
'en_CK' => 'Tiếng Anh (Quần đảo Cook)',
'en_CM' => 'Tiếng Anh (Cameroon)',
'en_CX' => 'Tiếng Anh (Đảo Giáng Sinh)',
'en_CY' => 'Tiếng Anh (Síp)',
'en_DE' => 'Tiếng Anh (Đức)',
'en_DK' => 'Tiếng Anh (Đan Mạch)',
'en_DM' => 'Tiếng Anh (Dominica)',
'en_ER' => 'Tiếng Anh (Eritrea)',
'en_FI' => 'Tiếng Anh (Phần Lan)',
'en_FJ' => 'Tiếng Anh (Fiji)',
'en_FK' => 'Tiếng Anh (Quần đảo Falkland)',
'en_FM' => 'Tiếng Anh (Micronesia)',
'en_GB' => 'Tiếng Anh (Vương quốc Anh)',
'en_GD' => 'Tiếng Anh (Grenada)',
'en_GG' => 'Tiếng Anh (Guernsey)',
'en_GH' => 'Tiếng Anh (Ghana)',
'en_GI' => 'Tiếng Anh (Gibraltar)',
'en_GM' => 'Tiếng Anh (Gambia)',
'en_GU' => 'Tiếng Anh (Guam)',
'en_GY' => 'Tiếng Anh (Guyana)',
'en_HK' => 'Tiếng Anh (Đặc khu Hành chính Hồng Kông, Trung Quốc)',
'en_IE' => 'Tiếng Anh (Ireland)',
'en_IL' => 'Tiếng Anh (Israel)',
'en_IM' => 'Tiếng Anh (Đảo Man)',
'en_IN' => 'Tiếng Anh (Ấn Độ)',
'en_IO' => 'Tiếng Anh (Lãnh thổ Ấn Độ Dương thuộc Anh)',
'en_JE' => 'Tiếng Anh (Jersey)',
'en_JM' => 'Tiếng Anh (Jamaica)',
'en_KE' => 'Tiếng Anh (Kenya)',
'en_KI' => 'Tiếng Anh (Kiribati)',
'en_KN' => 'Tiếng Anh (St. Kitts và Nevis)',
'en_KY' => 'Tiếng Anh (Quần đảo Cayman)',
'en_LC' => 'Tiếng Anh (St. Lucia)',
'en_LR' => 'Tiếng Anh (Liberia)',
'en_LS' => 'Tiếng Anh (Lesotho)',
'en_MG' => 'Tiếng Anh (Madagascar)',
'en_MH' => 'Tiếng Anh (Quần đảo Marshall)',
'en_MO' => 'Tiếng Anh (Đặc khu Hành chính Macao, Trung Quốc)',
'en_MP' => 'Tiếng Anh (Quần đảo Bắc Mariana)',
'en_MS' => 'Tiếng Anh (Montserrat)',
'en_MT' => 'Tiếng Anh (Malta)',
'en_MU' => 'Tiếng Anh (Mauritius)',
'en_MW' => 'Tiếng Anh (Malawi)',
'en_MY' => 'Tiếng Anh (Malaysia)',
'en_NA' => 'Tiếng Anh (Namibia)',
'en_NF' => 'Tiếng Anh (Đảo Norfolk)',
'en_NG' => 'Tiếng Anh (Nigeria)',
'en_NL' => 'Tiếng Anh (Hà Lan)',
'en_NR' => 'Tiếng Anh (Nauru)',
'en_NU' => 'Tiếng Anh (Niue)',
'en_NZ' => 'Tiếng Anh (New Zealand)',
'en_PG' => 'Tiếng Anh (Papua New Guinea)',
'en_PH' => 'Tiếng Anh (Philippines)',
'en_PK' => 'Tiếng Anh (Pakistan)',
'en_PN' => 'Tiếng Anh (Quần đảo Pitcairn)',
'en_PR' => 'Tiếng Anh (Puerto Rico)',
'en_PW' => 'Tiếng Anh (Palau)',
'en_RW' => 'Tiếng Anh (Rwanda)',
'en_SB' => 'Tiếng Anh (Quần đảo Solomon)',
'en_SC' => 'Tiếng Anh (Seychelles)',
'en_SD' => 'Tiếng Anh (Sudan)',
'en_SE' => 'Tiếng Anh (Thụy Điển)',
'en_SG' => 'Tiếng Anh (Singapore)',
'en_SH' => 'Tiếng Anh (St. Helena)',
'en_SI' => 'Tiếng Anh (Slovenia)',
'en_SL' => 'Tiếng Anh (Sierra Leone)',
'en_SS' => 'Tiếng Anh (Nam Sudan)',
'en_SX' => 'Tiếng Anh (Sint Maarten)',
'en_SZ' => 'Tiếng Anh (Eswatini)',
'en_TC' => 'Tiếng Anh (Quần đảo Turks và Caicos)',
'en_TK' => 'Tiếng Anh (Tokelau)',
'en_TO' => 'Tiếng Anh (Tonga)',
'en_TT' => 'Tiếng Anh (Trinidad và Tobago)',
'en_TV' => 'Tiếng Anh (Tuvalu)',
'en_TZ' => 'Tiếng Anh (Tanzania)',
'en_UG' => 'Tiếng Anh (Uganda)',
'en_UM' => 'Tiếng Anh (Các tiểu đảo xa của Hoa Kỳ)',
'en_US' => 'Tiếng Anh (Hoa Kỳ)',
'en_VC' => 'Tiếng Anh (St. Vincent và Grenadines)',
'en_VG' => 'Tiếng Anh (Quần đảo Virgin thuộc Anh)',
'en_VI' => 'Tiếng Anh (Quần đảo Virgin thuộc Hoa Kỳ)',
'en_VU' => 'Tiếng Anh (Vanuatu)',
'en_WS' => 'Tiếng Anh (Samoa)',
'en_ZA' => 'Tiếng Anh (Nam Phi)',
'en_ZM' => 'Tiếng Anh (Zambia)',
'en_ZW' => 'Tiếng Anh (Zimbabwe)',
'eo' => 'Tiếng Quốc Tế Ngữ',
'eo_001' => 'Tiếng Quốc Tế Ngữ (Thế giới)',
'es' => 'Tiếng Tây Ban Nha',
'es_419' => 'Tiếng Tây Ban Nha (Châu Mỹ La-tinh)',
'es_AR' => 'Tiếng Tây Ban Nha (Argentina)',
'es_BO' => 'Tiếng Tây Ban Nha (Bolivia)',
'es_BR' => 'Tiếng Tây Ban Nha (Brazil)',
'es_BZ' => 'Tiếng Tây Ban Nha (Belize)',
'es_CL' => 'Tiếng Tây Ban Nha (Chile)',
'es_CO' => 'Tiếng Tây Ban Nha (Colombia)',
'es_CR' => 'Tiếng Tây Ban Nha (Costa Rica)',
'es_CU' => 'Tiếng Tây Ban Nha (Cuba)',
'es_DO' => 'Tiếng Tây Ban Nha (Cộng hòa Dominica)',
'es_EC' => 'Tiếng Tây Ban Nha (Ecuador)',
'es_ES' => 'Tiếng Tây Ban Nha (Tây Ban Nha)',
'es_GQ' => 'Tiếng Tây Ban Nha (Guinea Xích Đạo)',
'es_GT' => 'Tiếng Tây Ban Nha (Guatemala)',
'es_HN' => 'Tiếng Tây Ban Nha (Honduras)',
'es_MX' => 'Tiếng Tây Ban Nha (Mexico)',
'es_NI' => 'Tiếng Tây Ban Nha (Nicaragua)',
'es_PA' => 'Tiếng Tây Ban Nha (Panama)',
'es_PE' => 'Tiếng Tây Ban Nha (Peru)',
'es_PH' => 'Tiếng Tây Ban Nha (Philippines)',
'es_PR' => 'Tiếng Tây Ban Nha (Puerto Rico)',
'es_PY' => 'Tiếng Tây Ban Nha (Paraguay)',
'es_SV' => 'Tiếng Tây Ban Nha (El Salvador)',
'es_US' => 'Tiếng Tây Ban Nha (Hoa Kỳ)',
'es_UY' => 'Tiếng Tây Ban Nha (Uruguay)',
'es_VE' => 'Tiếng Tây Ban Nha (Venezuela)',
'et' => 'Tiếng Estonia',
'et_EE' => 'Tiếng Estonia (Estonia)',
'eu' => 'Tiếng Basque',
'eu_ES' => 'Tiếng Basque (Tây Ban Nha)',
'fa' => 'Tiếng Ba Tư',
'fa_AF' => 'Tiếng Ba Tư (Afghanistan)',
'fa_IR' => 'Tiếng Ba Tư (Iran)',
'ff' => 'Tiếng Fulah',
'ff_CM' => 'Tiếng Fulah (Cameroon)',
'ff_GN' => 'Tiếng Fulah (Guinea)',
'ff_Latn' => 'Tiếng Fulah (Chữ La tinh)',
'ff_Latn_BF' => 'Tiếng Fulah (Chữ La tinh, Burkina Faso)',
'ff_Latn_CM' => 'Tiếng Fulah (Chữ La tinh, Cameroon)',
'ff_Latn_GH' => 'Tiếng Fulah (Chữ La tinh, Ghana)',
'ff_Latn_GM' => 'Tiếng Fulah (Chữ La tinh, Gambia)',
'ff_Latn_GN' => 'Tiếng Fulah (Chữ La tinh, Guinea)',
'ff_Latn_GW' => 'Tiếng Fulah (Chữ La tinh, Guinea-Bissau)',
'ff_Latn_LR' => 'Tiếng Fulah (Chữ La tinh, Liberia)',
'ff_Latn_MR' => 'Tiếng Fulah (Chữ La tinh, Mauritania)',
'ff_Latn_NE' => 'Tiếng Fulah (Chữ La tinh, Niger)',
'ff_Latn_NG' => 'Tiếng Fulah (Chữ La tinh, Nigeria)',
'ff_Latn_SL' => 'Tiếng Fulah (Chữ La tinh, Sierra Leone)',
'ff_Latn_SN' => 'Tiếng Fulah (Chữ La tinh, Senegal)',
'ff_MR' => 'Tiếng Fulah (Mauritania)',
'ff_SN' => 'Tiếng Fulah (Senegal)',
'fi' => 'Tiếng Phần Lan',
'fi_FI' => 'Tiếng Phần Lan (Phần Lan)',
'fo' => 'Tiếng Faroe',
'fo_DK' => 'Tiếng Faroe (Đan Mạch)',
'fo_FO' => 'Tiếng Faroe (Quần đảo Faroe)',
'fr' => 'Tiếng Pháp',
'fr_BE' => 'Tiếng Pháp (Bỉ)',
'fr_BF' => 'Tiếng Pháp (Burkina Faso)',
'fr_BI' => 'Tiếng Pháp (Burundi)',
'fr_BJ' => 'Tiếng Pháp (Benin)',
'fr_BL' => 'Tiếng Pháp (St. Barthélemy)',
'fr_CA' => 'Tiếng Pháp (Canada)',
'fr_CD' => 'Tiếng Pháp (Congo - Kinshasa)',
'fr_CF' => 'Tiếng Pháp (Cộng hòa Trung Phi)',
'fr_CG' => 'Tiếng Pháp (Congo - Brazzaville)',
'fr_CH' => 'Tiếng Pháp (Thụy Sĩ)',
'fr_CI' => 'Tiếng Pháp (Côte d’Ivoire)',
'fr_CM' => 'Tiếng Pháp (Cameroon)',
'fr_DJ' => 'Tiếng Pháp (Djibouti)',
'fr_DZ' => 'Tiếng Pháp (Algeria)',
'fr_FR' => 'Tiếng Pháp (Pháp)',
'fr_GA' => 'Tiếng Pháp (Gabon)',
'fr_GF' => 'Tiếng Pháp (Guiana thuộc Pháp)',
'fr_GN' => 'Tiếng Pháp (Guinea)',
'fr_GP' => 'Tiếng Pháp (Guadeloupe)',
'fr_GQ' => 'Tiếng Pháp (Guinea Xích Đạo)',
'fr_HT' => 'Tiếng Pháp (Haiti)',
'fr_KM' => 'Tiếng Pháp (Comoros)',
'fr_LU' => 'Tiếng Pháp (Luxembourg)',
'fr_MA' => 'Tiếng Pháp (Ma-rốc)',
'fr_MC' => 'Tiếng Pháp (Monaco)',
'fr_MF' => 'Tiếng Pháp (St. Martin)',
'fr_MG' => 'Tiếng Pháp (Madagascar)',
'fr_ML' => 'Tiếng Pháp (Mali)',
'fr_MQ' => 'Tiếng Pháp (Martinique)',
'fr_MR' => 'Tiếng Pháp (Mauritania)',
'fr_MU' => 'Tiếng Pháp (Mauritius)',
'fr_NC' => 'Tiếng Pháp (New Caledonia)',
'fr_NE' => 'Tiếng Pháp (Niger)',
'fr_PF' => 'Tiếng Pháp (Polynesia thuộc Pháp)',
'fr_PM' => 'Tiếng Pháp (Saint Pierre và Miquelon)',
'fr_RE' => 'Tiếng Pháp (Réunion)',
'fr_RW' => 'Tiếng Pháp (Rwanda)',
'fr_SC' => 'Tiếng Pháp (Seychelles)',
'fr_SN' => 'Tiếng Pháp (Senegal)',
'fr_SY' => 'Tiếng Pháp (Syria)',
'fr_TD' => 'Tiếng Pháp (Chad)',
'fr_TG' => 'Tiếng Pháp (Togo)',
'fr_TN' => 'Tiếng Pháp (Tunisia)',
'fr_VU' => 'Tiếng Pháp (Vanuatu)',
'fr_WF' => 'Tiếng Pháp (Wallis và Futuna)',
'fr_YT' => 'Tiếng Pháp (Mayotte)',
'fy' => 'Tiếng Frisia',
'fy_NL' => 'Tiếng Frisia (Hà Lan)',
'ga' => 'Tiếng Ireland',
'ga_GB' => 'Tiếng Ireland (Vương quốc Anh)',
'ga_IE' => 'Tiếng Ireland (Ireland)',
'gd' => 'Tiếng Gael Scotland',
'gd_GB' => 'Tiếng Gael Scotland (Vương quốc Anh)',
'gl' => 'Tiếng Galician',
'gl_ES' => 'Tiếng Galician (Tây Ban Nha)',
'gu' => 'Tiếng Gujarati',
'gu_IN' => 'Tiếng Gujarati (Ấn Độ)',
'gv' => 'Tiếng Manx',
'gv_IM' => 'Tiếng Manx (Đảo Man)',
'ha' => 'Tiếng Hausa',
'ha_GH' => 'Tiếng Hausa (Ghana)',
'ha_NE' => 'Tiếng Hausa (Niger)',
'ha_NG' => 'Tiếng Hausa (Nigeria)',
'he' => 'Tiếng Do Thái',
'he_IL' => 'Tiếng Do Thái (Israel)',
'hi' => 'Tiếng Hindi',
'hi_IN' => 'Tiếng Hindi (Ấn Độ)',
'hr' => 'Tiếng Croatia',
'hr_BA' => 'Tiếng Croatia (Bosnia và Herzegovina)',
'hr_HR' => 'Tiếng Croatia (Croatia)',
'hu' => 'Tiếng Hungary',
'hu_HU' => 'Tiếng Hungary (Hungary)',
'hy' => 'Tiếng Armenia',
'hy_AM' => 'Tiếng Armenia (Armenia)',
'ia' => 'Tiếng Khoa Học Quốc Tế',
'ia_001' => 'Tiếng Khoa Học Quốc Tế (Thế giới)',
'id' => 'Tiếng Indonesia',
'id_ID' => 'Tiếng Indonesia (Indonesia)',
'ig' => 'Tiếng Igbo',
'ig_NG' => 'Tiếng Igbo (Nigeria)',
'ii' => 'Tiếng Di Tứ Xuyên',
'ii_CN' => 'Tiếng Di Tứ Xuyên (Trung Quốc)',
'is' => 'Tiếng Iceland',
'is_IS' => 'Tiếng Iceland (Iceland)',
'it' => 'Tiếng Italy',
'it_CH' => 'Tiếng Italy (Thụy Sĩ)',
'it_IT' => 'Tiếng Italy (Italy)',
'it_SM' => 'Tiếng Italy (San Marino)',
'it_VA' => 'Tiếng Italy (Thành Vatican)',
'ja' => 'Tiếng Nhật',
'ja_JP' => 'Tiếng Nhật (Nhật Bản)',
'jv' => 'Tiếng Java',
'jv_ID' => 'Tiếng Java (Indonesia)',
'ka' => 'Tiếng Georgia',
'ka_GE' => 'Tiếng Georgia (Georgia)',
'ki' => 'Tiếng Kikuyu',
'ki_KE' => 'Tiếng Kikuyu (Kenya)',
'kk' => 'Tiếng Kazakh',
'kk_KZ' => 'Tiếng Kazakh (Kazakhstan)',
'kl' => 'Tiếng Kalaallisut',
'kl_GL' => 'Tiếng Kalaallisut (Greenland)',
'km' => 'Tiếng Khmer',
'km_KH' => 'Tiếng Khmer (Campuchia)',
'kn' => 'Tiếng Kannada',
'kn_IN' => 'Tiếng Kannada (Ấn Độ)',
'ko' => 'Tiếng Hàn',
'ko_KP' => 'Tiếng Hàn (Triều Tiên)',
'ko_KR' => 'Tiếng Hàn (Hàn Quốc)',
'ks' => 'Tiếng Kashmir',
'ks_Arab' => 'Tiếng Kashmir (Chữ Ả Rập)',
'ks_Arab_IN' => 'Tiếng Kashmir (Chữ Ả Rập, Ấn Độ)',
'ks_IN' => 'Tiếng Kashmir (Ấn Độ)',
'ku' => 'Tiếng Kurd',
'ku_TR' => 'Tiếng Kurd (Thổ Nhĩ Kỳ)',
'kw' => 'Tiếng Cornwall',
'kw_GB' => 'Tiếng Cornwall (Vương quốc Anh)',
'ky' => 'Tiếng Kyrgyz',
'ky_KG' => 'Tiếng Kyrgyz (Kyrgyzstan)',
'lb' => 'Tiếng Luxembourg',
'lb_LU' => 'Tiếng Luxembourg (Luxembourg)',
'lg' => 'Tiếng Ganda',
'lg_UG' => 'Tiếng Ganda (Uganda)',
'ln' => 'Tiếng Lingala',
'ln_AO' => 'Tiếng Lingala (Angola)',
'ln_CD' => 'Tiếng Lingala (Congo - Kinshasa)',
'ln_CF' => 'Tiếng Lingala (Cộng hòa Trung Phi)',
'ln_CG' => 'Tiếng Lingala (Congo - Brazzaville)',
'lo' => 'Tiếng Lào',
'lo_LA' => 'Tiếng Lào (Lào)',
'lt' => 'Tiếng Litva',
'lt_LT' => 'Tiếng Litva (Litva)',
'lu' => 'Tiếng Luba-Katanga',
'lu_CD' => 'Tiếng Luba-Katanga (Congo - Kinshasa)',
'lv' => 'Tiếng Latvia',
'lv_LV' => 'Tiếng Latvia (Latvia)',
'mg' => 'Tiếng Malagasy',
'mg_MG' => 'Tiếng Malagasy (Madagascar)',
'mi' => 'Tiếng Māori',
'mi_NZ' => 'Tiếng Māori (New Zealand)',
'mk' => 'Tiếng Macedonia',
'mk_MK' => 'Tiếng Macedonia (Bắc Macedonia)',
'ml' => 'Tiếng Malayalam',
'ml_IN' => 'Tiếng Malayalam (Ấn Độ)',
'mn' => 'Tiếng Mông Cổ',
'mn_MN' => 'Tiếng Mông Cổ (Mông Cổ)',
'mr' => 'Tiếng Marathi',
'mr_IN' => 'Tiếng Marathi (Ấn Độ)',
'ms' => 'Tiếng Mã Lai',
'ms_BN' => 'Tiếng Mã Lai (Brunei)',
'ms_ID' => 'Tiếng Mã Lai (Indonesia)',
'ms_MY' => 'Tiếng Mã Lai (Malaysia)',
'ms_SG' => 'Tiếng Mã Lai (Singapore)',
'mt' => 'Tiếng Malta',
'mt_MT' => 'Tiếng Malta (Malta)',
'my' => 'Tiếng Miến Điện',
'my_MM' => 'Tiếng Miến Điện (Myanmar [Miến Điện])',
'nb' => 'Tiếng Na Uy [Bokmål]',
'nb_NO' => 'Tiếng Na Uy [Bokmål] (Na Uy)',
'nb_SJ' => 'Tiếng Na Uy [Bokmål] (Svalbard và Jan Mayen)',
'nd' => 'Tiếng Ndebele Miền Bắc',
'nd_ZW' => 'Tiếng Ndebele Miền Bắc (Zimbabwe)',
'ne' => 'Tiếng Nepal',
'ne_IN' => 'Tiếng Nepal (Ấn Độ)',
'ne_NP' => 'Tiếng Nepal (Nepal)',
'nl' => 'Tiếng Hà Lan',
'nl_AW' => 'Tiếng Hà Lan (Aruba)',
'nl_BE' => 'Tiếng Hà Lan (Bỉ)',
'nl_BQ' => 'Tiếng Hà Lan (Ca-ri-bê Hà Lan)',
'nl_CW' => 'Tiếng Hà Lan (Curaçao)',
'nl_NL' => 'Tiếng Hà Lan (Hà Lan)',
'nl_SR' => 'Tiếng Hà Lan (Suriname)',
'nl_SX' => 'Tiếng Hà Lan (Sint Maarten)',
'nn' => 'Tiếng Na Uy [Nynorsk]',
'nn_NO' => 'Tiếng Na Uy [Nynorsk] (Na Uy)',
'no' => 'Tiếng Na Uy',
'no_NO' => 'Tiếng Na Uy (Na Uy)',
'om' => 'Tiếng Oromo',
'om_ET' => 'Tiếng Oromo (Ethiopia)',
'om_KE' => 'Tiếng Oromo (Kenya)',
'or' => 'Tiếng Odia',
'or_IN' => 'Tiếng Odia (Ấn Độ)',
'os' => 'Tiếng Ossetic',
'os_GE' => 'Tiếng Ossetic (Georgia)',
'os_RU' => 'Tiếng Ossetic (Nga)',
'pa' => 'Tiếng Punjab',
'pa_Arab' => 'Tiếng Punjab (Chữ Ả Rập)',
'pa_Arab_PK' => 'Tiếng Punjab (Chữ Ả Rập, Pakistan)',
'pa_Guru' => 'Tiếng Punjab (Chữ Gurmukhi)',
'pa_Guru_IN' => 'Tiếng Punjab (Chữ Gurmukhi, Ấn Độ)',
'pa_IN' => 'Tiếng Punjab (Ấn Độ)',
'pa_PK' => 'Tiếng Punjab (Pakistan)',
'pl' => 'Tiếng Ba Lan',
'pl_PL' => 'Tiếng Ba Lan (Ba Lan)',
'ps' => 'Tiếng Pashto',
'ps_AF' => 'Tiếng Pashto (Afghanistan)',
'ps_PK' => 'Tiếng Pashto (Pakistan)',
'pt' => 'Tiếng Bồ Đào Nha',
'pt_AO' => 'Tiếng Bồ Đào Nha (Angola)',
'pt_BR' => 'Tiếng Bồ Đào Nha (Brazil)',
'pt_CH' => 'Tiếng Bồ Đào Nha (Thụy Sĩ)',
'pt_CV' => 'Tiếng Bồ Đào Nha (Cape Verde)',
'pt_GQ' => 'Tiếng Bồ Đào Nha (Guinea Xích Đạo)',
'pt_GW' => 'Tiếng Bồ Đào Nha (Guinea-Bissau)',
'pt_LU' => 'Tiếng Bồ Đào Nha (Luxembourg)',
'pt_MO' => 'Tiếng Bồ Đào Nha (Đặc khu Hành chính Macao, Trung Quốc)',
'pt_MZ' => 'Tiếng Bồ Đào Nha (Mozambique)',
'pt_PT' => 'Tiếng Bồ Đào Nha (Bồ Đào Nha)',
'pt_ST' => 'Tiếng Bồ Đào Nha (São Tomé và Príncipe)',
'pt_TL' => 'Tiếng Bồ Đào Nha (Timor-Leste)',
'qu' => 'Tiếng Quechua',
'qu_BO' => 'Tiếng Quechua (Bolivia)',
'qu_EC' => 'Tiếng Quechua (Ecuador)',
'qu_PE' => 'Tiếng Quechua (Peru)',
'rm' => 'Tiếng Romansh',
'rm_CH' => 'Tiếng Romansh (Thụy Sĩ)',
'rn' => 'Tiếng Rundi',
'rn_BI' => 'Tiếng Rundi (Burundi)',
'ro' => 'Tiếng Romania',
'ro_MD' => 'Tiếng Romania (Moldova)',
'ro_RO' => 'Tiếng Romania (Romania)',
'ru' => 'Tiếng Nga',
'ru_BY' => 'Tiếng Nga (Belarus)',
'ru_KG' => 'Tiếng Nga (Kyrgyzstan)',
'ru_KZ' => 'Tiếng Nga (Kazakhstan)',
'ru_MD' => 'Tiếng Nga (Moldova)',
'ru_RU' => 'Tiếng Nga (Nga)',
'ru_UA' => 'Tiếng Nga (Ukraina)',
'rw' => 'Tiếng Kinyarwanda',
'rw_RW' => 'Tiếng Kinyarwanda (Rwanda)',
'sa' => 'Tiếng Phạn',
'sa_IN' => 'Tiếng Phạn (Ấn Độ)',
'sc' => 'Tiếng Sardinia',
'sc_IT' => 'Tiếng Sardinia (Italy)',
'sd' => 'Tiếng Sindhi',
'sd_Arab' => 'Tiếng Sindhi (Chữ Ả Rập)',
'sd_Arab_PK' => 'Tiếng Sindhi (Chữ Ả Rập, Pakistan)',
'sd_Deva' => 'Tiếng Sindhi (Chữ Devanagari)',
'sd_Deva_IN' => 'Tiếng Sindhi (Chữ Devanagari, Ấn Độ)',
'sd_PK' => 'Tiếng Sindhi (Pakistan)',
'se' => 'Tiếng Sami Miền Bắc',
'se_FI' => 'Tiếng Sami Miền Bắc (Phần Lan)',
'se_NO' => 'Tiếng Sami Miền Bắc (Na Uy)',
'se_SE' => 'Tiếng Sami Miền Bắc (Thụy Điển)',
'sg' => 'Tiếng Sango',
'sg_CF' => 'Tiếng Sango (Cộng hòa Trung Phi)',
'sh' => 'Tiếng Serbo-Croatia',
'sh_BA' => 'Tiếng Serbo-Croatia (Bosnia và Herzegovina)',
'si' => 'Tiếng Sinhala',
'si_LK' => 'Tiếng Sinhala (Sri Lanka)',
'sk' => 'Tiếng Slovak',
'sk_SK' => 'Tiếng Slovak (Slovakia)',
'sl' => 'Tiếng Slovenia',
'sl_SI' => 'Tiếng Slovenia (Slovenia)',
'sn' => 'Tiếng Shona',
'sn_ZW' => 'Tiếng Shona (Zimbabwe)',
'so' => 'Tiếng Somali',
'so_DJ' => 'Tiếng Somali (Djibouti)',
'so_ET' => 'Tiếng Somali (Ethiopia)',
'so_KE' => 'Tiếng Somali (Kenya)',
'so_SO' => 'Tiếng Somali (Somalia)',
'sq' => 'Tiếng Albania',
'sq_AL' => 'Tiếng Albania (Albania)',
'sq_MK' => 'Tiếng Albania (Bắc Macedonia)',
'sr' => 'Tiếng Serbia',
'sr_BA' => 'Tiếng Serbia (Bosnia và Herzegovina)',
'sr_Cyrl' => 'Tiếng Serbia (Chữ Kirin)',
'sr_Cyrl_BA' => 'Tiếng Serbia (Chữ Kirin, Bosnia và Herzegovina)',
'sr_Cyrl_ME' => 'Tiếng Serbia (Chữ Kirin, Montenegro)',
'sr_Cyrl_RS' => 'Tiếng Serbia (Chữ Kirin, Serbia)',
'sr_Latn' => 'Tiếng Serbia (Chữ La tinh)',
'sr_Latn_BA' => 'Tiếng Serbia (Chữ La tinh, Bosnia và Herzegovina)',
'sr_Latn_ME' => 'Tiếng Serbia (Chữ La tinh, Montenegro)',
'sr_Latn_RS' => 'Tiếng Serbia (Chữ La tinh, Serbia)',
'sr_ME' => 'Tiếng Serbia (Montenegro)',
'sr_RS' => 'Tiếng Serbia (Serbia)',
'su' => 'Tiếng Sunda',
'su_ID' => 'Tiếng Sunda (Indonesia)',
'su_Latn' => 'Tiếng Sunda (Chữ La tinh)',
'su_Latn_ID' => 'Tiếng Sunda (Chữ La tinh, Indonesia)',
'sv' => 'Tiếng Thụy Điển',
'sv_AX' => 'Tiếng Thụy Điển (Quần đảo Åland)',
'sv_FI' => 'Tiếng Thụy Điển (Phần Lan)',
'sv_SE' => 'Tiếng Thụy Điển (Thụy Điển)',
'sw' => 'Tiếng Swahili',
'sw_CD' => 'Tiếng Swahili (Congo - Kinshasa)',
'sw_KE' => 'Tiếng Swahili (Kenya)',
'sw_TZ' => 'Tiếng Swahili (Tanzania)',
'sw_UG' => 'Tiếng Swahili (Uganda)',
'ta' => 'Tiếng Tamil',
'ta_IN' => 'Tiếng Tamil (Ấn Độ)',
'ta_LK' => 'Tiếng Tamil (Sri Lanka)',
'ta_MY' => 'Tiếng Tamil (Malaysia)',
'ta_SG' => 'Tiếng Tamil (Singapore)',
'te' => 'Tiếng Telugu',
'te_IN' => 'Tiếng Telugu (Ấn Độ)',
'tg' => 'Tiếng Tajik',
'tg_TJ' => 'Tiếng Tajik (Tajikistan)',
'th' => 'Tiếng Thái',
'th_TH' => 'Tiếng Thái (Thái Lan)',
'ti' => 'Tiếng Tigrinya',
'ti_ER' => 'Tiếng Tigrinya (Eritrea)',
'ti_ET' => 'Tiếng Tigrinya (Ethiopia)',
'tk' => 'Tiếng Turkmen',
'tk_TM' => 'Tiếng Turkmen (Turkmenistan)',
'tl' => 'Tiếng Tagalog',
'tl_PH' => 'Tiếng Tagalog (Philippines)',
'to' => 'Tiếng Tonga',
'to_TO' => 'Tiếng Tonga (Tonga)',
'tr' => 'Tiếng Thổ Nhĩ Kỳ',
'tr_CY' => 'Tiếng Thổ Nhĩ Kỳ (Síp)',
'tr_TR' => 'Tiếng Thổ Nhĩ Kỳ (Thổ Nhĩ Kỳ)',
'tt' => 'Tiếng Tatar',
'tt_RU' => 'Tiếng Tatar (Nga)',
'ug' => 'Tiếng Uyghur',
'ug_CN' => 'Tiếng Uyghur (Trung Quốc)',
'uk' => 'Tiếng Ukraina',
'uk_UA' => 'Tiếng Ukraina (Ukraina)',
'ur' => 'Tiếng Urdu',
'ur_IN' => 'Tiếng Urdu (Ấn Độ)',
'ur_PK' => 'Tiếng Urdu (Pakistan)',
'uz' => 'Tiếng Uzbek',
'uz_AF' => 'Tiếng Uzbek (Afghanistan)',
'uz_Arab' => 'Tiếng Uzbek (Chữ Ả Rập)',
'uz_Arab_AF' => 'Tiếng Uzbek (Chữ Ả Rập, Afghanistan)',
'uz_Cyrl' => 'Tiếng Uzbek (Chữ Kirin)',
'uz_Cyrl_UZ' => 'Tiếng Uzbek (Chữ Kirin, Uzbekistan)',
'uz_Latn' => 'Tiếng Uzbek (Chữ La tinh)',
'uz_Latn_UZ' => 'Tiếng Uzbek (Chữ La tinh, Uzbekistan)',
'uz_UZ' => 'Tiếng Uzbek (Uzbekistan)',
'vi' => 'Tiếng Việt',
'vi_VN' => 'Tiếng Việt (Việt Nam)',
'wo' => 'Tiếng Wolof',
'wo_SN' => 'Tiếng Wolof (Senegal)',
'xh' => 'Tiếng Xhosa',
'xh_ZA' => 'Tiếng Xhosa (Nam Phi)',
'yi' => 'Tiếng Yiddish',
'yi_001' => 'Tiếng Yiddish (Thế giới)',
'yo' => 'Tiếng Yoruba',
'yo_BJ' => 'Tiếng Yoruba (Benin)',
'yo_NG' => 'Tiếng Yoruba (Nigeria)',
'zh' => 'Tiếng Trung',
'zh_CN' => 'Tiếng Trung (Trung Quốc)',
'zh_HK' => 'Tiếng Trung (Đặc khu Hành chính Hồng Kông, Trung Quốc)',
'zh_Hans' => 'Tiếng Trung (Giản thể)',
'zh_Hans_CN' => 'Tiếng Trung (Giản thể, Trung Quốc)',
'zh_Hans_HK' => 'Tiếng Trung (Giản thể, Đặc khu Hành chính Hồng Kông, Trung Quốc)',
'zh_Hans_MO' => 'Tiếng Trung (Giản thể, Đặc khu Hành chính Macao, Trung Quốc)',
'zh_Hans_SG' => 'Tiếng Trung (Giản thể, Singapore)',
'zh_Hant' => 'Tiếng Trung (Phồn thể)',
'zh_Hant_HK' => 'Tiếng Trung (Phồn thể, Đặc khu Hành chính Hồng Kông, Trung Quốc)',
'zh_Hant_MO' => 'Tiếng Trung (Phồn thể, Đặc khu Hành chính Macao, Trung Quốc)',
'zh_Hant_TW' => 'Tiếng Trung (Phồn thể, Đài Loan)',
'zh_MO' => 'Tiếng Trung (Đặc khu Hành chính Macao, Trung Quốc)',
'zh_SG' => 'Tiếng Trung (Singapore)',
'zh_TW' => 'Tiếng Trung (Đài Loan)',
'zu' => 'Tiếng Zulu',
'zu_ZA' => 'Tiếng Zulu (Nam Phi)',
],
];
| derrabus/symfony | src/Symfony/Component/Intl/Resources/data/locales/vi.php | PHP | mit | 30,213 |
/** @license React v16.3.0
* react-dom-test-utils.development.js
*
* Copyright (c) 2013-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
'use strict';
if (process.env.NODE_ENV !== "production") {
(function() {
'use strict';
var _assign = require('object-assign');
var React = require('react');
var ReactDOM = require('react-dom');
var invariant = require('fbjs/lib/invariant');
var warning = require('fbjs/lib/warning');
var emptyFunction = require('fbjs/lib/emptyFunction');
var ExecutionEnvironment = require('fbjs/lib/ExecutionEnvironment');
/**
* WARNING: DO NOT manually require this module.
* This is a replacement for `invariant(...)` used by the error code system
* and will _only_ be required by the corresponding babel pass.
* It always throws.
*/
/**
* `ReactInstanceMap` maintains a mapping from a public facing stateful
* instance (key) and the internal representation (value). This allows public
* methods to accept the user facing instance as an argument and map them back
* to internal methods.
*
* Note that this module is currently shared and assumed to be stateless.
* If this becomes an actual Map, that will break.
*/
/**
* This API should be called `delete` but we'd have to make sure to always
* transform these to strings for IE support. When this transform is fully
* supported we can rename it.
*/
function get(key) {
return key._reactInternalFiber;
}
var ReactInternals = React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED;
var ReactCurrentOwner = ReactInternals.ReactCurrentOwner;
var ReactDebugCurrentFrame = ReactInternals.ReactDebugCurrentFrame;
// The Symbol used to tag the ReactElement-like types. If there is no native Symbol
// nor polyfill, then a plain number is used for performance.
// Before we know whether it is functional or class
var FunctionalComponent = 1;
var ClassComponent = 2;
var HostRoot = 3; // Root of a host tree. Could be nested inside another node.
// A subtree. Could be an entry point to a different renderer.
var HostComponent = 5;
var HostText = 6;
// Don't change these two values. They're used by React Dev Tools.
var NoEffect = /* */0;
// You can change the rest (and add more).
var Placement = /* */2;
// Union of all host effects
var MOUNTING = 1;
var MOUNTED = 2;
var UNMOUNTED = 3;
function isFiberMountedImpl(fiber) {
var node = fiber;
if (!fiber.alternate) {
// If there is no alternate, this might be a new tree that isn't inserted
// yet. If it is, then it will have a pending insertion effect on it.
if ((node.effectTag & Placement) !== NoEffect) {
return MOUNTING;
}
while (node['return']) {
node = node['return'];
if ((node.effectTag & Placement) !== NoEffect) {
return MOUNTING;
}
}
} else {
while (node['return']) {
node = node['return'];
}
}
if (node.tag === HostRoot) {
// TODO: Check if this was a nested HostRoot when used with
// renderContainerIntoSubtree.
return MOUNTED;
}
// If we didn't hit the root, that means that we're in an disconnected tree
// that has been unmounted.
return UNMOUNTED;
}
function assertIsMounted(fiber) {
!(isFiberMountedImpl(fiber) === MOUNTED) ? invariant(false, 'Unable to find node on an unmounted component.') : void 0;
}
function findCurrentFiberUsingSlowPath(fiber) {
var alternate = fiber.alternate;
if (!alternate) {
// If there is no alternate, then we only need to check if it is mounted.
var state = isFiberMountedImpl(fiber);
!(state !== UNMOUNTED) ? invariant(false, 'Unable to find node on an unmounted component.') : void 0;
if (state === MOUNTING) {
return null;
}
return fiber;
}
// If we have two possible branches, we'll walk backwards up to the root
// to see what path the root points to. On the way we may hit one of the
// special cases and we'll deal with them.
var a = fiber;
var b = alternate;
while (true) {
var parentA = a['return'];
var parentB = parentA ? parentA.alternate : null;
if (!parentA || !parentB) {
// We're at the root.
break;
}
// If both copies of the parent fiber point to the same child, we can
// assume that the child is current. This happens when we bailout on low
// priority: the bailed out fiber's child reuses the current child.
if (parentA.child === parentB.child) {
var child = parentA.child;
while (child) {
if (child === a) {
// We've determined that A is the current branch.
assertIsMounted(parentA);
return fiber;
}
if (child === b) {
// We've determined that B is the current branch.
assertIsMounted(parentA);
return alternate;
}
child = child.sibling;
}
// We should never have an alternate for any mounting node. So the only
// way this could possibly happen is if this was unmounted, if at all.
invariant(false, 'Unable to find node on an unmounted component.');
}
if (a['return'] !== b['return']) {
// The return pointer of A and the return pointer of B point to different
// fibers. We assume that return pointers never criss-cross, so A must
// belong to the child set of A.return, and B must belong to the child
// set of B.return.
a = parentA;
b = parentB;
} else {
// The return pointers point to the same fiber. We'll have to use the
// default, slow path: scan the child sets of each parent alternate to see
// which child belongs to which set.
//
// Search parent A's child set
var didFindChild = false;
var _child = parentA.child;
while (_child) {
if (_child === a) {
didFindChild = true;
a = parentA;
b = parentB;
break;
}
if (_child === b) {
didFindChild = true;
b = parentA;
a = parentB;
break;
}
_child = _child.sibling;
}
if (!didFindChild) {
// Search parent B's child set
_child = parentB.child;
while (_child) {
if (_child === a) {
didFindChild = true;
a = parentB;
b = parentA;
break;
}
if (_child === b) {
didFindChild = true;
b = parentB;
a = parentA;
break;
}
_child = _child.sibling;
}
!didFindChild ? invariant(false, 'Child was not found in either parent set. This indicates a bug in React related to the return pointer. Please file an issue.') : void 0;
}
}
!(a.alternate === b) ? invariant(false, 'Return fibers should always be each others\' alternates. This error is likely caused by a bug in React. Please file an issue.') : void 0;
}
// If the root is not a host container, we're in a disconnected tree. I.e.
// unmounted.
!(a.tag === HostRoot) ? invariant(false, 'Unable to find node on an unmounted component.') : void 0;
if (a.stateNode.current === a) {
// We've determined that A is the current branch.
return fiber;
}
// Otherwise B has to be current branch.
return alternate;
}
/* eslint valid-typeof: 0 */
var didWarnForAddedNewProperty = false;
var EVENT_POOL_SIZE = 10;
var shouldBeReleasedProperties = ['dispatchConfig', '_targetInst', 'nativeEvent', 'isDefaultPrevented', 'isPropagationStopped', '_dispatchListeners', '_dispatchInstances'];
/**
* @interface Event
* @see http://www.w3.org/TR/DOM-Level-3-Events/
*/
var EventInterface = {
type: null,
target: null,
// currentTarget is set when dispatching; no use in copying it here
currentTarget: emptyFunction.thatReturnsNull,
eventPhase: null,
bubbles: null,
cancelable: null,
timeStamp: function (event) {
return event.timeStamp || Date.now();
},
defaultPrevented: null,
isTrusted: null
};
/**
* Synthetic events are dispatched by event plugins, typically in response to a
* top-level event delegation handler.
*
* These systems should generally use pooling to reduce the frequency of garbage
* collection. The system should check `isPersistent` to determine whether the
* event should be released into the pool after being dispatched. Users that
* need a persisted event should invoke `persist`.
*
* Synthetic events (and subclasses) implement the DOM Level 3 Events API by
* normalizing browser quirks. Subclasses do not necessarily have to implement a
* DOM interface; custom application-specific events can also subclass this.
*
* @param {object} dispatchConfig Configuration used to dispatch this event.
* @param {*} targetInst Marker identifying the event target.
* @param {object} nativeEvent Native browser event.
* @param {DOMEventTarget} nativeEventTarget Target node.
*/
function SyntheticEvent(dispatchConfig, targetInst, nativeEvent, nativeEventTarget) {
{
// these have a getter/setter for warnings
delete this.nativeEvent;
delete this.preventDefault;
delete this.stopPropagation;
}
this.dispatchConfig = dispatchConfig;
this._targetInst = targetInst;
this.nativeEvent = nativeEvent;
var Interface = this.constructor.Interface;
for (var propName in Interface) {
if (!Interface.hasOwnProperty(propName)) {
continue;
}
{
delete this[propName]; // this has a getter/setter for warnings
}
var normalize = Interface[propName];
if (normalize) {
this[propName] = normalize(nativeEvent);
} else {
if (propName === 'target') {
this.target = nativeEventTarget;
} else {
this[propName] = nativeEvent[propName];
}
}
}
var defaultPrevented = nativeEvent.defaultPrevented != null ? nativeEvent.defaultPrevented : nativeEvent.returnValue === false;
if (defaultPrevented) {
this.isDefaultPrevented = emptyFunction.thatReturnsTrue;
} else {
this.isDefaultPrevented = emptyFunction.thatReturnsFalse;
}
this.isPropagationStopped = emptyFunction.thatReturnsFalse;
return this;
}
_assign(SyntheticEvent.prototype, {
preventDefault: function () {
this.defaultPrevented = true;
var event = this.nativeEvent;
if (!event) {
return;
}
if (event.preventDefault) {
event.preventDefault();
} else if (typeof event.returnValue !== 'unknown') {
event.returnValue = false;
}
this.isDefaultPrevented = emptyFunction.thatReturnsTrue;
},
stopPropagation: function () {
var event = this.nativeEvent;
if (!event) {
return;
}
if (event.stopPropagation) {
event.stopPropagation();
} else if (typeof event.cancelBubble !== 'unknown') {
// The ChangeEventPlugin registers a "propertychange" event for
// IE. This event does not support bubbling or cancelling, and
// any references to cancelBubble throw "Member not found". A
// typeof check of "unknown" circumvents this issue (and is also
// IE specific).
event.cancelBubble = true;
}
this.isPropagationStopped = emptyFunction.thatReturnsTrue;
},
/**
* We release all dispatched `SyntheticEvent`s after each event loop, adding
* them back into the pool. This allows a way to hold onto a reference that
* won't be added back into the pool.
*/
persist: function () {
this.isPersistent = emptyFunction.thatReturnsTrue;
},
/**
* Checks if this event should be released back into the pool.
*
* @return {boolean} True if this should not be released, false otherwise.
*/
isPersistent: emptyFunction.thatReturnsFalse,
/**
* `PooledClass` looks for `destructor` on each instance it releases.
*/
destructor: function () {
var Interface = this.constructor.Interface;
for (var propName in Interface) {
{
Object.defineProperty(this, propName, getPooledWarningPropertyDefinition(propName, Interface[propName]));
}
}
for (var i = 0; i < shouldBeReleasedProperties.length; i++) {
this[shouldBeReleasedProperties[i]] = null;
}
{
Object.defineProperty(this, 'nativeEvent', getPooledWarningPropertyDefinition('nativeEvent', null));
Object.defineProperty(this, 'preventDefault', getPooledWarningPropertyDefinition('preventDefault', emptyFunction));
Object.defineProperty(this, 'stopPropagation', getPooledWarningPropertyDefinition('stopPropagation', emptyFunction));
}
}
});
SyntheticEvent.Interface = EventInterface;
/**
* Helper to reduce boilerplate when creating subclasses.
*/
SyntheticEvent.extend = function (Interface) {
var Super = this;
var E = function () {};
E.prototype = Super.prototype;
var prototype = new E();
function Class() {
return Super.apply(this, arguments);
}
_assign(prototype, Class.prototype);
Class.prototype = prototype;
Class.prototype.constructor = Class;
Class.Interface = _assign({}, Super.Interface, Interface);
Class.extend = Super.extend;
addEventPoolingTo(Class);
return Class;
};
/** Proxying after everything set on SyntheticEvent
* to resolve Proxy issue on some WebKit browsers
* in which some Event properties are set to undefined (GH#10010)
*/
{
var isProxySupported = typeof Proxy === 'function' &&
// https://github.com/facebook/react/issues/12011
!Object.isSealed(new Proxy({}, {}));
if (isProxySupported) {
/*eslint-disable no-func-assign */
SyntheticEvent = new Proxy(SyntheticEvent, {
construct: function (target, args) {
return this.apply(target, Object.create(target.prototype), args);
},
apply: function (constructor, that, args) {
return new Proxy(constructor.apply(that, args), {
set: function (target, prop, value) {
if (prop !== 'isPersistent' && !target.constructor.Interface.hasOwnProperty(prop) && shouldBeReleasedProperties.indexOf(prop) === -1) {
warning(didWarnForAddedNewProperty || target.isPersistent(), "This synthetic event is reused for performance reasons. If you're " + "seeing this, you're adding a new property in the synthetic event object. " + 'The property is never released. See ' + 'https://fb.me/react-event-pooling for more information.');
didWarnForAddedNewProperty = true;
}
target[prop] = value;
return true;
}
});
}
});
/*eslint-enable no-func-assign */
}
}
addEventPoolingTo(SyntheticEvent);
/**
* Helper to nullify syntheticEvent instance properties when destructing
*
* @param {String} propName
* @param {?object} getVal
* @return {object} defineProperty object
*/
function getPooledWarningPropertyDefinition(propName, getVal) {
var isFunction = typeof getVal === 'function';
return {
configurable: true,
set: set,
get: get
};
function set(val) {
var action = isFunction ? 'setting the method' : 'setting the property';
warn(action, 'This is effectively a no-op');
return val;
}
function get() {
var action = isFunction ? 'accessing the method' : 'accessing the property';
var result = isFunction ? 'This is a no-op function' : 'This is set to null';
warn(action, result);
return getVal;
}
function warn(action, result) {
var warningCondition = false;
warning(warningCondition, "This synthetic event is reused for performance reasons. If you're seeing this, " + "you're %s `%s` on a released/nullified synthetic event. %s. " + 'If you must keep the original synthetic event around, use event.persist(). ' + 'See https://fb.me/react-event-pooling for more information.', action, propName, result);
}
}
function getPooledEvent(dispatchConfig, targetInst, nativeEvent, nativeInst) {
var EventConstructor = this;
if (EventConstructor.eventPool.length) {
var instance = EventConstructor.eventPool.pop();
EventConstructor.call(instance, dispatchConfig, targetInst, nativeEvent, nativeInst);
return instance;
}
return new EventConstructor(dispatchConfig, targetInst, nativeEvent, nativeInst);
}
function releasePooledEvent(event) {
var EventConstructor = this;
!(event instanceof EventConstructor) ? invariant(false, 'Trying to release an event instance into a pool of a different type.') : void 0;
event.destructor();
if (EventConstructor.eventPool.length < EVENT_POOL_SIZE) {
EventConstructor.eventPool.push(event);
}
}
function addEventPoolingTo(EventConstructor) {
EventConstructor.eventPool = [];
EventConstructor.getPooled = getPooledEvent;
EventConstructor.release = releasePooledEvent;
}
var SyntheticEvent$1 = SyntheticEvent;
/**
* Generate a mapping of standard vendor prefixes using the defined style property and event name.
*
* @param {string} styleProp
* @param {string} eventName
* @returns {object}
*/
function makePrefixMap(styleProp, eventName) {
var prefixes = {};
prefixes[styleProp.toLowerCase()] = eventName.toLowerCase();
prefixes['Webkit' + styleProp] = 'webkit' + eventName;
prefixes['Moz' + styleProp] = 'moz' + eventName;
prefixes['ms' + styleProp] = 'MS' + eventName;
prefixes['O' + styleProp] = 'o' + eventName.toLowerCase();
return prefixes;
}
/**
* A list of event names to a configurable list of vendor prefixes.
*/
var vendorPrefixes = {
animationend: makePrefixMap('Animation', 'AnimationEnd'),
animationiteration: makePrefixMap('Animation', 'AnimationIteration'),
animationstart: makePrefixMap('Animation', 'AnimationStart'),
transitionend: makePrefixMap('Transition', 'TransitionEnd')
};
/**
* Event names that have already been detected and prefixed (if applicable).
*/
var prefixedEventNames = {};
/**
* Element to check for prefixes on.
*/
var style = {};
/**
* Bootstrap if a DOM exists.
*/
if (ExecutionEnvironment.canUseDOM) {
style = document.createElement('div').style;
// On some platforms, in particular some releases of Android 4.x,
// the un-prefixed "animation" and "transition" properties are defined on the
// style object but the events that fire will still be prefixed, so we need
// to check if the un-prefixed events are usable, and if not remove them from the map.
if (!('AnimationEvent' in window)) {
delete vendorPrefixes.animationend.animation;
delete vendorPrefixes.animationiteration.animation;
delete vendorPrefixes.animationstart.animation;
}
// Same as above
if (!('TransitionEvent' in window)) {
delete vendorPrefixes.transitionend.transition;
}
}
/**
* Attempts to determine the correct vendor prefixed event name.
*
* @param {string} eventName
* @returns {string}
*/
function getVendorPrefixedEventName(eventName) {
if (prefixedEventNames[eventName]) {
return prefixedEventNames[eventName];
} else if (!vendorPrefixes[eventName]) {
return eventName;
}
var prefixMap = vendorPrefixes[eventName];
for (var styleProp in prefixMap) {
if (prefixMap.hasOwnProperty(styleProp) && styleProp in style) {
return prefixedEventNames[eventName] = prefixMap[styleProp];
}
}
return eventName;
}
/**
* Types of raw signals from the browser caught at the top level.
*
* For events like 'submit' or audio/video events which don't consistently
* bubble (which we trap at a lower node than `document`), binding
* at `document` would cause duplicate events so we don't include them here.
*/
var topLevelTypes = {
topAnimationEnd: getVendorPrefixedEventName('animationend'),
topAnimationIteration: getVendorPrefixedEventName('animationiteration'),
topAnimationStart: getVendorPrefixedEventName('animationstart'),
topBlur: 'blur',
topCancel: 'cancel',
topChange: 'change',
topClick: 'click',
topClose: 'close',
topCompositionEnd: 'compositionend',
topCompositionStart: 'compositionstart',
topCompositionUpdate: 'compositionupdate',
topContextMenu: 'contextmenu',
topCopy: 'copy',
topCut: 'cut',
topDoubleClick: 'dblclick',
topDrag: 'drag',
topDragEnd: 'dragend',
topDragEnter: 'dragenter',
topDragExit: 'dragexit',
topDragLeave: 'dragleave',
topDragOver: 'dragover',
topDragStart: 'dragstart',
topDrop: 'drop',
topFocus: 'focus',
topInput: 'input',
topKeyDown: 'keydown',
topKeyPress: 'keypress',
topKeyUp: 'keyup',
topLoad: 'load',
topLoadStart: 'loadstart',
topMouseDown: 'mousedown',
topMouseMove: 'mousemove',
topMouseOut: 'mouseout',
topMouseOver: 'mouseover',
topMouseUp: 'mouseup',
topPaste: 'paste',
topScroll: 'scroll',
topSelectionChange: 'selectionchange',
topTextInput: 'textInput',
topToggle: 'toggle',
topTouchCancel: 'touchcancel',
topTouchEnd: 'touchend',
topTouchMove: 'touchmove',
topTouchStart: 'touchstart',
topTransitionEnd: getVendorPrefixedEventName('transitionend'),
topWheel: 'wheel'
};
// There are so many media events, it makes sense to just
// maintain a list of them. Note these aren't technically
// "top-level" since they don't bubble. We should come up
// with a better naming convention if we come to refactoring
// the event system.
var mediaEventTypes = {
topAbort: 'abort',
topCanPlay: 'canplay',
topCanPlayThrough: 'canplaythrough',
topDurationChange: 'durationchange',
topEmptied: 'emptied',
topEncrypted: 'encrypted',
topEnded: 'ended',
topError: 'error',
topLoadedData: 'loadeddata',
topLoadedMetadata: 'loadedmetadata',
topLoadStart: 'loadstart',
topPause: 'pause',
topPlay: 'play',
topPlaying: 'playing',
topProgress: 'progress',
topRateChange: 'ratechange',
topSeeked: 'seeked',
topSeeking: 'seeking',
topStalled: 'stalled',
topSuspend: 'suspend',
topTimeUpdate: 'timeupdate',
topVolumeChange: 'volumechange',
topWaiting: 'waiting'
};
var findDOMNode = ReactDOM.findDOMNode;
var _ReactDOM$__SECRET_IN = ReactDOM.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED;
var EventPluginHub = _ReactDOM$__SECRET_IN.EventPluginHub;
var EventPluginRegistry = _ReactDOM$__SECRET_IN.EventPluginRegistry;
var EventPropagators = _ReactDOM$__SECRET_IN.EventPropagators;
var ReactControlledComponent = _ReactDOM$__SECRET_IN.ReactControlledComponent;
var ReactDOMComponentTree = _ReactDOM$__SECRET_IN.ReactDOMComponentTree;
var ReactDOMEventListener = _ReactDOM$__SECRET_IN.ReactDOMEventListener;
function Event(suffix) {}
/**
* @class ReactTestUtils
*/
function findAllInRenderedFiberTreeInternal(fiber, test) {
if (!fiber) {
return [];
}
var currentParent = findCurrentFiberUsingSlowPath(fiber);
if (!currentParent) {
return [];
}
var node = currentParent;
var ret = [];
while (true) {
if (node.tag === HostComponent || node.tag === HostText || node.tag === ClassComponent || node.tag === FunctionalComponent) {
var publicInst = node.stateNode;
if (test(publicInst)) {
ret.push(publicInst);
}
}
if (node.child) {
node.child['return'] = node;
node = node.child;
continue;
}
if (node === currentParent) {
return ret;
}
while (!node.sibling) {
if (!node['return'] || node['return'] === currentParent) {
return ret;
}
node = node['return'];
}
node.sibling['return'] = node['return'];
node = node.sibling;
}
}
/**
* Utilities for making it easy to test React components.
*
* See https://reactjs.org/docs/test-utils.html
*
* Todo: Support the entire DOM.scry query syntax. For now, these simple
* utilities will suffice for testing purposes.
* @lends ReactTestUtils
*/
var ReactTestUtils = {
renderIntoDocument: function (element) {
var div = document.createElement('div');
// None of our tests actually require attaching the container to the
// DOM, and doing so creates a mess that we rely on test isolation to
// clean up, so we're going to stop honoring the name of this method
// (and probably rename it eventually) if no problems arise.
// document.documentElement.appendChild(div);
return ReactDOM.render(element, div);
},
isElement: function (element) {
return React.isValidElement(element);
},
isElementOfType: function (inst, convenienceConstructor) {
return React.isValidElement(inst) && inst.type === convenienceConstructor;
},
isDOMComponent: function (inst) {
return !!(inst && inst.nodeType === 1 && inst.tagName);
},
isDOMComponentElement: function (inst) {
return !!(inst && React.isValidElement(inst) && !!inst.tagName);
},
isCompositeComponent: function (inst) {
if (ReactTestUtils.isDOMComponent(inst)) {
// Accessing inst.setState warns; just return false as that'll be what
// this returns when we have DOM nodes as refs directly
return false;
}
return inst != null && typeof inst.render === 'function' && typeof inst.setState === 'function';
},
isCompositeComponentWithType: function (inst, type) {
if (!ReactTestUtils.isCompositeComponent(inst)) {
return false;
}
var internalInstance = get(inst);
var constructor = internalInstance.type;
return constructor === type;
},
findAllInRenderedTree: function (inst, test) {
if (!inst) {
return [];
}
!ReactTestUtils.isCompositeComponent(inst) ? invariant(false, 'findAllInRenderedTree(...): instance must be a composite component') : void 0;
var internalInstance = get(inst);
return findAllInRenderedFiberTreeInternal(internalInstance, test);
},
/**
* Finds all instance of components in the rendered tree that are DOM
* components with the class name matching `className`.
* @return {array} an array of all the matches.
*/
scryRenderedDOMComponentsWithClass: function (root, classNames) {
return ReactTestUtils.findAllInRenderedTree(root, function (inst) {
if (ReactTestUtils.isDOMComponent(inst)) {
var className = inst.className;
if (typeof className !== 'string') {
// SVG, probably.
className = inst.getAttribute('class') || '';
}
var classList = className.split(/\s+/);
if (!Array.isArray(classNames)) {
!(classNames !== undefined) ? invariant(false, 'TestUtils.scryRenderedDOMComponentsWithClass expects a className as a second argument.') : void 0;
classNames = classNames.split(/\s+/);
}
return classNames.every(function (name) {
return classList.indexOf(name) !== -1;
});
}
return false;
});
},
/**
* Like scryRenderedDOMComponentsWithClass but expects there to be one result,
* and returns that one result, or throws exception if there is any other
* number of matches besides one.
* @return {!ReactDOMComponent} The one match.
*/
findRenderedDOMComponentWithClass: function (root, className) {
var all = ReactTestUtils.scryRenderedDOMComponentsWithClass(root, className);
if (all.length !== 1) {
throw new Error('Did not find exactly one match (found: ' + all.length + ') ' + 'for class:' + className);
}
return all[0];
},
/**
* Finds all instance of components in the rendered tree that are DOM
* components with the tag name matching `tagName`.
* @return {array} an array of all the matches.
*/
scryRenderedDOMComponentsWithTag: function (root, tagName) {
return ReactTestUtils.findAllInRenderedTree(root, function (inst) {
return ReactTestUtils.isDOMComponent(inst) && inst.tagName.toUpperCase() === tagName.toUpperCase();
});
},
/**
* Like scryRenderedDOMComponentsWithTag but expects there to be one result,
* and returns that one result, or throws exception if there is any other
* number of matches besides one.
* @return {!ReactDOMComponent} The one match.
*/
findRenderedDOMComponentWithTag: function (root, tagName) {
var all = ReactTestUtils.scryRenderedDOMComponentsWithTag(root, tagName);
if (all.length !== 1) {
throw new Error('Did not find exactly one match (found: ' + all.length + ') ' + 'for tag:' + tagName);
}
return all[0];
},
/**
* Finds all instances of components with type equal to `componentType`.
* @return {array} an array of all the matches.
*/
scryRenderedComponentsWithType: function (root, componentType) {
return ReactTestUtils.findAllInRenderedTree(root, function (inst) {
return ReactTestUtils.isCompositeComponentWithType(inst, componentType);
});
},
/**
* Same as `scryRenderedComponentsWithType` but expects there to be one result
* and returns that one result, or throws exception if there is any other
* number of matches besides one.
* @return {!ReactComponent} The one match.
*/
findRenderedComponentWithType: function (root, componentType) {
var all = ReactTestUtils.scryRenderedComponentsWithType(root, componentType);
if (all.length !== 1) {
throw new Error('Did not find exactly one match (found: ' + all.length + ') ' + 'for componentType:' + componentType);
}
return all[0];
},
/**
* Pass a mocked component module to this method to augment it with
* useful methods that allow it to be used as a dummy React component.
* Instead of rendering as usual, the component will become a simple
* <div> containing any provided children.
*
* @param {object} module the mock function object exported from a
* module that defines the component to be mocked
* @param {?string} mockTagName optional dummy root tag name to return
* from render method (overrides
* module.mockTagName if provided)
* @return {object} the ReactTestUtils object (for chaining)
*/
mockComponent: function (module, mockTagName) {
mockTagName = mockTagName || module.mockTagName || 'div';
module.prototype.render.mockImplementation(function () {
return React.createElement(mockTagName, null, this.props.children);
});
return this;
},
/**
* Simulates a top level event being dispatched from a raw event that occurred
* on an `Element` node.
* @param {Object} topLevelType A type from `BrowserEventConstants.topLevelTypes`
* @param {!Element} node The dom to simulate an event occurring on.
* @param {?Event} fakeNativeEvent Fake native event to use in SyntheticEvent.
*/
simulateNativeEventOnNode: function (topLevelType, node, fakeNativeEvent) {
fakeNativeEvent.target = node;
ReactDOMEventListener.dispatchEvent(topLevelType, fakeNativeEvent);
},
/**
* Simulates a top level event being dispatched from a raw event that occurred
* on the `ReactDOMComponent` `comp`.
* @param {Object} topLevelType A type from `BrowserEventConstants.topLevelTypes`.
* @param {!ReactDOMComponent} comp
* @param {?Event} fakeNativeEvent Fake native event to use in SyntheticEvent.
*/
simulateNativeEventOnDOMComponent: function (topLevelType, comp, fakeNativeEvent) {
ReactTestUtils.simulateNativeEventOnNode(topLevelType, findDOMNode(comp), fakeNativeEvent);
},
nativeTouchData: function (x, y) {
return {
touches: [{ pageX: x, pageY: y }]
};
},
Simulate: null,
SimulateNative: {}
};
/**
* Exports:
*
* - `ReactTestUtils.Simulate.click(Element)`
* - `ReactTestUtils.Simulate.mouseMove(Element)`
* - `ReactTestUtils.Simulate.change(Element)`
* - ... (All keys from event plugin `eventTypes` objects)
*/
function makeSimulator(eventType) {
return function (domNode, eventData) {
!!React.isValidElement(domNode) ? invariant(false, 'TestUtils.Simulate expected a DOM node as the first argument but received a React element. Pass the DOM node you wish to simulate the event on instead. Note that TestUtils.Simulate will not work if you are using shallow rendering.') : void 0;
!!ReactTestUtils.isCompositeComponent(domNode) ? invariant(false, 'TestUtils.Simulate expected a DOM node as the first argument but received a component instance. Pass the DOM node you wish to simulate the event on instead.') : void 0;
var dispatchConfig = EventPluginRegistry.eventNameDispatchConfigs[eventType];
var fakeNativeEvent = new Event();
fakeNativeEvent.target = domNode;
fakeNativeEvent.type = eventType.toLowerCase();
// We don't use SyntheticEvent.getPooled in order to not have to worry about
// properly destroying any properties assigned from `eventData` upon release
var targetInst = ReactDOMComponentTree.getInstanceFromNode(domNode);
var event = new SyntheticEvent$1(dispatchConfig, targetInst, fakeNativeEvent, domNode);
// Since we aren't using pooling, always persist the event. This will make
// sure it's marked and won't warn when setting additional properties.
event.persist();
_assign(event, eventData);
if (dispatchConfig.phasedRegistrationNames) {
EventPropagators.accumulateTwoPhaseDispatches(event);
} else {
EventPropagators.accumulateDirectDispatches(event);
}
ReactDOM.unstable_batchedUpdates(function () {
// Normally extractEvent enqueues a state restore, but we'll just always
// do that since we we're by-passing it here.
ReactControlledComponent.enqueueStateRestore(domNode);
EventPluginHub.runEventsInBatch(event, true);
});
ReactControlledComponent.restoreStateIfNeeded();
};
}
function buildSimulators() {
ReactTestUtils.Simulate = {};
var eventType = void 0;
for (eventType in EventPluginRegistry.eventNameDispatchConfigs) {
/**
* @param {!Element|ReactDOMComponent} domComponentOrNode
* @param {?object} eventData Fake event data to use in SyntheticEvent.
*/
ReactTestUtils.Simulate[eventType] = makeSimulator(eventType);
}
}
// Rebuild ReactTestUtils.Simulate whenever event plugins are injected
var oldInjectEventPluginOrder = EventPluginHub.injection.injectEventPluginOrder;
EventPluginHub.injection.injectEventPluginOrder = function () {
oldInjectEventPluginOrder.apply(this, arguments);
buildSimulators();
};
var oldInjectEventPlugins = EventPluginHub.injection.injectEventPluginsByName;
EventPluginHub.injection.injectEventPluginsByName = function () {
oldInjectEventPlugins.apply(this, arguments);
buildSimulators();
};
buildSimulators();
/**
* Exports:
*
* - `ReactTestUtils.SimulateNative.click(Element/ReactDOMComponent)`
* - `ReactTestUtils.SimulateNative.mouseMove(Element/ReactDOMComponent)`
* - `ReactTestUtils.SimulateNative.mouseIn/ReactDOMComponent)`
* - `ReactTestUtils.SimulateNative.mouseOut(Element/ReactDOMComponent)`
* - ... (All keys from `BrowserEventConstants.topLevelTypes`)
*
* Note: Top level event types are a subset of the entire set of handler types
* (which include a broader set of "synthetic" events). For example, onDragDone
* is a synthetic event. Except when testing an event plugin or React's event
* handling code specifically, you probably want to use ReactTestUtils.Simulate
* to dispatch synthetic events.
*/
function makeNativeSimulator(eventType) {
return function (domComponentOrNode, nativeEventData) {
var fakeNativeEvent = new Event(eventType);
_assign(fakeNativeEvent, nativeEventData);
if (ReactTestUtils.isDOMComponent(domComponentOrNode)) {
ReactTestUtils.simulateNativeEventOnDOMComponent(eventType, domComponentOrNode, fakeNativeEvent);
} else if (domComponentOrNode.tagName) {
// Will allow on actual dom nodes.
ReactTestUtils.simulateNativeEventOnNode(eventType, domComponentOrNode, fakeNativeEvent);
}
};
}
var eventKeys = [].concat(Object.keys(topLevelTypes), Object.keys(mediaEventTypes));
eventKeys.forEach(function (eventType) {
// Event type is stored as 'topClick' - we transform that to 'click'
var convenienceName = eventType.indexOf('top') === 0 ? eventType.charAt(3).toLowerCase() + eventType.substr(4) : eventType;
/**
* @param {!Element|ReactDOMComponent} domComponentOrNode
* @param {?Event} nativeEventData Fake native event to use in SyntheticEvent.
*/
ReactTestUtils.SimulateNative[convenienceName] = makeNativeSimulator(eventType);
});
var ReactTestUtils$2 = Object.freeze({
default: ReactTestUtils
});
var ReactTestUtils$3 = ( ReactTestUtils$2 && ReactTestUtils ) || ReactTestUtils$2;
// TODO: decide on the top-level export form.
// This is hacky but makes it work with both Rollup and Jest.
var testUtils = ReactTestUtils$3['default'] ? ReactTestUtils$3['default'] : ReactTestUtils$3;
module.exports = testUtils;
})();
}
| ahocevar/cdnjs | ajax/libs/react-dom/16.3.0/cjs/react-dom-test-utils.development.js | JavaScript | mit | 36,533 |
using Microsoft.CodeAnalysis.CodeRefactorings;
using Microsoft.CodeAnalysis;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.Formatting;
namespace RefactoringEssentials.CSharp.CodeRefactorings
{
[ExportCodeRefactoringProvider(LanguageNames.CSharp, Name = "Convert '??' to '?:'")]
public class ConvertCoalescingToConditionalExpressionCodeRefactoringProvider : CodeRefactoringProvider
{
public override async Task ComputeRefactoringsAsync(CodeRefactoringContext context)
{
var document = context.Document;
if (document.Project.Solution.Workspace.Kind == WorkspaceKind.MiscellaneousFiles)
return;
var span = context.Span;
if (!span.IsEmpty)
return;
var cancellationToken = context.CancellationToken;
if (cancellationToken.IsCancellationRequested)
return;
var root = await document.GetSyntaxRootAsync(cancellationToken);
var model = await document.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false);
if (model.IsFromGeneratedCode(cancellationToken))
return;
var node = root.FindNode(span) as BinaryExpressionSyntax;
if (node == null || !node.OperatorToken.IsKind(SyntaxKind.QuestionQuestionToken))
return;
context.RegisterRefactoring(
CodeActionFactory.Create(
span,
DiagnosticSeverity.Info,
GettextCatalog.GetString("Replace '??' operator with '?:' expression"), t2 =>
{
var left = node.Left;
var info = model.GetTypeInfo(left, t2);
if (info.ConvertedType.IsNullableType())
left = SyntaxFactory.MemberAccessExpression(SyntaxKind.SimpleMemberAccessExpression, FlipEqualsTargetAndArgumentCodeRefactoringProvider.AddParensIfRequired(left), SyntaxFactory.IdentifierName("Value"));
var ternary = SyntaxFactory.ConditionalExpression(
SyntaxFactory.BinaryExpression(
SyntaxKind.NotEqualsExpression,
node.Left,
SyntaxFactory.LiteralExpression(SyntaxKind.NullLiteralExpression)
),
left,
node.Right
).WithAdditionalAnnotations(Formatter.Annotation);
return Task.FromResult(document.WithSyntaxRoot(root.ReplaceNode((SyntaxNode)node, (ExpressionSyntax)ternary)));
}
)
);
}
}
} | mrward/RefactoringEssentials | RefactoringEssentials/CSharp/CodeRefactorings/Synced/ConvertCoalescingToConditionalExpressionCodeRefactoringProvider.cs | C# | mit | 2,856 |
// DOM elements
var $source;
var $photographer;
var $save;
var $textColor;
var $logo;
var $crop;
var $logoColor;
var $imageLoader;
var $imageLink;
var $imageLinkButton;
var $canvas;
var canvas;
var $qualityQuestions;
var $copyrightHolder;
var $dragHelp;
var $filename;
var $fileinput;
var $customFilename;
// Constants
var IS_MOBILE = Modernizr.touch && Modernizr.mq('screen and max-width(700px)');
var MIME_TYPES = ['image/jpeg', 'image/png', 'image/gif'];
// state
var scaledImageHeight;
var scaledImageWidth;
var previewScale = IS_MOBILE ? 0.32 : 0.64;
var dy = 0;
var dx = 0;
var image;
var imageFilename = 'image';
var currentCopyright;
var credit = 'Belal Khan/Flickr'
var shallowImage = false;
// JS objects
var ctx;
var img = new Image();
var logo = new Image();
var onDocumentLoad = function(e) {
$source = $('#source');
$photographer = $('#photographer');
$canvas = $('#imageCanvas');
canvas = $canvas[0];
$imageLoader = $('#imageLoader');
$imageLink = $('#imageLink');
$imageLinkButton = $('#imageLinkButton');
ctx = canvas.getContext('2d');
$save = $('.save-btn');
$textColor = $('input[name="textColor"]');
$crop = $('input[name="crop"]');
$logoColor = $('input[name="logoColor"]');
$qualityQuestions = $('.quality-question');
$copyrightHolder = $('.copyright-holder');
$dragHelp = $('.drag-help');
$filename = $('.fileinput-filename');
$fileinput = $('.fileinput');
$customFilename = $('.custom-filename');
$logosWrapper = $('.logos-wrapper');
img.src = defaultImage;
img.onload = onImageLoad;
logo.src = defaultLogo;
logo.onload = renderCanvas;
$photographer.on('keyup', renderCanvas);
$source.on('keyup', renderCanvas);
$imageLoader.on('change', handleImage);
$imageLinkButton.on('click', handleImageLink);
$save.on('click', onSaveClick);
$textColor.on('change', onTextColorChange);
$logoColor.on('change', onLogoColorChange);
$crop.on('change', onCropChange);
$canvas.on('mousedown touchstart', onDrag);
$copyrightHolder.on('change', onCopyrightChange);
$customFilename.on('click', function(e) {
e.stopPropagation();
})
$("body").on("contextmenu", "canvas", function(e) {
return false;
});
$imageLink.keypress(function(e) {
if (e.keyCode == 13) {
handleImageLink();
}
});
// $imageLink.on('paste', handleImageLink);
$(window).on('resize', resizeCanvas);
resizeCanvas();
buildForm();
}
var resizeCanvas = function() {
var scale = $('.canvas-cell').width() / canvasWidth;
$canvas.css({
'webkitTransform': 'scale(' + scale + ')',
'MozTransform': 'scale(' + scale + ')',
'msTransform': 'scale(' + scale + ')',
'OTransform': 'scale(' + scale + ')',
'transform': 'scale(' + scale + ')'
});
renderCanvas();
}
var buildForm = function() {
var copyrightKeys = Object.keys(copyrightOptions);
var logoKeys = Object.keys(logos);
for (var i = 0; i < copyrightKeys.length; i++) {
var key = copyrightKeys[i];
var display = copyrightOptions[key]['display'];
$copyrightHolder.append('<option value="' + key + '">' + display + '</option>');
}
if (logoKeys.length > 1) {
$logosWrapper.append('<div class="btn-group btn-group-justified btn-group-sm logos" data-toggle="buttons"></div>');
var $logos = $('.logos');
for (var j = 0; j < logoKeys.length; j++) {
var key = logoKeys[j];
var display = logos[key]['display']
$logos.append('<label class="btn btn-primary"><input type="radio" name="logo" id="' + key + '" value="' + key + '">' + display + '</label>');
disableLogo();
if (key === currentLogo) {
$('#' + key).attr('checked', true);
$('#' + key).parent('.btn').addClass('active');
}
}
$logo = $('input[name="logo"]');
$logo.on('change', onLogoChange);
} else {
$logosWrapper.hide();
}
}
/*
* Draw the image, then the logo, then the text
*/
var renderCanvas = function() {
// canvas is always the same width
canvas.width = canvasWidth;
// if we're cropping, use the aspect ratio for the height
if (currentCrop !== 'original') {
canvas.height = canvasWidth / (16/9);
}
// clear the canvas
ctx.clearRect(0,0,canvas.width,canvas.height);
// determine height of canvas and scaled image, then draw the image
var imageAspect = img.width / img.height;
if (currentCrop === 'original') {
canvas.height = canvasWidth / imageAspect;
scaledImageHeight = canvas.height;
ctx.drawImage(
img,
0,
0,
canvasWidth,
scaledImageHeight
);
} else {
if (img.width / img.height > canvas.width / canvas.height) {
shallowImage = true;
scaledImageHeight = canvasWidth / imageAspect;
scaledImageWidth = canvas.height * (img.width / img.height)
ctx.drawImage(
img,
0,
0,
img.width,
img.height,
dx,
dy,
scaledImageWidth,
canvas.height
);
} else {
shallowImage = false;
scaledImageHeight = canvasWidth / imageAspect;
ctx.drawImage(
img,
0,
0,
img.width,
img.height,
dx,
dy,
canvasWidth,
scaledImageHeight
);
}
}
// set alpha channel, draw the logo
if (currentLogoColor === 'white') {
ctx.globalAlpha = whiteLogoAlpha;
} else {
ctx.globalAlpha = blackLogoAlpha;
}
ctx.drawImage(
logo,
elementPadding,
currentLogo === 'npr'? elementPadding : elementPadding - 14,
logos[currentLogo]['w'],
logos[currentLogo]['h']
);
// reset alpha channel so text is not translucent
ctx.globalAlpha = "1";
// draw the text
ctx.textBaseline = 'bottom';
ctx.textAlign = 'left';
ctx.fillStyle = currentTextColor;
ctx.font = fontWeight + ' ' + fontSize + ' ' + fontFace;
if (currentTextColor === 'white') {
ctx.shadowColor = fontShadow;
ctx.shadowOffsetX = fontShadowOffsetX;
ctx.shadowOffsetY = fontShadowOffsetY;
ctx.shadowBlur = fontShadowBlur;
}
if (currentCopyright) {
credit = buildCreditString();
}
var creditWidth = ctx.measureText(credit);
ctx.fillText(
credit,
canvas.width - (creditWidth.width + elementPadding),
canvas.height - elementPadding
);
validateForm();
}
/*
* Build the proper format for the credit based on current copyright
*/
var buildCreditString = function() {
var creditString;
var val = $copyrightHolder.val();
if ($photographer.val() !== '') {
if (copyrightOptions[val]['source']) {
creditString = $photographer.val() + '/' + copyrightOptions[val]['source'];
} else {
creditString = $photographer.val() + '/' + $source.val();
}
} else {
if (copyrightOptions[val]['source']) {
creditString = copyrightOptions[val]['source'];
} else {
creditString = $source.val();
}
}
if (copyrightOptions[val]['photographerRequired']) {
if ($photographer.val() !== '') {
$photographer.parents('.form-group').removeClass('has-warning');
} else {
$photographer.parents('.form-group').addClass('has-warning');
}
}
if (copyrightOptions[val]['sourceRequired']) {
if ($source.val() !== '') {
$source.parents('.form-group').removeClass('has-warning');
} else {
$source.parents('.form-group').addClass('has-warning');
}
}
return creditString;
}
/*
* Check to see if any required fields have not been
* filled out before enabling saving
*/
var validateForm = function() {
if ($('.has-warning').length === 0 && currentCopyright) {
$save.removeAttr('disabled');
$("body").off("contextmenu", "canvas");
} else {
$save.attr('disabled', '');
$("body").on("contextmenu", "canvas", function(e) {
return false;
});
}
}
/*
* Handle dragging the image for crops when applicable
*/
var onDrag = function(e) {
e.preventDefault();
var originY = e.clientY||e.originalEvent.targetTouches[0].clientY;
originY = originY/previewScale;
var originX = e.clientX||e.originalEvent.targetTouches[0].clientX;
originX = originX/previewScale;
var startY = dy;
var startX = dx;
if (currentCrop === 'original') {
return;
}
function update(e) {
var dragY = e.clientY||e.originalEvent.targetTouches[0].clientY;
dragY = dragY/previewScale;
var dragX = e.clientX||e.originalEvent.targetTouches[0].clientX;
dragX = dragX/previewScale;
if (shallowImage === false) {
if (Math.abs(dragY - originY) > 1) {
dy = startY - (originY - dragY);
// Prevent dragging image below upper bound
if (dy > 0) {
dy = 0;
return;
}
// Prevent dragging image above lower bound
if (dy < canvas.height - scaledImageHeight) {
dy = canvas.height - scaledImageHeight;
return;
}
renderCanvas();
}
} else {
if (Math.abs(dragX - originX) > 1) {
dx = startX - (originX - dragX);
// Prevent dragging image below left bound
if (dx > 0) {
dx = 0;
return;
}
// Prevent dragging image above right bound
if (dx < canvas.width - scaledImageWidth) {
dx = canvas.width - scaledImageWidth;
return;
}
renderCanvas();
}
}
}
// Perform drag sequence:
$(document).on('mousemove.drag touchmove', _.debounce(update, 5, true))
.on('mouseup.drag touchend', function(e) {
$(document).off('mouseup.drag touchmove mousemove.drag');
update(e);
});
}
/*
* Take an image from file input and load it
*/
var handleImage = function(e) {
var reader = new FileReader();
reader.onload = function(e){
// reset dy value
dy = 0;
dx = 0;
image = e.target.result;
imageFilename = $('.fileinput-filename').text().split('.')[0];
img.src = image;
$customFilename.text(imageFilename);
$customFilename.parents('.form-group').addClass('has-file');
$imageLink.val('');
$imageLink.parents('.form-group').removeClass('has-file');
}
reader.readAsDataURL(e.target.files[0]);
}
/*
* Load a remote image
*/
var handleImageLink = function(e) {
var requestStatus =
// Test if image URL returns a 200
$.ajax({
url: $imageLink.val(),
success: function(data, status, xhr) {
var responseType = xhr.getResponseHeader('content-type').toLowerCase();
// if content type is jpeg, gif or png, load the image into the canvas
if (MIME_TYPES.indexOf(responseType) >= 0) {
// reset dy value
dy = 0;
dx = 0;
$fileinput.fileinput('clear');
$imageLink.parents('.form-group').addClass('has-file').removeClass('has-error');
$imageLink.parents('.input-group').next().text('Click to edit name');
img.src = $imageLink.val();
img.crossOrigin = "anonymous"
var filename = $imageLink.val().split('/');
imageFilename = filename[filename.length - 1].split('.')[0];
$imageLink.val(imageFilename);
// otherwise, display an error
} else {
$imageLink.parents('.form-group').addClass('has-error');
$imageLink.parents('.input-group').next().text('Not a valid image URL');
return;
}
},
error: function(data) {
$imageLink.parents('.form-group').addClass('has-error');
$imageLink.parents('.input-group').next().text('Not a valid image URL');
}
});
}
/*
* Set dragging status based on image aspect ratio and render canvas
*/
var onImageLoad = function(e) {
renderCanvas();
onCropChange();
}
/*
* Load the logo based on radio buttons
*/
var loadLogo = function() {
if (currentLogoColor === 'white') {
logo.src = logos[currentLogo]['whitePath'];
} else {
logo.src = logos[currentLogo]['blackPath'];
}
disableLogo();
}
/*
* If image paths not defined for the logo, grey it out
*/
var disableLogo = function(){
var whiteLogo = logos[currentLogo]['whitePath']
var blackLogo = logos[currentLogo]['blackPath']
if(typeof(whiteLogo) == "undefined"){
$("#whiteLogo").parent().addClass("disabled")
}else{
$("#whiteLogo").parent().removeClass("disabled")
}
if(typeof(blackLogo) == "undefined"){
$("#blackLogo").parent().addClass("disabled")
}else{
$("#blackLogo").parent().removeClass("disabled")
}
}
/*
* Download the image on save click
*/
var onSaveClick = function(e) {
e.preventDefault();
/// create an "off-screen" anchor tag
var link = document.createElement('a'),
e;
/// the key here is to set the download attribute of the a tag
if ($customFilename.text()) {
imageFilename = $customFilename.text();
}
if ($imageLink.val() !== "") {
var filename = $imageLink.val().split('/');
imageFilename = filename[filename.length - 1].split('.')[0];
}
link.download = 'waterbug-' + imageFilename + '.png';
/// convert canvas content to data-uri for link. When download
/// attribute is set the content pointed to by link will be
/// pushed as "download" in HTML5 capable browsers
link.href = canvas.toDataURL();
link.target = "_blank";
/// create a "fake" click-event to trigger the download
if (document.createEvent) {
e = document.createEvent("MouseEvents");
e.initMouseEvent("click", true, true, window,
0, 0, 0, 0, 0, false, false, false,
false, 0, null);
link.dispatchEvent(e);
} else if (link.fireEvent) {
link.fireEvent("onclick");
}
}
/*
* Handle logo radio button clicks
*/
var onLogoColorChange = function(e) {
currentLogoColor = $(this).val();
loadLogo();
renderCanvas();
}
/*
* Handle text color radio button clicks
*/
var onTextColorChange = function(e) {
currentTextColor = $(this).val();
renderCanvas();
}
/*
* Handle logo radio button clicks
*/
var onLogoChange = function(e) {
currentLogo = $(this).val();
loadLogo();
renderCanvas();
}
/*
* Handle crop radio button clicks
*/
var onCropChange = function() {
currentCrop = $crop.filter(':checked').val();
if (currentCrop !== 'original') {
var dragClass = shallowImage ? 'is-draggable shallow' : 'is-draggable';
$canvas.addClass(dragClass);
$dragHelp.show();
} else {
$canvas.removeClass('is-draggable shallow');
$dragHelp.hide();
}
renderCanvas();
}
/*
* Show the appropriate fields based on the chosen copyright
*/
var onCopyrightChange = function() {
currentCopyright = $copyrightHolder.val();
$photographer.parents('.form-group').removeClass('has-warning');
$source.parents('.form-group').removeClass('has-warning');
if (copyrightOptions[currentCopyright]) {
if (copyrightOptions[currentCopyright]['showPhotographer']) {
$photographer.parents('.form-group').slideDown();
if (copyrightOptions[currentCopyright]['photographerRequired']) {
$photographer.parents('.form-group').addClass('has-warning required');
} else {
$photographer.parents('.form-group').removeClass('required')
}
} else {
$photographer.parents('.form-group').slideUp();
}
if (copyrightOptions[currentCopyright]['showSource']) {
$source.parents('.form-group').slideDown();
if (copyrightOptions[currentCopyright]['sourceRequired']) {
$source.parents('.form-group').addClass('has-warning required');
} else {
$source.parents('.form-group').removeClass('required')
}
} else {
$source.parents('.form-group').slideUp();
}
} else {
$photographer.parents('.form-group').slideUp();
$source.parents('.form-group').slideUp();
credit = '';
}
// if (currentCopyright === 'npr') {
// $photographer.parents('.form-group').removeClass('required').slideDown();
// $source.parents('.form-group').slideUp();
// } else if (currentCopyright === 'freelance') {
// $photographer.parents('.form-group').slideDown();
// $source.parents('.form-group').slideUp();
// $photographer.parents('.form-group').addClass('has-warning required');
// } else if (currentCopyright === 'ap' || currentCopyright === 'getty') {
// $photographer.parents('.form-group').removeClass('required').slideDown();
// $source.parents('.form-group')
// .slideUp()
// .removeClass('has-warning required');
// } else if (currentCopyright === 'third-party') {
// $photographer.parents('.form-group').removeClass('required').slideDown();
// $source.parents('.form-group').slideDown();
// $source.parents('.form-group').addClass('has-warning required');
// } else {
// credit = '';
// $photographer.parents('.form-group').slideUp();
// $source.parents('.form-group')
// .slideUp()
// .parents('.form-group').removeClass('has-warning required');
// }
renderCanvas();
}
$(onDocumentLoad);
| mcclatchy/lunchbox | www/js/waterbug.js | JavaScript | mit | 18,520 |
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package time_test
import (
"testing"
"time"
)
func TestVersion3(t *testing.T) {
t.Skip("gccgo does not use the zip file")
time.ForceZipFileForTesting(true)
defer time.ForceZipFileForTesting(false)
_, err := time.LoadLocation("Asia/Jerusalem")
if err != nil {
t.Fatal(err)
}
}
// Test that we get the correct results for times before the first
// transition time. To do this we explicitly check early dates in a
// couple of specific timezones.
func TestFirstZone(t *testing.T) {
t.Skip("gccgo does not use the zip file")
time.ForceZipFileForTesting(true)
defer time.ForceZipFileForTesting(false)
const format = "Mon, 02 Jan 2006 15:04:05 -0700 (MST)"
var tests = []struct {
zone string
unix int64
want1 string
want2 string
}{
{
"PST8PDT",
-1633269601,
"Sun, 31 Mar 1918 01:59:59 -0800 (PST)",
"Sun, 31 Mar 1918 03:00:00 -0700 (PDT)",
},
{
"Pacific/Fakaofo",
1325242799,
"Thu, 29 Dec 2011 23:59:59 -1100 (TKT)",
"Sat, 31 Dec 2011 00:00:00 +1300 (TKT)",
},
}
for _, test := range tests {
z, err := time.LoadLocation(test.zone)
if err != nil {
t.Fatal(err)
}
s := time.Unix(test.unix, 0).In(z).Format(format)
if s != test.want1 {
t.Errorf("for %s %d got %q want %q", test.zone, test.unix, s, test.want1)
}
s = time.Unix(test.unix+1, 0).In(z).Format(format)
if s != test.want2 {
t.Errorf("for %s %d got %q want %q", test.zone, test.unix, s, test.want2)
}
}
}
func TestLocationNames(t *testing.T) {
if time.Local.String() != "Local" {
t.Errorf(`invalid Local location name: got %q want "Local"`, time.Local)
}
if time.UTC.String() != "UTC" {
t.Errorf(`invalid UTC location name: got %q want "UTC"`, time.UTC)
}
}
| paranoiacblack/gcc | libgo/go/time/zoneinfo_test.go | GO | gpl-2.0 | 1,871 |
/*
* Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/**
* @test
* @bug 8033277
* @summary Confirm that scaling of printout is correct. Manual comparison with printout using a supported resolution is needed.
* @run main/manual TestUnsupportedResolution
*/
import java.awt.Graphics;
import java.awt.print.PageFormat;
import java.awt.print.Printable;
import java.awt.print.PrinterException;
import java.awt.print.PrinterJob;
import javax.print.*;
import javax.print.attribute.HashPrintRequestAttributeSet;
import javax.print.attribute.PrintRequestAttributeSet;
import javax.print.attribute.standard.*;
import javax.print.attribute.ResolutionSyntax;
public class TestUnsupportedResolution implements Printable
{
public static void main(String[] args)
{
System.out.println("USAGE: default or no args: it will test 300 dpi\n args is \"600\" : it will test 600 dpi\n------------------------------------------------------\n");
TestUnsupportedResolution pt=new TestUnsupportedResolution();
pt.printWorks(args);
}
public void printWorks(String[] args)
{
PrinterJob job=PrinterJob.getPrinterJob();
job.setPrintable(this);
PrintRequestAttributeSet settings=new HashPrintRequestAttributeSet();
PrinterResolution pr = new PrinterResolution(300, 300, ResolutionSyntax.DPI);
if (args.length > 0 && (args[0].compareTo("600") == 0)) {
pr = new PrinterResolution(600, 600, ResolutionSyntax.DPI);
System.out.println("Adding 600 Dpi attribute");
} else {
System.out.println("Adding 300 Dpi attribute");
}
PrintService ps = job.getPrintService();
boolean resolutionSupported = ps.isAttributeValueSupported(pr, null, null);
System.out.println("Is "+pr+" supported by "+ps+"? "+resolutionSupported);
if (resolutionSupported) {
System.out.println("Resolution is supported.\nTest is not applicable, PASSED");
}
settings.add(pr);
if (args.length > 0 && (args[0].equalsIgnoreCase("fidelity"))) {
settings.add(Fidelity.FIDELITY_TRUE);
System.out.println("Adding Fidelity.FIDELITY_TRUE attribute");
}
if (job.printDialog(settings))
{
try {
job.print(settings);
} catch (PrinterException e) {
e.printStackTrace();
}
}
}
public int print(Graphics graphics, PageFormat pageFormat, int pageIndex) throws PrinterException
{
if (pageIndex>0)
{
return NO_SUCH_PAGE;
}
StringBuffer s=new StringBuffer();
for (int i=0;i<10;i++)
{
s.append("1234567890ABCDEFGHIJ");
}
int x=(int) pageFormat.getImageableX();
int y=(int) (pageFormat.getImageableY()+50);
graphics.drawString(s.toString(), x, y);
return PAGE_EXISTS;
}
}
| isaacl/openjdk-jdk | test/javax/print/attribute/TestUnsupportedResolution.java | Java | gpl-2.0 | 3,739 |
/*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
#include "native_thread.cpp"
#include "nsk_tools.cpp"
#include "jni_tools.cpp"
#include "jvmti_tools.cpp"
#include "agent_tools.cpp"
#include "jvmti_FollowRefObjects.cpp"
#include "Injector.cpp"
#include "JVMTITools.cpp"
#include "agent_common.cpp"
#include "thrstat003.cpp"
| md-5/jdk10 | test/hotspot/jtreg/vmTestbase/nsk/jvmti/GetThreadState/thrstat003/libthrstat003.cpp | C++ | gpl-2.0 | 1,327 |
<?php
namespace Drupal\node\Plugin\views\row;
use Drupal\Core\Entity\EntityDisplayRepositoryInterface;
use Drupal\Core\Entity\EntityTypeManagerInterface;
use Drupal\views\Plugin\views\row\RssPluginBase;
/**
* Plugin which performs a node_view on the resulting object
* and formats it as an RSS item.
*
* @ViewsRow(
* id = "node_rss",
* title = @Translation("Content"),
* help = @Translation("Display the content with standard node view."),
* theme = "views_view_row_rss",
* register_theme = FALSE,
* base = {"node_field_data"},
* display_types = {"feed"}
* )
*/
class Rss extends RssPluginBase {
// Basic properties that let the row style follow relationships.
public $base_table = 'node_field_data';
public $base_field = 'nid';
// Stores the nodes loaded with preRender.
public $nodes = [];
/**
* {@inheritdoc}
*/
protected $entityTypeId = 'node';
/**
* The node storage
*
* @var \Drupal\node\NodeStorageInterface
*/
protected $nodeStorage;
/**
* Constructs the Rss object.
*
* @param array $configuration
* A configuration array containing information about the plugin instance.
* @param string $plugin_id
* The plugin_id for the plugin instance.
* @param mixed $plugin_definition
* The plugin implementation definition.
* @param \Drupal\Core\Entity\EntityTypeManagerInterface $entity_type_manager
* The entity type manager.
* @param \Drupal\Core\Entity\EntityDisplayRepositoryInterface $entity_display_repository
* The entity display repository.
*/
public function __construct(array $configuration, $plugin_id, $plugin_definition, EntityTypeManagerInterface $entity_type_manager, EntityDisplayRepositoryInterface $entity_display_repository = NULL) {
parent::__construct($configuration, $plugin_id, $plugin_definition, $entity_type_manager, $entity_display_repository);
$this->nodeStorage = $entity_type_manager->getStorage('node');
}
/**
* {@inheritdoc}
*/
public function buildOptionsForm_summary_options() {
$options = parent::buildOptionsForm_summary_options();
$options['title'] = $this->t('Title only');
$options['default'] = $this->t('Use site default RSS settings');
return $options;
}
public function summaryTitle() {
$options = $this->buildOptionsForm_summary_options();
return $options[$this->options['view_mode']];
}
public function preRender($values) {
$nids = [];
foreach ($values as $row) {
$nids[] = $row->{$this->field_alias};
}
if (!empty($nids)) {
$this->nodes = $this->nodeStorage->loadMultiple($nids);
}
}
public function render($row) {
global $base_url;
$nid = $row->{$this->field_alias};
if (!is_numeric($nid)) {
return;
}
$display_mode = $this->options['view_mode'];
if ($display_mode == 'default') {
$display_mode = \Drupal::config('system.rss')->get('items.view_mode');
}
// Load the specified node:
/** @var \Drupal\node\NodeInterface $node */
$node = $this->nodes[$nid];
if (empty($node)) {
return;
}
$node->link = $node->toUrl('canonical', ['absolute' => TRUE])->toString();
$node->rss_namespaces = [];
$node->rss_elements = [
[
'key' => 'pubDate',
'value' => gmdate('r', $node->getCreatedTime()),
],
[
'key' => 'dc:creator',
'value' => $node->getOwner()->getDisplayName(),
],
[
'key' => 'guid',
'value' => $node->id() . ' at ' . $base_url,
'attributes' => ['isPermaLink' => 'false'],
],
];
// The node gets built and modules add to or modify $node->rss_elements
// and $node->rss_namespaces.
$build_mode = $display_mode;
$build = node_view($node, $build_mode);
unset($build['#theme']);
if (!empty($node->rss_namespaces)) {
$this->view->style_plugin->namespaces = array_merge($this->view->style_plugin->namespaces, $node->rss_namespaces);
}
elseif (function_exists('rdf_get_namespaces')) {
// Merge RDF namespaces in the XML namespaces in case they are used
// further in the RSS content.
$xml_rdf_namespaces = [];
foreach (rdf_get_namespaces() as $prefix => $uri) {
$xml_rdf_namespaces['xmlns:' . $prefix] = $uri;
}
$this->view->style_plugin->namespaces += $xml_rdf_namespaces;
}
$item = new \stdClass();
if ($display_mode != 'title') {
// We render node contents.
$item->description = $build;
}
$item->title = $node->label();
$item->link = $node->link;
// Provide a reference so that the render call in
// template_preprocess_views_view_row_rss() can still access it.
$item->elements = &$node->rss_elements;
$item->nid = $node->id();
$build = [
'#theme' => $this->themeFunctions(),
'#view' => $this->view,
'#options' => $this->options,
'#row' => $item,
];
return $build;
}
}
| enslyon/ensl | core/modules/node/src/Plugin/views/row/Rss.php | PHP | gpl-2.0 | 4,982 |
/* This file is part of the KDE project
Copyright (C) 2006-2007 Alfredo Beaumont Sainz <alfredo.beaumont@gmail.com>
2009 Jeremias Epperlein <jeeree@web.de>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public License
along with this library; see the file COPYING.LIB. If not, write to
the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA.
*/
#include "TokenElement.h"
#include "AttributeManager.h"
#include "FormulaCursor.h"
#include "Dictionary.h"
#include "GlyphElement.h"
#include <KoXmlWriter.h>
#include <KoXmlReader.h>
#include <QPainter>
#include <kdebug.h>
TokenElement::TokenElement( BasicElement* parent ) : BasicElement( parent )
{
m_stretchHorizontally = false;
m_stretchVertically = false;
}
const QList<BasicElement*> TokenElement::childElements() const
{
// only return the mglyph elements
QList<BasicElement*> tmpList;
foreach( GlyphElement* tmp, m_glyphs )
tmpList << tmp;
return tmpList;
}
void TokenElement::paint( QPainter& painter, AttributeManager* am )
{
// set the painter to background color and paint it
painter.setPen( am->colorOf( "mathbackground", this ) );
painter.setBrush( QBrush( painter.pen().color() ) );
painter.drawRect( QRectF( 0.0, 0.0, width(), height() ) );
// set the painter to foreground color and paint the text in the content path
QColor color = am->colorOf( "mathcolor", this );
if (!color.isValid())
color = am->colorOf( "color", this );
painter.translate( m_xoffset, baseLine() );
if(m_stretchHorizontally || m_stretchVertically)
painter.scale(width() / m_originalSize.width(), height() / m_originalSize.height());
painter.setPen( color );
painter.setBrush( QBrush( color ) );
painter.drawPath( m_contentPath );
}
int TokenElement::endPosition() const
{
return m_rawString.length();
}
void TokenElement::layout( const AttributeManager* am )
{
m_offsets.erase(m_offsets.begin(),m_offsets.end());
m_offsets << 0.0;
// Query the font to use
m_font = am->font( this );
QFontMetricsF fm(m_font);
// save the token in an empty path
m_contentPath = QPainterPath();
/* Current bounding box. Note that the left can be negative, for italics etc */
QRectF boundingrect;
if(m_glyphs.isEmpty()) {//optimize for the common case
boundingrect = renderToPath(m_rawString, m_contentPath);
for (int j = 0; j < m_rawString.length(); ++j) {
m_offsets.append(fm.width(m_rawString.left(j+1)));
}
} else {
// replace all the object replacement characters with glyphs
// We have to keep track of the bounding box at all times
QString chunk;
int counter = 0;
for( int i = 0; i < m_rawString.length(); i++ ) {
if( m_rawString[ i ] != QChar::ObjectReplacementCharacter )
chunk.append( m_rawString[ i ] );
else {
m_contentPath.moveTo(boundingrect.right(), 0);
QRectF newbox = renderToPath( chunk, m_contentPath );
boundingrect.setRight( boundingrect.right() + newbox.right());
boundingrect.setTop( qMax(boundingrect.top(), newbox.top()));
boundingrect.setBottom( qMax(boundingrect.bottom(), newbox.bottom()));
qreal glyphoffset = m_offsets.last();
for (int j = 0; j < chunk.length(); ++j) {
m_offsets << fm.width(chunk.left(j+1)) + glyphoffset;
}
m_contentPath.moveTo(boundingrect.right(), 0);
newbox = m_glyphs[ counter ]->renderToPath( QString(), m_contentPath );
boundingrect.setRight( boundingrect.right() + newbox.right());
boundingrect.setTop( qMax(boundingrect.top(), newbox.top()));
boundingrect.setBottom( qMax(boundingrect.bottom(), newbox.bottom()));
m_offsets.append(newbox.width() + m_offsets.last());
counter++;
chunk.clear();
}
}
if( !chunk.isEmpty() ) {
m_contentPath.moveTo(boundingrect.right(), 0);
QRectF newbox = renderToPath( chunk, m_contentPath );
boundingrect.setRight( boundingrect.right() + newbox.right());
boundingrect.setTop( qMax(boundingrect.top(), newbox.top()));
boundingrect.setBottom( qMax(boundingrect.bottom(), newbox.bottom()));
// qreal glyphoffset = m_offsets.last();
for (int j = 0; j < chunk.length(); ++j) {
m_offsets << fm.width(chunk.left(j+1)) + m_offsets.last();
}
}
}
//FIXME: This is only a temporary solution
boundingrect=m_contentPath.boundingRect();
m_offsets.removeLast();
m_offsets.append(m_contentPath.boundingRect().right());
//The left side may be negative, because of italised letters etc. we need to adjust for this when painting
//The qMax is just incase. The bounding box left should never be >0
m_xoffset = qMax(-boundingrect.left(), (qreal)0.0);
// As the text is added to (0,0) the baseline equals the top edge of the
// elements bounding rect, while translating it down the text's baseline moves too
setBaseLine( -boundingrect.y() ); // set baseline accordingly
setWidth( boundingrect.right() + m_xoffset );
setHeight( boundingrect.height() );
m_originalSize = QSizeF(width(), height());
}
bool TokenElement::insertChild( int position, BasicElement* child )
{
Q_UNUSED( position)
Q_UNUSED( child )
//if( child && child->elementType() == Glyph ) {
//m_rawString.insert( QChar( QChar::ObjectReplacementCharacter ) );
// m_glyphs.insert();
// return false;
//} else {
return false;
//}
}
void TokenElement::insertGlyphs ( int position, QList< GlyphElement* > glyphs )
{
for (int i=0; i < glyphs.length(); ++i) {
m_glyphs.insert(position+i,glyphs[i]);
}
}
bool TokenElement::insertText ( int position, const QString& text )
{
m_rawString.insert (position,text);
return true;
}
QList< GlyphElement* > TokenElement::glyphList ( int position, int length )
{
QList<GlyphElement*> tmp;
//find out, how many glyphs we have
int counter=0;
for (int i=position; i<position+length; ++i) {
if (m_rawString[ position ] == QChar::ObjectReplacementCharacter) {
counter++;
}
}
int start=0;
//find out where we should start removing glyphs
if (counter>0) {
for (int i=0; i<position; ++i) {
if (m_rawString[position] == QChar::ObjectReplacementCharacter) {
start++;
}
}
}
for (int i=start; i<start+counter; ++i) {
tmp.append(m_glyphs.at(i));
}
return tmp;
}
int TokenElement::removeText ( int position, int length )
{
//find out, how many glyphs we have
int counter=0;
for (int i=position; i<position+length; ++i) {
if (m_rawString[ position ] == QChar::ObjectReplacementCharacter) {
counter++;
}
}
int start=0;
//find out where we should start removing glyphs
if (counter>0) {
for (int i=0; i<position; ++i) {
if (m_rawString[position] == QChar::ObjectReplacementCharacter) {
start++;
}
}
}
for (int i=start; i<start+counter; ++i) {
m_glyphs.removeAt(i);
}
m_rawString.remove(position,length);
return start;
}
bool TokenElement::setCursorTo(FormulaCursor& cursor, QPointF point) {
int i = 0;
cursor.setCurrentElement(this);
if (cursorOffset(endPosition())<point.x()) {
cursor.setPosition(endPosition());
return true;
}
//Find the letter we clicked on
for( i = 1; i < endPosition(); ++i ) {
if (point.x() < cursorOffset(i)) {
break;
}
}
//Find out, if we should place the cursor before or after the character
if ((point.x()-cursorOffset(i-1))<(cursorOffset(i)-point.x())) {
--i;
}
cursor.setPosition(i);
return true;
}
QLineF TokenElement::cursorLine(int position) const
{
// inside tokens let the token calculate the cursor x offset
qreal tmp = cursorOffset( position );
QPointF top = absoluteBoundingRect().topLeft() + QPointF( tmp, 0 );
QPointF bottom = top + QPointF( 0.0,height() );
return QLineF(top,bottom);
}
bool TokenElement::acceptCursor( const FormulaCursor& cursor )
{
Q_UNUSED( cursor )
return true;
}
bool TokenElement::moveCursor(FormulaCursor& newcursor, FormulaCursor& oldcursor) {
Q_UNUSED( oldcursor )
if ((newcursor.direction()==MoveUp) ||
(newcursor.direction()==MoveDown) ||
(newcursor.isHome() && newcursor.direction()==MoveLeft) ||
(newcursor.isEnd() && newcursor.direction()==MoveRight) ) {
return false;
}
switch( newcursor.direction() ) {
case MoveLeft:
newcursor+=-1;
break;
case MoveRight:
newcursor+=1;
break;
default:
break;
}
return true;
}
qreal TokenElement::cursorOffset( const int position) const
{
return m_offsets[position]+m_xoffset;
}
QFont TokenElement::font() const
{
return m_font;
}
void TokenElement::setText ( const QString& text )
{
removeText(0,m_rawString.length());
insertText(0,text);
}
const QString& TokenElement::text()
{
return m_rawString;
}
bool TokenElement::readMathMLContent( const KoXmlElement& element )
{
// iterate over all child elements ( possible embedded glyphs ) and put the text
// content in the m_rawString and mark glyph positions with
// QChar::ObjectReplacementCharacter
GlyphElement* tmpGlyph;
KoXmlNode node = element.firstChild();
while( !node.isNull() ) {
if( node.isElement() && node.toElement().tagName() == "mglyph" ) {
tmpGlyph = new GlyphElement( this );
m_rawString.append( QChar( QChar::ObjectReplacementCharacter ) );
tmpGlyph->readMathML( node.toElement() );
m_glyphs.append(tmpGlyph);
}
else if( node.isElement() )
return false;
/*
else if (node.isEntityReference()) {
Dictionary dict;
m_rawString.append( dict.mapEntity( node.nodeName() ) );
}
*/
else {
m_rawString.append( node.toText().data() );
}
node = node.nextSibling();
}
m_rawString = m_rawString.simplified();
return true;
}
void TokenElement::writeMathMLContent( KoXmlWriter* writer, const QString& ns ) const
{
// split the m_rawString into text content chunks that are divided by glyphs
// which are represented as ObjectReplacementCharacter and write each chunk
QStringList tmp = m_rawString.split( QChar( QChar::ObjectReplacementCharacter ) );
for ( int i = 0; i < tmp.count(); i++ ) {
if( m_rawString.startsWith( QChar( QChar::ObjectReplacementCharacter ) ) ) {
m_glyphs[ i ]->writeMathML( writer, ns );
if (i + 1 < tmp.count()) {
writer->addTextNode( tmp[ i ] );
}
}
else {
writer->addTextNode( tmp[ i ] );
if (i + 1 < tmp.count()) {
m_glyphs[ i ]->writeMathML( writer, ns );
}
}
}
}
const QString TokenElement::writeElementContent() const
{
return m_rawString;
}
| yxl/emscripten-calligra-mobile | plugins/formulashape/elements/TokenElement.cpp | C++ | gpl-2.0 | 12,060 |
<?php
/**
* Customer booking notification
*/
if ( ! defined( 'ABSPATH' ) ) exit; // Exit if accessed directly ?>
<?php do_action( 'woocommerce_email_header', $email_heading ); ?>
<?php echo wpautop( wptexturize( $notification_message ) ); ?>
<table cellspacing="0" cellpadding="6" style="width: 100%; border: 1px solid #eee;" border="1" bordercolor="#eee">
<tbody>
<tr>
<th scope="row" style="text-align:left; border: 1px solid #eee;"><?php _e( 'Booked Product', 'woocommerce-bookings' ); ?></th>
<td style="text-align:left; border: 1px solid #eee;"><?php echo $booking->get_product()->get_title(); ?></td>
</tr>
<tr>
<th style="text-align:left; border: 1px solid #eee;" scope="row"><?php _e( 'Booking ID', 'woocommerce-bookings' ); ?></th>
<td style="text-align:left; border: 1px solid #eee;"><?php echo $booking->get_id(); ?></td>
</tr>
<?php if ( $booking->has_resources() && ( $resource = $booking->get_resource() ) ) : ?>
<tr>
<th style="text-align:left; border: 1px solid #eee;" scope="row"><?php _e( 'Booking Type', 'woocommerce-bookings' ); ?></th>
<td style="text-align:left; border: 1px solid #eee;"><?php echo $resource->post_title; ?></td>
</tr>
<?php endif; ?>
<tr>
<th style="text-align:left; border: 1px solid #eee;" scope="row"><?php _e( 'Booking Start Date', 'woocommerce-bookings' ); ?></th>
<td style="text-align:left; border: 1px solid #eee;"><?php echo $booking->get_start_date(); ?></td>
</tr>
<tr>
<th style="text-align:left; border: 1px solid #eee;" scope="row"><?php _e( 'Booking End Date', 'woocommerce-bookings' ); ?></th>
<td style="text-align:left; border: 1px solid #eee;"><?php echo $booking->get_end_date(); ?></td>
</tr>
<?php if ( $booking->has_persons() ) : ?>
<?php
foreach ( $booking->get_persons() as $id => $qty ) :
if ( 0 === $qty ) {
continue;
}
$person_type = ( 0 < $id ) ? get_the_title( $id ) : __( 'Person(s)', 'woocommerce-bookings' );
?>
<tr>
<th style="text-align:left; border: 1px solid #eee;" scope="row"><?php echo $person_type; ?></th>
<td style="text-align:left; border: 1px solid #eee;"><?php echo $qty; ?></td>
</tr>
<?php endforeach; ?>
<?php endif; ?>
</tbody>
</table>
<?php do_action( 'woocommerce_email_footer' ); ?> | snappermorgan/radaralley | wp-content/plugins/woocommerce-bookings/templates/emails/customer-booking-notification.php | PHP | gpl-2.0 | 2,299 |
<?php
/**
* @file
* Contains \Drupal\Console\Command\Generate\RouteSubscriber.
*/
namespace Drupal\Console\Command\Generate;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\OutputInterface;
use Drupal\Console\Command\Shared\ModuleTrait;
use Drupal\Console\Generator\RouteSubscriberGenerator;
use Drupal\Console\Command\Shared\ConfirmationTrait;
use Symfony\Component\Console\Command\Command;
use Drupal\Console\Core\Style\DrupalStyle;
use Drupal\Console\Extension\Manager;
use Drupal\Console\Core\Utils\ChainQueue;
use Drupal\Console\Core\Command\Shared\CommandTrait;
/**
* Class RouteSubscriberCommand
*
* @package Drupal\Console\Command\Generate
*/
class RouteSubscriberCommand extends Command
{
use ModuleTrait;
use ConfirmationTrait;
use CommandTrait;
/**
* @var Manager
*/
protected $extensionManager;
/**
* @var RouteSubscriberGenerator
*/
protected $generator;
/**
* @var ChainQueue
*/
protected $chainQueue;
/**
* RouteSubscriberCommand constructor.
*
* @param Manager $extensionManager
* @param RouteSubscriberGenerator $generator
* @param ChainQueue $chainQueue
*/
public function __construct(
Manager $extensionManager,
RouteSubscriberGenerator $generator,
ChainQueue $chainQueue
) {
$this->extensionManager = $extensionManager;
$this->generator = $generator;
$this->chainQueue = $chainQueue;
parent::__construct();
}
/**
* {@inheritdoc}
*/
protected function configure()
{
$this
->setName('generate:routesubscriber')
->setDescription($this->trans('commands.generate.routesubscriber.description'))
->setHelp($this->trans('commands.generate.routesubscriber.description'))
->addOption(
'module',
null,
InputOption::VALUE_REQUIRED,
$this->trans('commands.common.options.module')
)
->addOption(
'name',
null,
InputOption::VALUE_REQUIRED,
$this->trans('commands.generate.routesubscriber.options.name')
)
->addOption(
'class',
null,
InputOption::VALUE_REQUIRED,
$this->trans('commands.generate.routesubscriber.options.class')
);
}
/**
* {@inheritdoc}
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$output = new DrupalStyle($input, $output);
// @see use Drupal\Console\Command\Shared\ConfirmationTrait::confirmGeneration
if (!$this->confirmGeneration($output)) {
return 1;
}
$module = $input->getOption('module');
$name = $input->getOption('name');
$class = $input->getOption('class');
$this->generator->generate($module, $name, $class);
$this->chainQueue->addCommand('cache:rebuild', ['cache' => 'all']);
return 0;
}
/**
* {@inheritdoc}
*/
protected function interact(InputInterface $input, OutputInterface $output)
{
$io = new DrupalStyle($input, $output);
// --module option
$module = $input->getOption('module');
if (!$module) {
// @see Drupal\Console\Command\Shared\ModuleTrait::moduleQuestion
$module = $this->moduleQuestion($io);
$input->setOption('module', $module);
}
// --name option
$name = $input->getOption('name');
if (!$name) {
$name = $io->ask(
$this->trans('commands.generate.routesubscriber.questions.name'),
$module.'.route_subscriber'
);
$input->setOption('name', $name);
}
// --class option
$class = $input->getOption('class');
if (!$class) {
$class = $io->ask(
$this->trans('commands.generate.routesubscriber.questions.class'),
'RouteSubscriber'
);
$input->setOption('class', $class);
}
}
protected function createGenerator()
{
return new RouteSubscriberGenerator();
}
}
| lian-rr/Ecommerse-Drupal | vendor/drupal/console/src/Command/Generate/RouteSubscriberCommand.php | PHP | gpl-2.0 | 4,403 |
#!/usr/bin/python
#
# Copyright 2010, 2011 wkhtmltopdf authors
#
# This file is part of wkhtmltopdf.
#
# wkhtmltopdf is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wkhtmltopdf is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with wkhtmltopdf. If not, see <http:#www.gnu.org/licenses/>.
from sys import argv, exit
import re
from datetime import date
import os
import difflib
cdate = re.compile(r"Copyright ([0-9 ,]*) wkhtmltopdf authors")
ifdef = re.compile(r"^[\n\r \t]*#ifndef __(.*)__[\t ]*\n#define __(\1)__[\t ]*\n")
endif = re.compile(r"#endif.*[\r\n \t]*$")
ws = re.compile(r"[ \t]*[\r\n]")
branchspace = re.compile(r"([ \t\r\n])(for|if|while|switch|foreach)[\t \r\n]*\(")
hangelse = re.compile(r"}[\r\n\t ]*(else)")
braceup = re.compile(r"(\)|else)[\r\n\t ]*{")
include = re.compile(r"(#include (\"[^\"]*\"|<[^>]*>)\n)+")
def includesort(x):
return "\n".join(sorted(x.group(0)[:-1].split("\n"))+[""])
changes=False
progname="wkhtmltopdf"
for path in argv[1:]:
if path.split("/")[0] == "include": continue
try:
data = file(path).read()
except:
continue
mo = cdate.search(data)
years = set(mo.group(1).split(", ")) if mo else set()
years.add(str(date.today().year))
ext = path.rsplit(".",2)[-1]
header = ""
cc = "//"
if ext in ["hh","h","c","cc","cpp","inl", "inc"]:
header += """// -*- mode: c++; tab-width: 4; indent-tabs-mode: t; eval: (progn (c-set-style "stroustrup") (c-set-offset 'innamespace 0)); -*-
// vi:set ts=4 sts=4 sw=4 noet :
//
"""
elif ext in ["sh"]:
header += "#!/bin/bash\n#\n"
cc = "#"
elif ext in ["py"]:
header += "#!/usr/bin/python\n#\n"
cc = "#"
elif ext in ["pro","pri"]:
cc = "#"
else:
continue
header += """// Copyright %(years)s %(name)s authors
//
// This file is part of %(name)s.
//
// %(name)s is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// %(name)s is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with %(name)s. If not, see <http://www.gnu.org/licenses/>.
"""%{"years": (", ".join(sorted(list(years)))),"name":progname}
if ext in ["c", "h", "inc"]:
header = "/*" + header[2:-1] + " */\n\n"
cc = " *"
hexp = re.compile(r"^/\*([^*]*(\*[^/]))*[^*]*\*/[ \t\n]*");
else:
#Strip away generated header
hexp = re.compile("^(%s[^\\n]*\\n)*"%(cc))
ndata = hexp.sub("", data,1)
ndata = ws.sub("\n", ndata)+"\n"
if ext in ["hh","h","inl"]:
s=0
e=-1
while ndata[s] in ['\r','\n',' ','\t']: s+=1
while ndata[e] in ['\r','\n',' ','\t']: e-=1
#Strip away generated ifdef
if ifdef.search(ndata):
ndata = endif.sub("",ifdef.sub("",ndata,1),1)
s=0
e=-1
while ndata[s] in ['\r','\n',' ','\t']: s+=1
while ndata[e] in ['\r','\n',' ','\t']: e-=1
ndata=ndata[s:e+1].replace(" ",'\t')
if ext in ["hh","h","c","cc","cpp","inl"]:
ndata = branchspace.sub(r"\1\2 (",ndata)
ndata = hangelse.sub("} else",ndata)
ndata = braceup.sub(r"\1 {",ndata)
ndata = include.sub(includesort, ndata)
if ext in ["hh","h","inl"]:
n = os.path.split(path)[-1].replace(".","_").replace(" ","_").upper()
ndata = """#ifndef __%s__
#define __%s__
%s
#endif %s__%s__%s"""%(n,n,ndata, "//" if ext != "h" else "/*", n, "" if ext != "h" else "*/")
ndata = header.replace("//",cc)+ndata+"\n"
if ndata != data:
for x in difflib.unified_diff(data.split("\n"),ndata.split("\n"), "a/"+path, "b/"+path):
print x
changes=True
file(path, "w").write(ndata)
if changes: exit(1)
| anouschka42/starktheatreprod | sites/all/libraries/wkhtmltopdf-0.12.0/scripts/sourcefix.py | Python | gpl-2.0 | 4,292 |
<?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\HttpKernel\DependencyInjection;
use Symfony\Component\DependencyInjection\Compiler\CompilerPassInterface;
use Symfony\Component\DependencyInjection\Compiler\ServiceLocatorTagPass;
use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\DependencyInjection\Exception\InvalidArgumentException;
use Symfony\Component\DependencyInjection\Reference;
use Symfony\Component\HttpKernel\Fragment\FragmentRendererInterface;
/**
* Adds services tagged kernel.fragment_renderer as HTTP content rendering strategies.
*
* @author Fabien Potencier <fabien@symfony.com>
*/
class FragmentRendererPass implements CompilerPassInterface
{
private $handlerService;
private $rendererTag;
/**
* @param string $handlerService Service name of the fragment handler in the container
* @param string $rendererTag Tag name used for fragments
*/
public function __construct($handlerService = 'fragment.handler', $rendererTag = 'kernel.fragment_renderer')
{
$this->handlerService = $handlerService;
$this->rendererTag = $rendererTag;
}
public function process(ContainerBuilder $container)
{
if (!$container->hasDefinition($this->handlerService)) {
return;
}
$definition = $container->getDefinition($this->handlerService);
$renderers = array();
foreach ($container->findTaggedServiceIds($this->rendererTag, true) as $id => $tags) {
$def = $container->getDefinition($id);
$class = $container->getParameterBag()->resolveValue($def->getClass());
if (!$r = $container->getReflectionClass($class)) {
throw new InvalidArgumentException(sprintf('Class "%s" used for service "%s" cannot be found.', $class, $id));
}
if (!$r->isSubclassOf(FragmentRendererInterface::class)) {
throw new InvalidArgumentException(sprintf('Service "%s" must implement interface "%s".', $id, FragmentRendererInterface::class));
}
foreach ($tags as $tag) {
$renderers[$tag['alias']] = new Reference($id);
}
}
$definition->replaceArgument(0, ServiceLocatorTagPass::register($container, $renderers));
}
}
| nmacd85/drupal-nicoledawn | vendor/symfony/http-kernel/DependencyInjection/FragmentRendererPass.php | PHP | gpl-2.0 | 2,526 |
// license:BSD-3-Clause
// copyright-holders:Ville Linde
// TMS320C82 Master Processor core execution
#include "emu.h"
#include "tms32082.h"
#define OP_LINK() ((m_ir >> 27) & 0x1f)
#define OP_RD() ((m_ir >> 27) & 0x1f)
#define OP_RS() ((m_ir >> 22) & 0x1f)
#define OP_BASE() ((m_ir >> 22) & 0x1f)
#define OP_SIMM15() ((m_ir & 0x4000) ? (0xffffe000 | (m_ir & 0x7fff)) : (m_ir & 0x7fff))
#define OP_UIMM15() (m_ir & 0x7fff)
#define OP_BITNUM() ((m_ir >> 27) & 0x1f)
#define OP_ROTATE() (m_ir & 0x1f)
#define OP_ENDMASK() ((m_ir >> 5) & 0x1f)
#define OP_SRC1() (m_ir & 0x1f)
#define OP_PD() ((m_ir >> 9) & 0x3)
#define OP_P1() ((m_ir >> 5) & 0x3)
#define OP_P2() ((m_ir >> 7) & 0x3)
#define OP_ACC() ((m_ir >> 15) & 0x2) | ((m_ir >> 11) & 1)
#define ROTATE_L(x, r) ((x << r) | (x >> (32-r)))
#define ROTATE_R(x, r) ((x >> r) | (x << (32-r)))
#define CMP_OVERFLOW32(r, s, d) ((((d) ^ (s)) & ((d) ^ (r)) & 0x80000000) ? 1 : 0)
#define CMP_OVERFLOW16(r, s, d) ((((d) ^ (s)) & ((d) ^ (r)) & 0x8000) ? 1 : 0)
#define CMP_OVERFLOW8(r, s, d) ((((d) ^ (s)) & ((d) ^ (r)) & 0x80) ? 1 : 0)
#define CARRY32(x) (((x) & (((uint64_t)1) << 32)) ? 1 : 0)
#define CARRY16(x) (((x) & 0x10000) ? 1 : 0)
#define CARRY8(x) (((x) & 0x100) ? 1 : 0)
#define SIGN32(x) (((x) & 0x80000000) ? 1 : 0)
#define SIGN16(x) (((x) & 0x8000) ? 1 : 0)
#define SIGN8(x) (((x) & 0x80) ? 1 : 0)
#define SIGN_EXTEND(x, r) ((x) | (((x) & (0x80000000 >> r)) ? ((int32_t)(0x80000000) >> r) : 0))
bool tms32082_mp_device::test_condition(int condition, uint32_t value)
{
switch (condition)
{
case 0x00: return false; // never, byte
case 0x01: return (int8_t)(value) > 0; // greater than zero, byte
case 0x02: return (int8_t)(value) == 0; // equals zero, byte
case 0x03: return (int8_t)(value) >= 0; // greater than or equal to zero, byte
case 0x04: return (int8_t)(value) < 0; // less than zero, byte
case 0x05: return (int8_t)(value) != 0; // not equal to zero, byte
case 0x06: return (int8_t)(value) <= 0; // less than or equal to zero, byte
case 0x07: return true; // always, byte
case 0x08: return false; // never, word
case 0x09: return (int16_t)(value) > 0; // greater than zero, word
case 0x0a: return (int16_t)(value) == 0; // equals zero, word
case 0x0b: return (int16_t)(value) >= 0; // greater than or equal to zero, word
case 0x0c: return (int16_t)(value) < 0; // less than zero, word
case 0x0d: return (int16_t)(value) != 0; // not equal to zero, word
case 0x0e: return (int16_t)(value) <= 0; // less than or equal to zero, word
case 0x0f: return true; // always, word
case 0x10: return false; // never, dword
case 0x11: return (int32_t)(value) > 0; // greater than zero, dword
case 0x12: return (int32_t)(value) == 0; // equals zero, dword
case 0x13: return (int32_t)(value) >= 0; // greater than or equal to zero, dword
case 0x14: return (int32_t)(value) < 0; // less than zero, dword
case 0x15: return (int32_t)(value) != 0; // not equal to zero, dword
case 0x16: return (int32_t)(value) <= 0; // less than or equal to zero, dword
case 0x17: return true; // always, dword
default: return false; // reserved
}
}
uint32_t tms32082_mp_device::calculate_cmp(uint32_t src1, uint32_t src2)
{
uint16_t src1_16 = (uint16_t)(src1);
uint8_t src1_8 = (uint8_t)(src1);
uint16_t src2_16 = (uint16_t)(src2);
uint8_t src2_8 = (uint8_t)(src2);
uint64_t res32 = (uint64_t)src1 - (uint64_t)src2;
int z32 = (res32 == 0) ? 1 : 0;
int n32 = SIGN32(res32);
int v32 = CMP_OVERFLOW32(res32, src2, src1);
int c32 = CARRY32(res32);
uint32_t res16 = (uint32_t)src1_16 - (uint32_t)src2_16;
int z16 = (res16 == 0) ? 1 : 0;
int n16 = SIGN16(res16);
int v16 = CMP_OVERFLOW16(res16, src2_16, src1_16);
int c16 = CARRY16(res16);
uint16_t res8 = (uint16_t)src1_8 - (uint16_t)src2_8;
int z8 = (res8 == 0) ? 1 : 0;
int n8 = SIGN8(res8);
int v8 = CMP_OVERFLOW8(res8, src2_8, src1_8);
int c8 = CARRY8(res8);
uint32_t flags = 0;
// 32-bits (bits 20-29)
flags |= ((~c32) & 1) << 29; // higher than or same (C)
flags |= ((c32) & 1) << 28; // lower than (~C)
flags |= ((c32|z32) & 1) << 27; // lower than or same (~C|Z)
flags |= ((~c32&~z32) & 1) << 26; // higher than (C&~Z)
flags |= (((n32&v32)|(~n32&~v32)) & 1) << 25; // greater than or equal (N&V)|(~N&~V)
flags |= (((n32&~v32)|(~n32&v32)) & 1) << 24; // less than (N&~V)|(~N&V)
flags |= (((n32&~v32)|(~n32&v32)|(z32)) & 1) << 23; // less than or equal (N&~V)|(~N&V)|Z
flags |= (((n32&v32&~z32)|(~n32&~v32&~z32)) & 1) << 22; // greater than (N&V&~Z)|(~N&~V&~Z)
flags |= ((~z32) & 1) << 21; // not equal (~Z)
flags |= ((z32) & 1) << 20; // equal (Z)
// 16-bits (bits 10-19)
flags |= ((~c16) & 1) << 19; // higher than or same (C)
flags |= ((c16) & 1) << 18; // lower than (~C)
flags |= ((c16|z16) & 1) << 17; // lower than or same (~C|Z)
flags |= ((~c16&~z16) & 1) << 16; // higher than (C&~Z)
flags |= (((n16&v16)|(~n16&~v16)) & 1) << 15; // greater than or equal (N&V)|(~N&~V)
flags |= (((n16&~v16)|(~n16&v16)) & 1) << 14; // less than (N&~V)|(~N&V)
flags |= (((n16&~v16)|(~n16&v16)|(z16)) & 1) << 13; // less than or equal (N&~V)|(~N&V)|Z
flags |= (((n16&v16&~z16)|(~n16&~v16&~z16)) & 1) << 12; // greater than (N&V&~Z)|(~N&~V&~Z)
flags |= ((~z16) & 1) << 11; // not equal (~Z)
flags |= ((z16) & 1) << 10; // equal (Z)
// 8-bits (bits 0-9)
flags |= ((~c8) & 1) << 9; // higher than or same (C)
flags |= ((c8) & 1) << 8; // lower than (~C)
flags |= ((c8|z8) & 1) << 7; // lower than or same (~C|Z)
flags |= ((~c8&~z8) & 1) << 6; // higher than (C&~Z)
flags |= (((n8&v8)|(~n8&~v8)) & 1) << 5; // greater than or equal (N&V)|(~N&~V)
flags |= (((n8&~v8)|(~n8&v8)) & 1) << 4; // less than (N&~V)|(~N&V)
flags |= (((n8&~v8)|(~n8&v8)|(z8)) & 1) << 3; // less than or equal (N&~V)|(~N&V)|Z
flags |= (((n8&v8&~z8)|(~n8&~v8&~z8)) & 1) << 2; // greater than (N&V&~Z)|(~N&~V&~Z)
flags |= ((~z8) & 1) << 1; // not equal (~Z)
flags |= ((z8) & 1) << 0; // equal (Z)
return flags;
}
void tms32082_mp_device::vector_loadstore()
{
int rd = OP_RD();
int vector_ls_bits = (((m_ir >> 9) & 0x3) << 1) | ((m_ir >> 6) & 1);
switch (vector_ls_bits)
{
case 0x01: // vst.s
{
m_program.write_dword(m_outp, m_reg[rd]);
m_outp += 4;
break;
}
case 0x03: // vst.d
{
uint64_t data = m_fpair[rd >> 1];
m_program.write_qword(m_outp, data);
m_outp += 8;
break;
}
case 0x04: // vld0.s
{
m_reg[rd] = m_program.read_dword(m_in0p);
m_in0p += 4;
break;
}
case 0x05: // vld1.s
{
m_reg[rd] = m_program.read_dword(m_in1p);
m_in1p += 4;
break;
}
case 0x06: // vld0.d
{
m_fpair[rd >> 1] = m_program.read_qword(m_in0p);
m_in0p += 8;
break;
}
case 0x07: // vld1.d
{
m_fpair[rd >> 1] = m_program.read_qword(m_in1p);
m_in1p += 8;
break;
}
default:
fatalerror("vector_loadstore(): ls bits = %02X\n", vector_ls_bits);
}
}
void tms32082_mp_device::execute_short_imm()
{
switch ((m_ir >> 15) & 0x7f)
{
case 0x02: // cmnd
{
uint32_t data = OP_UIMM15();
processor_command(data);
break;
}
case 0x04: // rdcr
{
int rd = OP_RD();
uint32_t imm = OP_UIMM15();
uint32_t r = read_creg(imm);
if (rd)
m_reg[rd] = r;
break;
}
case 0x05: // swcr
{
int rd = OP_RD();
int rs = OP_RS();
uint32_t imm = OP_UIMM15();
uint32_t r = read_creg(imm);
if (rd)
m_reg[rd] = r;
write_creg(imm, m_reg[rs]);
break;
}
case 0x06: // brcr
{
int cr = OP_UIMM15();
if (cr == 0x0001)
{
// ignore jump to EIP because of how we emulate the pipeline
}
else
{
uint32_t data = read_creg(cr);
m_fetchpc = data & ~3;
m_ie = (m_ie & ~1) | (data & 1); // global interrupt mask from creg
// TODO: user/supervisor latch from creg
}
break;
}
case 0x08: // shift.dz
{
int r = (m_ir & (1 << 10));
int inv = (m_ir & (1 << 11));
int rot = OP_ROTATE();
int end = OP_ENDMASK();
uint32_t source = m_reg[OP_RS()];
int rd = OP_RD();
uint32_t endmask = SHIFT_MASK[end ? end : 32];
if (inv) endmask = ~endmask;
uint32_t compmask = endmask; // shiftmask == 0xffffffff
uint32_t res;
if (r) // right
{
res = ROTATE_R(source, rot) & compmask;
}
else // left
{
res = ROTATE_L(source, rot) & compmask;
}
if (rd)
m_reg[rd] = res;
break;
}
case 0x0a: // shift.ds
{
int r = (m_ir & (1 << 10));
int inv = (m_ir & (1 << 11));
int rot = OP_ROTATE();
int end = OP_ENDMASK();
uint32_t source = m_reg[OP_RS()];
int rd = OP_RD();
uint32_t endmask = SHIFT_MASK[end ? end : 32];
if (inv) endmask = ~endmask;
uint32_t compmask = endmask; // shiftmask == 0xffffffff
uint32_t res;
if (r) // right
{
res = ROTATE_R(source, rot) & compmask;
res = SIGN_EXTEND(res, rot);
}
else // left
{
res = ROTATE_L(source, rot) & compmask;
// sign extend makes no sense to left..
}
if (rd)
m_reg[rd] = res;
break;
}
case 0x0b: // shift.ez
{
int r = (m_ir & (1 << 10));
int inv = (m_ir & (1 << 11));
int rot = OP_ROTATE();
int end = OP_ENDMASK();
uint32_t source = m_reg[OP_RS()];
int rd = OP_RD();
uint32_t endmask = SHIFT_MASK[end ? end : 32];
if (inv) endmask = ~endmask;
int shift = r ? 32-rot : rot;
uint32_t shiftmask = SHIFT_MASK[shift ? shift : 32];
uint32_t compmask = endmask & shiftmask;
uint32_t res;
if (r) // right
{
res = ROTATE_R(source, rot) & compmask;
}
else // left
{
res = ROTATE_L(source, rot) & compmask;
}
if (rd)
m_reg[rd] = res;
break;
}
case 0x0c: // shift.em
{
int r = (m_ir & (1 << 10));
int inv = (m_ir & (1 << 11));
int rot = OP_ROTATE();
int end = OP_ENDMASK();
uint32_t source = m_reg[OP_RS()];
int rd = OP_RD();
uint32_t endmask = SHIFT_MASK[end ? end : 32];
if (inv) endmask = ~endmask;
uint32_t shiftmask = SHIFT_MASK[r ? 32-rot : rot];
uint32_t compmask = endmask & shiftmask;
uint32_t res;
if (r) // right
{
res = (ROTATE_R(source, rot) & compmask) | (m_reg[rd] & ~compmask);
}
else // left
{
res = (ROTATE_L(source, rot) & compmask) | (m_reg[rd] & ~compmask);
}
if (rd)
m_reg[rd] = res;
break;
}
case 0x0d: // shift.es
{
int r = (m_ir & (1 << 10));
int inv = (m_ir & (1 << 11));
int rot = OP_ROTATE();
int end = OP_ENDMASK();
uint32_t source = m_reg[OP_RS()];
int rd = OP_RD();
uint32_t endmask = SHIFT_MASK[end ? end : 32];
if (inv) endmask = ~endmask;
int shift = r ? 32-rot : rot;
uint32_t shiftmask = SHIFT_MASK[shift ? shift : 32];
uint32_t compmask = endmask & shiftmask;
uint32_t res;
if (r) // right
{
res = ROTATE_R(source, rot) & compmask;
res = SIGN_EXTEND(res, rot);
}
else // left
{
res = ROTATE_L(source, rot) & compmask;
// sign extend makes no sense to left..
}
if (rd)
m_reg[rd] = res;
break;
}
case 0x0e: // shift.iz
{
int r = (m_ir & (1 << 10));
int inv = (m_ir & (1 << 11));
int rot = OP_ROTATE();
int end = OP_ENDMASK();
uint32_t source = m_reg[OP_RS()];
int rd = OP_RD();
uint32_t endmask = SHIFT_MASK[end ? end : 32];
if (inv) endmask = ~endmask;
uint32_t shiftmask = SHIFT_MASK[r ? 32-rot : rot];
uint32_t compmask = endmask & ~shiftmask;
uint32_t res;
if (r) // right
{
res = ROTATE_R(source, rot) & compmask;
}
else // left
{
res = ROTATE_L(source, rot) & compmask;
}
if (rd)
m_reg[rd] = res;
break;
}
case 0x0f: // shift.im
{
int r = (m_ir & (1 << 10));
int inv = (m_ir & (1 << 11));
int rot = OP_ROTATE();
int end = OP_ENDMASK();
uint32_t source = m_reg[OP_RS()];
int rd = OP_RD();
uint32_t endmask = SHIFT_MASK[end ? end : 32];
if (inv) endmask = ~endmask;
uint32_t shiftmask = SHIFT_MASK[r ? 32-rot : rot];
uint32_t compmask = endmask & ~shiftmask;
uint32_t res;
if (r) // right
{
res = (ROTATE_R(source, rot) & compmask) | (m_reg[rd] & ~compmask);
}
else // left
{
res = (ROTATE_L(source, rot) & compmask) | (m_reg[rd] & ~compmask);
}
if (rd)
m_reg[rd] = res;
break;
}
case 0x11: // and
{
int rd = OP_RD();
int rs = OP_RS();
uint32_t imm = OP_UIMM15();
if (rd)
m_reg[rd] = m_reg[rs] & imm;
break;
}
case 0x12: // and.tf
{
int rd = OP_RD();
int rs = OP_RS();
uint32_t imm = OP_UIMM15();
if (rd)
m_reg[rd] = ~m_reg[rs] & imm;
break;
}
case 0x14: // and.ft
{
int rd = OP_RD();
int rs = OP_RS();
uint32_t imm = OP_UIMM15();
if (rd)
m_reg[rd] = m_reg[rs] & ~imm;
break;
}
case 0x17: // or
{
int rd = OP_RD();
int rs = OP_RS();
uint32_t imm = OP_UIMM15();
if (rd)
m_reg[rd] = m_reg[rs] | imm;
break;
}
case 0x1d: // or.ft
{
int rd = OP_RD();
int rs = OP_RS();
uint32_t imm = OP_UIMM15();
if (rd)
m_reg[rd] = m_reg[rs] | ~imm;
break;
}
case 0x24:
case 0x20: // ld.b
{
int rd = OP_RD();
int base = OP_BASE();
int m = m_ir & (1 << 17);
int32_t offset = OP_SIMM15();
uint32_t address = m_reg[base] + offset;
uint32_t data = (uint8_t)m_program.read_byte(address);
if (data & 0x80) data |= 0xffffff00;
if (rd)
m_reg[rd] = data;
if (m && base)
m_reg[base] = address;
break;
}
case 0x25:
case 0x21: // ld.h
{
int rd = OP_RD();
int base = OP_BASE();
int m = m_ir & (1 << 17);
int32_t offset = OP_SIMM15();
uint32_t address = m_reg[base] + offset;
uint32_t data = (uint16_t)m_program.read_word(address);
if (data & 0x8000) data |= 0xffff0000;
if (rd)
m_reg[rd] = data;
if (m && base)
m_reg[base] = address;
break;
}
case 0x26:
case 0x22: // ld
{
int rd = OP_RD();
int base = OP_BASE();
int m = m_ir & (1 << 17);
int32_t offset = OP_SIMM15();
uint32_t address = m_reg[base] + offset;
uint32_t data = m_program.read_dword(address);
if (rd)
m_reg[rd] = data;
if (m && base)
m_reg[base] = address;
break;
}
case 0x27:
case 0x23: // ld.d
{
int rd = OP_RD();
int base = OP_BASE();
int m = m_ir & (1 << 17);
int32_t offset = OP_SIMM15();
uint32_t address = m_reg[base] + offset;
uint32_t data1 = m_program.read_dword(address);
uint32_t data2 = m_program.read_dword(address+4);
if (rd)
{
m_reg[(rd & ~1)+1] = data1;
m_reg[(rd & ~1)] = data2;
}
if (m && base)
m_reg[base] = address;
break;
}
case 0x28:
case 0x2c: // ld.ub
{
int rd = OP_RD();
int base = OP_BASE();
int m = m_ir & (1 << 17);
int32_t offset = OP_SIMM15();
uint32_t address = m_reg[base] + offset;
uint32_t data = (uint8_t)(m_program.read_byte(address));
if (rd)
m_reg[rd] = data;
if (m && base)
m_reg[base] = address;
break;
}
case 0x2d:
case 0x29: // ld.uh
{
int rd = OP_RD();
int base = OP_BASE();
int m = m_ir & (1 << 17);
int32_t offset = OP_SIMM15();
uint32_t address = m_reg[base] + offset;
uint32_t data = (uint16_t)(m_program.read_word(address));
if (rd)
m_reg[rd] = data;
if (m && base)
m_reg[base] = address;
break;
}
case 0x34:
case 0x30: // st.b
{
int rd = OP_RD();
int base = OP_BASE();
int m = m_ir & (1 << 17);
int32_t offset = OP_SIMM15();
uint32_t address = m_reg[base] + offset;
m_program.write_byte(address, (uint8_t)(m_reg[rd]));
if (m && base)
m_reg[base] = address;
break;
}
case 0x35:
case 0x31: // st.h
{
int rd = OP_RD();
int base = OP_BASE();
int m = m_ir & (1 << 17);
int32_t offset = OP_SIMM15();
uint32_t address = m_reg[base] + offset;
m_program.write_word(address, (uint16_t)(m_reg[rd]));
if (m && base)
m_reg[base] = address;
break;
}
case 0x36:
case 0x32: // st
{
int rd = OP_RD();
int base = OP_BASE();
int m = m_ir & (1 << 17);
int32_t offset = OP_SIMM15();
uint32_t address = m_reg[base] + offset;
m_program.write_dword(address, m_reg[rd]);
if (m && base)
m_reg[base] = address;
break;
}
case 0x37:
case 0x33: // st.d
{
int rd = OP_RD();
int base = OP_BASE();
int m = m_ir & (1 << 17);
int32_t offset = OP_SIMM15();
uint32_t address = m_reg[base] + offset;
m_program.write_dword(address+0, m_reg[(rd & ~1) + 1]);
m_program.write_dword(address+4, m_reg[rd & ~1]);
if (m && base)
m_reg[base] = address;
break;
}
case 0x45: // jsr.a
{
int link = OP_LINK();
int base = OP_BASE();
int32_t offset = OP_SIMM15();
if (link)
m_reg[link] = m_fetchpc;
m_fetchpc = m_reg[base] + offset;
break;
}
case 0x48: // bbz
{
int bitnum = OP_BITNUM() ^ 0x1f;
int32_t offset = OP_SIMM15();
int rs = OP_RS();
if ((m_reg[rs] & (1 << bitnum)) == 0)
{
uint32_t address = m_pc + (offset * 4);
m_pc = m_fetchpc;
delay_slot();
m_fetchpc = address;
}
break;
}
case 0x49: // bbz.a
{
int bitnum = OP_BITNUM() ^ 0x1f;
int32_t offset = OP_SIMM15();
int rs = OP_RS();
if ((m_reg[rs] & (1 << bitnum)) == 0)
{
m_fetchpc = m_pc + (offset * 4);
}
break;
}
case 0x4a: // bbo
{
int bitnum = OP_BITNUM() ^ 0x1f;
int32_t offset = OP_SIMM15();
int rs = OP_RS();
if ((m_reg[rs] & (1 << bitnum)) != 0)
{
uint32_t address = m_pc + (offset * 4);
m_pc = m_fetchpc;
delay_slot();
m_fetchpc = address;
}
break;
}
case 0x4b: // bbo.a
{
int bitnum = OP_BITNUM() ^ 0x1f;
int32_t offset = OP_SIMM15();
int rs = OP_RS();
if ((m_reg[rs] & (1 << bitnum)) != 0)
{
m_fetchpc = m_pc + (offset * 4);
}
break;
}
case 0x4c: // bcnd
{
int32_t offset = OP_SIMM15();
int code = OP_RD();
int rs = OP_RS();
if (test_condition(code, m_reg[rs]))
{
uint32_t address = m_pc + (offset * 4);
m_pc = m_fetchpc;
delay_slot();
m_fetchpc = address;
}
break;
}
case 0x4d: // bcnd.a
{
int32_t offset = OP_SIMM15();
int code = OP_RD();
int rs = OP_RS();
if (test_condition(code, m_reg[rs]))
{
m_fetchpc = m_pc + (offset * 4);
}
break;
}
case 0x50: // cmp
{
uint32_t src1 = OP_SIMM15();
uint32_t src2 = m_reg[OP_RS()];
int rd = OP_RD();
if (rd)
m_reg[rd] = calculate_cmp(src1, src2);
break;
}
case 0x58: // add
{
int32_t imm = OP_SIMM15();
int rd = OP_RD();
int rs = OP_RS();
if (rd)
m_reg[rd] = m_reg[rs] + imm;
// TODO: integer overflow exception
break;
}
case 0x59: // addu
{
int32_t imm = OP_SIMM15();
int rd = OP_RD();
int rs = OP_RS();
if (rd)
m_reg[rd] = m_reg[rs] + imm;
break;
}
case 0x5a: // sub
{
int32_t imm = OP_SIMM15();
int rd = OP_RD();
int rs = OP_RS();
if (rd)
m_reg[rd] = imm - m_reg[rs];
// TODO: integer overflow exception
break;
}
case 0x5b: // subu
{
int32_t imm = OP_SIMM15();
int rd = OP_RD();
int rs = OP_RS();
if (rd)
m_reg[rd] = imm - m_reg[rs];
break;
}
default:
fatalerror("execute_short_imm(): %08X: opcode %08X (%02X)", m_pc, m_ir, (m_ir >> 15) & 0x7f);
}
}
void tms32082_mp_device::execute_reg_long_imm()
{
uint32_t imm32 = 0;
int has_imm = (m_ir & (1 << 12));
if (has_imm)
imm32 = fetch();
switch ((m_ir >> 12) & 0xff)
{
case 0x04: // cmnd
{
uint32_t data = has_imm ? imm32 : m_reg[OP_SRC1()];
processor_command(data);
break;
}
case 0x16: // shift.ez
{
int r = (m_ir & (1 << 10));
int inv = (m_ir & (1 << 11));
int rot = m_reg[OP_ROTATE()];
int end = OP_ENDMASK();
uint32_t source = m_reg[OP_RS()];
int rd = OP_RD();
uint32_t endmask = end ? SHIFT_MASK[end ? end : 32] : m_reg[OP_ROTATE()+1];
if (inv) endmask = ~endmask;
int shift = r ? 32-rot : rot;
uint32_t shiftmask = SHIFT_MASK[shift ? shift : 32];
uint32_t compmask = endmask & shiftmask;
uint32_t res;
if (r) // right
{
res = ROTATE_R(source, rot) & compmask;
}
else // left
{
res = ROTATE_L(source, rot) & compmask;
}
if (rd)
m_reg[rd] = res;
break;
}
case 0x1a: // shift.es
{
int r = (m_ir & (1 << 10));
int inv = (m_ir & (1 << 11));
int rot = m_reg[OP_ROTATE()];
int end = OP_ENDMASK();
uint32_t source = m_reg[OP_RS()];
int rd = OP_RD();
uint32_t endmask = end ? SHIFT_MASK[end ? end : 32] : m_reg[OP_ROTATE()+1];
if (inv) endmask = ~endmask;
int shift = r ? 32-rot : rot;
uint32_t shiftmask = SHIFT_MASK[shift ? shift : 32];
uint32_t compmask = endmask & shiftmask;
uint32_t res;
if (r) // right
{
res = ROTATE_R(source, rot) & compmask;
res = SIGN_EXTEND(res, rot);
}
else // left
{
res = ROTATE_L(source, rot) & compmask;
}
if (rd)
m_reg[rd] = res;
break;
}
case 0x1c: // shift.iz
{
int r = (m_ir & (1 << 10));
int inv = (m_ir & (1 << 11));
int rot = m_reg[OP_ROTATE()];
int end = OP_ENDMASK();
uint32_t source = m_reg[OP_RS()];
int rd = OP_RD();
uint32_t endmask = end ? SHIFT_MASK[end ? end : 32] : m_reg[OP_ROTATE()+1];
if (inv) endmask = ~endmask;
int shift = r ? 32-rot : rot;
uint32_t shiftmask = SHIFT_MASK[shift ? shift : 32];
uint32_t compmask = endmask & ~shiftmask;
uint32_t res;
if (r) // right
{
res = ROTATE_R(source, rot) & compmask;
}
else // left
{
res = ROTATE_L(source, rot) & compmask;
}
if (rd)
m_reg[rd] = res;
break;
}
case 0x22:
case 0x23: // and
{
int rd = OP_RD();
int rs = OP_RS();
uint32_t src1 = has_imm ? imm32 : m_reg[OP_SRC1()];
if (rd)
m_reg[rd] = src1 & m_reg[rs];
break;
}
case 0x24:
case 0x25: // and.tf
{
int rd = OP_RD();
int rs = OP_RS();
uint32_t src1 = has_imm ? imm32 : m_reg[OP_SRC1()];
if (rd)
m_reg[rd] = src1 & ~(m_reg[rs]);
break;
}
case 0x2c:
case 0x2d: // xor
{
int rd = OP_RD();
int rs = OP_RS();
if (rd)
m_reg[rd] = m_reg[rs] ^ (has_imm ? imm32 : m_reg[OP_SRC1()]);
break;
}
case 0x2e:
case 0x2f: // or
{
int rd = OP_RD();
int rs = OP_RS();
if (rd)
m_reg[rd] = m_reg[rs] | (has_imm ? imm32 : m_reg[OP_SRC1()]);
break;
}
case 0x3a:
case 0x3b: // or.ft
{
int rd = OP_RD();
int rs = OP_RS();
if (rd)
m_reg[rd] = m_reg[rs] | ~(has_imm ? imm32 : m_reg[OP_SRC1()]);
break;
}
case 0x40:
case 0x41:
case 0x48:
case 0x49: // ld.b
{
int m = m_ir & (1 << 15);
int base = OP_BASE();
int rd = OP_RD();
uint32_t address = m_reg[base] + (has_imm ? imm32 : m_reg[OP_SRC1()]);
uint32_t r = m_program.read_byte(address);
if (r & 0x80) r |= 0xffffff00;
if (rd)
m_reg[rd] = r;
if (m && base)
m_reg[base] = address;
break;
}
case 0x42:
case 0x4a:
case 0x43:
case 0x4b: // ld.h
{
int shift = (m_ir & (1 << 11)) ? 1 : 0;
int m = m_ir & (1 << 15);
int base = OP_BASE();
int rd = OP_RD();
uint32_t address = m_reg[base] + ((has_imm ? imm32 : m_reg[OP_SRC1()]) << shift);
uint32_t r = m_program.read_word(address);
if (r & 0x8000) r |= 0xffff0000;
if (rd)
m_reg[rd] = r;
if (m && base)
m_reg[base] = address;
break;
}
case 0x4c:
case 0x44:
case 0x4d:
case 0x45: // ld
{
int shift = (m_ir & (1 << 11)) ? 2 : 0;
int m = m_ir & (1 << 15);
int base = OP_BASE();
int rd = OP_RD();
uint32_t address = m_reg[base] + ((has_imm ? imm32 : m_reg[OP_SRC1()]) << shift);
uint32_t r = m_program.read_dword(address);
if (rd)
m_reg[rd] = r;
if (m && base)
m_reg[base] = address;
break;
}
case 0x4e:
case 0x4f:
case 0x46:
case 0x47: // ld.d
{
int shift = (m_ir & (1 << 11)) ? 3 : 0;
int m = m_ir & (1 << 15);
int base = OP_BASE();
int rd = OP_RD();
uint32_t address = m_reg[base] + ((has_imm ? imm32 : m_reg[OP_SRC1()]) << shift);
uint64_t r = m_program.read_qword(address);
if (rd)
m_fpair[rd >> 1] = r;
if (m && base)
m_reg[base] = address;
break;
}
case 0x58:
case 0x59:
case 0x50:
case 0x51: // ld.ub
{
int m = m_ir & (1 << 15);
int base = OP_BASE();
int rd = OP_RD();
uint32_t address = m_reg[base] + (has_imm ? imm32 : m_reg[OP_SRC1()]);
uint32_t r = (uint8_t)(m_program.read_byte(address));
if (rd)
m_reg[rd] = r;
if (m && base)
m_reg[base] = address;
break;
}
case 0x5a:
case 0x5b:
case 0x52:
case 0x53: // ld.uh
{
int shift = (m_ir & (1 << 11)) ? 1 : 0;
int m = m_ir & (1 << 15);
int base = OP_BASE();
int rd = OP_RD();
uint32_t address = m_reg[base] + ((has_imm ? imm32 : m_reg[OP_SRC1()]) << shift);
uint32_t r = (uint16_t)(m_program.read_word(address));
if (rd)
m_reg[rd] = r;
if (m && base)
m_reg[base] = address;
break;
}
case 0x60:
case 0x61:
case 0x68:
case 0x69: // st.b
{
int m = m_ir & (1 << 15);
int base = OP_BASE();
uint32_t address = m_reg[base] + (has_imm ? imm32 : m_reg[OP_SRC1()]);
m_program.write_byte(address, (uint8_t)(m_reg[OP_RD()]));
if (m && base)
m_reg[base] = address;
break;
}
case 0x62:
case 0x63:
case 0x6a:
case 0x6b: // st.h
{
int shift = (m_ir & (1 << 11)) ? 1 : 0;
int m = m_ir & (1 << 15);
int base = OP_BASE();
uint32_t address = m_reg[base] + ((has_imm ? imm32 : m_reg[OP_SRC1()]) << shift);
m_program.write_word(address, (uint16_t)(m_reg[OP_RD()]));
if (m && base)
m_reg[base] = address;
break;
}
case 0x6c:
case 0x6d:
case 0x64:
case 0x65: // st
{
int shift = (m_ir & (1 << 11)) ? 2 : 0;
int m = m_ir & (1 << 15);
int base = OP_BASE();
uint32_t address = m_reg[base] + ((has_imm ? imm32 : m_reg[OP_SRC1()]) << shift);
m_program.write_dword(address, m_reg[OP_RD()]);
if (m && base)
m_reg[base] = address;
break;
}
case 0x88:
case 0x89: // jsr
{
int link = OP_LINK();
int base = OP_BASE();
if (link)
m_reg[link] = m_fetchpc + 4;
uint32_t address = m_reg[base] + (has_imm ? imm32 : m_reg[OP_SRC1()]);
m_pc = m_fetchpc;
delay_slot();
m_fetchpc = address;
break;
}
case 0x8a:
case 0x8b: // jsr.a
{
int link = OP_LINK();
int base = OP_BASE();
if (link)
m_reg[link] = m_fetchpc;
m_fetchpc = m_reg[base] + (has_imm ? imm32 : m_reg[OP_SRC1()]);
break;
}
case 0xa0:
case 0xa1: // cmp
{
int rd = OP_RD();
uint32_t src1 = has_imm ? imm32 : m_reg[OP_SRC1()];
uint32_t src2 = m_reg[OP_RS()];
if (rd)
m_reg[rd] = calculate_cmp(src1, src2);
break;
}
case 0xb2:
case 0xb3: // addu
{
int rd = OP_RD();
int rs = OP_RS();
if (rd)
m_reg[rd] = m_reg[rs] + (has_imm ? imm32 : m_reg[OP_SRC1()]);
break;
}
case 0xb4:
case 0xb5: // sub
{
int rd = OP_RD();
int rs = OP_RS();
if (rd)
m_reg[rd] = (has_imm ? imm32 : m_reg[OP_SRC1()]) - m_reg[rs];
// TODO: overflow interrupt
break;
}
case 0xb6:
case 0xb7: // subu
{
int rd = OP_RD();
int rs = OP_RS();
if (rd)
m_reg[rd] = (has_imm ? imm32 : m_reg[OP_SRC1()]) - m_reg[rs];
break;
}
case 0xc4:
case 0xd4:
case 0xc5:
case 0xd5: // vmpy
{
int p1 = m_ir & (1 << 5);
int pd = m_ir & (1 << 7);
int ls_bit1 = m_ir & (1 << 10);
int ls_bit2 = m_ir & (1 << 6);
int rd = OP_RS();
int src1 OP_SRC1();
double source = has_imm ? (double)u2f(imm32) : (p1 ? u2d(m_fpair[src1 >> 1]) : (double)u2f(m_reg[src1]));
if (rd)
{
if (pd)
{
double res = source * u2d(m_fpair[rd >> 1]);
m_fpair[rd >> 1] = d2u(res);
}
else
{
float res = (float)(source) * u2f(m_reg[rd]);
m_reg[rd] = f2u(res);
}
}
// parallel load/store op
if (!(ls_bit1 == 0 && ls_bit2 == 0))
{
vector_loadstore();
}
break;
}
case 0xc8:
case 0xd8:
case 0xc9:
case 0xd9: // vrnd
{
int acc = OP_ACC();
int p1 = m_ir & (1 << 5);
int pd = (m_ir >> 7) & 3;
int ls_bit1 = m_ir & (1 << 10);
int ls_bit2 = m_ir & (1 << 6);
int rd = OP_RS();
int rs1 = OP_SRC1();
double source = has_imm ? (double)u2f(imm32) : (p1 ? u2d(m_fpair[rs1 >> 1]) : (double)u2f(m_reg[rs1]));
if (rd)
{
// destination register
switch (pd)
{
case 0:
m_reg[rd] = f2u((float)source);
break;
case 1:
m_fpair[rd >> 1] = d2u(source);
break;
case 2:
m_reg[rd] = (int32_t)(source);
break;
case 3:
m_reg[rd] = (uint32_t)(source);
break;
}
}
else
{
// destination accumulator
if (pd != 1)
fatalerror("vrnd pd = %d at %08X\n", pd, m_pc);
m_facc[acc] = source;
}
// parallel load/store op
if (!(ls_bit1 == 0 && ls_bit2 == 0))
{
vector_loadstore();
}
break;
}
case 0xcc:
case 0xdc:
case 0xcd:
case 0xdd: // vmac
{
int acc = OP_ACC();
int z = m_ir & (1 << 8);
int pd = m_ir & (1 << 9);
int ls_bit1 = m_ir & (1 << 10);
int ls_bit2 = m_ir & (1 << 6);
int rd = OP_RD();
float src1 = u2f(m_reg[OP_SRC1()]);
float src2 = u2f(m_reg[OP_RS()]);
float res = (src1 * src2) + (z ? 0.0f : m_facc[acc]);
// parallel load/store op
if (!(ls_bit1 == 0 && ls_bit2 == 0))
{
vector_loadstore();
// if the opcode has load/store, dest is always accumulator
m_facc[acc] = (double)res;
}
else
{
if (rd)
{
if (pd)
m_fpair[rd >> 1] = d2u(res);
else
m_reg[rd] = f2u((float)res);
}
else
{
// write to accumulator
m_facc[acc] = (double)res;
}
}
break;
}
case 0xce:
case 0xde:
case 0xcf:
case 0xdf: // vmsc
{
int acc = OP_ACC();
int z = m_ir & (1 << 8);
int pd = m_ir & (1 << 9);
int ls_bit1 = m_ir & (1 << 10);
int ls_bit2 = m_ir & (1 << 6);
int rd = OP_RD();
float src1 = u2f(m_reg[OP_SRC1()]);
float src2 = u2f(m_reg[OP_RS()]);
float res = (z ? 0.0f : m_facc[acc]) - (src1 * src2);
// parallel load/store op
if (!(ls_bit1 == 0 && ls_bit2 == 0))
{
vector_loadstore();
// if the opcode has load/store, dest is always accumulator
m_facc[acc] = (double)res;
}
else
{
if (rd)
{
if (pd)
m_fpair[rd >> 1] = d2u(res);
else
m_reg[rd] = f2u((float)res);
}
else
{
// write to accumulator
m_facc[acc] = (double)res;
}
}
break;
}
case 0xe0:
case 0xe1: // fadd
{
int rd = OP_RD();
int rs = OP_RS();
int src1 = OP_SRC1();
int precision = (m_ir >> 5) & 0x3f;
if (rd) // only calculate if destination register is valid
{
switch (precision)
{
case 0x00: // SP - SP -> SP
{
float s1 = u2f(has_imm ? imm32 : m_reg[src1]);
float s2 = u2f(m_reg[rs]);
m_reg[rd] = f2u(s1 + s2);
break;
}
case 0x10: // SP - SP -> DP
{
float s1 = u2f(has_imm ? imm32 : m_reg[src1]);
float s2 = u2f(m_reg[rs]);
uint64_t res = d2u((double)(s1 + s2));
m_fpair[rd >> 1] = res;
break;
}
case 0x14: // SP - DP -> DP
{
float s1 = u2f(has_imm ? imm32 : m_reg[src1]);
double s2 = u2d(m_fpair[rs >> 1]);
uint64_t res = d2u((double) s1 + s2);
m_fpair[rd >> 1] = res;
break;
}
case 0x11: // DP - SP -> DP
{
double s1 = u2d(m_fpair[src1 >> 1]);
float s2 = u2f(m_reg[rs]);
uint64_t res = d2u(s1 + (double) s2);
m_fpair[rd >> 1] = res;
break;
}
case 0x15: // DP - DP -> DP
{
double s1 = u2d(m_fpair[src1 >> 1]);
double s2 = u2d(m_fpair[rs >> 1]);
uint64_t res = d2u((double)(s1 + s2));
m_fpair[rd >> 1] = res;
break;
}
default:
fatalerror("fadd: invalid precision combination %02X\n", precision);
}
}
break;
}
case 0xe2:
case 0xe3: // fsub
{
int rd = OP_RD();
int rs = OP_RS();
int src1 = OP_SRC1();
int precision = (m_ir >> 5) & 0x3f;
if (rd) // only calculate if destination register is valid
{
switch (precision)
{
case 0x00: // SP - SP -> SP
{
float s1 = u2f(has_imm ? imm32 : m_reg[src1]);
float s2 = u2f(m_reg[rs]);
m_reg[rd] = f2u(s1 - s2);
break;
}
case 0x10: // SP - SP -> DP
{
float s1 = u2f(has_imm ? imm32 : m_reg[src1]);
float s2 = u2f(m_reg[rs]);
uint64_t res = d2u((double)(s1 - s2));
m_fpair[rd >> 1] = res;
break;
}
case 0x14: // SP - DP -> DP
{
float s1 = u2f(has_imm ? imm32 : m_reg[src1]);
double s2 = u2d(m_fpair[rs >> 1]);
uint64_t res = d2u((double) s1 - s2);
m_fpair[rd >> 1] = res;
break;
}
case 0x11: // DP - SP -> DP
{
double s1 = u2d(m_fpair[src1 >> 1]);
float s2 = u2f(m_reg[rs]);
uint64_t res = d2u(s1 - (double) s2);
m_fpair[rd >> 1] = res;
break;
}
case 0x15: // DP - DP -> DP
{
double s1 = u2d(m_fpair[src1 >> 1]);
double s2 = u2d(m_fpair[rs >> 1]);
uint64_t res = d2u((double)(s1 - s2));
m_fpair[rd >> 1] = res;
break;
}
default:
fatalerror("fsub: invalid precision combination %02X\n", precision);
}
}
break;
}
case 0xe4:
case 0xe5: // fmpy
{
int rd = OP_RD();
int rs = OP_RS();
int src1 = OP_SRC1();
int precision = (m_ir >> 5) & 0x3f;
if (rd) // only calculate if destination register is valid
{
switch (precision)
{
case 0x00: // SP x SP -> SP
{
float s1 = u2f(has_imm ? imm32 : m_reg[src1]);
float s2 = u2f(m_reg[rs]);
m_reg[rd] = f2u(s1 * s2);
break;
}
case 0x10: // SP x SP -> DP
{
float s1 = u2f(has_imm ? imm32 : m_reg[src1]);
float s2 = u2f(m_reg[rs]);
uint64_t res = d2u((double)(s1 * s2));
m_fpair[rd >> 1] = res;
break;
}
case 0x14: // SP x DP -> DP
{
float s1 = u2f(has_imm ? imm32 : m_reg[src1]);
double s2 = u2d(m_fpair[rs >> 1]);
uint64_t res = d2u((double)s1 * s2);
m_fpair[rd >> 1] = res;
break;
}
case 0x11: // DP x SP -> DP
{
double s1 = u2d(m_fpair[src1 >> 1]);
float s2 = u2f(m_reg[rs]);
uint64_t res = d2u(s1 * (double) s2);
m_fpair[rd >> 1] = res;
break;
}
case 0x15: // DP x DP -> DP
{
double s1 = u2d(m_fpair[src1 >> 1]);
double s2 = u2d(m_fpair[rs >> 1]);
uint64_t res = d2u(s1 * s2);
m_fpair[rd >> 1] = res;
break;
}
case 0x2a: // I x I -> I
{
m_reg[rd] = (int32_t)(m_reg[rs]) * (int32_t)(has_imm ? imm32 : m_reg[OP_SRC1()]);
break;
}
case 0x3f: // U x U -> U
{
m_reg[rd] = (uint32_t)(m_reg[rs]) * (uint32_t)(has_imm ? imm32 : m_reg[OP_SRC1()]);
break;
}
default:
fatalerror("fmpy: invalid precision combination %02X\n", precision);
}
}
break;
}
case 0xe6:
case 0xe7: // fdiv
{
int rd = OP_RD();
int p1 = m_ir & (1 << 5);
int p2 = m_ir & (1 << 7);
int pd = m_ir & (1 << 9);
int rs1 = OP_SRC1();
int rs2 = OP_RS();
if (rd)
{
double src1 = has_imm ? (double)u2f(imm32) : (p1 ? u2d(m_fpair[rs1 >> 1]) : (double)u2f(m_reg[rs1]));
double src2 = p2 ? u2d(m_fpair[rs2 >> 1]) : (double)u2f(m_reg[rs2]);
double res;
if (src2 != 0.0)
res = src1 / src2;
else
res = 0.0f;
if (pd)
m_fpair[rd >> 1] = d2u(res);
else
m_reg[rd] = f2u((float)res);
}
break;
}
case 0xe8:
case 0xe9: // frnd
{
//int mode = (m_ir >> 7) & 3;
int p1 = (m_ir >> 5) & 3;
int pd = (m_ir >> 9) & 3;
int src1 = OP_SRC1();
int rd = OP_RD();
double s = 0.0;
switch (p1)
{
case 0:
s = has_imm ? (double)(u2f(imm32)) : (double)u2f(m_reg[src1]);
break;
case 1:
s = u2d(m_fpair[src1 >> 1]);
break;
case 2:
s = has_imm ? (double)((int32_t)(imm32)) : (double)(int32_t)(m_reg[src1]);
break;
case 3:
s = has_imm ? (double)((uint32_t)(imm32)) : (double)(uint32_t)(m_reg[src1]);
break;
}
// TODO: round
if (rd)
{
switch (pd)
{
case 0:
m_reg[rd] = f2u((float)(s));
break;
case 1:
m_fpair[rd >> 1] = d2u(s);
break;
case 2:
m_reg[rd] = (int32_t)(s);
break;
case 3:
m_reg[rd] = (uint32_t)(s);
break;
}
}
break;
}
case 0xea:
case 0xeb: // fcmp
{
int rd = OP_RD();
int p1 = m_ir & (1 << 5);
int p2 = m_ir & (1 << 7);
int rs1 = OP_SRC1();
int rs2 = OP_RS();
double src1 = has_imm ? (double)(u2f(imm32)) : (p1 ? u2d(m_fpair[rs1 >> 1]) : (double)u2f(m_reg[rs1]));
double src2 = p2 ? u2d(m_fpair[rs2 >> 1]) : (double)u2f(m_reg[rs2]);
if (rd)
{
uint32_t flags = 0;
flags |= (src1 == src2) ? (1 << 20) : 0;
flags |= (src1 != src2) ? (1 << 21) : 0;
flags |= (src1 > src2) ? (1 << 22) : 0;
flags |= (src1 <= src2) ? (1 << 23) : 0;
flags |= (src1 < src2) ? (1 << 24) : 0;
flags |= (src1 >= src2) ? (1 << 25) : 0;
flags |= (src1 < 0 || src1 > src2) ? (1 << 26) : 0;
flags |= (src1 > 0 && src1 < src2) ? (1 << 27) : 0;
flags |= (src1 >= 0 && src1 <= src2) ? (1 << 28) : 0;
flags |= (src1 <= 0 || src1 >= src2) ? (1 << 29) : 0;
// TODO: src1 or src2 unordered
// TODO: src1 and src2 ordered
m_reg[rd] = flags;
}
break;
}
case 0xee:
case 0xef: // fsqrt
{
int rd = OP_RD();
int src1 = OP_SRC1();
int p1 = m_ir & (1 << 5);
int pd = m_ir & (1 << 9);
double source = has_imm ? (double)u2f(imm32) : (p1 ? u2d(m_fpair[src1 >> 1]) : (double)u2f(m_reg[src1]));
if (rd)
{
double res;
if (source >= 0.0f)
res = sqrt(source);
else
res = 0.0;
if (pd)
m_fpair[rd >> 1] = d2u(res);
else
m_reg[rd] = f2u((float)res);
}
break;
}
case 0xf2: // rmo
{
uint32_t source = m_reg[OP_RS()];
int rd = OP_RD();
int bit = 32;
for (int i=0; i < 32; i++)
{
if (source & (1 << (31-i)))
{
bit = i;
break;
}
}
if (rd)
m_reg[rd] = bit;
break;
}
default:
fatalerror("execute_reg_long_imm(): %08X: opcode %08X (%02X)", m_pc, m_ir, (m_ir >> 12) & 0xff);
}
}
void tms32082_mp_device::execute()
{
switch ((m_ir >> 20) & 3)
{
case 0:
case 1:
case 2:
execute_short_imm();
break;
case 3:
execute_reg_long_imm();
break;
}
}
| johnparker007/mame | src/devices/cpu/tms32082/mp_ops.cpp | C++ | gpl-2.0 | 40,621 |
<?php
/* Copyright 2014 Sutherland Boswell (email : sutherland.boswell@gmail.com)
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License, version 2, as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
// Require thumbnail provider class
require_once( VIDEO_THUMBNAILS_PATH . '/php/providers/class-video-thumbnails-provider.php' );
class Youku_Thumbnails extends Video_Thumbnails_Provider {
// Human-readable name of the video provider
public $service_name = 'Youku';
const service_name = 'Youku';
// Slug for the video provider
public $service_slug = 'youku';
const service_slug = 'youku';
public static function register_provider( $providers ) {
$providers[self::service_slug] = new self;
return $providers;
}
// Regex strings
public $regexes = array(
'#http://player\.youku\.com/embed/([A-Za-z0-9]+)#', // iFrame
'#http://player\.youku\.com/player\.php/sid/([A-Za-z0-9]+)/v\.swf#', // Flash
'#http://v\.youku\.com/v_show/id_([A-Za-z0-9]+)\.html#' // Link
);
// Thumbnail URL
public function get_thumbnail_url( $id ) {
$request = "http://v.youku.com/player/getPlayList/VideoIDS/$id/";
$response = wp_remote_get( $request, array( 'sslverify' => false ) );
if( is_wp_error( $response ) ) {
$result = $this->construct_info_retrieval_error( $request, $response );
} else {
$result = json_decode( $response['body'] );
$result = $result->data[0]->logo;
}
return $result;
}
// Test cases
public static function get_test_cases() {
return array(
array(
'markup' => '<iframe height=498 width=510 src="http://player.youku.com/embed/XMzQyMzk5MzQ4" frameborder=0 allowfullscreen></iframe>',
'expected' => 'http://g1.ykimg.com/1100641F464F0FB57407E2053DFCBC802FBBC4-E4C5-7A58-0394-26C366F10493',
'expected_hash' => 'deac7bb89058a8c46ae2350da9d33ba8',
'name' => __( 'iFrame Embed', 'video-thumbnails' )
),
array(
'markup' => '<embed src="http://player.youku.com/player.php/sid/XMzQyMzk5MzQ4/v.swf" quality="high" width="480" height="400" align="middle" allowScriptAccess="sameDomain" allowFullscreen="true" type="application/x-shockwave-flash"></embed>',
'expected' => 'http://g1.ykimg.com/1100641F464F0FB57407E2053DFCBC802FBBC4-E4C5-7A58-0394-26C366F10493',
'expected_hash' => 'deac7bb89058a8c46ae2350da9d33ba8',
'name' => __( 'Flash Embed', 'video-thumbnails' )
),
array(
'markup' => 'http://v.youku.com/v_show/id_XMzQyMzk5MzQ4.html',
'expected' => 'http://g1.ykimg.com/1100641F464F0FB57407E2053DFCBC802FBBC4-E4C5-7A58-0394-26C366F10493',
'expected_hash' => 'deac7bb89058a8c46ae2350da9d33ba8',
'name' => __( 'Video URL', 'video-thumbnails' )
),
);
}
}
?> | trocvuong/izzfeed_us | wp-content/plugins/video-thumbnails/php/providers/class-youku-thumbnails.php | PHP | gpl-2.0 | 3,248 |
/*
* Copyright (C) 2005-2013 Team XBMC
* http://xbmc.org
*
* This Program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This Program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with XBMC; see the file COPYING. If not, see
* <http://www.gnu.org/licenses/>.
*
*/
#include "EmuFileWrapper.h"
#include "filesystem/File.h"
#include "threads/SingleLock.h"
CEmuFileWrapper g_emuFileWrapper;
namespace
{
constexpr bool isValidFilePtr(FILE* f)
{
return (f != nullptr);
}
}
CEmuFileWrapper::CEmuFileWrapper()
{
// since we always use dlls we might just initialize it directly
for (int i = 0; i < MAX_EMULATED_FILES; i++)
{
memset(&m_files[i], 0, sizeof(EmuFileObject));
m_files[i].used = false;
m_files[i].fd = -1;
}
}
CEmuFileWrapper::~CEmuFileWrapper()
{
CleanUp();
}
void CEmuFileWrapper::CleanUp()
{
CSingleLock lock(m_criticalSection);
for (int i = 0; i < MAX_EMULATED_FILES; i++)
{
if (m_files[i].used)
{
m_files[i].file_xbmc->Close();
delete m_files[i].file_xbmc;
if (m_files[i].file_lock)
{
delete m_files[i].file_lock;
m_files[i].file_lock = nullptr;
}
m_files[i].used = false;
m_files[i].fd = -1;
}
}
}
EmuFileObject* CEmuFileWrapper::RegisterFileObject(XFILE::CFile* pFile)
{
EmuFileObject* object = nullptr;
CSingleLock lock(m_criticalSection);
for (int i = 0; i < MAX_EMULATED_FILES; i++)
{
if (!m_files[i].used)
{
// found a free location
object = &m_files[i];
object->used = true;
object->file_xbmc = pFile;
object->fd = (i + FILE_WRAPPER_OFFSET);
object->file_lock = new CCriticalSection();
break;
}
}
return object;
}
void CEmuFileWrapper::UnRegisterFileObjectByDescriptor(int fd)
{
int i = fd - FILE_WRAPPER_OFFSET;
if (! (i >= 0 && i < MAX_EMULATED_FILES))
return;
if (!m_files[i].used)
return;
CSingleLock lock(m_criticalSection);
// we assume the emulated function already deleted the CFile object
if (m_files[i].file_lock)
{
delete m_files[i].file_lock;
m_files[i].file_lock = nullptr;
}
m_files[i].used = false;
m_files[i].fd = -1;
}
void CEmuFileWrapper::UnRegisterFileObjectByStream(FILE* stream)
{
if (isValidFilePtr(stream))
{
EmuFileObject* o = reinterpret_cast<EmuFileObject*>(stream);
return UnRegisterFileObjectByDescriptor(o->fd);
}
}
void CEmuFileWrapper::LockFileObjectByDescriptor(int fd)
{
int i = fd - FILE_WRAPPER_OFFSET;
if (i >= 0 && i < MAX_EMULATED_FILES)
{
if (m_files[i].used)
{
m_files[i].file_lock->lock();
}
}
}
bool CEmuFileWrapper::TryLockFileObjectByDescriptor(int fd)
{
int i = fd - FILE_WRAPPER_OFFSET;
if (i >= 0 && i < MAX_EMULATED_FILES)
{
if (m_files[i].used)
{
return m_files[i].file_lock->try_lock();
}
}
return false;
}
void CEmuFileWrapper::UnlockFileObjectByDescriptor(int fd)
{
int i = fd - FILE_WRAPPER_OFFSET;
if (i >= 0 && i < MAX_EMULATED_FILES)
{
if (m_files[i].used)
{
m_files[i].file_lock->unlock();
}
}
}
EmuFileObject* CEmuFileWrapper::GetFileObjectByDescriptor(int fd)
{
int i = fd - FILE_WRAPPER_OFFSET;
if (i >= 0 && i < MAX_EMULATED_FILES)
{
if (m_files[i].used)
{
return &m_files[i];
}
}
return nullptr;
}
EmuFileObject* CEmuFileWrapper::GetFileObjectByStream(FILE* stream)
{
if (isValidFilePtr(stream))
{
EmuFileObject* o = reinterpret_cast<EmuFileObject*>(stream);
return GetFileObjectByDescriptor(o->fd);
}
return nullptr;
}
XFILE::CFile* CEmuFileWrapper::GetFileXbmcByDescriptor(int fd)
{
auto object = GetFileObjectByDescriptor(fd);
if (object != nullptr && object->used)
{
return object->file_xbmc;
}
return nullptr;
}
XFILE::CFile* CEmuFileWrapper::GetFileXbmcByStream(FILE* stream)
{
if (isValidFilePtr(stream))
{
EmuFileObject* object = reinterpret_cast<EmuFileObject*>(stream);
if (object != nullptr && object->used)
{
return object->file_xbmc;
}
}
return nullptr;
}
int CEmuFileWrapper::GetDescriptorByStream(FILE* stream)
{
if (isValidFilePtr(stream))
{
EmuFileObject* obj = reinterpret_cast<EmuFileObject*>(stream);
int i = obj->fd - FILE_WRAPPER_OFFSET;
if (i >= 0 && i < MAX_EMULATED_FILES)
{
return i + FILE_WRAPPER_OFFSET;
}
}
return -1;
}
FILE* CEmuFileWrapper::GetStreamByDescriptor(int fd)
{
auto object = GetFileObjectByDescriptor(fd);
if (object != nullptr && object->used)
{
return reinterpret_cast<FILE*>(object);
}
return nullptr;
}
bool CEmuFileWrapper::StreamIsEmulatedFile(FILE* stream)
{
if (isValidFilePtr(stream))
{
EmuFileObject* obj = reinterpret_cast<EmuFileObject*>(stream);
return DescriptorIsEmulatedFile(obj->fd);
}
return false;
}
| notspiff/xbmc | xbmc/cores/DllLoader/exports/util/EmuFileWrapper.cpp | C++ | gpl-2.0 | 5,290 |
from __future__ import print_function, unicode_literals
from praw.internal import _to_reddit_list
from .helper import PRAWTest, betamax
class InternalTest(PRAWTest):
def test__to_reddit_list(self):
output = _to_reddit_list('hello')
self.assertEqual('hello', output)
def test__to_reddit_list_with_list(self):
output = _to_reddit_list(['hello'])
self.assertEqual('hello', output)
def test__to_reddit_list_with_empty_list(self):
output = _to_reddit_list([])
self.assertEqual('', output)
def test__to_reddit_list_with_big_list(self):
output = _to_reddit_list(['hello', 'world'])
self.assertEqual('hello,world', output)
@betamax()
def test__to_reddit_list_with_object(self):
output = _to_reddit_list(self.r.get_subreddit(self.sr))
self.assertEqual(self.sr, output)
def test__to_reddit_list_with_object_in_list(self):
obj = self.r.get_subreddit(self.sr)
output = _to_reddit_list([obj])
self.assertEqual(self.sr, output)
def test__to_reddit_list_with_mix(self):
obj = self.r.get_subreddit(self.sr)
output = _to_reddit_list([obj, 'hello'])
self.assertEqual("{0},{1}".format(self.sr, 'hello'), output)
| dmarx/praw | tests/test_internal.py | Python | gpl-3.0 | 1,261 |
/**
* @license @product.name@ JS v@product.version@ (@product.date@)
*
* Money Flow Index indicator for Highstock
*
* (c) 2010-2019 Grzegorz Blachliński
*
* License: www.highcharts.com/license
*/
'use strict';
import '../../indicators/mfi.src.js';
| blue-eyed-devil/testCMS | externals/highcharts/es-modules/masters/indicators/mfi.src.js | JavaScript | gpl-3.0 | 258 |
// { dg-options "-std=gnu++14" }
// { dg-do compile }
// Copyright (C) 2015-2016 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option)
// any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a moved_to of the GNU General Public License along
// with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
#include <experimental/memory>
using std::experimental::observer_ptr;
struct nontrivial {nontrivial() {}};
struct other {};
struct base {};
struct derived : base {};
static_assert(!std::is_trivially_constructible<
observer_ptr<nontrivial>>::value, "");
static_assert(std::is_trivially_copyable<
observer_ptr<nontrivial>>::value, "");
static_assert(std::is_trivially_destructible<
observer_ptr<nontrivial>>::value, "");
static_assert(std::is_constructible<
observer_ptr<nontrivial>, nontrivial*>::value,
"");
static_assert(std::is_constructible<observer_ptr<base>, base*>::value, "");
static_assert(std::is_constructible<observer_ptr<base>, derived*>::value, "");
static_assert(!std::is_constructible<observer_ptr<base>, other*>::value, "");
static_assert(std::is_constructible<
observer_ptr<base>, observer_ptr<base>>::value, "");
static_assert(std::is_constructible<
observer_ptr<base>, observer_ptr<derived>>::value, "");
static_assert(!std::is_constructible<
observer_ptr<base>, observer_ptr<other>>::value, "");
static_assert(!std::is_assignable<
observer_ptr<nontrivial>, nontrivial*>::value,
"");
static_assert(std::is_assignable<
observer_ptr<nontrivial>, observer_ptr<nontrivial>>::value,
"");
static_assert(std::is_assignable<observer_ptr<base>,
observer_ptr<base>>::value, "");
static_assert(std::is_assignable<observer_ptr<base>,
observer_ptr<derived>>::value, "");
static_assert(!std::is_assignable<
observer_ptr<base>, observer_ptr<other>>::value, "");
static_assert(std::is_assignable<observer_ptr<const int>,
observer_ptr<int>>::value, "");
static_assert(!std::is_assignable<observer_ptr<int>,
observer_ptr<const int>>::value, "");
| selmentdev/selment-toolchain | source/gcc-latest/libstdc++-v3/testsuite/experimental/memory/observer_ptr/requirements.cc | C++ | gpl-3.0 | 2,725 |
package nomad
import (
"fmt"
"time"
"github.com/armon/go-metrics"
"github.com/hashicorp/nomad/nomad/structs"
)
// Periodic endpoint is used for periodic job interactions
type Periodic struct {
srv *Server
}
// Force is used to force a new instance of a periodic job
func (p *Periodic) Force(args *structs.PeriodicForceRequest, reply *structs.PeriodicForceResponse) error {
if done, err := p.srv.forward("Periodic.Force", args, args, reply); done {
return err
}
defer metrics.MeasureSince([]string{"nomad", "periodic", "force"}, time.Now())
// Validate the arguments
if args.JobID == "" {
return fmt.Errorf("missing job ID for evaluation")
}
// Lookup the job
snap, err := p.srv.fsm.State().Snapshot()
if err != nil {
return err
}
job, err := snap.JobByID(args.JobID)
if err != nil {
return err
}
if job == nil {
return fmt.Errorf("job not found")
}
if !job.IsPeriodic() {
return fmt.Errorf("can't force launch non-periodic job")
}
// Force run the job.
eval, err := p.srv.periodicDispatcher.ForceRun(job.ID)
if err != nil {
return fmt.Errorf("force launch for job %q failed: %v", job.ID, err)
}
reply.EvalID = eval.ID
reply.EvalCreateIndex = eval.CreateIndex
reply.Index = eval.CreateIndex
return nil
}
| mkuzmin/terraform | vendor/github.com/hashicorp/nomad/nomad/periodic_endpoint.go | GO | mpl-2.0 | 1,256 |
<?php
/**
*@package plugins.inletArmada
*/
class InletArmadaPlugin extends KalturaPlugin implements IKalturaObjectLoader, IKalturaEnumerator
{
const PLUGIN_NAME = 'inletArmada';
public static function getPluginName()
{
return self::PLUGIN_NAME;
}
/**
* @param string $baseClass
* @param string $enumValue
* @param array $constructorArgs
* @return object
*/
public static function loadObject($baseClass, $enumValue, array $constructorArgs = null)
{
if($baseClass == 'KOperationEngine' && $enumValue == KalturaConversionEngineType::INLET_ARMADA)
{
if(!isset($constructorArgs['params']) || !isset($constructorArgs['outFilePath']))
return null;
return new KOperationEngineInletArmada("", $constructorArgs['outFilePath']);
}
if($baseClass == 'KDLOperatorBase' && $enumValue == self::getApiValue(InletArmadaConversionEngineType::INLET_ARMADA))
{
return new KDLOperatorInletArmada($enumValue);
}
return null;
}
/**
* @param string $baseClass
* @param string $enumValue
* @return string
*/
public static function getObjectClass($baseClass, $enumValue)
{
if($baseClass == 'KOperationEngine' && $enumValue == self::getApiValue(InletArmadaConversionEngineType::INLET_ARMADA))
return 'KOperationEngineInletArmada';
if($baseClass == 'KDLOperatorBase' && $enumValue == self::getConversionEngineCoreValue(InletArmadaConversionEngineType::INLET_ARMADA))
return 'KDLOperatorInletArmada';
return null;
}
/**
* @return array<string> list of enum classes names that extend the base enum name
*/
public static function getEnums($baseEnumName = null)
{
if(is_null($baseEnumName))
return array('InletArmadaConversionEngineType');
if($baseEnumName == 'conversionEngineType')
return array('InletArmadaConversionEngineType');
return array();
}
/**
* @return int id of dynamic enum in the DB.
*/
public static function getConversionEngineCoreValue($valueName)
{
$value = self::getPluginName() . IKalturaEnumerator::PLUGIN_VALUE_DELIMITER . $valueName;
return kPluginableEnumsManager::apiToCore('conversionEngineType', $value);
}
/**
* @return string external API value of dynamic enum.
*/
public static function getApiValue($valueName)
{
return self::getPluginName() . IKalturaEnumerator::PLUGIN_VALUE_DELIMITER . $valueName;
}
}
| ratliff/server | plugins/transcoding/inlet_armada/InletArmadaPlugin.php | PHP | agpl-3.0 | 2,348 |
require 'spec_helper'
describe Spree::Admin::OverviewController do
include AuthenticationWorkflow
context "loading overview" do
let(:user) { create_enterprise_user(enterprise_limit: 2) }
before do
controller.stub spree_current_user: user
end
context "when user owns only one enterprise" do
let!(:enterprise) { create(:distributor_enterprise, owner: user) }
context "when the referer is not an admin page" do
before { @request.env['HTTP_REFERER'] = 'http://test.com/some_other_path' }
context "and the enterprise has sells='unspecified'" do
before do
enterprise.update_attribute(:sells, "unspecified")
end
it "redirects to the welcome page for the enterprise" do
spree_get :index
response.should redirect_to welcome_admin_enterprise_path(enterprise)
end
end
context "and the enterprise does not have sells='unspecified'" do
it "renders the single enterprise dashboard" do
spree_get :index
response.should render_template "single_enterprise_dashboard"
end
end
end
context "when the refer is an admin page" do
before { @request.env['HTTP_REFERER'] = 'http://test.com/admin' }
it "renders the single enterprise dashboard" do
spree_get :index
response.should render_template "single_enterprise_dashboard"
end
end
end
context "when user owns multiple enterprises" do
let!(:enterprise1) { create(:distributor_enterprise, owner: user) }
let!(:enterprise2) { create(:distributor_enterprise, owner: user) }
context "when the referer is not an admin page" do
before { @request.env['HTTP_REFERER'] = 'http://test.com/some_other_path' }
context "and at least one owned enterprise has sells='unspecified'" do
before do
enterprise1.update_attribute(:sells, "unspecified")
end
it "redirects to the enterprises index" do
spree_get :index
response.should redirect_to admin_enterprises_path
end
end
context "and no owned enterprises have sells='unspecified'" do
it "renders the multiple enterprise dashboard" do
spree_get :index
response.should render_template "multi_enterprise_dashboard"
end
end
end
context "when the refer is an admin page" do
before { @request.env['HTTP_REFERER'] = 'http://test.com/admin' }
it "renders the multiple enterprise dashboard" do
spree_get :index
response.should render_template "multi_enterprise_dashboard"
end
end
end
end
end
| levent/openfoodnetwork | spec/controllers/spree/admin/overview_controller_spec.rb | Ruby | agpl-3.0 | 2,772 |
<?php
return array(
/*
|--------------------------------------------------------------------------
| Pagination Language Lines
|--------------------------------------------------------------------------
|
| The following language lines are used by the paginator library to build
| the simple pagination links. You are free to change them to anything
| you want to customize your views to better match your application.
|
*/
'previous' => '« Nakaraan',
'next' => 'Susunod »',
);
| ixmid/snipe-it | resources/lang/fil/pagination.php | PHP | agpl-3.0 | 547 |
////////////////////////////////////////////////////////////////////////////////
// Test case file for checkstyle.
// Created: 2001
////////////////////////////////////////////////////////////////////////////////
package com . puppycrawl
.tools.
checkstyle.checks.whitespace.nowhitespacebefore;
/**
* Class for testing whitespace issues.
* error missing author tag
**/
class InputNoWhitespaceBeforeDefault
{
/** ignore assignment **/
private int mVar1=1;
/** ignore assignment **/
private int mVar2 =1;
/** Should be ok **/
private int mVar3 = 1;
/** method **/
void method1()
{
final int a = 1;
int b= 1; // Ignore 1
b=1; // Ignore 1
b+=1; // Ignore 1
b -=- 1 + (+ b); // Ignore 2
b = b ++ + b --; // Ignore 1
b = ++ b - -- b; // Ignore 1
}
/** method **/
void method2()
{
synchronized(this) {
}
try{
}
catch(RuntimeException e){
}
}
/**
skip blank lines between comment and code,
should be ok
**/
private int mVar4 = 1;
/** test WS after void return */
private void fastExit()
{
boolean complicatedStuffNeeded = true;
if( !complicatedStuffNeeded )
{
return; // should not complain about missing WS after return
}
else
{
// do complicated stuff
}
}
/** test WS after non void return
@return 2
*/
private int nonVoid()
{
if ( true )
{
return(2); // should complain about missing WS after return
}
else
{
return 2; // this is ok
}
}
/** test casts **/
private void testCasts()
{
Object o = (Object) new Object(); // ok
o = (Object)o; // error
o = ( Object ) o; // ok
o = (Object)
o; // ok
}
/** test questions **/
private void testQuestions()
{
boolean b = (1 == 2)?true:false;
b = (1==2) ? false : true;
}
/** star test **/
private void starTest()
{
int x = 2 *3* 4;
}
/** boolean test **/
private void boolTest()
{
boolean a = true;
boolean x = ! a;
int z = ~1 + ~ 2;
}
/** division test **/
private void divTest()
{
int a = 4 % 2;
int b = 4% 2;
int c = 4 %2;
int d = 4%2;
int e = 4 / 2;
int f = 4/ 2;
int g = 4 /2;
int h = 4/2;
}
/** @return dot test **/
private java .lang. String dotTest()
{
Object o = new java.lang.Object();
o.
toString();
o
.toString();
o . toString();
return o.toString();
}
/** assert statement test */
public void assertTest()
{
// OK
assert true;
// OK
assert true : "Whups";
// evil colons, should be OK
assert "OK".equals(null) ? false : true : "Whups";
// missing WS around assert
assert(true);
// missing WS around colon
assert true:"Whups";
}
/** another check */
void donBradman(Runnable aRun)
{
donBradman(new Runnable() {
public void run() {
}
});
final Runnable r = new Runnable() {
public void run() {
}
};
}
/** rfe 521323, detect whitespace before ';' */
void rfe521323()
{
doStuff() ;
// ^ whitespace
for (int i = 0 ; i < 5; i++) {
// ^ whitespace
}
}
/** bug 806243 (NoWhitespaceBeforeCheck error for anonymous inner class) */
private int i ;
// ^ whitespace
private int i1, i2, i3 ;
// ^ whitespace
private int i4, i5, i6;
/** bug 806243 (NoWhitespaceBeforeCheck error for anonymous inner class) */
void bug806243()
{
Object o = new InputNoWhitespaceBeforeDefault() {
private int j ;
// ^ whitespace
};
}
void doStuff() {
}
}
/**
* Bug 806242 (NoWhitespaceBeforeCheck error with an interface).
* @author o_sukhodolsky
* @version 1.0
*/
interface IFoo_NoWhitespaceBeforeDefault
{
void foo() ;
// ^ whitespace
}
/**
* Avoid Whitespace errors in for loop.
* @author lkuehne
* @version 1.0
*/
class SpecialCasesInForLoop_NoWhitespaceBeforeDefault
{
void forIterator()
{
// avoid conflict between WhiteSpaceAfter ';' and ParenPad(nospace)
for (int i = 0; i++ < 5;) {
// ^ no whitespace
}
// bug 895072
// avoid confilct between ParenPad(space) and NoWhiteSpace before ';'
int i = 0;
for ( ; i < 5; i++ ) {
// ^ whitespace
}
for (int anInt : getSomeInts()) {
//Should be ignored
}
}
int[] getSomeInts() {
int i = (int) ( 2 / 3 );
return null;
}
public void myMethod() {
new Thread() {
public void run() {
}
}.start();
}
public void foo(java.util.List<? extends String[]> bar, Comparable<? super Object[]> baz) { }
public void mySuperMethod() {
Runnable[] runs = new Runnable[] {new Runnable() {
public void run() {
}
},
new Runnable() {
public void run() {
}
}};
runs[0]
.
run()
;
}
public void testNullSemi() {
return ;
}
public void register(Object obj) { }
public void doSomething(String args[]) {
register(boolean[].class);
register( args );
}
public void parentheses() {
testNullSemi
(
)
;
}
public static void testNoWhitespaceBeforeEllipses(String ... args) {
}
}
| AkshitaKukreja30/checkstyle | src/test/resources/com/puppycrawl/tools/checkstyle/checks/whitespace/nowhitespacebefore/InputNoWhitespaceBeforeDefault.java | Java | lgpl-2.1 | 5,995 |
/*
* SonarQube, open source software quality management tool.
* Copyright (C) 2008-2014 SonarSource
* mailto:contact AT sonarsource DOT com
*
* SonarQube is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* SonarQube is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.server.metric.ws;
import org.sonar.api.server.ws.WebService;
public class MetricsWs implements WebService {
public static final String ENDPOINT = "api/metrics";
private final MetricsWsAction[] actions;
public MetricsWs(MetricsWsAction... actions) {
this.actions = actions;
}
@Override
public void define(Context context) {
NewController controller = context.createController(ENDPOINT);
controller.setDescription("Metrics management");
controller.setSince("2.6");
for (MetricsWsAction action : actions) {
action.define(controller);
}
controller.done();
}
}
| abbeyj/sonarqube | server/sonar-server/src/main/java/org/sonar/server/metric/ws/MetricsWs.java | Java | lgpl-3.0 | 1,515 |
require_dependency 'user_query'
class UsersController < ApplicationController
def index
if params[:followed_by] || params[:followers_of]
if params[:followed_by]
users = User.find(params[:followed_by]).following
elsif params[:followers_of]
users = User.find(params[:followers_of]).followers
end
users = users.page(params[:page]).per(20)
UserQuery.load_is_followed(users, current_user)
render json: users, meta: { cursor: 1 + (params[:page] || 1).to_i }
elsif params[:to_follow]
render json: User.where(to_follow: true), each_serializer: UserSerializer
else
### OLD CODE PATH BELOW. Used only by the recommendations page.
authenticate_user!
status = {
recommendations_up_to_date: current_user.recommendations_up_to_date
}
respond_to do |format|
format.html { redirect_to '/' }
format.json { render json: status }
end
end
end
def show
user = User.find(params[:id])
# Redirect to canonical path
if request.path != user_path(user)
return redirect_to user_path(user), status: :moved_permanently
end
if user_signed_in? && current_user == user
# Clear notifications if the current user is viewing his/her feed.
# TODO: This needs to be moved elsewhere.
Notification.where(user: user, notification_type: 'profile_comment',
seen: false).update_all seen: true
end
respond_with_ember user
end
ember_action(:ember) { User.find(params[:user_id]) }
def follow
authenticate_user!
user = User.find(params[:user_id])
if user != current_user
if user.followers.include? current_user
user.followers.destroy current_user
action_type = 'unfollowed'
else
if current_user.following_count < 10_000
user.followers.push current_user
action_type = 'followed'
else
flash[:message] = "Wow! You're following 10,000 people?! You should \
unfollow a few people that no longer interest you \
before following any others."
action_type = nil
end
end
if action_type
Substory.from_action(
user_id: current_user.id,
action_type: action_type,
followed_id: user.id
)
end
end
respond_to do |format|
format.html { redirect_to :back }
format.json { render json: true }
end
end
def update_avatar
authenticate_user!
user = User.find(params[:user_id])
if user == current_user
user.avatar = params[:avatar] || params[:user][:avatar]
user.save!
respond_to do |format|
format.html { redirect_to :back }
format.json { render json: user, serializer: CurrentUserSerializer }
end
else
error! 403
end
end
def disconnect_facebook
authenticate_user!
current_user.update_attributes(facebook_id: nil)
redirect_to :back
end
def redirect_short_url
@user = User.find_by_name params[:username]
fail ActionController::RoutingError, 'Not Found' if @user.nil?
redirect_to @user
end
def comment
authenticate_user!
# Create the story.
@user = User.find(params[:user_id])
Action.broadcast(
action_type: 'created_profile_comment',
user: @user,
poster: current_user,
comment: params[:comment]
)
respond_to do |format|
format.html { redirect_to :back }
format.json { render json: true }
end
end
def update
authenticate_user!
user = User.find(params[:id])
changes = params[:current_user] || params[:user]
return error!(401, 'Wrong user') unless current_user == user
# Finagling things into place
changes[:cover_image] =
changes[:cover_image_url] if changes[:cover_image_url] =~ /^data:/
changes[:password] =
changes[:new_password] if changes[:new_password].present?
changes[:name] = changes[:new_username] if changes[:new_username].present?
changes[:star_rating] = (changes[:rating_type] == 'advanced')
%i(new_password new_username rating_type cover_image_url).each do |key|
changes.delete(key)
end
changes = changes.permit(:about, :location, :website, :name, :waifu_char_id,
:sfw_filter, :waifu, :bio, :email, :cover_image,
:waifu_or_husbando, :title_language_preference,
:password, :star_rating)
# Convert to hash so that we ignore disallowed attributes
user.assign_attributes(changes.to_h)
if user.save
render json: user
else
return error!(user.errors, 400)
end
end
def to_follow
fixed_user_list = %w(
Gigguk Holden JeanP
Arkada HappiLeeErin DoctorDazza
Yokurama dexbonus DEMOLITION_D
)
@users = User.where(name: fixed_user_list)
render json: @users, each_serializer: UserSerializer
end
end
| jcoady9/hummingbird | app/controllers/users_controller.rb | Ruby | apache-2.0 | 5,020 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.seda;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.builder.RouteBuilder;
import org.junit.jupiter.api.Test;
public class SedaSizeTest extends ContextTestSupport {
@Test
public void testSeda() throws Exception {
getMockEndpoint("mock:bar").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").to("seda:bar");
from("seda:bar?size=5").to("mock:bar");
}
};
}
}
| nikhilvibhav/camel | core/camel-core/src/test/java/org/apache/camel/component/seda/SedaSizeTest.java | Java | apache-2.0 | 1,588 |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using Microsoft.Azure.Insights.Models;
namespace Microsoft.Azure.Commands.Insights.OutputClasses
{
/// <summary>
/// Wraps around a list of Dimension objects to display them with indentation
/// </summary>
public class PSLocalizableString : LocalizableString
{
/// <summary>
/// Initializes a new instance of the PSLocalizableString class
/// </summary>
/// <param name="localizableString">The input LocalizableString object</param>
public PSLocalizableString(LocalizableString localizableString)
{
if (localizableString != null)
{
this.LocalizedValue = localizableString.LocalizedValue;
this.Value = localizableString.Value;
}
}
/// <summary>
/// A string representation of the list LocalizableString objects including indentation
/// </summary>
/// <returns>A string representation of the LocalizableString object including indentation</returns>
public override string ToString()
{
return this.ToString(indentationTabs: 1);
}
}
}
| zhencui/azure-powershell | src/ResourceManager/Insights/Commands.Insights/OutputClasses/PSLocalizableString.cs | C# | apache-2.0 | 1,909 |
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apimachinery
import (
"fmt"
"reflect"
"strings"
"time"
"k8s.io/api/admissionregistration/v1beta1"
"k8s.io/api/core/v1"
extensions "k8s.io/api/extensions/v1beta1"
rbacv1beta1 "k8s.io/api/rbac/v1beta1"
apiextensionsv1beta1 "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1beta1"
"k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"k8s.io/apimachinery/pkg/types"
"k8s.io/apimachinery/pkg/util/intstr"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/client-go/dynamic"
clientset "k8s.io/client-go/kubernetes"
utilversion "k8s.io/kubernetes/pkg/util/version"
"k8s.io/kubernetes/test/e2e/framework"
imageutils "k8s.io/kubernetes/test/utils/image"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
_ "github.com/stretchr/testify/assert"
)
const (
secretName = "sample-webhook-secret"
deploymentName = "sample-webhook-deployment"
serviceName = "e2e-test-webhook"
roleBindingName = "webhook-auth-reader"
// The webhook configuration names should not be reused between test instances.
crdWebhookConfigName = "e2e-test-webhook-config-crd"
webhookConfigName = "e2e-test-webhook-config"
mutatingWebhookConfigName = "e2e-test-mutating-webhook-config"
podMutatingWebhookConfigName = "e2e-test-mutating-webhook-pod"
crdMutatingWebhookConfigName = "e2e-test-mutating-webhook-config-crd"
webhookFailClosedConfigName = "e2e-test-webhook-fail-closed"
webhookForWebhooksConfigName = "e2e-test-webhook-for-webhooks-config"
removableValidatingHookName = "e2e-test-should-be-removable-validating-webhook-config"
removableMutatingHookName = "e2e-test-should-be-removable-mutating-webhook-config"
skipNamespaceLabelKey = "skip-webhook-admission"
skipNamespaceLabelValue = "yes"
skippedNamespaceName = "exempted-namesapce"
disallowedPodName = "disallowed-pod"
hangingPodName = "hanging-pod"
disallowedConfigMapName = "disallowed-configmap"
allowedConfigMapName = "allowed-configmap"
failNamespaceLabelKey = "fail-closed-webhook"
failNamespaceLabelValue = "yes"
failNamespaceName = "fail-closed-namesapce"
)
var serverWebhookVersion = utilversion.MustParseSemantic("v1.8.0")
var _ = SIGDescribe("AdmissionWebhook", func() {
var context *certContext
f := framework.NewDefaultFramework("webhook")
var client clientset.Interface
var namespaceName string
BeforeEach(func() {
client = f.ClientSet
namespaceName = f.Namespace.Name
// Make sure the relevant provider supports admission webhook
framework.SkipUnlessServerVersionGTE(serverWebhookVersion, f.ClientSet.Discovery())
framework.SkipUnlessProviderIs("gce", "gke", "local")
_, err := f.ClientSet.AdmissionregistrationV1beta1().ValidatingWebhookConfigurations().List(metav1.ListOptions{})
if errors.IsNotFound(err) {
framework.Skipf("dynamic configuration of webhooks requires the admissionregistration.k8s.io group to be enabled")
}
By("Setting up server cert")
context = setupServerCert(namespaceName, serviceName)
createAuthReaderRoleBinding(f, namespaceName)
// Note that in 1.9 we will have backwards incompatible change to
// admission webhooks, so the image will be updated to 1.9 sometime in
// the development 1.9 cycle.
deployWebhookAndService(f, imageutils.GetE2EImage(imageutils.AdmissionWebhook), context)
})
AfterEach(func() {
cleanWebhookTest(client, namespaceName)
})
It("Should be able to deny pod and configmap creation", func() {
webhookCleanup := registerWebhook(f, context)
defer webhookCleanup()
testWebhook(f)
})
It("Should be able to deny custom resource creation", func() {
testcrd, err := framework.CreateTestCRD(f)
if err != nil {
return
}
defer testcrd.CleanUp()
webhookCleanup := registerWebhookForCRD(f, context, testcrd)
defer webhookCleanup()
testCRDWebhook(f, testcrd.Crd, testcrd.DynamicClient)
})
It("Should unconditionally reject operations on fail closed webhook", func() {
webhookCleanup := registerFailClosedWebhook(f, context)
defer webhookCleanup()
testFailClosedWebhook(f)
})
It("Should mutate configmap", func() {
webhookCleanup := registerMutatingWebhookForConfigMap(f, context)
defer webhookCleanup()
testMutatingConfigMapWebhook(f)
})
It("Should mutate pod and apply defaults after mutation", func() {
webhookCleanup := registerMutatingWebhookForPod(f, context)
defer webhookCleanup()
testMutatingPodWebhook(f)
})
It("Should not be able to prevent deleting validating-webhook-configurations or mutating-webhook-configurations", func() {
webhookCleanup := registerWebhookForWebhookConfigurations(f, context)
defer webhookCleanup()
testWebhookForWebhookConfigurations(f)
})
It("Should mutate crd", func() {
testcrd, err := framework.CreateTestCRD(f)
if err != nil {
return
}
defer testcrd.CleanUp()
webhookCleanup := registerMutatingWebhookForCRD(f, context, testcrd)
defer webhookCleanup()
testMutatingCRDWebhook(f, testcrd.Crd, testcrd.DynamicClient)
})
// TODO: add more e2e tests for mutating webhooks
// 1. mutating webhook that mutates pod
// 2. mutating webhook that sends empty patch
// 2.1 and sets status.allowed=true
// 2.2 and sets status.allowed=false
// 3. mutating webhook that sends patch, but also sets status.allowed=false
// 4. mtuating webhook that fail-open v.s. fail-closed
})
func createAuthReaderRoleBinding(f *framework.Framework, namespace string) {
By("Create role binding to let webhook read extension-apiserver-authentication")
client := f.ClientSet
// Create the role binding to allow the webhook read the extension-apiserver-authentication configmap
_, err := client.RbacV1beta1().RoleBindings("kube-system").Create(&rbacv1beta1.RoleBinding{
ObjectMeta: metav1.ObjectMeta{
Name: roleBindingName,
Annotations: map[string]string{
rbacv1beta1.AutoUpdateAnnotationKey: "true",
},
},
RoleRef: rbacv1beta1.RoleRef{
APIGroup: "",
Kind: "Role",
Name: "extension-apiserver-authentication-reader",
},
// Webhook uses the default service account.
Subjects: []rbacv1beta1.Subject{
{
Kind: "ServiceAccount",
Name: "default",
Namespace: namespace,
},
},
})
if err != nil && errors.IsAlreadyExists(err) {
framework.Logf("role binding %s already exists", roleBindingName)
} else {
framework.ExpectNoError(err, "creating role binding %s:webhook to access configMap", namespace)
}
}
func deployWebhookAndService(f *framework.Framework, image string, context *certContext) {
By("Deploying the webhook pod")
client := f.ClientSet
// Creating the secret that contains the webhook's cert.
secret := &v1.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: secretName,
},
Type: v1.SecretTypeOpaque,
Data: map[string][]byte{
"tls.crt": context.cert,
"tls.key": context.key,
},
}
namespace := f.Namespace.Name
_, err := client.CoreV1().Secrets(namespace).Create(secret)
framework.ExpectNoError(err, "creating secret %q in namespace %q", secretName, namespace)
// Create the deployment of the webhook
podLabels := map[string]string{"app": "sample-webhook", "webhook": "true"}
replicas := int32(1)
zero := int64(0)
mounts := []v1.VolumeMount{
{
Name: "webhook-certs",
ReadOnly: true,
MountPath: "/webhook.local.config/certificates",
},
}
volumes := []v1.Volume{
{
Name: "webhook-certs",
VolumeSource: v1.VolumeSource{
Secret: &v1.SecretVolumeSource{SecretName: secretName},
},
},
}
containers := []v1.Container{
{
Name: "sample-webhook",
VolumeMounts: mounts,
Args: []string{
"--tls-cert-file=/webhook.local.config/certificates/tls.crt",
"--tls-private-key-file=/webhook.local.config/certificates/tls.key",
"--alsologtostderr",
"-v=4",
"2>&1",
},
Image: image,
},
}
d := &extensions.Deployment{
ObjectMeta: metav1.ObjectMeta{
Name: deploymentName,
},
Spec: extensions.DeploymentSpec{
Replicas: &replicas,
Strategy: extensions.DeploymentStrategy{
Type: extensions.RollingUpdateDeploymentStrategyType,
},
Template: v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: podLabels,
},
Spec: v1.PodSpec{
TerminationGracePeriodSeconds: &zero,
Containers: containers,
Volumes: volumes,
},
},
},
}
deployment, err := client.ExtensionsV1beta1().Deployments(namespace).Create(d)
framework.ExpectNoError(err, "creating deployment %s in namespace %s", deploymentName, namespace)
By("Wait for the deployment to be ready")
err = framework.WaitForDeploymentRevisionAndImage(client, namespace, deploymentName, "1", image)
framework.ExpectNoError(err, "waiting for the deployment of image %s in %s in %s to complete", image, deploymentName, namespace)
err = framework.WaitForDeploymentComplete(client, deployment)
framework.ExpectNoError(err, "waiting for the deployment status valid", image, deploymentName, namespace)
By("Deploying the webhook service")
serviceLabels := map[string]string{"webhook": "true"}
service := &v1.Service{
ObjectMeta: metav1.ObjectMeta{
Namespace: namespace,
Name: serviceName,
Labels: map[string]string{"test": "webhook"},
},
Spec: v1.ServiceSpec{
Selector: serviceLabels,
Ports: []v1.ServicePort{
{
Protocol: "TCP",
Port: 443,
TargetPort: intstr.FromInt(443),
},
},
},
}
_, err = client.CoreV1().Services(namespace).Create(service)
framework.ExpectNoError(err, "creating service %s in namespace %s", serviceName, namespace)
By("Verifying the service has paired with the endpoint")
err = framework.WaitForServiceEndpointsNum(client, namespace, serviceName, 1, 1*time.Second, 30*time.Second)
framework.ExpectNoError(err, "waiting for service %s/%s have %d endpoint", namespace, serviceName, 1)
}
func strPtr(s string) *string { return &s }
func registerWebhook(f *framework.Framework, context *certContext) func() {
client := f.ClientSet
By("Registering the webhook via the AdmissionRegistration API")
namespace := f.Namespace.Name
configName := webhookConfigName
// A webhook that cannot talk to server, with fail-open policy
failOpenHook := failingWebhook(namespace, "fail-open.k8s.io")
policyIgnore := v1beta1.Ignore
failOpenHook.FailurePolicy = &policyIgnore
_, err := client.AdmissionregistrationV1beta1().ValidatingWebhookConfigurations().Create(&v1beta1.ValidatingWebhookConfiguration{
ObjectMeta: metav1.ObjectMeta{
Name: configName,
},
Webhooks: []v1beta1.Webhook{
{
Name: "deny-unwanted-pod-container-name-and-label.k8s.io",
Rules: []v1beta1.RuleWithOperations{{
Operations: []v1beta1.OperationType{v1beta1.Create},
Rule: v1beta1.Rule{
APIGroups: []string{""},
APIVersions: []string{"v1"},
Resources: []string{"pods"},
},
}},
ClientConfig: v1beta1.WebhookClientConfig{
Service: &v1beta1.ServiceReference{
Namespace: namespace,
Name: serviceName,
Path: strPtr("/pods"),
},
CABundle: context.signingCert,
},
},
{
Name: "deny-unwanted-configmap-data.k8s.io",
Rules: []v1beta1.RuleWithOperations{{
Operations: []v1beta1.OperationType{v1beta1.Create, v1beta1.Update},
Rule: v1beta1.Rule{
APIGroups: []string{""},
APIVersions: []string{"v1"},
Resources: []string{"configmaps"},
},
}},
// The webhook skips the namespace that has label "skip-webhook-admission":"yes"
NamespaceSelector: &metav1.LabelSelector{
MatchExpressions: []metav1.LabelSelectorRequirement{
{
Key: skipNamespaceLabelKey,
Operator: metav1.LabelSelectorOpNotIn,
Values: []string{skipNamespaceLabelValue},
},
},
},
ClientConfig: v1beta1.WebhookClientConfig{
Service: &v1beta1.ServiceReference{
Namespace: namespace,
Name: serviceName,
Path: strPtr("/configmaps"),
},
CABundle: context.signingCert,
},
},
// Server cannot talk to this webhook, so it always fails.
// Because this webhook is configured fail-open, request should be admitted after the call fails.
failOpenHook,
},
})
framework.ExpectNoError(err, "registering webhook config %s with namespace %s", configName, namespace)
// The webhook configuration is honored in 10s.
time.Sleep(10 * time.Second)
return func() {
client.AdmissionregistrationV1beta1().ValidatingWebhookConfigurations().Delete(configName, nil)
}
}
func registerMutatingWebhookForConfigMap(f *framework.Framework, context *certContext) func() {
client := f.ClientSet
By("Registering the mutating configmap webhook via the AdmissionRegistration API")
namespace := f.Namespace.Name
configName := mutatingWebhookConfigName
_, err := client.AdmissionregistrationV1beta1().MutatingWebhookConfigurations().Create(&v1beta1.MutatingWebhookConfiguration{
ObjectMeta: metav1.ObjectMeta{
Name: configName,
},
Webhooks: []v1beta1.Webhook{
{
Name: "adding-configmap-data-stage-1.k8s.io",
Rules: []v1beta1.RuleWithOperations{{
Operations: []v1beta1.OperationType{v1beta1.Create},
Rule: v1beta1.Rule{
APIGroups: []string{""},
APIVersions: []string{"v1"},
Resources: []string{"configmaps"},
},
}},
ClientConfig: v1beta1.WebhookClientConfig{
Service: &v1beta1.ServiceReference{
Namespace: namespace,
Name: serviceName,
Path: strPtr("/mutating-configmaps"),
},
CABundle: context.signingCert,
},
},
{
Name: "adding-configmap-data-stage-2.k8s.io",
Rules: []v1beta1.RuleWithOperations{{
Operations: []v1beta1.OperationType{v1beta1.Create},
Rule: v1beta1.Rule{
APIGroups: []string{""},
APIVersions: []string{"v1"},
Resources: []string{"configmaps"},
},
}},
ClientConfig: v1beta1.WebhookClientConfig{
Service: &v1beta1.ServiceReference{
Namespace: namespace,
Name: serviceName,
Path: strPtr("/mutating-configmaps"),
},
CABundle: context.signingCert,
},
},
},
})
framework.ExpectNoError(err, "registering mutating webhook config %s with namespace %s", configName, namespace)
// The webhook configuration is honored in 10s.
time.Sleep(10 * time.Second)
return func() { client.AdmissionregistrationV1beta1().MutatingWebhookConfigurations().Delete(configName, nil) }
}
func testMutatingConfigMapWebhook(f *framework.Framework) {
By("create a configmap that should be updated by the webhook")
client := f.ClientSet
configMap := toBeMutatedConfigMap(f)
mutatedConfigMap, err := client.CoreV1().ConfigMaps(f.Namespace.Name).Create(configMap)
Expect(err).To(BeNil())
expectedConfigMapData := map[string]string{
"mutation-start": "yes",
"mutation-stage-1": "yes",
"mutation-stage-2": "yes",
}
if !reflect.DeepEqual(expectedConfigMapData, mutatedConfigMap.Data) {
framework.Failf("\nexpected %#v\n, got %#v\n", expectedConfigMapData, mutatedConfigMap.Data)
}
}
func registerMutatingWebhookForPod(f *framework.Framework, context *certContext) func() {
client := f.ClientSet
By("Registering the mutating pod webhook via the AdmissionRegistration API")
namespace := f.Namespace.Name
configName := podMutatingWebhookConfigName
_, err := client.AdmissionregistrationV1beta1().MutatingWebhookConfigurations().Create(&v1beta1.MutatingWebhookConfiguration{
ObjectMeta: metav1.ObjectMeta{
Name: configName,
},
Webhooks: []v1beta1.Webhook{
{
Name: "adding-init-container.k8s.io",
Rules: []v1beta1.RuleWithOperations{{
Operations: []v1beta1.OperationType{v1beta1.Create},
Rule: v1beta1.Rule{
APIGroups: []string{""},
APIVersions: []string{"v1"},
Resources: []string{"pods"},
},
}},
ClientConfig: v1beta1.WebhookClientConfig{
Service: &v1beta1.ServiceReference{
Namespace: namespace,
Name: serviceName,
Path: strPtr("/mutating-pods"),
},
CABundle: context.signingCert,
},
},
},
})
framework.ExpectNoError(err, "registering mutating webhook config %s with namespace %s", configName, namespace)
// The webhook configuration is honored in 10s.
time.Sleep(10 * time.Second)
return func() { client.AdmissionregistrationV1beta1().MutatingWebhookConfigurations().Delete(configName, nil) }
}
func testMutatingPodWebhook(f *framework.Framework) {
By("create a pod that should be updated by the webhook")
client := f.ClientSet
configMap := toBeMutatedPod(f)
mutatedPod, err := client.CoreV1().Pods(f.Namespace.Name).Create(configMap)
Expect(err).To(BeNil())
if len(mutatedPod.Spec.InitContainers) != 1 {
framework.Failf("expect pod to have 1 init container, got %#v", mutatedPod.Spec.InitContainers)
}
if got, expected := mutatedPod.Spec.InitContainers[0].Name, "webhook-added-init-container"; got != expected {
framework.Failf("expect the init container name to be %q, got %q", expected, got)
}
if got, expected := mutatedPod.Spec.InitContainers[0].TerminationMessagePolicy, v1.TerminationMessageReadFile; got != expected {
framework.Failf("expect the init terminationMessagePolicy to be default to %q, got %q", expected, got)
}
}
func toBeMutatedPod(f *framework.Framework) *v1.Pod {
return &v1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "webhook-to-be-mutated",
},
Spec: v1.PodSpec{
Containers: []v1.Container{
{
Name: "example",
Image: framework.GetPauseImageName(f.ClientSet),
},
},
},
}
}
func testWebhook(f *framework.Framework) {
By("create a pod that should be denied by the webhook")
client := f.ClientSet
// Creating the pod, the request should be rejected
pod := nonCompliantPod(f)
_, err := client.CoreV1().Pods(f.Namespace.Name).Create(pod)
Expect(err).NotTo(BeNil())
expectedErrMsg1 := "the pod contains unwanted container name"
if !strings.Contains(err.Error(), expectedErrMsg1) {
framework.Failf("expect error contains %q, got %q", expectedErrMsg1, err.Error())
}
expectedErrMsg2 := "the pod contains unwanted label"
if !strings.Contains(err.Error(), expectedErrMsg2) {
framework.Failf("expect error contains %q, got %q", expectedErrMsg2, err.Error())
}
By("create a pod that causes the webhook to hang")
client = f.ClientSet
// Creating the pod, the request should be rejected
pod = hangingPod(f)
_, err = client.CoreV1().Pods(f.Namespace.Name).Create(pod)
Expect(err).NotTo(BeNil())
expectedTimeoutErr := "request did not complete within allowed duration"
if !strings.Contains(err.Error(), expectedTimeoutErr) {
framework.Failf("expect timeout error %q, got %q", expectedTimeoutErr, err.Error())
}
By("create a configmap that should be denied by the webhook")
// Creating the configmap, the request should be rejected
configmap := nonCompliantConfigMap(f)
_, err = client.CoreV1().ConfigMaps(f.Namespace.Name).Create(configmap)
Expect(err).NotTo(BeNil())
expectedErrMsg := "the configmap contains unwanted key and value"
if !strings.Contains(err.Error(), expectedErrMsg) {
framework.Failf("expect error contains %q, got %q", expectedErrMsg, err.Error())
}
By("create a configmap that should be admitted by the webhook")
// Creating the configmap, the request should be admitted
configmap = &v1.ConfigMap{
ObjectMeta: metav1.ObjectMeta{
Name: allowedConfigMapName,
},
Data: map[string]string{
"admit": "this",
},
}
_, err = client.CoreV1().ConfigMaps(f.Namespace.Name).Create(configmap)
Expect(err).NotTo(HaveOccurred())
By("update (PUT) the admitted configmap to a non-compliant one should be rejected by the webhook")
toNonCompliantFn := func(cm *v1.ConfigMap) {
if cm.Data == nil {
cm.Data = map[string]string{}
}
cm.Data["webhook-e2e-test"] = "webhook-disallow"
}
_, err = updateConfigMap(client, f.Namespace.Name, allowedConfigMapName, toNonCompliantFn)
Expect(err).NotTo(BeNil())
if !strings.Contains(err.Error(), expectedErrMsg) {
framework.Failf("expect error contains %q, got %q", expectedErrMsg, err.Error())
}
By("update (PATCH) the admitted configmap to a non-compliant one should be rejected by the webhook")
patch := nonCompliantConfigMapPatch()
_, err = client.CoreV1().ConfigMaps(f.Namespace.Name).Patch(allowedConfigMapName, types.StrategicMergePatchType, []byte(patch))
Expect(err).NotTo(BeNil())
if !strings.Contains(err.Error(), expectedErrMsg) {
framework.Failf("expect error contains %q, got %q", expectedErrMsg, err.Error())
}
By("create a namespace that bypass the webhook")
err = createNamespace(f, &v1.Namespace{ObjectMeta: metav1.ObjectMeta{
Name: skippedNamespaceName,
Labels: map[string]string{
skipNamespaceLabelKey: skipNamespaceLabelValue,
},
}})
framework.ExpectNoError(err, "creating namespace %q", skippedNamespaceName)
// clean up the namespace
defer client.CoreV1().Namespaces().Delete(skippedNamespaceName, nil)
By("create a configmap that violates the webhook policy but is in a whitelisted namespace")
configmap = nonCompliantConfigMap(f)
_, err = client.CoreV1().ConfigMaps(skippedNamespaceName).Create(configmap)
Expect(err).To(BeNil())
}
// failingWebhook returns a webhook with rule of create configmaps,
// but with an invalid client config so that server cannot communicate with it
func failingWebhook(namespace, name string) v1beta1.Webhook {
return v1beta1.Webhook{
Name: name,
Rules: []v1beta1.RuleWithOperations{{
Operations: []v1beta1.OperationType{v1beta1.Create},
Rule: v1beta1.Rule{
APIGroups: []string{""},
APIVersions: []string{"v1"},
Resources: []string{"configmaps"},
},
}},
ClientConfig: v1beta1.WebhookClientConfig{
Service: &v1beta1.ServiceReference{
Namespace: namespace,
Name: serviceName,
Path: strPtr("/configmaps"),
},
// Without CA bundle, the call to webhook always fails
CABundle: nil,
},
}
}
func registerFailClosedWebhook(f *framework.Framework, context *certContext) func() {
client := f.ClientSet
By("Registering a webhook that server cannot talk to, with fail closed policy, via the AdmissionRegistration API")
namespace := f.Namespace.Name
configName := webhookFailClosedConfigName
// A webhook that cannot talk to server, with fail-closed policy
policyFail := v1beta1.Fail
hook := failingWebhook(namespace, "fail-closed.k8s.io")
hook.FailurePolicy = &policyFail
hook.NamespaceSelector = &metav1.LabelSelector{
MatchExpressions: []metav1.LabelSelectorRequirement{
{
Key: failNamespaceLabelKey,
Operator: metav1.LabelSelectorOpIn,
Values: []string{failNamespaceLabelValue},
},
},
}
_, err := client.AdmissionregistrationV1beta1().ValidatingWebhookConfigurations().Create(&v1beta1.ValidatingWebhookConfiguration{
ObjectMeta: metav1.ObjectMeta{
Name: configName,
},
Webhooks: []v1beta1.Webhook{
// Server cannot talk to this webhook, so it always fails.
// Because this webhook is configured fail-closed, request should be rejected after the call fails.
hook,
},
})
framework.ExpectNoError(err, "registering webhook config %s with namespace %s", configName, namespace)
// The webhook configuration is honored in 10s.
time.Sleep(10 * time.Second)
return func() {
f.ClientSet.AdmissionregistrationV1beta1().ValidatingWebhookConfigurations().Delete(configName, nil)
}
}
func testFailClosedWebhook(f *framework.Framework) {
client := f.ClientSet
By("create a namespace for the webhook")
err := createNamespace(f, &v1.Namespace{ObjectMeta: metav1.ObjectMeta{
Name: failNamespaceName,
Labels: map[string]string{
failNamespaceLabelKey: failNamespaceLabelValue,
},
}})
framework.ExpectNoError(err, "creating namespace %q", failNamespaceName)
defer client.CoreV1().Namespaces().Delete(failNamespaceName, nil)
By("create a configmap should be unconditionally rejected by the webhook")
configmap := &v1.ConfigMap{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
},
}
_, err = client.CoreV1().ConfigMaps(failNamespaceName).Create(configmap)
Expect(err).To(HaveOccurred())
if !errors.IsInternalError(err) {
framework.Failf("expect an internal error, got %#v", err)
}
}
func registerWebhookForWebhookConfigurations(f *framework.Framework, context *certContext) func() {
var err error
client := f.ClientSet
By("Registering a webhook on ValidatingWebhookConfiguration and MutatingWebhookConfiguration objects, via the AdmissionRegistration API")
namespace := f.Namespace.Name
configName := webhookForWebhooksConfigName
failurePolicy := v1beta1.Fail
// This webhook will deny all requests to Delete admissionregistration objects
_, err = client.AdmissionregistrationV1beta1().ValidatingWebhookConfigurations().Create(&v1beta1.ValidatingWebhookConfiguration{
ObjectMeta: metav1.ObjectMeta{
Name: configName,
},
Webhooks: []v1beta1.Webhook{
{
Name: "deny-webhook-configuration-deletions.k8s.io",
Rules: []v1beta1.RuleWithOperations{{
Operations: []v1beta1.OperationType{v1beta1.Delete},
Rule: v1beta1.Rule{
APIGroups: []string{"admissionregistration.k8s.io"},
APIVersions: []string{"*"},
Resources: []string{
"validatingwebhookconfigurations",
"mutatingwebhookconfigurations",
},
},
}},
ClientConfig: v1beta1.WebhookClientConfig{
Service: &v1beta1.ServiceReference{
Namespace: namespace,
Name: serviceName,
Path: strPtr("/always-deny"),
},
CABundle: context.signingCert,
},
FailurePolicy: &failurePolicy,
},
},
})
framework.ExpectNoError(err, "registering webhook config %s with namespace %s", configName, namespace)
// The webhook configuration is honored in 10s.
time.Sleep(10 * time.Second)
return func() {
err := client.AdmissionregistrationV1beta1().ValidatingWebhookConfigurations().Delete(configName, nil)
framework.ExpectNoError(err, "deleting webhook config %s with namespace %s", configName, namespace)
}
}
// This test assumes that the deletion-rejecting webhook defined in
// registerWebhookForWebhookConfigurations is in place.
func testWebhookForWebhookConfigurations(f *framework.Framework) {
var err error
client := f.ClientSet
By("Creating a validating-webhook-configuration object")
namespace := f.Namespace.Name
failurePolicy := v1beta1.Ignore
_, err = client.AdmissionregistrationV1beta1().ValidatingWebhookConfigurations().Create(&v1beta1.ValidatingWebhookConfiguration{
ObjectMeta: metav1.ObjectMeta{
Name: removableValidatingHookName,
},
Webhooks: []v1beta1.Webhook{
{
Name: "should-be-removable-validating-webhook.k8s.io",
Rules: []v1beta1.RuleWithOperations{{
Operations: []v1beta1.OperationType{v1beta1.Create},
Rule: v1beta1.Rule{
APIGroups: []string{"*"},
APIVersions: []string{"*"},
Resources: []string{"*"},
},
}},
ClientConfig: v1beta1.WebhookClientConfig{
Service: &v1beta1.ServiceReference{
Namespace: namespace,
Name: serviceName,
// This path not recognized by the webhook service,
// so the call to this webhook will always fail,
// but because the failure policy is ignore, it will
// have no effect on admission requests.
Path: strPtr(""),
},
CABundle: nil,
},
FailurePolicy: &failurePolicy,
},
},
})
framework.ExpectNoError(err, "registering webhook config %s with namespace %s", removableValidatingHookName, namespace)
// The webhook configuration is honored in 10s.
time.Sleep(10 * time.Second)
By("Deleting the validating-webhook-configuration, which should be possible to remove")
err = client.AdmissionregistrationV1beta1().ValidatingWebhookConfigurations().Delete(removableValidatingHookName, nil)
framework.ExpectNoError(err, "deleting webhook config %s with namespace %s", removableValidatingHookName, namespace)
By("Creating a mutating-webhook-configuration object")
_, err = client.AdmissionregistrationV1beta1().MutatingWebhookConfigurations().Create(&v1beta1.MutatingWebhookConfiguration{
ObjectMeta: metav1.ObjectMeta{
Name: removableMutatingHookName,
},
Webhooks: []v1beta1.Webhook{
{
Name: "should-be-removable-mutating-webhook.k8s.io",
Rules: []v1beta1.RuleWithOperations{{
Operations: []v1beta1.OperationType{v1beta1.Create},
Rule: v1beta1.Rule{
APIGroups: []string{"*"},
APIVersions: []string{"*"},
Resources: []string{"*"},
},
}},
ClientConfig: v1beta1.WebhookClientConfig{
Service: &v1beta1.ServiceReference{
Namespace: namespace,
Name: serviceName,
// This path not recognized by the webhook service,
// so the call to this webhook will always fail,
// but because the failure policy is ignore, it will
// have no effect on admission requests.
Path: strPtr(""),
},
CABundle: nil,
},
FailurePolicy: &failurePolicy,
},
},
})
framework.ExpectNoError(err, "registering webhook config %s with namespace %s", removableMutatingHookName, namespace)
// The webhook configuration is honored in 10s.
time.Sleep(10 * time.Second)
By("Deleting the mutating-webhook-configuration, which should be possible to remove")
err = client.AdmissionregistrationV1beta1().MutatingWebhookConfigurations().Delete(removableMutatingHookName, nil)
framework.ExpectNoError(err, "deleting webhook config %s with namespace %s", removableMutatingHookName, namespace)
}
func createNamespace(f *framework.Framework, ns *v1.Namespace) error {
return wait.PollImmediate(100*time.Millisecond, 30*time.Second, func() (bool, error) {
_, err := f.ClientSet.CoreV1().Namespaces().Create(ns)
if err != nil {
if strings.HasPrefix(err.Error(), "object is being deleted:") {
return false, nil
}
return false, err
}
return true, nil
})
}
func nonCompliantPod(f *framework.Framework) *v1.Pod {
return &v1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: disallowedPodName,
Labels: map[string]string{
"webhook-e2e-test": "webhook-disallow",
},
},
Spec: v1.PodSpec{
Containers: []v1.Container{
{
Name: "webhook-disallow",
Image: framework.GetPauseImageName(f.ClientSet),
},
},
},
}
}
func hangingPod(f *framework.Framework) *v1.Pod {
return &v1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: hangingPodName,
Labels: map[string]string{
"webhook-e2e-test": "wait-forever",
},
},
Spec: v1.PodSpec{
Containers: []v1.Container{
{
Name: "wait-forever",
Image: framework.GetPauseImageName(f.ClientSet),
},
},
},
}
}
func nonCompliantConfigMap(f *framework.Framework) *v1.ConfigMap {
return &v1.ConfigMap{
ObjectMeta: metav1.ObjectMeta{
Name: disallowedConfigMapName,
},
Data: map[string]string{
"webhook-e2e-test": "webhook-disallow",
},
}
}
func toBeMutatedConfigMap(f *framework.Framework) *v1.ConfigMap {
return &v1.ConfigMap{
ObjectMeta: metav1.ObjectMeta{
Name: "to-be-mutated",
},
Data: map[string]string{
"mutation-start": "yes",
},
}
}
func nonCompliantConfigMapPatch() string {
return fmt.Sprint(`{"data":{"webhook-e2e-test":"webhook-disallow"}}`)
}
type updateConfigMapFn func(cm *v1.ConfigMap)
func updateConfigMap(c clientset.Interface, ns, name string, update updateConfigMapFn) (*v1.ConfigMap, error) {
var cm *v1.ConfigMap
pollErr := wait.PollImmediate(2*time.Second, 1*time.Minute, func() (bool, error) {
var err error
if cm, err = c.CoreV1().ConfigMaps(ns).Get(name, metav1.GetOptions{}); err != nil {
return false, err
}
update(cm)
if cm, err = c.CoreV1().ConfigMaps(ns).Update(cm); err == nil {
return true, nil
}
// Only retry update on conflict
if !errors.IsConflict(err) {
return false, err
}
return false, nil
})
return cm, pollErr
}
func cleanWebhookTest(client clientset.Interface, namespaceName string) {
_ = client.CoreV1().Services(namespaceName).Delete(serviceName, nil)
_ = client.ExtensionsV1beta1().Deployments(namespaceName).Delete(deploymentName, nil)
_ = client.CoreV1().Secrets(namespaceName).Delete(secretName, nil)
_ = client.RbacV1beta1().RoleBindings("kube-system").Delete(roleBindingName, nil)
}
func registerWebhookForCRD(f *framework.Framework, context *certContext, testcrd *framework.TestCrd) func() {
client := f.ClientSet
By("Registering the crd webhook via the AdmissionRegistration API")
namespace := f.Namespace.Name
configName := crdWebhookConfigName
_, err := client.AdmissionregistrationV1beta1().ValidatingWebhookConfigurations().Create(&v1beta1.ValidatingWebhookConfiguration{
ObjectMeta: metav1.ObjectMeta{
Name: configName,
},
Webhooks: []v1beta1.Webhook{
{
Name: "deny-unwanted-crd-data.k8s.io",
Rules: []v1beta1.RuleWithOperations{{
Operations: []v1beta1.OperationType{v1beta1.Create},
Rule: v1beta1.Rule{
APIGroups: []string{testcrd.ApiGroup},
APIVersions: []string{testcrd.ApiVersion},
Resources: []string{testcrd.GetPluralName()},
},
}},
ClientConfig: v1beta1.WebhookClientConfig{
Service: &v1beta1.ServiceReference{
Namespace: namespace,
Name: serviceName,
Path: strPtr("/crd"),
},
CABundle: context.signingCert,
},
},
},
})
framework.ExpectNoError(err, "registering crd webhook config %s with namespace %s", configName, namespace)
// The webhook configuration is honored in 10s.
time.Sleep(10 * time.Second)
return func() {
client.AdmissionregistrationV1beta1().ValidatingWebhookConfigurations().Delete(configName, nil)
}
}
func registerMutatingWebhookForCRD(f *framework.Framework, context *certContext, testcrd *framework.TestCrd) func() {
client := f.ClientSet
By("Registering the mutating webhook for crd via the AdmissionRegistration API")
namespace := f.Namespace.Name
configName := crdMutatingWebhookConfigName
_, err := client.AdmissionregistrationV1beta1().MutatingWebhookConfigurations().Create(&v1beta1.MutatingWebhookConfiguration{
ObjectMeta: metav1.ObjectMeta{
Name: configName,
},
Webhooks: []v1beta1.Webhook{
{
Name: "mutate-crd-data-stage-1.k8s.io",
Rules: []v1beta1.RuleWithOperations{{
Operations: []v1beta1.OperationType{v1beta1.Create},
Rule: v1beta1.Rule{
APIGroups: []string{testcrd.ApiGroup},
APIVersions: []string{testcrd.ApiVersion},
Resources: []string{testcrd.GetPluralName()},
},
}},
ClientConfig: v1beta1.WebhookClientConfig{
Service: &v1beta1.ServiceReference{
Namespace: namespace,
Name: serviceName,
Path: strPtr("/mutating-crd"),
},
CABundle: context.signingCert,
},
},
{
Name: "mutate-crd-data-stage-2.k8s.io",
Rules: []v1beta1.RuleWithOperations{{
Operations: []v1beta1.OperationType{v1beta1.Create},
Rule: v1beta1.Rule{
APIGroups: []string{testcrd.ApiGroup},
APIVersions: []string{testcrd.ApiVersion},
Resources: []string{testcrd.GetPluralName()},
},
}},
ClientConfig: v1beta1.WebhookClientConfig{
Service: &v1beta1.ServiceReference{
Namespace: namespace,
Name: serviceName,
Path: strPtr("/mutating-crd"),
},
CABundle: context.signingCert,
},
},
},
})
framework.ExpectNoError(err, "registering crd webhook config %s with namespace %s", configName, namespace)
// The webhook configuration is honored in 10s.
time.Sleep(10 * time.Second)
return func() { client.AdmissionregistrationV1beta1().MutatingWebhookConfigurations().Delete(configName, nil) }
}
func testCRDWebhook(f *framework.Framework, crd *apiextensionsv1beta1.CustomResourceDefinition, crdClient dynamic.ResourceInterface) {
By("Creating a custom resource that should be denied by the webhook")
crInstance := &unstructured.Unstructured{
Object: map[string]interface{}{
"kind": crd.Spec.Names.Kind,
"apiVersion": crd.Spec.Group + "/" + crd.Spec.Version,
"metadata": map[string]interface{}{
"name": "cr-instance-1",
"namespace": f.Namespace.Name,
},
"data": map[string]interface{}{
"webhook-e2e-test": "webhook-disallow",
},
},
}
_, err := crdClient.Create(crInstance)
Expect(err).NotTo(BeNil())
expectedErrMsg := "the custom resource contains unwanted data"
if !strings.Contains(err.Error(), expectedErrMsg) {
framework.Failf("expect error contains %q, got %q", expectedErrMsg, err.Error())
}
}
func testMutatingCRDWebhook(f *framework.Framework, crd *apiextensionsv1beta1.CustomResourceDefinition, crdClient dynamic.ResourceInterface) {
By("Creating a custom resource that should be mutated by the webhook")
cr := &unstructured.Unstructured{
Object: map[string]interface{}{
"kind": crd.Spec.Names.Kind,
"apiVersion": crd.Spec.Group + "/" + crd.Spec.Version,
"metadata": map[string]interface{}{
"name": "cr-instance-1",
"namespace": f.Namespace.Name,
},
"data": map[string]interface{}{
"mutation-start": "yes",
},
},
}
mutatedCR, err := crdClient.Create(cr)
Expect(err).To(BeNil())
expectedCRData := map[string]interface{}{
"mutation-start": "yes",
"mutation-stage-1": "yes",
"mutation-stage-2": "yes",
}
if !reflect.DeepEqual(expectedCRData, mutatedCR.Object["data"]) {
framework.Failf("\nexpected %#v\n, got %#v\n", expectedCRData, mutatedCR.Object["data"])
}
}
| maxamillion/origin | vendor/k8s.io/kubernetes/test/e2e/apimachinery/webhook.go | GO | apache-2.0 | 38,059 |
//>>built
define("dgrid/extensions/nls/zh-cn/columnHider",{popupLabel:"\u663e\u793a\u6216\u9690\u85cf\u5217"}); | aconyteds/Esri-Ozone-Map-Widget | vendor/js/esri/arcgis_js_api/library/3.12/3.12compact/dgrid/extensions/nls/zh-cn/columnHider.js | JavaScript | apache-2.0 | 111 |
"""
Telstra API platform for notify component.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/notify.telstra/
"""
import logging
import requests
import voluptuous as vol
from homeassistant.components.notify import (
BaseNotificationService, ATTR_TITLE, PLATFORM_SCHEMA)
from homeassistant.const import CONTENT_TYPE_JSON, HTTP_HEADER_CONTENT_TYPE
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_CONSUMER_KEY = 'consumer_key'
CONF_CONSUMER_SECRET = 'consumer_secret'
CONF_PHONE_NUMBER = 'phone_number'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_CONSUMER_KEY): cv.string,
vol.Required(CONF_CONSUMER_SECRET): cv.string,
vol.Required(CONF_PHONE_NUMBER): cv.string,
})
def get_service(hass, config, discovery_info=None):
"""Get the Telstra SMS API notification service."""
consumer_key = config.get(CONF_CONSUMER_KEY)
consumer_secret = config.get(CONF_CONSUMER_SECRET)
phone_number = config.get(CONF_PHONE_NUMBER)
if _authenticate(consumer_key, consumer_secret) is False:
_LOGGER.exception("Error obtaining authorization from Telstra API")
return None
return TelstraNotificationService(
consumer_key, consumer_secret, phone_number)
class TelstraNotificationService(BaseNotificationService):
"""Implementation of a notification service for the Telstra SMS API."""
def __init__(self, consumer_key, consumer_secret, phone_number):
"""Initialize the service."""
self._consumer_key = consumer_key
self._consumer_secret = consumer_secret
self._phone_number = phone_number
def send_message(self, message="", **kwargs):
"""Send a message to a user."""
title = kwargs.get(ATTR_TITLE)
# Retrieve authorization first
token_response = _authenticate(
self._consumer_key, self._consumer_secret)
if token_response is False:
_LOGGER.exception("Error obtaining authorization from Telstra API")
return
# Send the SMS
if title:
text = '{} {}'.format(title, message)
else:
text = message
message_data = {
'to': self._phone_number,
'body': text,
}
message_resource = 'https://api.telstra.com/v1/sms/messages'
message_headers = {
HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_JSON,
'Authorization': 'Bearer ' + token_response['access_token'],
}
message_response = requests.post(
message_resource, headers=message_headers, json=message_data,
timeout=10)
if message_response.status_code != 202:
_LOGGER.exception("Failed to send SMS. Status code: %d",
message_response.status_code)
def _authenticate(consumer_key, consumer_secret):
"""Authenticate with the Telstra API."""
token_data = {
'client_id': consumer_key,
'client_secret': consumer_secret,
'grant_type': 'client_credentials',
'scope': 'SMS'
}
token_resource = 'https://api.telstra.com/v1/oauth/token'
token_response = requests.get(
token_resource, params=token_data, timeout=10).json()
if 'error' in token_response:
return False
return token_response
| MungoRae/home-assistant | homeassistant/components/notify/telstra.py | Python | apache-2.0 | 3,404 |
package git4idea.repo;
import com.intellij.openapi.application.PluginPathManager;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.VcsTestUtil;
import com.intellij.util.Function;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.ZipUtil;
import com.intellij.vcs.log.Hash;
import com.intellij.vcs.log.impl.HashImpl;
import git4idea.GitBranch;
import git4idea.GitLocalBranch;
import git4idea.test.GitPlatformTest;
import junit.framework.TestCase;
import org.jetbrains.annotations.NotNull;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
@RunWith(Parameterized.class)
public class GitRepositoryReaderTest extends GitPlatformTest {
@NotNull private final File myTestCaseDir;
private File myTempDir;
private GitRepositoryReader myRepositoryReader;
private File myGitDir;
@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> data() {
File pluginRoot = new File(PluginPathManager.getPluginHomePath("git4idea"));
File dataDir = new File(new File(pluginRoot, "testData"), "repo");
File[] testCases = dataDir.listFiles(FileUtilRt.ALL_DIRECTORIES);
return ContainerUtil.map(testCases, new Function<File, Object[]>() {
@Override
public Object[] fun(File file) {
return new Object[] { file.getName(), file };
}
});
}
@SuppressWarnings({"UnusedParameters", "JUnitTestCaseWithNonTrivialConstructors"})
public GitRepositoryReaderTest(@NotNull String name, @NotNull File testDir) {
myTestCaseDir = testDir;
}
@Override
@Before
public void setUp() throws Exception {
edt(new ThrowableRunnable() {
@Override
public void run() throws Exception {
GitRepositoryReaderTest.super.setUp();
}
});
myTempDir = new File(myProjectRoot.getPath(), "test");
prepareTest(myTestCaseDir);
}
@After
@Override
public void tearDown() throws Exception {
try {
if (myTempDir != null) {
FileUtil.delete(myTempDir);
}
}
finally {
edt(new ThrowableRunnable() {
@Override
public void run() throws Throwable {
GitRepositoryReaderTest.super.tearDown();
}
});
}
}
private void prepareTest(File testDir) throws IOException {
assertTrue("Temp directory was not created", myTempDir.mkdir());
FileUtil.copyDir(testDir, myTempDir);
myGitDir = new File(myTempDir, ".git");
File dotGit = new File(myTempDir, "dot_git");
if (!dotGit.exists()) {
File dotGitZip = new File(myTempDir, "dot_git.zip");
assertTrue("Neither dot_git nor dot_git.zip were found", dotGitZip.exists());
ZipUtil.extract(dotGitZip, myTempDir, null);
}
FileUtil.rename(dotGit, myGitDir);
TestCase.assertTrue(myGitDir.exists());
myRepositoryReader = new GitRepositoryReader(myGitDir);
}
@NotNull
private static String readHead(@NotNull File dir) throws IOException {
return FileUtil.loadFile(new File(dir, "head.txt")).trim();
}
@NotNull
private static Branch readCurrentBranch(@NotNull File resultDir) throws IOException {
String branch = FileUtil.loadFile(new File(resultDir, "current-branch.txt")).trim();
return readBranchFromLine(branch);
}
@NotNull
private static Branch readBranchFromLine(@NotNull String branch) {
List<String> branchAndHash = StringUtil.split(branch, " ");
return new Branch(branchAndHash.get(1), HashImpl.build(branchAndHash.get(0)));
}
@Test
public void testBranches() throws Exception {
Collection<GitRemote> remotes = GitConfig.read(myPlatformFacade, new File(myGitDir, "config")).parseRemotes();
GitBranchState state = myRepositoryReader.readState(remotes);
assertEquals("HEAD revision is incorrect", readHead(myTempDir), state.getCurrentRevision());
assertEqualBranches(readCurrentBranch(myTempDir), state.getCurrentBranch());
assertBranches(state.getLocalBranches(), readBranches(myTempDir, true));
assertBranches(state.getRemoteBranches(), readBranches(myTempDir, false));
}
private static void assertEqualBranches(@NotNull Branch expected, @NotNull GitLocalBranch actual) {
assertEquals(expected.name, actual.getName());
assertEquals("Incorrect hash of branch " + actual.getName(), expected.hash, actual.getHash());
}
private static void assertBranches(Collection<? extends GitBranch> actualBranches, Collection<Branch> expectedBranches) {
VcsTestUtil.assertEqualCollections(actualBranches, expectedBranches, new VcsTestUtil.EqualityChecker<GitBranch, Branch>() {
@Override
public boolean areEqual(GitBranch actual, Branch expected) {
return branchesAreEqual(actual, expected);
}
});
}
@NotNull
private static Collection<Branch> readBranches(@NotNull File resultDir, boolean local) throws IOException {
String content = FileUtil.loadFile(new File(resultDir, local ? "local-branches.txt" : "remote-branches.txt"));
Collection<Branch> branches = ContainerUtil.newArrayList();
for (String line : StringUtil.splitByLines(content)) {
branches.add(readBranchFromLine(line));
}
return branches;
}
private static boolean branchesAreEqual(GitBranch actual, Branch expected) {
return actual.getFullName().equals(expected.name) && actual.getHash().equals(expected.hash);
}
private static class Branch {
final String name;
final Hash hash;
private Branch(String name, Hash hash) {
this.name = name;
this.hash = hash;
}
@Override
public String toString() {
return name;
}
}
}
| ivan-fedorov/intellij-community | plugins/git4idea/tests/git4idea/repo/GitRepositoryReaderTest.java | Java | apache-2.0 | 5,931 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.optim.nonlinear.scalar;
import org.apache.commons.math3.analysis.MultivariateVectorFunction;
import org.apache.commons.math3.optim.OptimizationData;
/**
* Gradient of the scalar function to be optimized.
*
* @version $Id$
* @since 3.1
*/
public class ObjectiveFunctionGradient implements OptimizationData {
/** Function to be optimized. */
private final MultivariateVectorFunction gradient;
/**
* @param g Gradient of the function to be optimized.
*/
public ObjectiveFunctionGradient(MultivariateVectorFunction g) {
gradient = g;
}
/**
* Gets the gradient of the function to be optimized.
*
* @return the objective function gradient.
*/
public MultivariateVectorFunction getObjectiveFunctionGradient() {
return gradient;
}
}
| charles-cooper/idylfin | src/org/apache/commons/math3/optim/nonlinear/scalar/ObjectiveFunctionGradient.java | Java | apache-2.0 | 1,648 |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.codeStyle.arrangement;
import com.intellij.lang.Language;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.editor.FoldRegion;
import com.intellij.openapi.editor.FoldingModel;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
import com.intellij.psi.codeStyle.arrangement.engine.ArrangementEngine;
import com.intellij.psi.codeStyle.arrangement.group.ArrangementGroupingRule;
import com.intellij.psi.codeStyle.arrangement.match.ArrangementSectionRule;
import com.intellij.psi.codeStyle.arrangement.match.StdArrangementEntryMatcher;
import com.intellij.psi.codeStyle.arrangement.match.StdArrangementMatchRule;
import com.intellij.psi.codeStyle.arrangement.model.ArrangementAtomMatchCondition;
import com.intellij.psi.codeStyle.arrangement.model.ArrangementMatchCondition;
import com.intellij.psi.codeStyle.arrangement.std.*;
import com.intellij.testFramework.fixtures.LightPlatformCodeInsightFixtureTestCase;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.intellij.psi.codeStyle.arrangement.std.StdArrangementTokens.Order.KEEP;
/**
* @author Denis Zhdanov
* @since 20.07.2012
*/
public abstract class AbstractRearrangerTest extends LightPlatformCodeInsightFixtureTestCase {
private static final RichTextHandler[] RICH_TEXT_HANDLERS = {new RangeHandler(), new FoldingHandler()};
private static final Pattern ATTRIBUTE_PATTERN = Pattern.compile("([^\\s]+)=([^\\s]+)");
protected FileType fileType;
protected Language language;
@Override
protected void setUp() throws Exception {
super.setUp();
CodeStyleSettingsManager.getInstance(myFixture.getProject()).setTemporarySettings(new CodeStyleSettings());
}
@Override
protected void tearDown() throws Exception {
CodeStyleSettingsManager.getInstance(myFixture.getProject()).dropTemporarySettings();
super.tearDown();
}
@NotNull
protected CommonCodeStyleSettings getCommonSettings() {
return CodeStyleSettingsManager.getInstance(myFixture.getProject()).getCurrentSettings().getCommonSettings(language);
}
protected static ArrangementSectionRule section(@NotNull StdArrangementMatchRule... rules) {
return section(null, null, rules);
}
protected static ArrangementSectionRule section(@Nullable String start, @Nullable String end, @NotNull StdArrangementMatchRule... rules) {
return ArrangementSectionRule.create(start, end, rules);
}
protected static StdArrangementRuleAliasToken alias(@NotNull String id, @NotNull StdArrangementMatchRule... rules) {
return new StdArrangementRuleAliasToken(id, id, ContainerUtil.newArrayList(rules));
}
@NotNull
protected static ArrangementGroupingRule group(@NotNull ArrangementSettingsToken type) {
return group(type, KEEP);
}
@NotNull
protected static ArrangementGroupingRule group(@NotNull ArrangementSettingsToken type, @NotNull ArrangementSettingsToken order) {
return new ArrangementGroupingRule(type, order);
}
@NotNull
protected static StdArrangementMatchRule rule(@NotNull ArrangementSettingsToken token) {
return new StdArrangementMatchRule(new StdArrangementEntryMatcher(atom(token)));
}
@NotNull
protected static StdArrangementMatchRule nameRule(@NotNull String nameFilter, @NotNull ArrangementSettingsToken... tokens) {
if (tokens.length == 0) {
return new StdArrangementMatchRule(new StdArrangementEntryMatcher(atom(nameFilter)));
}
else {
ArrangementAtomMatchCondition[] conditions = new ArrangementAtomMatchCondition[tokens.length + 1];
conditions[0] = atom(nameFilter);
for (int i = 0; i < tokens.length; i++) conditions[i + 1] = atom(tokens[i]);
ArrangementMatchCondition compositeCondition = ArrangementUtil.combine(conditions);
return new StdArrangementMatchRule(new StdArrangementEntryMatcher(compositeCondition));
}
}
@NotNull
protected static StdArrangementMatchRule rule(@NotNull ArrangementSettingsToken... conditions) {
return rule(ContainerUtil.map(conditions, it -> atom(it)));
}
@NotNull
protected static StdArrangementMatchRule rule(@NotNull List<ArrangementAtomMatchCondition> conditions) {
return rule(conditions.toArray(new ArrangementAtomMatchCondition[conditions.size()]));
}
@NotNull
protected static StdArrangementMatchRule rule(@NotNull ArrangementAtomMatchCondition... conditions) {
ArrangementMatchCondition compositeCondition = ArrangementUtil.combine(conditions);
return new StdArrangementMatchRule(new StdArrangementEntryMatcher(compositeCondition));
}
@NotNull
protected static StdArrangementMatchRule ruleWithOrder(@NotNull ArrangementSettingsToken orderType, @NotNull StdArrangementMatchRule rule) {
return new StdArrangementMatchRule(rule.getMatcher(), orderType);
}
@NotNull
protected static ArrangementAtomMatchCondition atom(@NotNull ArrangementSettingsToken token) {
return new ArrangementAtomMatchCondition(token);
}
protected static ArrangementAtomMatchCondition atom(@NotNull ArrangementSettingsToken token, boolean included) {
return new ArrangementAtomMatchCondition(token, included);
}
@NotNull
protected static ArrangementAtomMatchCondition atom(@NotNull String nameFilter) {
return new ArrangementAtomMatchCondition(StdArrangementTokens.Regexp.NAME, nameFilter);
}
protected void doTest(@NotNull Map<String, ?> args) {
String text = (String)args.get("initial");
String expected = (String)args.get("expected");
@SuppressWarnings("unchecked") List<TextRange> ranges = (List<TextRange>)args.get("ranges");
Info info = parse(text);
if (!isEmpty(ranges) && !isEmpty(info.ranges)) {
fail("Duplicate ranges set: explicit: " + ranges + ", " + "derived: " + info.ranges + ", text:\n" + text);
}
if (isEmpty(info.ranges)) {
info.ranges = !isEmpty(ranges) ? ranges : Arrays.asList(TextRange.from(0, text.length()));
}
myFixture.configureByText(fileType, info.text);
final FoldingModel foldingModel = myFixture.getEditor().getFoldingModel();
for (final FoldingInfo foldingInfo : info.foldings) {
foldingModel.runBatchFoldingOperation(() -> {
FoldRegion region = foldingModel.addFoldRegion(foldingInfo.start, foldingInfo.end, foldingInfo.placeholder);
if (region != null) region.setExpanded(false);
});
}
@SuppressWarnings("unchecked") List<ArrangementGroupingRule> groupingRules = (List<ArrangementGroupingRule>)args.get("groups");
if (groupingRules == null) groupingRules = Collections.emptyList();
List<?> rules = (List<?>)args.get("rules");
List<ArrangementSectionRule> sectionRules = getSectionRules(rules);
@SuppressWarnings("unchecked")
List<StdArrangementRuleAliasToken> aliases = (List<StdArrangementRuleAliasToken>)args.get("aliases");
CommonCodeStyleSettings settings = CodeStyleSettingsManager.getInstance(myFixture.getProject()).getCurrentSettings().getCommonSettings(language);
final StdArrangementSettings arrangementSettings =
aliases == null ?
new StdArrangementSettings(groupingRules, sectionRules) :
new StdArrangementExtendableSettings(groupingRules, sectionRules, aliases);
settings.setArrangementSettings(arrangementSettings);
ArrangementEngine engine = ServiceManager.getService(myFixture.getProject(), ArrangementEngine.class);
CommandProcessor.getInstance().executeCommand(getProject(), ()-> engine.arrange(myFixture.getEditor(), myFixture.getFile(), info.ranges), null, null);
// Check expectation.
Info after = parse(expected);
assertEquals(after.text, myFixture.getEditor().getDocument().getText());
for (FoldingInfo it : after.foldings) {
FoldRegion foldRegion = foldingModel.getCollapsedRegionAtOffset(it.start);
assertNotNull("Expected to find fold region at offset " + it.start, foldRegion);
assertEquals(it.end, foldRegion.getEndOffset());
}
}
protected List<ArrangementSectionRule> getSectionRules(List<?> rules) {
List<ArrangementSectionRule> sectionRules = Collections.emptyList();
if (rules != null) sectionRules = ContainerUtil.map(rules, (Function<Object, ArrangementSectionRule>)o -> o instanceof ArrangementSectionRule ? (ArrangementSectionRule)o : ArrangementSectionRule.create((StdArrangementMatchRule)o));
return sectionRules;
}
private static boolean isEmpty(Collection<?> collection) {
return collection == null || collection.isEmpty();
}
@NotNull
private static Info parse(@NotNull String text) {
Info result = new Info();
StringBuilder buffer = new StringBuilder(text);
int offset = 0;
while (offset < buffer.length()) {
RichTextHandler handler = null;
int richTextMarkStart = -1;
for (RichTextHandler h : RICH_TEXT_HANDLERS) {
int i = buffer.indexOf("<" + h.getMarker(), offset);
if (i >= 0 && (handler == null || i < richTextMarkStart)) {
richTextMarkStart = i;
handler = h;
}
}
if (handler == null) break;
String marker = handler.getMarker();
int attrStart = richTextMarkStart + marker.length() + 1;
int openingTagEnd = buffer.indexOf(">", richTextMarkStart);
int openTagLength = openingTagEnd - richTextMarkStart + 1;
Map<String, String> attributes = parseAttributes(buffer.substring(attrStart, openingTagEnd));
String closingTag = "</" + marker + ">";
int closingTagStart = buffer.indexOf(closingTag);
assert closingTagStart > 0;
handler.handle(result, attributes, richTextMarkStart, closingTagStart - openTagLength);
buffer.delete(closingTagStart, closingTagStart + closingTag.length());
buffer.delete(richTextMarkStart, openingTagEnd + 1);
offset = closingTagStart - openTagLength;
}
result.text = buffer.toString();
return result;
}
@NotNull
private static Map<String, String> parseAttributes(@NotNull String text) {
if (text.isEmpty()) return Collections.emptyMap();
Matcher matcher = ATTRIBUTE_PATTERN.matcher(text);
Map<String, String> result = ContainerUtil.newLinkedHashMap();
while (matcher.find()) result.put(matcher.group(1), matcher.group(2));
return result;
}
}
| ThiagoGarciaAlves/intellij-community | platform/testFramework/src/com/intellij/psi/codeStyle/arrangement/AbstractRearrangerTest.java | Java | apache-2.0 | 11,297 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.test.classloading.jar;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.ReducingState;
import org.apache.flink.api.common.state.ReducingStateDescriptor;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.base.TypeSerializerSingleton;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
import org.apache.flink.runtime.state.CheckpointListener;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.checkpoint.ListCheckpointed;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.ParallelSourceFunction;
import org.apache.flink.test.util.SuccessException;
import org.apache.flink.util.Collector;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
/**
* Test class used by the {@link org.apache.flink.test.classloading.ClassLoaderITCase}.
*/
public class CheckpointingCustomKvStateProgram {
public static void main(String[] args) throws Exception {
final String checkpointPath = args[0];
final String outputPath = args[1];
final int parallelism = 1;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(parallelism);
env.getConfig().disableSysoutLogging();
env.enableCheckpointing(100);
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(1, 1000));
env.setStateBackend(new FsStateBackend(checkpointPath));
DataStream<Integer> source = env.addSource(new InfiniteIntegerSource());
source
.map(new MapFunction<Integer, Tuple2<Integer, Integer>>() {
private static final long serialVersionUID = 1L;
@Override
public Tuple2<Integer, Integer> map(Integer value) throws Exception {
return new Tuple2<>(ThreadLocalRandom.current().nextInt(parallelism), value);
}
})
.keyBy(new KeySelector<Tuple2<Integer, Integer>, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Integer getKey(Tuple2<Integer, Integer> value) throws Exception {
return value.f0;
}
}).flatMap(new ReducingStateFlatMap()).writeAsText(outputPath, FileSystem.WriteMode.OVERWRITE);
env.execute();
}
private static class InfiniteIntegerSource implements ParallelSourceFunction<Integer>, ListCheckpointed<Integer> {
private static final long serialVersionUID = -7517574288730066280L;
private volatile boolean running = true;
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
int counter = 0;
while (running) {
synchronized (ctx.getCheckpointLock()) {
ctx.collect(counter++);
}
}
}
@Override
public void cancel() {
running = false;
}
@Override
public List<Integer> snapshotState(long checkpointId, long timestamp) throws Exception {
return Collections.singletonList(0);
}
@Override
public void restoreState(List<Integer> state) throws Exception {
}
}
private static class ReducingStateFlatMap extends RichFlatMapFunction<Tuple2<Integer, Integer>, Integer>
implements ListCheckpointed<ReducingStateFlatMap>, CheckpointListener {
private static final long serialVersionUID = -5939722892793950253L;
private transient ReducingState<Integer> kvState;
private boolean atLeastOneSnapshotComplete = false;
private boolean restored = false;
@Override
public void open(Configuration parameters) throws Exception {
ReducingStateDescriptor<Integer> stateDescriptor =
new ReducingStateDescriptor<>(
"reducing-state",
new ReduceSum(),
CustomIntSerializer.INSTANCE);
this.kvState = getRuntimeContext().getReducingState(stateDescriptor);
}
@Override
public void flatMap(Tuple2<Integer, Integer> value, Collector<Integer> out) throws Exception {
kvState.add(value.f1);
if (atLeastOneSnapshotComplete) {
if (restored) {
throw new SuccessException();
} else {
throw new RuntimeException("Intended failure, to trigger restore");
}
}
}
@Override
public List<ReducingStateFlatMap> snapshotState(long checkpointId, long timestamp) throws Exception {
return Collections.singletonList(this);
}
@Override
public void restoreState(List<ReducingStateFlatMap> state) throws Exception {
restored = true;
atLeastOneSnapshotComplete = true;
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
atLeastOneSnapshotComplete = true;
}
private static class ReduceSum implements ReduceFunction<Integer> {
private static final long serialVersionUID = 1L;
@Override
public Integer reduce(Integer value1, Integer value2) throws Exception {
return value1 + value2;
}
}
}
private static final class CustomIntSerializer extends TypeSerializerSingleton<Integer> {
private static final long serialVersionUID = 4572452915892737448L;
public static final TypeSerializer<Integer> INSTANCE = new CustomIntSerializer();
@Override
public boolean isImmutableType() {
return true;
}
@Override
public Integer createInstance() {
return 0;
}
@Override
public Integer copy(Integer from) {
return from;
}
@Override
public Integer copy(Integer from, Integer reuse) {
return from;
}
@Override
public int getLength() {
return 4;
}
@Override
public void serialize(Integer record, DataOutputView target) throws IOException {
target.writeInt(record.intValue());
}
@Override
public Integer deserialize(DataInputView source) throws IOException {
return Integer.valueOf(source.readInt());
}
@Override
public Integer deserialize(Integer reuse, DataInputView source) throws IOException {
return Integer.valueOf(source.readInt());
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
target.writeInt(source.readInt());
}
@Override
public boolean canEqual(Object obj) {
return obj instanceof CustomIntSerializer;
}
}
}
| zimmermatt/flink | flink-tests/src/test/java/org/apache/flink/test/classloading/jar/CheckpointingCustomKvStateProgram.java | Java | apache-2.0 | 7,468 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.common.operators;
//CHECKSTYLE.OFF: AvoidStarImport - Needed for TupleGenerator
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.List;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.JoinFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.io.LocalCollectionOutputFormat;
import org.apache.flink.api.java.operators.DeltaIteration;
import org.apache.flink.api.java.operators.IterativeDataSet;
import org.apache.flink.api.java.tuple.Tuple1;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;
import org.junit.Test;
@SuppressWarnings("serial")
public class CollectionExecutionIterationTest implements java.io.Serializable {
@Test
public void testBulkIteration() {
try {
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(10);
DataSet<Integer> result = iteration.closeWith(iteration.map(new AddSuperstepNumberMapper()));
List<Integer> collected = new ArrayList<Integer>();
result.output(new LocalCollectionOutputFormat<Integer>(collected));
env.execute();
assertEquals(1, collected.size());
assertEquals(56, collected.get(0).intValue());
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testBulkIterationWithTerminationCriterion() {
try {
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(100);
DataSet<Integer> iterationResult = iteration.map(new AddSuperstepNumberMapper());
DataSet<Integer> terminationCriterion = iterationResult.filter(new FilterFunction<Integer>() {
public boolean filter(Integer value) {
return value < 50;
}
});
List<Integer> collected = new ArrayList<Integer>();
iteration.closeWith(iterationResult, terminationCriterion)
.output(new LocalCollectionOutputFormat<Integer>(collected));
env.execute();
assertEquals(1, collected.size());
assertEquals(56, collected.get(0).intValue());
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeltaIteration() {
try {
ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
@SuppressWarnings("unchecked")
DataSet<Tuple2<Integer, Integer>> solInput = env.fromElements(
new Tuple2<Integer, Integer>(1, 0),
new Tuple2<Integer, Integer>(2, 0),
new Tuple2<Integer, Integer>(3, 0),
new Tuple2<Integer, Integer>(4, 0));
@SuppressWarnings("unchecked")
DataSet<Tuple1<Integer>> workInput = env.fromElements(
new Tuple1<Integer>(1),
new Tuple1<Integer>(2),
new Tuple1<Integer>(3),
new Tuple1<Integer>(4));
// Perform a delta iteration where we add those values to the workset where
// the second tuple field is smaller than the first tuple field.
// At the end both tuple fields must be the same.
DeltaIteration<Tuple2<Integer, Integer>, Tuple1<Integer>> iteration =
solInput.iterateDelta(workInput, 10, 0);
DataSet<Tuple2<Integer, Integer>> solDelta = iteration.getSolutionSet().join(
iteration.getWorkset()).where(0).equalTo(0).with(
new JoinFunction<Tuple2<Integer, Integer>, Tuple1<Integer>, Tuple2<Integer, Integer>>() {
@Override
public Tuple2<Integer, Integer> join(Tuple2<Integer, Integer> first,
Tuple1<Integer> second) throws Exception {
return new Tuple2<Integer, Integer>(first.f0, first.f1 + 1);
}
});
DataSet<Tuple1<Integer>> nextWorkset = solDelta.flatMap(
new FlatMapFunction<Tuple2<Integer, Integer>, Tuple1<Integer>>() {
@Override
public void flatMap(Tuple2<Integer, Integer> in, Collector<Tuple1<Integer>>
out) throws Exception {
if (in.f1 < in.f0) {
out.collect(new Tuple1<Integer>(in.f0));
}
}
});
List<Tuple2<Integer, Integer>> collected = new ArrayList<Tuple2<Integer, Integer>>();
iteration.closeWith(solDelta, nextWorkset)
.output(new LocalCollectionOutputFormat<Tuple2<Integer, Integer>>(collected));
env.execute();
// verify that both tuple fields are now the same
for (Tuple2<Integer, Integer> t: collected) {
assertEquals(t.f0, t.f1);
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
public static class AddSuperstepNumberMapper extends RichMapFunction<Integer, Integer> {
@Override
public Integer map(Integer value) {
int superstep = getIterationRuntimeContext().getSuperstepNumber();
return value + superstep;
}
}
}
| WangTaoTheTonic/flink | flink-java/src/test/java/org/apache/flink/api/common/operators/CollectionExecutionIterationTest.java | Java | apache-2.0 | 5,782 |
<?php
final class PhabricatorPeopleProfileEditController
extends PhabricatorPeopleProfileController {
public function handleRequest(AphrontRequest $request) {
$viewer = $this->getViewer();
$id = $request->getURIData('id');
$user = id(new PhabricatorPeopleQuery())
->setViewer($viewer)
->withIDs(array($id))
->needProfileImage(true)
->requireCapabilities(
array(
PhabricatorPolicyCapability::CAN_VIEW,
PhabricatorPolicyCapability::CAN_EDIT,
))
->executeOne();
if (!$user) {
return new Aphront404Response();
}
$this->setUser($user);
$done_uri = $this->getApplicationURI("manage/{$id}/");
$field_list = PhabricatorCustomField::getObjectFields(
$user,
PhabricatorCustomField::ROLE_EDIT);
$field_list
->setViewer($viewer)
->readFieldsFromStorage($user);
$validation_exception = null;
if ($request->isFormPost()) {
$xactions = $field_list->buildFieldTransactionsFromRequest(
new PhabricatorUserTransaction(),
$request);
$editor = id(new PhabricatorUserTransactionEditor())
->setActor($viewer)
->setContentSourceFromRequest($request)
->setContinueOnNoEffect(true);
try {
$editor->applyTransactions($user, $xactions);
return id(new AphrontRedirectResponse())->setURI($done_uri);
} catch (PhabricatorApplicationTransactionValidationException $ex) {
$validation_exception = $ex;
}
}
$title = pht('Edit Profile');
$form = id(new AphrontFormView())
->setUser($viewer);
$field_list->appendFieldsToForm($form);
$form
->appendChild(
id(new AphrontFormSubmitControl())
->addCancelButton($done_uri)
->setValue(pht('Save Profile')));
$allow_public = PhabricatorEnv::getEnvConfig('policy.allow-public');
$note = null;
if ($allow_public) {
$note = id(new PHUIInfoView())
->setSeverity(PHUIInfoView::SEVERITY_WARNING)
->appendChild(pht(
'Information on user profiles on this install is publicly '.
'visible.'));
}
$form_box = id(new PHUIObjectBoxView())
->setHeaderText(pht('Profile'))
->setValidationException($validation_exception)
->setBackground(PHUIObjectBoxView::BLUE_PROPERTY)
->setForm($form);
$crumbs = $this->buildApplicationCrumbs();
$crumbs->addTextCrumb(pht('Edit Profile'));
$crumbs->setBorder(true);
$nav = $this->newNavigation(
$user,
PhabricatorPeopleProfileMenuEngine::ITEM_MANAGE);
$header = id(new PHUIHeaderView())
->setHeader(pht('Edit Profile: %s', $user->getFullName()))
->setHeaderIcon('fa-pencil');
$view = id(new PHUITwoColumnView())
->setHeader($header)
->setFooter(array(
$note,
$form_box,
));
return $this->newPage()
->setTitle($title)
->setCrumbs($crumbs)
->setNavigation($nav)
->appendChild($view);
}
}
| phacility/phabricator | src/applications/people/controller/PhabricatorPeopleProfileEditController.php | PHP | apache-2.0 | 3,029 |
// Copyright 2015 RedHat, Inc.
// Copyright 2015 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sdjournal
import (
"os"
"testing"
"time"
"github.com/coreos/go-systemd/journal"
)
func TestJournalFollow(t *testing.T) {
r, err := NewJournalReader(JournalReaderConfig{
Since: time.Duration(-15) * time.Second,
Matches: []Match{
{
Field: SD_JOURNAL_FIELD_SYSTEMD_UNIT,
Value: "NetworkManager.service",
},
},
})
if err != nil {
t.Fatalf("Error opening journal: %s", err)
}
if r == nil {
t.Fatal("Got a nil reader")
}
defer r.Close()
// start writing some test entries
done := make(chan struct{}, 1)
defer close(done)
go func() {
for {
select {
case <-done:
return
default:
if err = journal.Print(journal.PriInfo, "test message %s", time.Now()); err != nil {
t.Fatalf("Error writing to journal: %s", err)
}
time.Sleep(time.Second)
}
}
}()
// and follow the reader synchronously
timeout := time.Duration(5) * time.Second
if err = r.Follow(time.After(timeout), os.Stdout); err != ErrExpired {
t.Fatalf("Error during follow: %s", err)
}
}
func TestJournalGetUsage(t *testing.T) {
j, err := NewJournal()
if err != nil {
t.Fatalf("Error opening journal: %s", err)
}
if j == nil {
t.Fatal("Got a nil journal")
}
defer j.Close()
_, err = j.GetUsage()
if err != nil {
t.Fatalf("Error getting journal size: %s", err)
}
}
| jgsqware/clairctl | vendor/github.com/coreos/go-systemd/sdjournal/journal_test.go | GO | apache-2.0 | 1,951 |
// Code generated by "go generate gonum.org/v1/gonum/unit; DO NOT EDIT.
// Copyright ©2019 The Gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package unit
import (
"fmt"
"testing"
)
func TestCapacitanceFormat(t *testing.T) {
for _, test := range []struct {
value Capacitance
format string
want string
}{
{1.23456789, "%v", "1.23456789 F"},
{1.23456789, "%.1v", "1 F"},
{1.23456789, "%20.1v", " 1 F"},
{1.23456789, "%20v", " 1.23456789 F"},
{1.23456789, "%1v", "1.23456789 F"},
{1.23456789, "%#v", "unit.Capacitance(1.23456789)"},
{1.23456789, "%s", "%!s(unit.Capacitance=1.23456789 F)"},
} {
got := fmt.Sprintf(test.format, test.value)
if got != test.want {
t.Errorf("Format %q %v: got: %q want: %q", test.format, float64(test.value), got, test.want)
}
}
}
| pweil-/origin | vendor/gonum.org/v1/gonum/unit/capacitance_test.go | GO | apache-2.0 | 916 |
/*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// DO NOT EDIT. THIS FILE IS AUTO-GENERATED BY $KUBEROOT/hack/update-generated-deep-copies.sh.
package extensions
import (
time "time"
api "k8s.io/kubernetes/pkg/api"
resource "k8s.io/kubernetes/pkg/api/resource"
unversioned "k8s.io/kubernetes/pkg/api/unversioned"
conversion "k8s.io/kubernetes/pkg/conversion"
util "k8s.io/kubernetes/pkg/util"
inf "speter.net/go/exp/math/dec/inf"
)
func deepCopy_api_AWSElasticBlockStoreVolumeSource(in api.AWSElasticBlockStoreVolumeSource, out *api.AWSElasticBlockStoreVolumeSource, c *conversion.Cloner) error {
out.VolumeID = in.VolumeID
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_Capabilities(in api.Capabilities, out *api.Capabilities, c *conversion.Cloner) error {
if in.Add != nil {
out.Add = make([]api.Capability, len(in.Add))
for i := range in.Add {
out.Add[i] = in.Add[i]
}
} else {
out.Add = nil
}
if in.Drop != nil {
out.Drop = make([]api.Capability, len(in.Drop))
for i := range in.Drop {
out.Drop[i] = in.Drop[i]
}
} else {
out.Drop = nil
}
return nil
}
func deepCopy_api_CephFSVolumeSource(in api.CephFSVolumeSource, out *api.CephFSVolumeSource, c *conversion.Cloner) error {
if in.Monitors != nil {
out.Monitors = make([]string, len(in.Monitors))
for i := range in.Monitors {
out.Monitors[i] = in.Monitors[i]
}
} else {
out.Monitors = nil
}
out.User = in.User
out.SecretFile = in.SecretFile
if in.SecretRef != nil {
out.SecretRef = new(api.LocalObjectReference)
if err := deepCopy_api_LocalObjectReference(*in.SecretRef, out.SecretRef, c); err != nil {
return err
}
} else {
out.SecretRef = nil
}
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_CinderVolumeSource(in api.CinderVolumeSource, out *api.CinderVolumeSource, c *conversion.Cloner) error {
out.VolumeID = in.VolumeID
out.FSType = in.FSType
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_Container(in api.Container, out *api.Container, c *conversion.Cloner) error {
out.Name = in.Name
out.Image = in.Image
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
if in.Args != nil {
out.Args = make([]string, len(in.Args))
for i := range in.Args {
out.Args[i] = in.Args[i]
}
} else {
out.Args = nil
}
out.WorkingDir = in.WorkingDir
if in.Ports != nil {
out.Ports = make([]api.ContainerPort, len(in.Ports))
for i := range in.Ports {
if err := deepCopy_api_ContainerPort(in.Ports[i], &out.Ports[i], c); err != nil {
return err
}
}
} else {
out.Ports = nil
}
if in.Env != nil {
out.Env = make([]api.EnvVar, len(in.Env))
for i := range in.Env {
if err := deepCopy_api_EnvVar(in.Env[i], &out.Env[i], c); err != nil {
return err
}
}
} else {
out.Env = nil
}
if err := deepCopy_api_ResourceRequirements(in.Resources, &out.Resources, c); err != nil {
return err
}
if in.VolumeMounts != nil {
out.VolumeMounts = make([]api.VolumeMount, len(in.VolumeMounts))
for i := range in.VolumeMounts {
if err := deepCopy_api_VolumeMount(in.VolumeMounts[i], &out.VolumeMounts[i], c); err != nil {
return err
}
}
} else {
out.VolumeMounts = nil
}
if in.LivenessProbe != nil {
out.LivenessProbe = new(api.Probe)
if err := deepCopy_api_Probe(*in.LivenessProbe, out.LivenessProbe, c); err != nil {
return err
}
} else {
out.LivenessProbe = nil
}
if in.ReadinessProbe != nil {
out.ReadinessProbe = new(api.Probe)
if err := deepCopy_api_Probe(*in.ReadinessProbe, out.ReadinessProbe, c); err != nil {
return err
}
} else {
out.ReadinessProbe = nil
}
if in.Lifecycle != nil {
out.Lifecycle = new(api.Lifecycle)
if err := deepCopy_api_Lifecycle(*in.Lifecycle, out.Lifecycle, c); err != nil {
return err
}
} else {
out.Lifecycle = nil
}
out.TerminationMessagePath = in.TerminationMessagePath
out.ImagePullPolicy = in.ImagePullPolicy
if in.SecurityContext != nil {
out.SecurityContext = new(api.SecurityContext)
if err := deepCopy_api_SecurityContext(*in.SecurityContext, out.SecurityContext, c); err != nil {
return err
}
} else {
out.SecurityContext = nil
}
out.Stdin = in.Stdin
out.StdinOnce = in.StdinOnce
out.TTY = in.TTY
return nil
}
func deepCopy_api_ContainerPort(in api.ContainerPort, out *api.ContainerPort, c *conversion.Cloner) error {
out.Name = in.Name
out.HostPort = in.HostPort
out.ContainerPort = in.ContainerPort
out.Protocol = in.Protocol
out.HostIP = in.HostIP
return nil
}
func deepCopy_api_DownwardAPIVolumeFile(in api.DownwardAPIVolumeFile, out *api.DownwardAPIVolumeFile, c *conversion.Cloner) error {
out.Path = in.Path
if err := deepCopy_api_ObjectFieldSelector(in.FieldRef, &out.FieldRef, c); err != nil {
return err
}
return nil
}
func deepCopy_api_DownwardAPIVolumeSource(in api.DownwardAPIVolumeSource, out *api.DownwardAPIVolumeSource, c *conversion.Cloner) error {
if in.Items != nil {
out.Items = make([]api.DownwardAPIVolumeFile, len(in.Items))
for i := range in.Items {
if err := deepCopy_api_DownwardAPIVolumeFile(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_api_EmptyDirVolumeSource(in api.EmptyDirVolumeSource, out *api.EmptyDirVolumeSource, c *conversion.Cloner) error {
out.Medium = in.Medium
return nil
}
func deepCopy_api_EnvVar(in api.EnvVar, out *api.EnvVar, c *conversion.Cloner) error {
out.Name = in.Name
out.Value = in.Value
if in.ValueFrom != nil {
out.ValueFrom = new(api.EnvVarSource)
if err := deepCopy_api_EnvVarSource(*in.ValueFrom, out.ValueFrom, c); err != nil {
return err
}
} else {
out.ValueFrom = nil
}
return nil
}
func deepCopy_api_EnvVarSource(in api.EnvVarSource, out *api.EnvVarSource, c *conversion.Cloner) error {
if in.FieldRef != nil {
out.FieldRef = new(api.ObjectFieldSelector)
if err := deepCopy_api_ObjectFieldSelector(*in.FieldRef, out.FieldRef, c); err != nil {
return err
}
} else {
out.FieldRef = nil
}
return nil
}
func deepCopy_api_ExecAction(in api.ExecAction, out *api.ExecAction, c *conversion.Cloner) error {
if in.Command != nil {
out.Command = make([]string, len(in.Command))
for i := range in.Command {
out.Command[i] = in.Command[i]
}
} else {
out.Command = nil
}
return nil
}
func deepCopy_api_FCVolumeSource(in api.FCVolumeSource, out *api.FCVolumeSource, c *conversion.Cloner) error {
if in.TargetWWNs != nil {
out.TargetWWNs = make([]string, len(in.TargetWWNs))
for i := range in.TargetWWNs {
out.TargetWWNs[i] = in.TargetWWNs[i]
}
} else {
out.TargetWWNs = nil
}
if in.Lun != nil {
out.Lun = new(int)
*out.Lun = *in.Lun
} else {
out.Lun = nil
}
out.FSType = in.FSType
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_FlockerVolumeSource(in api.FlockerVolumeSource, out *api.FlockerVolumeSource, c *conversion.Cloner) error {
out.DatasetName = in.DatasetName
return nil
}
func deepCopy_api_GCEPersistentDiskVolumeSource(in api.GCEPersistentDiskVolumeSource, out *api.GCEPersistentDiskVolumeSource, c *conversion.Cloner) error {
out.PDName = in.PDName
out.FSType = in.FSType
out.Partition = in.Partition
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_GitRepoVolumeSource(in api.GitRepoVolumeSource, out *api.GitRepoVolumeSource, c *conversion.Cloner) error {
out.Repository = in.Repository
out.Revision = in.Revision
return nil
}
func deepCopy_api_GlusterfsVolumeSource(in api.GlusterfsVolumeSource, out *api.GlusterfsVolumeSource, c *conversion.Cloner) error {
out.EndpointsName = in.EndpointsName
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_HTTPGetAction(in api.HTTPGetAction, out *api.HTTPGetAction, c *conversion.Cloner) error {
out.Path = in.Path
if err := deepCopy_util_IntOrString(in.Port, &out.Port, c); err != nil {
return err
}
out.Host = in.Host
out.Scheme = in.Scheme
return nil
}
func deepCopy_api_Handler(in api.Handler, out *api.Handler, c *conversion.Cloner) error {
if in.Exec != nil {
out.Exec = new(api.ExecAction)
if err := deepCopy_api_ExecAction(*in.Exec, out.Exec, c); err != nil {
return err
}
} else {
out.Exec = nil
}
if in.HTTPGet != nil {
out.HTTPGet = new(api.HTTPGetAction)
if err := deepCopy_api_HTTPGetAction(*in.HTTPGet, out.HTTPGet, c); err != nil {
return err
}
} else {
out.HTTPGet = nil
}
if in.TCPSocket != nil {
out.TCPSocket = new(api.TCPSocketAction)
if err := deepCopy_api_TCPSocketAction(*in.TCPSocket, out.TCPSocket, c); err != nil {
return err
}
} else {
out.TCPSocket = nil
}
return nil
}
func deepCopy_api_HostPathVolumeSource(in api.HostPathVolumeSource, out *api.HostPathVolumeSource, c *conversion.Cloner) error {
out.Path = in.Path
return nil
}
func deepCopy_api_ISCSIVolumeSource(in api.ISCSIVolumeSource, out *api.ISCSIVolumeSource, c *conversion.Cloner) error {
out.TargetPortal = in.TargetPortal
out.IQN = in.IQN
out.Lun = in.Lun
out.FSType = in.FSType
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_Lifecycle(in api.Lifecycle, out *api.Lifecycle, c *conversion.Cloner) error {
if in.PostStart != nil {
out.PostStart = new(api.Handler)
if err := deepCopy_api_Handler(*in.PostStart, out.PostStart, c); err != nil {
return err
}
} else {
out.PostStart = nil
}
if in.PreStop != nil {
out.PreStop = new(api.Handler)
if err := deepCopy_api_Handler(*in.PreStop, out.PreStop, c); err != nil {
return err
}
} else {
out.PreStop = nil
}
return nil
}
func deepCopy_api_LoadBalancerIngress(in api.LoadBalancerIngress, out *api.LoadBalancerIngress, c *conversion.Cloner) error {
out.IP = in.IP
out.Hostname = in.Hostname
return nil
}
func deepCopy_api_LoadBalancerStatus(in api.LoadBalancerStatus, out *api.LoadBalancerStatus, c *conversion.Cloner) error {
if in.Ingress != nil {
out.Ingress = make([]api.LoadBalancerIngress, len(in.Ingress))
for i := range in.Ingress {
if err := deepCopy_api_LoadBalancerIngress(in.Ingress[i], &out.Ingress[i], c); err != nil {
return err
}
}
} else {
out.Ingress = nil
}
return nil
}
func deepCopy_api_LocalObjectReference(in api.LocalObjectReference, out *api.LocalObjectReference, c *conversion.Cloner) error {
out.Name = in.Name
return nil
}
func deepCopy_api_NFSVolumeSource(in api.NFSVolumeSource, out *api.NFSVolumeSource, c *conversion.Cloner) error {
out.Server = in.Server
out.Path = in.Path
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_ObjectFieldSelector(in api.ObjectFieldSelector, out *api.ObjectFieldSelector, c *conversion.Cloner) error {
out.APIVersion = in.APIVersion
out.FieldPath = in.FieldPath
return nil
}
func deepCopy_api_ObjectMeta(in api.ObjectMeta, out *api.ObjectMeta, c *conversion.Cloner) error {
out.Name = in.Name
out.GenerateName = in.GenerateName
out.Namespace = in.Namespace
out.SelfLink = in.SelfLink
out.UID = in.UID
out.ResourceVersion = in.ResourceVersion
out.Generation = in.Generation
if err := deepCopy_unversioned_Time(in.CreationTimestamp, &out.CreationTimestamp, c); err != nil {
return err
}
if in.DeletionTimestamp != nil {
out.DeletionTimestamp = new(unversioned.Time)
if err := deepCopy_unversioned_Time(*in.DeletionTimestamp, out.DeletionTimestamp, c); err != nil {
return err
}
} else {
out.DeletionTimestamp = nil
}
if in.DeletionGracePeriodSeconds != nil {
out.DeletionGracePeriodSeconds = new(int64)
*out.DeletionGracePeriodSeconds = *in.DeletionGracePeriodSeconds
} else {
out.DeletionGracePeriodSeconds = nil
}
if in.Labels != nil {
out.Labels = make(map[string]string)
for key, val := range in.Labels {
out.Labels[key] = val
}
} else {
out.Labels = nil
}
if in.Annotations != nil {
out.Annotations = make(map[string]string)
for key, val := range in.Annotations {
out.Annotations[key] = val
}
} else {
out.Annotations = nil
}
return nil
}
func deepCopy_api_PersistentVolumeClaimVolumeSource(in api.PersistentVolumeClaimVolumeSource, out *api.PersistentVolumeClaimVolumeSource, c *conversion.Cloner) error {
out.ClaimName = in.ClaimName
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_PodSecurityContext(in api.PodSecurityContext, out *api.PodSecurityContext, c *conversion.Cloner) error {
out.HostNetwork = in.HostNetwork
out.HostPID = in.HostPID
out.HostIPC = in.HostIPC
if in.SELinuxOptions != nil {
out.SELinuxOptions = new(api.SELinuxOptions)
if err := deepCopy_api_SELinuxOptions(*in.SELinuxOptions, out.SELinuxOptions, c); err != nil {
return err
}
} else {
out.SELinuxOptions = nil
}
if in.RunAsUser != nil {
out.RunAsUser = new(int64)
*out.RunAsUser = *in.RunAsUser
} else {
out.RunAsUser = nil
}
if in.RunAsNonRoot != nil {
out.RunAsNonRoot = new(bool)
*out.RunAsNonRoot = *in.RunAsNonRoot
} else {
out.RunAsNonRoot = nil
}
if in.SupplementalGroups != nil {
out.SupplementalGroups = make([]int64, len(in.SupplementalGroups))
for i := range in.SupplementalGroups {
out.SupplementalGroups[i] = in.SupplementalGroups[i]
}
} else {
out.SupplementalGroups = nil
}
if in.FSGroup != nil {
out.FSGroup = new(int64)
*out.FSGroup = *in.FSGroup
} else {
out.FSGroup = nil
}
return nil
}
func deepCopy_api_PodSpec(in api.PodSpec, out *api.PodSpec, c *conversion.Cloner) error {
if in.Volumes != nil {
out.Volumes = make([]api.Volume, len(in.Volumes))
for i := range in.Volumes {
if err := deepCopy_api_Volume(in.Volumes[i], &out.Volumes[i], c); err != nil {
return err
}
}
} else {
out.Volumes = nil
}
if in.Containers != nil {
out.Containers = make([]api.Container, len(in.Containers))
for i := range in.Containers {
if err := deepCopy_api_Container(in.Containers[i], &out.Containers[i], c); err != nil {
return err
}
}
} else {
out.Containers = nil
}
out.RestartPolicy = in.RestartPolicy
if in.TerminationGracePeriodSeconds != nil {
out.TerminationGracePeriodSeconds = new(int64)
*out.TerminationGracePeriodSeconds = *in.TerminationGracePeriodSeconds
} else {
out.TerminationGracePeriodSeconds = nil
}
if in.ActiveDeadlineSeconds != nil {
out.ActiveDeadlineSeconds = new(int64)
*out.ActiveDeadlineSeconds = *in.ActiveDeadlineSeconds
} else {
out.ActiveDeadlineSeconds = nil
}
out.DNSPolicy = in.DNSPolicy
if in.NodeSelector != nil {
out.NodeSelector = make(map[string]string)
for key, val := range in.NodeSelector {
out.NodeSelector[key] = val
}
} else {
out.NodeSelector = nil
}
out.ServiceAccountName = in.ServiceAccountName
out.NodeName = in.NodeName
if in.SecurityContext != nil {
out.SecurityContext = new(api.PodSecurityContext)
if err := deepCopy_api_PodSecurityContext(*in.SecurityContext, out.SecurityContext, c); err != nil {
return err
}
} else {
out.SecurityContext = nil
}
if in.ImagePullSecrets != nil {
out.ImagePullSecrets = make([]api.LocalObjectReference, len(in.ImagePullSecrets))
for i := range in.ImagePullSecrets {
if err := deepCopy_api_LocalObjectReference(in.ImagePullSecrets[i], &out.ImagePullSecrets[i], c); err != nil {
return err
}
}
} else {
out.ImagePullSecrets = nil
}
return nil
}
func deepCopy_api_PodTemplateSpec(in api.PodTemplateSpec, out *api.PodTemplateSpec, c *conversion.Cloner) error {
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_api_PodSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
return nil
}
func deepCopy_api_Probe(in api.Probe, out *api.Probe, c *conversion.Cloner) error {
if err := deepCopy_api_Handler(in.Handler, &out.Handler, c); err != nil {
return err
}
out.InitialDelaySeconds = in.InitialDelaySeconds
out.TimeoutSeconds = in.TimeoutSeconds
out.PeriodSeconds = in.PeriodSeconds
out.SuccessThreshold = in.SuccessThreshold
out.FailureThreshold = in.FailureThreshold
return nil
}
func deepCopy_api_RBDVolumeSource(in api.RBDVolumeSource, out *api.RBDVolumeSource, c *conversion.Cloner) error {
if in.CephMonitors != nil {
out.CephMonitors = make([]string, len(in.CephMonitors))
for i := range in.CephMonitors {
out.CephMonitors[i] = in.CephMonitors[i]
}
} else {
out.CephMonitors = nil
}
out.RBDImage = in.RBDImage
out.FSType = in.FSType
out.RBDPool = in.RBDPool
out.RadosUser = in.RadosUser
out.Keyring = in.Keyring
if in.SecretRef != nil {
out.SecretRef = new(api.LocalObjectReference)
if err := deepCopy_api_LocalObjectReference(*in.SecretRef, out.SecretRef, c); err != nil {
return err
}
} else {
out.SecretRef = nil
}
out.ReadOnly = in.ReadOnly
return nil
}
func deepCopy_api_ResourceRequirements(in api.ResourceRequirements, out *api.ResourceRequirements, c *conversion.Cloner) error {
if in.Limits != nil {
out.Limits = make(api.ResourceList)
for key, val := range in.Limits {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Limits[key] = *newVal
}
} else {
out.Limits = nil
}
if in.Requests != nil {
out.Requests = make(api.ResourceList)
for key, val := range in.Requests {
newVal := new(resource.Quantity)
if err := deepCopy_resource_Quantity(val, newVal, c); err != nil {
return err
}
out.Requests[key] = *newVal
}
} else {
out.Requests = nil
}
return nil
}
func deepCopy_api_SELinuxOptions(in api.SELinuxOptions, out *api.SELinuxOptions, c *conversion.Cloner) error {
out.User = in.User
out.Role = in.Role
out.Type = in.Type
out.Level = in.Level
return nil
}
func deepCopy_api_SecretVolumeSource(in api.SecretVolumeSource, out *api.SecretVolumeSource, c *conversion.Cloner) error {
out.SecretName = in.SecretName
return nil
}
func deepCopy_api_SecurityContext(in api.SecurityContext, out *api.SecurityContext, c *conversion.Cloner) error {
if in.Capabilities != nil {
out.Capabilities = new(api.Capabilities)
if err := deepCopy_api_Capabilities(*in.Capabilities, out.Capabilities, c); err != nil {
return err
}
} else {
out.Capabilities = nil
}
if in.Privileged != nil {
out.Privileged = new(bool)
*out.Privileged = *in.Privileged
} else {
out.Privileged = nil
}
if in.SELinuxOptions != nil {
out.SELinuxOptions = new(api.SELinuxOptions)
if err := deepCopy_api_SELinuxOptions(*in.SELinuxOptions, out.SELinuxOptions, c); err != nil {
return err
}
} else {
out.SELinuxOptions = nil
}
if in.RunAsUser != nil {
out.RunAsUser = new(int64)
*out.RunAsUser = *in.RunAsUser
} else {
out.RunAsUser = nil
}
if in.RunAsNonRoot != nil {
out.RunAsNonRoot = new(bool)
*out.RunAsNonRoot = *in.RunAsNonRoot
} else {
out.RunAsNonRoot = nil
}
return nil
}
func deepCopy_api_TCPSocketAction(in api.TCPSocketAction, out *api.TCPSocketAction, c *conversion.Cloner) error {
if err := deepCopy_util_IntOrString(in.Port, &out.Port, c); err != nil {
return err
}
return nil
}
func deepCopy_api_Volume(in api.Volume, out *api.Volume, c *conversion.Cloner) error {
out.Name = in.Name
if err := deepCopy_api_VolumeSource(in.VolumeSource, &out.VolumeSource, c); err != nil {
return err
}
return nil
}
func deepCopy_api_VolumeMount(in api.VolumeMount, out *api.VolumeMount, c *conversion.Cloner) error {
out.Name = in.Name
out.ReadOnly = in.ReadOnly
out.MountPath = in.MountPath
return nil
}
func deepCopy_api_VolumeSource(in api.VolumeSource, out *api.VolumeSource, c *conversion.Cloner) error {
if in.HostPath != nil {
out.HostPath = new(api.HostPathVolumeSource)
if err := deepCopy_api_HostPathVolumeSource(*in.HostPath, out.HostPath, c); err != nil {
return err
}
} else {
out.HostPath = nil
}
if in.EmptyDir != nil {
out.EmptyDir = new(api.EmptyDirVolumeSource)
if err := deepCopy_api_EmptyDirVolumeSource(*in.EmptyDir, out.EmptyDir, c); err != nil {
return err
}
} else {
out.EmptyDir = nil
}
if in.GCEPersistentDisk != nil {
out.GCEPersistentDisk = new(api.GCEPersistentDiskVolumeSource)
if err := deepCopy_api_GCEPersistentDiskVolumeSource(*in.GCEPersistentDisk, out.GCEPersistentDisk, c); err != nil {
return err
}
} else {
out.GCEPersistentDisk = nil
}
if in.AWSElasticBlockStore != nil {
out.AWSElasticBlockStore = new(api.AWSElasticBlockStoreVolumeSource)
if err := deepCopy_api_AWSElasticBlockStoreVolumeSource(*in.AWSElasticBlockStore, out.AWSElasticBlockStore, c); err != nil {
return err
}
} else {
out.AWSElasticBlockStore = nil
}
if in.GitRepo != nil {
out.GitRepo = new(api.GitRepoVolumeSource)
if err := deepCopy_api_GitRepoVolumeSource(*in.GitRepo, out.GitRepo, c); err != nil {
return err
}
} else {
out.GitRepo = nil
}
if in.Secret != nil {
out.Secret = new(api.SecretVolumeSource)
if err := deepCopy_api_SecretVolumeSource(*in.Secret, out.Secret, c); err != nil {
return err
}
} else {
out.Secret = nil
}
if in.NFS != nil {
out.NFS = new(api.NFSVolumeSource)
if err := deepCopy_api_NFSVolumeSource(*in.NFS, out.NFS, c); err != nil {
return err
}
} else {
out.NFS = nil
}
if in.ISCSI != nil {
out.ISCSI = new(api.ISCSIVolumeSource)
if err := deepCopy_api_ISCSIVolumeSource(*in.ISCSI, out.ISCSI, c); err != nil {
return err
}
} else {
out.ISCSI = nil
}
if in.Glusterfs != nil {
out.Glusterfs = new(api.GlusterfsVolumeSource)
if err := deepCopy_api_GlusterfsVolumeSource(*in.Glusterfs, out.Glusterfs, c); err != nil {
return err
}
} else {
out.Glusterfs = nil
}
if in.PersistentVolumeClaim != nil {
out.PersistentVolumeClaim = new(api.PersistentVolumeClaimVolumeSource)
if err := deepCopy_api_PersistentVolumeClaimVolumeSource(*in.PersistentVolumeClaim, out.PersistentVolumeClaim, c); err != nil {
return err
}
} else {
out.PersistentVolumeClaim = nil
}
if in.RBD != nil {
out.RBD = new(api.RBDVolumeSource)
if err := deepCopy_api_RBDVolumeSource(*in.RBD, out.RBD, c); err != nil {
return err
}
} else {
out.RBD = nil
}
if in.Cinder != nil {
out.Cinder = new(api.CinderVolumeSource)
if err := deepCopy_api_CinderVolumeSource(*in.Cinder, out.Cinder, c); err != nil {
return err
}
} else {
out.Cinder = nil
}
if in.CephFS != nil {
out.CephFS = new(api.CephFSVolumeSource)
if err := deepCopy_api_CephFSVolumeSource(*in.CephFS, out.CephFS, c); err != nil {
return err
}
} else {
out.CephFS = nil
}
if in.Flocker != nil {
out.Flocker = new(api.FlockerVolumeSource)
if err := deepCopy_api_FlockerVolumeSource(*in.Flocker, out.Flocker, c); err != nil {
return err
}
} else {
out.Flocker = nil
}
if in.DownwardAPI != nil {
out.DownwardAPI = new(api.DownwardAPIVolumeSource)
if err := deepCopy_api_DownwardAPIVolumeSource(*in.DownwardAPI, out.DownwardAPI, c); err != nil {
return err
}
} else {
out.DownwardAPI = nil
}
if in.FC != nil {
out.FC = new(api.FCVolumeSource)
if err := deepCopy_api_FCVolumeSource(*in.FC, out.FC, c); err != nil {
return err
}
} else {
out.FC = nil
}
return nil
}
func deepCopy_resource_Quantity(in resource.Quantity, out *resource.Quantity, c *conversion.Cloner) error {
if in.Amount != nil {
if newVal, err := c.DeepCopy(in.Amount); err != nil {
return err
} else {
out.Amount = newVal.(*inf.Dec)
}
} else {
out.Amount = nil
}
out.Format = in.Format
return nil
}
func deepCopy_unversioned_ListMeta(in unversioned.ListMeta, out *unversioned.ListMeta, c *conversion.Cloner) error {
out.SelfLink = in.SelfLink
out.ResourceVersion = in.ResourceVersion
return nil
}
func deepCopy_unversioned_Time(in unversioned.Time, out *unversioned.Time, c *conversion.Cloner) error {
if newVal, err := c.DeepCopy(in.Time); err != nil {
return err
} else {
out.Time = newVal.(time.Time)
}
return nil
}
func deepCopy_unversioned_TypeMeta(in unversioned.TypeMeta, out *unversioned.TypeMeta, c *conversion.Cloner) error {
out.Kind = in.Kind
out.APIVersion = in.APIVersion
return nil
}
func deepCopy_extensions_APIVersion(in APIVersion, out *APIVersion, c *conversion.Cloner) error {
out.Name = in.Name
out.APIGroup = in.APIGroup
return nil
}
func deepCopy_extensions_CPUTargetUtilization(in CPUTargetUtilization, out *CPUTargetUtilization, c *conversion.Cloner) error {
out.TargetPercentage = in.TargetPercentage
return nil
}
func deepCopy_extensions_ClusterAutoscaler(in ClusterAutoscaler, out *ClusterAutoscaler, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_extensions_ClusterAutoscalerSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_ClusterAutoscalerList(in ClusterAutoscalerList, out *ClusterAutoscalerList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ClusterAutoscaler, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_ClusterAutoscaler(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_extensions_ClusterAutoscalerSpec(in ClusterAutoscalerSpec, out *ClusterAutoscalerSpec, c *conversion.Cloner) error {
out.MinNodes = in.MinNodes
out.MaxNodes = in.MaxNodes
if in.TargetUtilization != nil {
out.TargetUtilization = make([]NodeUtilization, len(in.TargetUtilization))
for i := range in.TargetUtilization {
if err := deepCopy_extensions_NodeUtilization(in.TargetUtilization[i], &out.TargetUtilization[i], c); err != nil {
return err
}
}
} else {
out.TargetUtilization = nil
}
return nil
}
func deepCopy_extensions_DaemonSet(in DaemonSet, out *DaemonSet, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_extensions_DaemonSetSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_extensions_DaemonSetStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_DaemonSetList(in DaemonSetList, out *DaemonSetList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]DaemonSet, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_DaemonSet(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_extensions_DaemonSetSpec(in DaemonSetSpec, out *DaemonSetSpec, c *conversion.Cloner) error {
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
if in.Template != nil {
out.Template = new(api.PodTemplateSpec)
if err := deepCopy_api_PodTemplateSpec(*in.Template, out.Template, c); err != nil {
return err
}
} else {
out.Template = nil
}
return nil
}
func deepCopy_extensions_DaemonSetStatus(in DaemonSetStatus, out *DaemonSetStatus, c *conversion.Cloner) error {
out.CurrentNumberScheduled = in.CurrentNumberScheduled
out.NumberMisscheduled = in.NumberMisscheduled
out.DesiredNumberScheduled = in.DesiredNumberScheduled
return nil
}
func deepCopy_extensions_Deployment(in Deployment, out *Deployment, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_extensions_DeploymentSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_extensions_DeploymentStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_DeploymentList(in DeploymentList, out *DeploymentList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Deployment, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_Deployment(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_extensions_DeploymentSpec(in DeploymentSpec, out *DeploymentSpec, c *conversion.Cloner) error {
out.Replicas = in.Replicas
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
if err := deepCopy_api_PodTemplateSpec(in.Template, &out.Template, c); err != nil {
return err
}
if err := deepCopy_extensions_DeploymentStrategy(in.Strategy, &out.Strategy, c); err != nil {
return err
}
out.UniqueLabelKey = in.UniqueLabelKey
return nil
}
func deepCopy_extensions_DeploymentStatus(in DeploymentStatus, out *DeploymentStatus, c *conversion.Cloner) error {
out.Replicas = in.Replicas
out.UpdatedReplicas = in.UpdatedReplicas
return nil
}
func deepCopy_extensions_DeploymentStrategy(in DeploymentStrategy, out *DeploymentStrategy, c *conversion.Cloner) error {
out.Type = in.Type
if in.RollingUpdate != nil {
out.RollingUpdate = new(RollingUpdateDeployment)
if err := deepCopy_extensions_RollingUpdateDeployment(*in.RollingUpdate, out.RollingUpdate, c); err != nil {
return err
}
} else {
out.RollingUpdate = nil
}
return nil
}
func deepCopy_extensions_HTTPIngressPath(in HTTPIngressPath, out *HTTPIngressPath, c *conversion.Cloner) error {
out.Path = in.Path
if err := deepCopy_extensions_IngressBackend(in.Backend, &out.Backend, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_HTTPIngressRuleValue(in HTTPIngressRuleValue, out *HTTPIngressRuleValue, c *conversion.Cloner) error {
if in.Paths != nil {
out.Paths = make([]HTTPIngressPath, len(in.Paths))
for i := range in.Paths {
if err := deepCopy_extensions_HTTPIngressPath(in.Paths[i], &out.Paths[i], c); err != nil {
return err
}
}
} else {
out.Paths = nil
}
return nil
}
func deepCopy_extensions_HorizontalPodAutoscaler(in HorizontalPodAutoscaler, out *HorizontalPodAutoscaler, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_extensions_HorizontalPodAutoscalerSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_extensions_HorizontalPodAutoscalerStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_HorizontalPodAutoscalerList(in HorizontalPodAutoscalerList, out *HorizontalPodAutoscalerList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]HorizontalPodAutoscaler, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_HorizontalPodAutoscaler(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_extensions_HorizontalPodAutoscalerSpec(in HorizontalPodAutoscalerSpec, out *HorizontalPodAutoscalerSpec, c *conversion.Cloner) error {
if err := deepCopy_extensions_SubresourceReference(in.ScaleRef, &out.ScaleRef, c); err != nil {
return err
}
if in.MinReplicas != nil {
out.MinReplicas = new(int)
*out.MinReplicas = *in.MinReplicas
} else {
out.MinReplicas = nil
}
out.MaxReplicas = in.MaxReplicas
if in.CPUUtilization != nil {
out.CPUUtilization = new(CPUTargetUtilization)
if err := deepCopy_extensions_CPUTargetUtilization(*in.CPUUtilization, out.CPUUtilization, c); err != nil {
return err
}
} else {
out.CPUUtilization = nil
}
return nil
}
func deepCopy_extensions_HorizontalPodAutoscalerStatus(in HorizontalPodAutoscalerStatus, out *HorizontalPodAutoscalerStatus, c *conversion.Cloner) error {
if in.ObservedGeneration != nil {
out.ObservedGeneration = new(int64)
*out.ObservedGeneration = *in.ObservedGeneration
} else {
out.ObservedGeneration = nil
}
if in.LastScaleTime != nil {
out.LastScaleTime = new(unversioned.Time)
if err := deepCopy_unversioned_Time(*in.LastScaleTime, out.LastScaleTime, c); err != nil {
return err
}
} else {
out.LastScaleTime = nil
}
out.CurrentReplicas = in.CurrentReplicas
out.DesiredReplicas = in.DesiredReplicas
if in.CurrentCPUUtilizationPercentage != nil {
out.CurrentCPUUtilizationPercentage = new(int)
*out.CurrentCPUUtilizationPercentage = *in.CurrentCPUUtilizationPercentage
} else {
out.CurrentCPUUtilizationPercentage = nil
}
return nil
}
func deepCopy_extensions_Ingress(in Ingress, out *Ingress, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_extensions_IngressSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_extensions_IngressStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_IngressBackend(in IngressBackend, out *IngressBackend, c *conversion.Cloner) error {
out.ServiceName = in.ServiceName
if err := deepCopy_util_IntOrString(in.ServicePort, &out.ServicePort, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_IngressList(in IngressList, out *IngressList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Ingress, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_Ingress(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_extensions_IngressRule(in IngressRule, out *IngressRule, c *conversion.Cloner) error {
out.Host = in.Host
if err := deepCopy_extensions_IngressRuleValue(in.IngressRuleValue, &out.IngressRuleValue, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_IngressRuleValue(in IngressRuleValue, out *IngressRuleValue, c *conversion.Cloner) error {
if in.HTTP != nil {
out.HTTP = new(HTTPIngressRuleValue)
if err := deepCopy_extensions_HTTPIngressRuleValue(*in.HTTP, out.HTTP, c); err != nil {
return err
}
} else {
out.HTTP = nil
}
return nil
}
func deepCopy_extensions_IngressSpec(in IngressSpec, out *IngressSpec, c *conversion.Cloner) error {
if in.Backend != nil {
out.Backend = new(IngressBackend)
if err := deepCopy_extensions_IngressBackend(*in.Backend, out.Backend, c); err != nil {
return err
}
} else {
out.Backend = nil
}
if in.Rules != nil {
out.Rules = make([]IngressRule, len(in.Rules))
for i := range in.Rules {
if err := deepCopy_extensions_IngressRule(in.Rules[i], &out.Rules[i], c); err != nil {
return err
}
}
} else {
out.Rules = nil
}
return nil
}
func deepCopy_extensions_IngressStatus(in IngressStatus, out *IngressStatus, c *conversion.Cloner) error {
if err := deepCopy_api_LoadBalancerStatus(in.LoadBalancer, &out.LoadBalancer, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_Job(in Job, out *Job, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_extensions_JobSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_extensions_JobStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_JobCondition(in JobCondition, out *JobCondition, c *conversion.Cloner) error {
out.Type = in.Type
out.Status = in.Status
if err := deepCopy_unversioned_Time(in.LastProbeTime, &out.LastProbeTime, c); err != nil {
return err
}
if err := deepCopy_unversioned_Time(in.LastTransitionTime, &out.LastTransitionTime, c); err != nil {
return err
}
out.Reason = in.Reason
out.Message = in.Message
return nil
}
func deepCopy_extensions_JobList(in JobList, out *JobList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]Job, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_Job(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_extensions_JobSpec(in JobSpec, out *JobSpec, c *conversion.Cloner) error {
if in.Parallelism != nil {
out.Parallelism = new(int)
*out.Parallelism = *in.Parallelism
} else {
out.Parallelism = nil
}
if in.Completions != nil {
out.Completions = new(int)
*out.Completions = *in.Completions
} else {
out.Completions = nil
}
if in.Selector != nil {
out.Selector = new(PodSelector)
if err := deepCopy_extensions_PodSelector(*in.Selector, out.Selector, c); err != nil {
return err
}
} else {
out.Selector = nil
}
if err := deepCopy_api_PodTemplateSpec(in.Template, &out.Template, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_JobStatus(in JobStatus, out *JobStatus, c *conversion.Cloner) error {
if in.Conditions != nil {
out.Conditions = make([]JobCondition, len(in.Conditions))
for i := range in.Conditions {
if err := deepCopy_extensions_JobCondition(in.Conditions[i], &out.Conditions[i], c); err != nil {
return err
}
}
} else {
out.Conditions = nil
}
if in.StartTime != nil {
out.StartTime = new(unversioned.Time)
if err := deepCopy_unversioned_Time(*in.StartTime, out.StartTime, c); err != nil {
return err
}
} else {
out.StartTime = nil
}
if in.CompletionTime != nil {
out.CompletionTime = new(unversioned.Time)
if err := deepCopy_unversioned_Time(*in.CompletionTime, out.CompletionTime, c); err != nil {
return err
}
} else {
out.CompletionTime = nil
}
out.Active = in.Active
out.Succeeded = in.Succeeded
out.Failed = in.Failed
return nil
}
func deepCopy_extensions_NodeUtilization(in NodeUtilization, out *NodeUtilization, c *conversion.Cloner) error {
out.Resource = in.Resource
out.Value = in.Value
return nil
}
func deepCopy_extensions_PodSelector(in PodSelector, out *PodSelector, c *conversion.Cloner) error {
if in.MatchLabels != nil {
out.MatchLabels = make(map[string]string)
for key, val := range in.MatchLabels {
out.MatchLabels[key] = val
}
} else {
out.MatchLabels = nil
}
if in.MatchExpressions != nil {
out.MatchExpressions = make([]PodSelectorRequirement, len(in.MatchExpressions))
for i := range in.MatchExpressions {
if err := deepCopy_extensions_PodSelectorRequirement(in.MatchExpressions[i], &out.MatchExpressions[i], c); err != nil {
return err
}
}
} else {
out.MatchExpressions = nil
}
return nil
}
func deepCopy_extensions_PodSelectorRequirement(in PodSelectorRequirement, out *PodSelectorRequirement, c *conversion.Cloner) error {
out.Key = in.Key
out.Operator = in.Operator
if in.Values != nil {
out.Values = make([]string, len(in.Values))
for i := range in.Values {
out.Values[i] = in.Values[i]
}
} else {
out.Values = nil
}
return nil
}
func deepCopy_extensions_ReplicationControllerDummy(in ReplicationControllerDummy, out *ReplicationControllerDummy, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_RollingUpdateDeployment(in RollingUpdateDeployment, out *RollingUpdateDeployment, c *conversion.Cloner) error {
if err := deepCopy_util_IntOrString(in.MaxUnavailable, &out.MaxUnavailable, c); err != nil {
return err
}
if err := deepCopy_util_IntOrString(in.MaxSurge, &out.MaxSurge, c); err != nil {
return err
}
out.MinReadySeconds = in.MinReadySeconds
return nil
}
func deepCopy_extensions_Scale(in Scale, out *Scale, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if err := deepCopy_extensions_ScaleSpec(in.Spec, &out.Spec, c); err != nil {
return err
}
if err := deepCopy_extensions_ScaleStatus(in.Status, &out.Status, c); err != nil {
return err
}
return nil
}
func deepCopy_extensions_ScaleSpec(in ScaleSpec, out *ScaleSpec, c *conversion.Cloner) error {
out.Replicas = in.Replicas
return nil
}
func deepCopy_extensions_ScaleStatus(in ScaleStatus, out *ScaleStatus, c *conversion.Cloner) error {
out.Replicas = in.Replicas
if in.Selector != nil {
out.Selector = make(map[string]string)
for key, val := range in.Selector {
out.Selector[key] = val
}
} else {
out.Selector = nil
}
return nil
}
func deepCopy_extensions_SubresourceReference(in SubresourceReference, out *SubresourceReference, c *conversion.Cloner) error {
out.Kind = in.Kind
out.Name = in.Name
out.APIVersion = in.APIVersion
out.Subresource = in.Subresource
return nil
}
func deepCopy_extensions_ThirdPartyResource(in ThirdPartyResource, out *ThirdPartyResource, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
out.Description = in.Description
if in.Versions != nil {
out.Versions = make([]APIVersion, len(in.Versions))
for i := range in.Versions {
if err := deepCopy_extensions_APIVersion(in.Versions[i], &out.Versions[i], c); err != nil {
return err
}
}
} else {
out.Versions = nil
}
return nil
}
func deepCopy_extensions_ThirdPartyResourceData(in ThirdPartyResourceData, out *ThirdPartyResourceData, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_api_ObjectMeta(in.ObjectMeta, &out.ObjectMeta, c); err != nil {
return err
}
if in.Data != nil {
out.Data = make([]uint8, len(in.Data))
for i := range in.Data {
out.Data[i] = in.Data[i]
}
} else {
out.Data = nil
}
return nil
}
func deepCopy_extensions_ThirdPartyResourceDataList(in ThirdPartyResourceDataList, out *ThirdPartyResourceDataList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ThirdPartyResourceData, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_ThirdPartyResourceData(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_extensions_ThirdPartyResourceList(in ThirdPartyResourceList, out *ThirdPartyResourceList, c *conversion.Cloner) error {
if err := deepCopy_unversioned_TypeMeta(in.TypeMeta, &out.TypeMeta, c); err != nil {
return err
}
if err := deepCopy_unversioned_ListMeta(in.ListMeta, &out.ListMeta, c); err != nil {
return err
}
if in.Items != nil {
out.Items = make([]ThirdPartyResource, len(in.Items))
for i := range in.Items {
if err := deepCopy_extensions_ThirdPartyResource(in.Items[i], &out.Items[i], c); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func deepCopy_util_IntOrString(in util.IntOrString, out *util.IntOrString, c *conversion.Cloner) error {
out.Kind = in.Kind
out.IntVal = in.IntVal
out.StrVal = in.StrVal
return nil
}
func init() {
err := api.Scheme.AddGeneratedDeepCopyFuncs(
deepCopy_api_AWSElasticBlockStoreVolumeSource,
deepCopy_api_Capabilities,
deepCopy_api_CephFSVolumeSource,
deepCopy_api_CinderVolumeSource,
deepCopy_api_Container,
deepCopy_api_ContainerPort,
deepCopy_api_DownwardAPIVolumeFile,
deepCopy_api_DownwardAPIVolumeSource,
deepCopy_api_EmptyDirVolumeSource,
deepCopy_api_EnvVar,
deepCopy_api_EnvVarSource,
deepCopy_api_ExecAction,
deepCopy_api_FCVolumeSource,
deepCopy_api_FlockerVolumeSource,
deepCopy_api_GCEPersistentDiskVolumeSource,
deepCopy_api_GitRepoVolumeSource,
deepCopy_api_GlusterfsVolumeSource,
deepCopy_api_HTTPGetAction,
deepCopy_api_Handler,
deepCopy_api_HostPathVolumeSource,
deepCopy_api_ISCSIVolumeSource,
deepCopy_api_Lifecycle,
deepCopy_api_LoadBalancerIngress,
deepCopy_api_LoadBalancerStatus,
deepCopy_api_LocalObjectReference,
deepCopy_api_NFSVolumeSource,
deepCopy_api_ObjectFieldSelector,
deepCopy_api_ObjectMeta,
deepCopy_api_PersistentVolumeClaimVolumeSource,
deepCopy_api_PodSecurityContext,
deepCopy_api_PodSpec,
deepCopy_api_PodTemplateSpec,
deepCopy_api_Probe,
deepCopy_api_RBDVolumeSource,
deepCopy_api_ResourceRequirements,
deepCopy_api_SELinuxOptions,
deepCopy_api_SecretVolumeSource,
deepCopy_api_SecurityContext,
deepCopy_api_TCPSocketAction,
deepCopy_api_Volume,
deepCopy_api_VolumeMount,
deepCopy_api_VolumeSource,
deepCopy_resource_Quantity,
deepCopy_unversioned_ListMeta,
deepCopy_unversioned_Time,
deepCopy_unversioned_TypeMeta,
deepCopy_extensions_APIVersion,
deepCopy_extensions_CPUTargetUtilization,
deepCopy_extensions_ClusterAutoscaler,
deepCopy_extensions_ClusterAutoscalerList,
deepCopy_extensions_ClusterAutoscalerSpec,
deepCopy_extensions_DaemonSet,
deepCopy_extensions_DaemonSetList,
deepCopy_extensions_DaemonSetSpec,
deepCopy_extensions_DaemonSetStatus,
deepCopy_extensions_Deployment,
deepCopy_extensions_DeploymentList,
deepCopy_extensions_DeploymentSpec,
deepCopy_extensions_DeploymentStatus,
deepCopy_extensions_DeploymentStrategy,
deepCopy_extensions_HTTPIngressPath,
deepCopy_extensions_HTTPIngressRuleValue,
deepCopy_extensions_HorizontalPodAutoscaler,
deepCopy_extensions_HorizontalPodAutoscalerList,
deepCopy_extensions_HorizontalPodAutoscalerSpec,
deepCopy_extensions_HorizontalPodAutoscalerStatus,
deepCopy_extensions_Ingress,
deepCopy_extensions_IngressBackend,
deepCopy_extensions_IngressList,
deepCopy_extensions_IngressRule,
deepCopy_extensions_IngressRuleValue,
deepCopy_extensions_IngressSpec,
deepCopy_extensions_IngressStatus,
deepCopy_extensions_Job,
deepCopy_extensions_JobCondition,
deepCopy_extensions_JobList,
deepCopy_extensions_JobSpec,
deepCopy_extensions_JobStatus,
deepCopy_extensions_NodeUtilization,
deepCopy_extensions_PodSelector,
deepCopy_extensions_PodSelectorRequirement,
deepCopy_extensions_ReplicationControllerDummy,
deepCopy_extensions_RollingUpdateDeployment,
deepCopy_extensions_Scale,
deepCopy_extensions_ScaleSpec,
deepCopy_extensions_ScaleStatus,
deepCopy_extensions_SubresourceReference,
deepCopy_extensions_ThirdPartyResource,
deepCopy_extensions_ThirdPartyResourceData,
deepCopy_extensions_ThirdPartyResourceDataList,
deepCopy_extensions_ThirdPartyResourceList,
deepCopy_util_IntOrString,
)
if err != nil {
// if one of the deep copy functions is malformed, detect it immediately.
panic(err)
}
}
| klucar/pachyderm | vendor/k8s.io/kubernetes/pkg/apis/extensions/deep_copy_generated.go | GO | apache-2.0 | 48,982 |
require 'rubygems'
require 'minitest/autorun'
require 'rdoc/ri'
require 'rdoc/markup'
require 'tmpdir'
require 'fileutils'
class TestRDocRIStore < MiniTest::Unit::TestCase
def setup
RDoc::TopLevel.reset
@tmpdir = File.join Dir.tmpdir, "test_rdoc_ri_store_#{$$}"
@s = RDoc::RI::Store.new @tmpdir
@top_level = RDoc::TopLevel.new 'file.rb'
@klass = @top_level.add_class RDoc::NormalClass, 'Object'
@klass.comment = 'original'
@cmeth = RDoc::AnyMethod.new nil, 'cmethod'
@cmeth.singleton = true
@meth = RDoc::AnyMethod.new nil, 'method'
@meth_bang = RDoc::AnyMethod.new nil, 'method!'
@attr = RDoc::Attr.new nil, 'attr', 'RW', ''
@klass.add_method @cmeth
@klass.add_method @meth
@klass.add_method @meth_bang
@klass.add_attribute @attr
@nest_klass = @klass.add_class RDoc::NormalClass, 'SubClass'
@nest_meth = RDoc::AnyMethod.new nil, 'method'
@nest_incl = RDoc::Include.new 'Incl', ''
@nest_klass.add_method @nest_meth
@nest_klass.add_include @nest_incl
@RM = RDoc::Markup
end
def teardown
FileUtils.rm_rf @tmpdir
end
def assert_cache imethods, cmethods, attrs, modules, ancestors = {}
expected = {
:class_methods => cmethods,
:instance_methods => imethods,
:attributes => attrs,
:modules => modules,
:ancestors => ancestors
}
assert_equal expected, @s.cache
end
def assert_directory path
assert File.directory?(path), "#{path} is not a directory"
end
def assert_file path
assert File.file?(path), "#{path} is not a file"
end
def test_attributes
@s.cache[:attributes]['Object'] = %w[attr]
expected = { 'Object' => %w[attr] }
assert_equal expected, @s.attributes
end
def test_class_file
assert_equal File.join(@tmpdir, 'Object', 'cdesc-Object.ri'),
@s.class_file('Object')
assert_equal File.join(@tmpdir, 'Object', 'SubClass', 'cdesc-SubClass.ri'),
@s.class_file('Object::SubClass')
end
def test_class_methods
@s.cache[:class_methods]['Object'] = %w[method]
expected = { 'Object' => %w[method] }
assert_equal expected, @s.class_methods
end
def test_class_path
assert_equal File.join(@tmpdir, 'Object'), @s.class_path('Object')
assert_equal File.join(@tmpdir, 'Object', 'SubClass'),
@s.class_path('Object::SubClass')
end
def test_friendly_path
@s.path = @tmpdir
@s.type = nil
assert_equal @s.path, @s.friendly_path
@s.type = :extra
assert_equal @s.path, @s.friendly_path
@s.type = :system
assert_equal "ruby core", @s.friendly_path
@s.type = :site
assert_equal "ruby site", @s.friendly_path
@s.type = :home
assert_equal "~/.ri", @s.friendly_path
@s.type = :gem
@s.path = "#{@tmpdir}/gem_repository/doc/gem_name-1.0/ri"
assert_equal "gem gem_name-1.0", @s.friendly_path
end
def test_instance_methods
@s.cache[:instance_methods]['Object'] = %w[method]
expected = { 'Object' => %w[method] }
assert_equal expected, @s.instance_methods
end
def test_load_cache
cache = {
:methods => %w[Object#method],
:modules => %w[Object],
}
Dir.mkdir @tmpdir
open File.join(@tmpdir, 'cache.ri'), 'wb' do |io|
Marshal.dump cache, io
end
@s.load_cache
assert_equal cache, @s.cache
end
def test_load_cache_no_cache
cache = {
:ancestors => {},
:attributes => {},
:class_methods => {},
:instance_methods => {},
:modules => [],
}
@s.load_cache
assert_equal cache, @s.cache
end
def test_load_class
@s.save_class @klass
assert_equal @klass, @s.load_class('Object')
end
def test_load_method_bang
@s.save_method @klass, @meth_bang
meth = @s.load_method('Object', '#method!')
assert_equal @meth_bang, meth
end
def test_method_file
assert_equal File.join(@tmpdir, 'Object', 'method-i.ri'),
@s.method_file('Object', 'Object#method')
assert_equal File.join(@tmpdir, 'Object', 'method%21-i.ri'),
@s.method_file('Object', 'Object#method!')
assert_equal File.join(@tmpdir, 'Object', 'SubClass', 'method%21-i.ri'),
@s.method_file('Object::SubClass', 'Object::SubClass#method!')
assert_equal File.join(@tmpdir, 'Object', 'method-c.ri'),
@s.method_file('Object', 'Object::method')
end
def test_save_cache
@s.save_class @klass
@s.save_method @klass, @meth
@s.save_method @klass, @cmeth
@s.save_class @nest_klass
@s.save_cache
assert_file File.join(@tmpdir, 'cache.ri')
expected = {
:attributes => { 'Object' => ['attr_accessor attr'] },
:class_methods => { 'Object' => %w[cmethod] },
:instance_methods => { 'Object' => %w[method] },
:modules => %w[Object Object::SubClass],
:ancestors => {
'Object' => %w[Object],
'Object::SubClass' => %w[Incl Object],
},
}
open File.join(@tmpdir, 'cache.ri'), 'rb' do |io|
cache = Marshal.load io.read
assert_equal expected, cache
end
end
def test_save_cache_duplicate_methods
@s.save_method @klass, @meth
@s.save_method @klass, @meth
@s.save_cache
assert_cache({ 'Object' => %w[method] }, {}, {}, [])
end
def test_save_class
@s.save_class @klass
assert_directory File.join(@tmpdir, 'Object')
assert_file File.join(@tmpdir, 'Object', 'cdesc-Object.ri')
assert_cache({}, {}, { 'Object' => ['attr_accessor attr'] }, %w[Object],
'Object' => %w[Object])
assert_equal @klass, @s.load_class('Object')
end
def test_save_class_basic_object
@klass.instance_variable_set :@superclass, nil
@s.save_class @klass
assert_directory File.join(@tmpdir, 'Object')
assert_file File.join(@tmpdir, 'Object', 'cdesc-Object.ri')
assert_cache({}, {}, { 'Object' => ['attr_accessor attr'] }, %w[Object],
'Object' => %w[])
assert_equal @klass, @s.load_class('Object')
end
def test_save_class_merge
@s.save_class @klass
klass = RDoc::NormalClass.new 'Object'
klass.comment = 'new class'
s = RDoc::RI::Store.new @tmpdir
s.save_class klass
s = RDoc::RI::Store.new @tmpdir
document = @RM::Document.new(
@RM::Paragraph.new('original'),
@RM::Paragraph.new('new class'))
assert_equal document, s.load_class('Object').comment
end
def test_save_class_methods
@s.save_class @klass
assert_directory File.join(@tmpdir, 'Object')
assert_file File.join(@tmpdir, 'Object', 'cdesc-Object.ri')
assert_cache({}, {}, { 'Object' => ['attr_accessor attr'] }, %w[Object],
'Object' => %w[Object])
assert_equal @klass, @s.load_class('Object')
end
def test_save_class_nested
@s.save_class @nest_klass
assert_directory File.join(@tmpdir, 'Object', 'SubClass')
assert_file File.join(@tmpdir, 'Object', 'SubClass', 'cdesc-SubClass.ri')
assert_cache({}, {}, {}, %w[Object::SubClass],
'Object::SubClass' => %w[Incl Object])
end
def test_save_method
@s.save_method @klass, @meth
assert_directory File.join(@tmpdir, 'Object')
assert_file File.join(@tmpdir, 'Object', 'method-i.ri')
assert_cache({ 'Object' => %w[method] }, {}, {}, [])
assert_equal @meth, @s.load_method('Object', '#method')
end
def test_save_method_nested
@s.save_method @nest_klass, @nest_meth
assert_directory File.join(@tmpdir, 'Object', 'SubClass')
assert_file File.join(@tmpdir, 'Object', 'SubClass', 'method-i.ri')
assert_cache({ 'Object::SubClass' => %w[method] }, {}, {}, [])
end
end
| racker/omnibus | source/ruby-1.9.2-p180/test/rdoc/test_rdoc_ri_store.rb | Ruby | apache-2.0 | 7,781 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.trogdor.rest;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.JsonNode;
import org.apache.kafka.trogdor.task.TaskSpec;
/**
* The state for a task which is being stopped on the coordinator.
*/
public class TaskStopping extends TaskState {
/**
* The time on the agent when the task was received.
*/
private final long startedMs;
@JsonCreator
public TaskStopping(@JsonProperty("spec") TaskSpec spec,
@JsonProperty("startedMs") long startedMs,
@JsonProperty("status") JsonNode status) {
super(spec, status);
this.startedMs = startedMs;
}
@JsonProperty
public long startedMs() {
return startedMs;
}
@Override
public TaskStateType stateType() {
return TaskStateType.STOPPING;
}
}
| TiVo/kafka | trogdor/src/main/java/org/apache/kafka/trogdor/rest/TaskStopping.java | Java | apache-2.0 | 1,713 |
// +build go1.9
// Copyright 2019 Microsoft Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// This code was auto-generated by:
// github.com/Azure/azure-sdk-for-go/tools/profileBuilder
package servicefabric
import original "github.com/Azure/azure-sdk-for-go/services/servicefabric/6.5/servicefabric"
const (
DefaultBaseURI = original.DefaultBaseURI
)
type ApplicationDefinitionKind = original.ApplicationDefinitionKind
const (
Compose ApplicationDefinitionKind = original.Compose
Invalid ApplicationDefinitionKind = original.Invalid
ServiceFabricApplicationDescription ApplicationDefinitionKind = original.ServiceFabricApplicationDescription
)
type ApplicationPackageCleanupPolicy = original.ApplicationPackageCleanupPolicy
const (
ApplicationPackageCleanupPolicyAutomatic ApplicationPackageCleanupPolicy = original.ApplicationPackageCleanupPolicyAutomatic
ApplicationPackageCleanupPolicyDefault ApplicationPackageCleanupPolicy = original.ApplicationPackageCleanupPolicyDefault
ApplicationPackageCleanupPolicyInvalid ApplicationPackageCleanupPolicy = original.ApplicationPackageCleanupPolicyInvalid
ApplicationPackageCleanupPolicyManual ApplicationPackageCleanupPolicy = original.ApplicationPackageCleanupPolicyManual
)
type ApplicationScopedVolumeKind = original.ApplicationScopedVolumeKind
const (
ServiceFabricVolumeDisk ApplicationScopedVolumeKind = original.ServiceFabricVolumeDisk
)
type ApplicationStatus = original.ApplicationStatus
const (
ApplicationStatusCreating ApplicationStatus = original.ApplicationStatusCreating
ApplicationStatusDeleting ApplicationStatus = original.ApplicationStatusDeleting
ApplicationStatusFailed ApplicationStatus = original.ApplicationStatusFailed
ApplicationStatusInvalid ApplicationStatus = original.ApplicationStatusInvalid
ApplicationStatusReady ApplicationStatus = original.ApplicationStatusReady
ApplicationStatusUpgrading ApplicationStatus = original.ApplicationStatusUpgrading
)
type ApplicationTypeDefinitionKind = original.ApplicationTypeDefinitionKind
const (
ApplicationTypeDefinitionKindCompose ApplicationTypeDefinitionKind = original.ApplicationTypeDefinitionKindCompose
ApplicationTypeDefinitionKindInvalid ApplicationTypeDefinitionKind = original.ApplicationTypeDefinitionKindInvalid
ApplicationTypeDefinitionKindServiceFabricApplicationPackage ApplicationTypeDefinitionKind = original.ApplicationTypeDefinitionKindServiceFabricApplicationPackage
)
type ApplicationTypeStatus = original.ApplicationTypeStatus
const (
ApplicationTypeStatusAvailable ApplicationTypeStatus = original.ApplicationTypeStatusAvailable
ApplicationTypeStatusFailed ApplicationTypeStatus = original.ApplicationTypeStatusFailed
ApplicationTypeStatusInvalid ApplicationTypeStatus = original.ApplicationTypeStatusInvalid
ApplicationTypeStatusProvisioning ApplicationTypeStatus = original.ApplicationTypeStatusProvisioning
ApplicationTypeStatusUnprovisioning ApplicationTypeStatus = original.ApplicationTypeStatusUnprovisioning
)
type AutoScalingMechanismKind = original.AutoScalingMechanismKind
const (
AddRemoveReplica AutoScalingMechanismKind = original.AddRemoveReplica
)
type AutoScalingMetricKind = original.AutoScalingMetricKind
const (
Resource AutoScalingMetricKind = original.Resource
)
type AutoScalingResourceMetricName = original.AutoScalingResourceMetricName
const (
CPU AutoScalingResourceMetricName = original.CPU
MemoryInGB AutoScalingResourceMetricName = original.MemoryInGB
)
type AutoScalingTriggerKind = original.AutoScalingTriggerKind
const (
AverageLoad AutoScalingTriggerKind = original.AverageLoad
)
type BackupEntityKind = original.BackupEntityKind
const (
BackupEntityKindApplication BackupEntityKind = original.BackupEntityKindApplication
BackupEntityKindInvalid BackupEntityKind = original.BackupEntityKindInvalid
BackupEntityKindPartition BackupEntityKind = original.BackupEntityKindPartition
BackupEntityKindService BackupEntityKind = original.BackupEntityKindService
)
type BackupPolicyScope = original.BackupPolicyScope
const (
BackupPolicyScopeApplication BackupPolicyScope = original.BackupPolicyScopeApplication
BackupPolicyScopeInvalid BackupPolicyScope = original.BackupPolicyScopeInvalid
BackupPolicyScopePartition BackupPolicyScope = original.BackupPolicyScopePartition
BackupPolicyScopeService BackupPolicyScope = original.BackupPolicyScopeService
)
type BackupScheduleFrequencyType = original.BackupScheduleFrequencyType
const (
BackupScheduleFrequencyTypeDaily BackupScheduleFrequencyType = original.BackupScheduleFrequencyTypeDaily
BackupScheduleFrequencyTypeInvalid BackupScheduleFrequencyType = original.BackupScheduleFrequencyTypeInvalid
BackupScheduleFrequencyTypeWeekly BackupScheduleFrequencyType = original.BackupScheduleFrequencyTypeWeekly
)
type BackupScheduleKind = original.BackupScheduleKind
const (
BackupScheduleKindFrequencyBased BackupScheduleKind = original.BackupScheduleKindFrequencyBased
BackupScheduleKindInvalid BackupScheduleKind = original.BackupScheduleKindInvalid
BackupScheduleKindTimeBased BackupScheduleKind = original.BackupScheduleKindTimeBased
)
type BackupState = original.BackupState
const (
BackupStateAccepted BackupState = original.BackupStateAccepted
BackupStateBackupInProgress BackupState = original.BackupStateBackupInProgress
BackupStateFailure BackupState = original.BackupStateFailure
BackupStateInvalid BackupState = original.BackupStateInvalid
BackupStateSuccess BackupState = original.BackupStateSuccess
BackupStateTimeout BackupState = original.BackupStateTimeout
)
type BackupStorageKind = original.BackupStorageKind
const (
BackupStorageKindAzureBlobStore BackupStorageKind = original.BackupStorageKindAzureBlobStore
BackupStorageKindFileShare BackupStorageKind = original.BackupStorageKindFileShare
BackupStorageKindInvalid BackupStorageKind = original.BackupStorageKindInvalid
)
type BackupSuspensionScope = original.BackupSuspensionScope
const (
BackupSuspensionScopeApplication BackupSuspensionScope = original.BackupSuspensionScopeApplication
BackupSuspensionScopeInvalid BackupSuspensionScope = original.BackupSuspensionScopeInvalid
BackupSuspensionScopePartition BackupSuspensionScope = original.BackupSuspensionScopePartition
BackupSuspensionScopeService BackupSuspensionScope = original.BackupSuspensionScopeService
)
type BackupType = original.BackupType
const (
BackupTypeFull BackupType = original.BackupTypeFull
BackupTypeIncremental BackupType = original.BackupTypeIncremental
BackupTypeInvalid BackupType = original.BackupTypeInvalid
)
type ChaosEventKind = original.ChaosEventKind
const (
ChaosEventKindExecutingFaults ChaosEventKind = original.ChaosEventKindExecutingFaults
ChaosEventKindInvalid ChaosEventKind = original.ChaosEventKindInvalid
ChaosEventKindStarted ChaosEventKind = original.ChaosEventKindStarted
ChaosEventKindStopped ChaosEventKind = original.ChaosEventKindStopped
ChaosEventKindTestError ChaosEventKind = original.ChaosEventKindTestError
ChaosEventKindValidationFailed ChaosEventKind = original.ChaosEventKindValidationFailed
ChaosEventKindWaiting ChaosEventKind = original.ChaosEventKindWaiting
)
type ChaosScheduleStatus = original.ChaosScheduleStatus
const (
ChaosScheduleStatusActive ChaosScheduleStatus = original.ChaosScheduleStatusActive
ChaosScheduleStatusExpired ChaosScheduleStatus = original.ChaosScheduleStatusExpired
ChaosScheduleStatusInvalid ChaosScheduleStatus = original.ChaosScheduleStatusInvalid
ChaosScheduleStatusPending ChaosScheduleStatus = original.ChaosScheduleStatusPending
ChaosScheduleStatusStopped ChaosScheduleStatus = original.ChaosScheduleStatusStopped
)
type ChaosStatus = original.ChaosStatus
const (
ChaosStatusInvalid ChaosStatus = original.ChaosStatusInvalid
ChaosStatusRunning ChaosStatus = original.ChaosStatusRunning
ChaosStatusStopped ChaosStatus = original.ChaosStatusStopped
)
type ComposeDeploymentStatus = original.ComposeDeploymentStatus
const (
ComposeDeploymentStatusCreating ComposeDeploymentStatus = original.ComposeDeploymentStatusCreating
ComposeDeploymentStatusDeleting ComposeDeploymentStatus = original.ComposeDeploymentStatusDeleting
ComposeDeploymentStatusFailed ComposeDeploymentStatus = original.ComposeDeploymentStatusFailed
ComposeDeploymentStatusInvalid ComposeDeploymentStatus = original.ComposeDeploymentStatusInvalid
ComposeDeploymentStatusProvisioning ComposeDeploymentStatus = original.ComposeDeploymentStatusProvisioning
ComposeDeploymentStatusReady ComposeDeploymentStatus = original.ComposeDeploymentStatusReady
ComposeDeploymentStatusUnprovisioning ComposeDeploymentStatus = original.ComposeDeploymentStatusUnprovisioning
ComposeDeploymentStatusUpgrading ComposeDeploymentStatus = original.ComposeDeploymentStatusUpgrading
)
type ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeState
const (
ComposeDeploymentUpgradeStateFailed ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateFailed
ComposeDeploymentUpgradeStateInvalid ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateInvalid
ComposeDeploymentUpgradeStateProvisioningTarget ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateProvisioningTarget
ComposeDeploymentUpgradeStateRollingBackCompleted ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateRollingBackCompleted
ComposeDeploymentUpgradeStateRollingBackInProgress ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateRollingBackInProgress
ComposeDeploymentUpgradeStateRollingForwardCompleted ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateRollingForwardCompleted
ComposeDeploymentUpgradeStateRollingForwardInProgress ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateRollingForwardInProgress
ComposeDeploymentUpgradeStateRollingForwardPending ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateRollingForwardPending
ComposeDeploymentUpgradeStateUnprovisioningCurrent ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateUnprovisioningCurrent
ComposeDeploymentUpgradeStateUnprovisioningTarget ComposeDeploymentUpgradeState = original.ComposeDeploymentUpgradeStateUnprovisioningTarget
)
type CreateFabricDump = original.CreateFabricDump
const (
False CreateFabricDump = original.False
True CreateFabricDump = original.True
)
type DataLossMode = original.DataLossMode
const (
DataLossModeFullDataLoss DataLossMode = original.DataLossModeFullDataLoss
DataLossModeInvalid DataLossMode = original.DataLossModeInvalid
DataLossModePartialDataLoss DataLossMode = original.DataLossModePartialDataLoss
)
type DayOfWeek = original.DayOfWeek
const (
Friday DayOfWeek = original.Friday
Monday DayOfWeek = original.Monday
Saturday DayOfWeek = original.Saturday
Sunday DayOfWeek = original.Sunday
Thursday DayOfWeek = original.Thursday
Tuesday DayOfWeek = original.Tuesday
Wednesday DayOfWeek = original.Wednesday
)
type DeactivationIntent = original.DeactivationIntent
const (
Pause DeactivationIntent = original.Pause
RemoveData DeactivationIntent = original.RemoveData
Restart DeactivationIntent = original.Restart
)
type DeployedApplicationStatus = original.DeployedApplicationStatus
const (
DeployedApplicationStatusActivating DeployedApplicationStatus = original.DeployedApplicationStatusActivating
DeployedApplicationStatusActive DeployedApplicationStatus = original.DeployedApplicationStatusActive
DeployedApplicationStatusDeactivating DeployedApplicationStatus = original.DeployedApplicationStatusDeactivating
DeployedApplicationStatusDownloading DeployedApplicationStatus = original.DeployedApplicationStatusDownloading
DeployedApplicationStatusInvalid DeployedApplicationStatus = original.DeployedApplicationStatusInvalid
DeployedApplicationStatusUpgrading DeployedApplicationStatus = original.DeployedApplicationStatusUpgrading
)
type DeploymentStatus = original.DeploymentStatus
const (
DeploymentStatusActivating DeploymentStatus = original.DeploymentStatusActivating
DeploymentStatusActive DeploymentStatus = original.DeploymentStatusActive
DeploymentStatusDeactivating DeploymentStatus = original.DeploymentStatusDeactivating
DeploymentStatusDownloading DeploymentStatus = original.DeploymentStatusDownloading
DeploymentStatusInvalid DeploymentStatus = original.DeploymentStatusInvalid
DeploymentStatusUpgrading DeploymentStatus = original.DeploymentStatusUpgrading
)
type DiagnosticsSinkKind = original.DiagnosticsSinkKind
const (
DiagnosticsSinkKindAzureInternalMonitoringPipeline DiagnosticsSinkKind = original.DiagnosticsSinkKindAzureInternalMonitoringPipeline
DiagnosticsSinkKindInvalid DiagnosticsSinkKind = original.DiagnosticsSinkKindInvalid
)
type EntityKind = original.EntityKind
const (
EntityKindApplication EntityKind = original.EntityKindApplication
EntityKindCluster EntityKind = original.EntityKindCluster
EntityKindDeployedApplication EntityKind = original.EntityKindDeployedApplication
EntityKindDeployedServicePackage EntityKind = original.EntityKindDeployedServicePackage
EntityKindInvalid EntityKind = original.EntityKindInvalid
EntityKindNode EntityKind = original.EntityKindNode
EntityKindPartition EntityKind = original.EntityKindPartition
EntityKindReplica EntityKind = original.EntityKindReplica
EntityKindService EntityKind = original.EntityKindService
)
type EntityKindBasicBackupEntity = original.EntityKindBasicBackupEntity
const (
EntityKindApplication1 EntityKindBasicBackupEntity = original.EntityKindApplication1
EntityKindBackupEntity EntityKindBasicBackupEntity = original.EntityKindBackupEntity
EntityKindPartition1 EntityKindBasicBackupEntity = original.EntityKindPartition1
EntityKindService1 EntityKindBasicBackupEntity = original.EntityKindService1
)
type EntryPointStatus = original.EntryPointStatus
const (
EntryPointStatusInvalid EntryPointStatus = original.EntryPointStatusInvalid
EntryPointStatusPending EntryPointStatus = original.EntryPointStatusPending
EntryPointStatusStarted EntryPointStatus = original.EntryPointStatusStarted
EntryPointStatusStarting EntryPointStatus = original.EntryPointStatusStarting
EntryPointStatusStopped EntryPointStatus = original.EntryPointStatusStopped
EntryPointStatusStopping EntryPointStatus = original.EntryPointStatusStopping
)
type FabricErrorCodes = original.FabricErrorCodes
const (
EABORT FabricErrorCodes = original.EABORT
EFAIL FabricErrorCodes = original.EFAIL
EINVALIDARG FabricErrorCodes = original.EINVALIDARG
FABRICEAPPLICATIONALREADYEXISTS FabricErrorCodes = original.FABRICEAPPLICATIONALREADYEXISTS
FABRICEAPPLICATIONALREADYINTARGETVERSION FabricErrorCodes = original.FABRICEAPPLICATIONALREADYINTARGETVERSION
FABRICEAPPLICATIONNOTFOUND FabricErrorCodes = original.FABRICEAPPLICATIONNOTFOUND
FABRICEAPPLICATIONNOTUPGRADING FabricErrorCodes = original.FABRICEAPPLICATIONNOTUPGRADING
FABRICEAPPLICATIONTYPEALREADYEXISTS FabricErrorCodes = original.FABRICEAPPLICATIONTYPEALREADYEXISTS
FABRICEAPPLICATIONTYPEINUSE FabricErrorCodes = original.FABRICEAPPLICATIONTYPEINUSE
FABRICEAPPLICATIONTYPENOTFOUND FabricErrorCodes = original.FABRICEAPPLICATIONTYPENOTFOUND
FABRICEAPPLICATIONTYPEPROVISIONINPROGRESS FabricErrorCodes = original.FABRICEAPPLICATIONTYPEPROVISIONINPROGRESS
FABRICEAPPLICATIONUPGRADEINPROGRESS FabricErrorCodes = original.FABRICEAPPLICATIONUPGRADEINPROGRESS
FABRICEAPPLICATIONUPGRADEVALIDATIONERROR FabricErrorCodes = original.FABRICEAPPLICATIONUPGRADEVALIDATIONERROR
FABRICEBACKUPINPROGRESS FabricErrorCodes = original.FABRICEBACKUPINPROGRESS
FABRICEBACKUPISENABLED FabricErrorCodes = original.FABRICEBACKUPISENABLED
FABRICEBACKUPNOTENABLED FabricErrorCodes = original.FABRICEBACKUPNOTENABLED
FABRICEBACKUPPOLICYALREADYEXISTING FabricErrorCodes = original.FABRICEBACKUPPOLICYALREADYEXISTING
FABRICEBACKUPPOLICYNOTEXISTING FabricErrorCodes = original.FABRICEBACKUPPOLICYNOTEXISTING
FABRICECOMMUNICATIONERROR FabricErrorCodes = original.FABRICECOMMUNICATIONERROR
FABRICECONFIGURATIONPARAMETERNOTFOUND FabricErrorCodes = original.FABRICECONFIGURATIONPARAMETERNOTFOUND
FABRICECONFIGURATIONSECTIONNOTFOUND FabricErrorCodes = original.FABRICECONFIGURATIONSECTIONNOTFOUND
FABRICEDIRECTORYNOTFOUND FabricErrorCodes = original.FABRICEDIRECTORYNOTFOUND
FABRICEENUMERATIONCOMPLETED FabricErrorCodes = original.FABRICEENUMERATIONCOMPLETED
FABRICEFABRICALREADYINTARGETVERSION FabricErrorCodes = original.FABRICEFABRICALREADYINTARGETVERSION
FABRICEFABRICNOTUPGRADING FabricErrorCodes = original.FABRICEFABRICNOTUPGRADING
FABRICEFABRICUPGRADEINPROGRESS FabricErrorCodes = original.FABRICEFABRICUPGRADEINPROGRESS
FABRICEFABRICUPGRADEVALIDATIONERROR FabricErrorCodes = original.FABRICEFABRICUPGRADEVALIDATIONERROR
FABRICEFABRICVERSIONALREADYEXISTS FabricErrorCodes = original.FABRICEFABRICVERSIONALREADYEXISTS
FABRICEFABRICVERSIONINUSE FabricErrorCodes = original.FABRICEFABRICVERSIONINUSE
FABRICEFABRICVERSIONNOTFOUND FabricErrorCodes = original.FABRICEFABRICVERSIONNOTFOUND
FABRICEFAULTANALYSISSERVICENOTEXISTING FabricErrorCodes = original.FABRICEFAULTANALYSISSERVICENOTEXISTING
FABRICEFILENOTFOUND FabricErrorCodes = original.FABRICEFILENOTFOUND
FABRICEHEALTHENTITYNOTFOUND FabricErrorCodes = original.FABRICEHEALTHENTITYNOTFOUND
FABRICEHEALTHSTALEREPORT FabricErrorCodes = original.FABRICEHEALTHSTALEREPORT
FABRICEIMAGEBUILDERRESERVEDDIRECTORYERROR FabricErrorCodes = original.FABRICEIMAGEBUILDERRESERVEDDIRECTORYERROR
FABRICEIMAGEBUILDERVALIDATIONERROR FabricErrorCodes = original.FABRICEIMAGEBUILDERVALIDATIONERROR
FABRICEINSTANCEIDMISMATCH FabricErrorCodes = original.FABRICEINSTANCEIDMISMATCH
FABRICEINVALIDADDRESS FabricErrorCodes = original.FABRICEINVALIDADDRESS
FABRICEINVALIDATOMICGROUP FabricErrorCodes = original.FABRICEINVALIDATOMICGROUP
FABRICEINVALIDCONFIGURATION FabricErrorCodes = original.FABRICEINVALIDCONFIGURATION
FABRICEINVALIDFORSTATELESSSERVICES FabricErrorCodes = original.FABRICEINVALIDFORSTATELESSSERVICES
FABRICEINVALIDNAMEURI FabricErrorCodes = original.FABRICEINVALIDNAMEURI
FABRICEINVALIDPARTITIONKEY FabricErrorCodes = original.FABRICEINVALIDPARTITIONKEY
FABRICEINVALIDSERVICESCALINGPOLICY FabricErrorCodes = original.FABRICEINVALIDSERVICESCALINGPOLICY
FABRICEKEYNOTFOUND FabricErrorCodes = original.FABRICEKEYNOTFOUND
FABRICEKEYTOOLARGE FabricErrorCodes = original.FABRICEKEYTOOLARGE
FABRICENAMEALREADYEXISTS FabricErrorCodes = original.FABRICENAMEALREADYEXISTS
FABRICENAMEDOESNOTEXIST FabricErrorCodes = original.FABRICENAMEDOESNOTEXIST
FABRICENAMENOTEMPTY FabricErrorCodes = original.FABRICENAMENOTEMPTY
FABRICENODEHASNOTSTOPPEDYET FabricErrorCodes = original.FABRICENODEHASNOTSTOPPEDYET
FABRICENODEISUP FabricErrorCodes = original.FABRICENODEISUP
FABRICENODENOTFOUND FabricErrorCodes = original.FABRICENODENOTFOUND
FABRICENOTPRIMARY FabricErrorCodes = original.FABRICENOTPRIMARY
FABRICENOTREADY FabricErrorCodes = original.FABRICENOTREADY
FABRICENOWRITEQUORUM FabricErrorCodes = original.FABRICENOWRITEQUORUM
FABRICEOPERATIONNOTCOMPLETE FabricErrorCodes = original.FABRICEOPERATIONNOTCOMPLETE
FABRICEPARTITIONNOTFOUND FabricErrorCodes = original.FABRICEPARTITIONNOTFOUND
FABRICEPATHTOOLONG FabricErrorCodes = original.FABRICEPATHTOOLONG
FABRICEPROPERTYCHECKFAILED FabricErrorCodes = original.FABRICEPROPERTYCHECKFAILED
FABRICEPROPERTYDOESNOTEXIST FabricErrorCodes = original.FABRICEPROPERTYDOESNOTEXIST
FABRICERECONFIGURATIONPENDING FabricErrorCodes = original.FABRICERECONFIGURATIONPENDING
FABRICEREPLICADOESNOTEXIST FabricErrorCodes = original.FABRICEREPLICADOESNOTEXIST
FABRICERESTOREINPROGRESS FabricErrorCodes = original.FABRICERESTOREINPROGRESS
FABRICERESTORESOURCETARGETPARTITIONMISMATCH FabricErrorCodes = original.FABRICERESTORESOURCETARGETPARTITIONMISMATCH
FABRICESEQUENCENUMBERCHECKFAILED FabricErrorCodes = original.FABRICESEQUENCENUMBERCHECKFAILED
FABRICESERVICEAFFINITYCHAINNOTSUPPORTED FabricErrorCodes = original.FABRICESERVICEAFFINITYCHAINNOTSUPPORTED
FABRICESERVICEALREADYEXISTS FabricErrorCodes = original.FABRICESERVICEALREADYEXISTS
FABRICESERVICEDOESNOTEXIST FabricErrorCodes = original.FABRICESERVICEDOESNOTEXIST
FABRICESERVICEGROUPALREADYEXISTS FabricErrorCodes = original.FABRICESERVICEGROUPALREADYEXISTS
FABRICESERVICEGROUPDOESNOTEXIST FabricErrorCodes = original.FABRICESERVICEGROUPDOESNOTEXIST
FABRICESERVICEMANIFESTNOTFOUND FabricErrorCodes = original.FABRICESERVICEMANIFESTNOTFOUND
FABRICESERVICEMETADATAMISMATCH FabricErrorCodes = original.FABRICESERVICEMETADATAMISMATCH
FABRICESERVICEOFFLINE FabricErrorCodes = original.FABRICESERVICEOFFLINE
FABRICESERVICETYPEMISMATCH FabricErrorCodes = original.FABRICESERVICETYPEMISMATCH
FABRICESERVICETYPENOTFOUND FabricErrorCodes = original.FABRICESERVICETYPENOTFOUND
FABRICESERVICETYPETEMPLATENOTFOUND FabricErrorCodes = original.FABRICESERVICETYPETEMPLATENOTFOUND
FABRICESINGLEINSTANCEAPPLICATIONALREADYEXISTS FabricErrorCodes = original.FABRICESINGLEINSTANCEAPPLICATIONALREADYEXISTS
FABRICESINGLEINSTANCEAPPLICATIONNOTFOUND FabricErrorCodes = original.FABRICESINGLEINSTANCEAPPLICATIONNOTFOUND
FABRICETIMEOUT FabricErrorCodes = original.FABRICETIMEOUT
FABRICEVALUEEMPTY FabricErrorCodes = original.FABRICEVALUEEMPTY
FABRICEVALUETOOLARGE FabricErrorCodes = original.FABRICEVALUETOOLARGE
FABRICEVOLUMEALREADYEXISTS FabricErrorCodes = original.FABRICEVOLUMEALREADYEXISTS
FABRICEVOLUMENOTFOUND FabricErrorCodes = original.FABRICEVOLUMENOTFOUND
SerializationError FabricErrorCodes = original.SerializationError
)
type FabricEventKind = original.FabricEventKind
const (
FabricEventKindApplicationContainerInstanceExited FabricEventKind = original.FabricEventKindApplicationContainerInstanceExited
FabricEventKindApplicationCreated FabricEventKind = original.FabricEventKindApplicationCreated
FabricEventKindApplicationDeleted FabricEventKind = original.FabricEventKindApplicationDeleted
FabricEventKindApplicationEvent FabricEventKind = original.FabricEventKindApplicationEvent
FabricEventKindApplicationHealthReportExpired FabricEventKind = original.FabricEventKindApplicationHealthReportExpired
FabricEventKindApplicationNewHealthReport FabricEventKind = original.FabricEventKindApplicationNewHealthReport
FabricEventKindApplicationProcessExited FabricEventKind = original.FabricEventKindApplicationProcessExited
FabricEventKindApplicationUpgradeCompleted FabricEventKind = original.FabricEventKindApplicationUpgradeCompleted
FabricEventKindApplicationUpgradeDomainCompleted FabricEventKind = original.FabricEventKindApplicationUpgradeDomainCompleted
FabricEventKindApplicationUpgradeRollbackCompleted FabricEventKind = original.FabricEventKindApplicationUpgradeRollbackCompleted
FabricEventKindApplicationUpgradeRollbackStarted FabricEventKind = original.FabricEventKindApplicationUpgradeRollbackStarted
FabricEventKindApplicationUpgradeStarted FabricEventKind = original.FabricEventKindApplicationUpgradeStarted
FabricEventKindChaosCodePackageRestartScheduled FabricEventKind = original.FabricEventKindChaosCodePackageRestartScheduled
FabricEventKindChaosNodeRestartScheduled FabricEventKind = original.FabricEventKindChaosNodeRestartScheduled
FabricEventKindChaosPartitionPrimaryMoveScheduled FabricEventKind = original.FabricEventKindChaosPartitionPrimaryMoveScheduled
FabricEventKindChaosPartitionSecondaryMoveScheduled FabricEventKind = original.FabricEventKindChaosPartitionSecondaryMoveScheduled
FabricEventKindChaosReplicaRemovalScheduled FabricEventKind = original.FabricEventKindChaosReplicaRemovalScheduled
FabricEventKindChaosReplicaRestartScheduled FabricEventKind = original.FabricEventKindChaosReplicaRestartScheduled
FabricEventKindChaosStarted FabricEventKind = original.FabricEventKindChaosStarted
FabricEventKindChaosStopped FabricEventKind = original.FabricEventKindChaosStopped
FabricEventKindClusterEvent FabricEventKind = original.FabricEventKindClusterEvent
FabricEventKindClusterHealthReportExpired FabricEventKind = original.FabricEventKindClusterHealthReportExpired
FabricEventKindClusterNewHealthReport FabricEventKind = original.FabricEventKindClusterNewHealthReport
FabricEventKindClusterUpgradeCompleted FabricEventKind = original.FabricEventKindClusterUpgradeCompleted
FabricEventKindClusterUpgradeDomainCompleted FabricEventKind = original.FabricEventKindClusterUpgradeDomainCompleted
FabricEventKindClusterUpgradeRollbackCompleted FabricEventKind = original.FabricEventKindClusterUpgradeRollbackCompleted
FabricEventKindClusterUpgradeRollbackStarted FabricEventKind = original.FabricEventKindClusterUpgradeRollbackStarted
FabricEventKindClusterUpgradeStarted FabricEventKind = original.FabricEventKindClusterUpgradeStarted
FabricEventKindContainerInstanceEvent FabricEventKind = original.FabricEventKindContainerInstanceEvent
FabricEventKindDeployedApplicationHealthReportExpired FabricEventKind = original.FabricEventKindDeployedApplicationHealthReportExpired
FabricEventKindDeployedApplicationNewHealthReport FabricEventKind = original.FabricEventKindDeployedApplicationNewHealthReport
FabricEventKindDeployedServicePackageHealthReportExpired FabricEventKind = original.FabricEventKindDeployedServicePackageHealthReportExpired
FabricEventKindDeployedServicePackageNewHealthReport FabricEventKind = original.FabricEventKindDeployedServicePackageNewHealthReport
FabricEventKindNodeAborted FabricEventKind = original.FabricEventKindNodeAborted
FabricEventKindNodeAddedToCluster FabricEventKind = original.FabricEventKindNodeAddedToCluster
FabricEventKindNodeClosed FabricEventKind = original.FabricEventKindNodeClosed
FabricEventKindNodeDeactivateCompleted FabricEventKind = original.FabricEventKindNodeDeactivateCompleted
FabricEventKindNodeDeactivateStarted FabricEventKind = original.FabricEventKindNodeDeactivateStarted
FabricEventKindNodeDown FabricEventKind = original.FabricEventKindNodeDown
FabricEventKindNodeEvent FabricEventKind = original.FabricEventKindNodeEvent
FabricEventKindNodeHealthReportExpired FabricEventKind = original.FabricEventKindNodeHealthReportExpired
FabricEventKindNodeNewHealthReport FabricEventKind = original.FabricEventKindNodeNewHealthReport
FabricEventKindNodeOpenFailed FabricEventKind = original.FabricEventKindNodeOpenFailed
FabricEventKindNodeOpenSucceeded FabricEventKind = original.FabricEventKindNodeOpenSucceeded
FabricEventKindNodeRemovedFromCluster FabricEventKind = original.FabricEventKindNodeRemovedFromCluster
FabricEventKindNodeUp FabricEventKind = original.FabricEventKindNodeUp
FabricEventKindPartitionAnalysisEvent FabricEventKind = original.FabricEventKindPartitionAnalysisEvent
FabricEventKindPartitionEvent FabricEventKind = original.FabricEventKindPartitionEvent
FabricEventKindPartitionHealthReportExpired FabricEventKind = original.FabricEventKindPartitionHealthReportExpired
FabricEventKindPartitionNewHealthReport FabricEventKind = original.FabricEventKindPartitionNewHealthReport
FabricEventKindPartitionPrimaryMoveAnalysis FabricEventKind = original.FabricEventKindPartitionPrimaryMoveAnalysis
FabricEventKindPartitionReconfigured FabricEventKind = original.FabricEventKindPartitionReconfigured
FabricEventKindReplicaEvent FabricEventKind = original.FabricEventKindReplicaEvent
FabricEventKindServiceCreated FabricEventKind = original.FabricEventKindServiceCreated
FabricEventKindServiceDeleted FabricEventKind = original.FabricEventKindServiceDeleted
FabricEventKindServiceEvent FabricEventKind = original.FabricEventKindServiceEvent
FabricEventKindServiceHealthReportExpired FabricEventKind = original.FabricEventKindServiceHealthReportExpired
FabricEventKindServiceNewHealthReport FabricEventKind = original.FabricEventKindServiceNewHealthReport
FabricEventKindStatefulReplicaHealthReportExpired FabricEventKind = original.FabricEventKindStatefulReplicaHealthReportExpired
FabricEventKindStatefulReplicaNewHealthReport FabricEventKind = original.FabricEventKindStatefulReplicaNewHealthReport
FabricEventKindStatelessReplicaHealthReportExpired FabricEventKind = original.FabricEventKindStatelessReplicaHealthReportExpired
FabricEventKindStatelessReplicaNewHealthReport FabricEventKind = original.FabricEventKindStatelessReplicaNewHealthReport
)
type FabricReplicaStatus = original.FabricReplicaStatus
const (
FabricReplicaStatusDown FabricReplicaStatus = original.FabricReplicaStatusDown
FabricReplicaStatusInvalid FabricReplicaStatus = original.FabricReplicaStatusInvalid
FabricReplicaStatusUp FabricReplicaStatus = original.FabricReplicaStatusUp
)
type FailureAction = original.FailureAction
const (
FailureActionInvalid FailureAction = original.FailureActionInvalid
FailureActionManual FailureAction = original.FailureActionManual
FailureActionRollback FailureAction = original.FailureActionRollback
)
type FailureReason = original.FailureReason
const (
HealthCheck FailureReason = original.HealthCheck
Interrupted FailureReason = original.Interrupted
None FailureReason = original.None
OverallUpgradeTimeout FailureReason = original.OverallUpgradeTimeout
UpgradeDomainTimeout FailureReason = original.UpgradeDomainTimeout
)
type HeaderMatchType = original.HeaderMatchType
const (
Exact HeaderMatchType = original.Exact
)
type HealthEvaluationKind = original.HealthEvaluationKind
const (
HealthEvaluationKindApplication HealthEvaluationKind = original.HealthEvaluationKindApplication
HealthEvaluationKindApplications HealthEvaluationKind = original.HealthEvaluationKindApplications
HealthEvaluationKindApplicationTypeApplications HealthEvaluationKind = original.HealthEvaluationKindApplicationTypeApplications
HealthEvaluationKindDeltaNodesCheck HealthEvaluationKind = original.HealthEvaluationKindDeltaNodesCheck
HealthEvaluationKindDeployedApplication HealthEvaluationKind = original.HealthEvaluationKindDeployedApplication
HealthEvaluationKindDeployedApplications HealthEvaluationKind = original.HealthEvaluationKindDeployedApplications
HealthEvaluationKindDeployedServicePackage HealthEvaluationKind = original.HealthEvaluationKindDeployedServicePackage
HealthEvaluationKindDeployedServicePackages HealthEvaluationKind = original.HealthEvaluationKindDeployedServicePackages
HealthEvaluationKindEvent HealthEvaluationKind = original.HealthEvaluationKindEvent
HealthEvaluationKindInvalid HealthEvaluationKind = original.HealthEvaluationKindInvalid
HealthEvaluationKindNode HealthEvaluationKind = original.HealthEvaluationKindNode
HealthEvaluationKindNodes HealthEvaluationKind = original.HealthEvaluationKindNodes
HealthEvaluationKindPartition HealthEvaluationKind = original.HealthEvaluationKindPartition
HealthEvaluationKindPartitions HealthEvaluationKind = original.HealthEvaluationKindPartitions
HealthEvaluationKindReplica HealthEvaluationKind = original.HealthEvaluationKindReplica
HealthEvaluationKindReplicas HealthEvaluationKind = original.HealthEvaluationKindReplicas
HealthEvaluationKindService HealthEvaluationKind = original.HealthEvaluationKindService
HealthEvaluationKindServices HealthEvaluationKind = original.HealthEvaluationKindServices
HealthEvaluationKindSystemApplication HealthEvaluationKind = original.HealthEvaluationKindSystemApplication
HealthEvaluationKindUpgradeDomainDeltaNodesCheck HealthEvaluationKind = original.HealthEvaluationKindUpgradeDomainDeltaNodesCheck
HealthEvaluationKindUpgradeDomainDeployedApplications HealthEvaluationKind = original.HealthEvaluationKindUpgradeDomainDeployedApplications
HealthEvaluationKindUpgradeDomainNodes HealthEvaluationKind = original.HealthEvaluationKindUpgradeDomainNodes
)
type HealthState = original.HealthState
const (
HealthStateError HealthState = original.HealthStateError
HealthStateInvalid HealthState = original.HealthStateInvalid
HealthStateOk HealthState = original.HealthStateOk
HealthStateUnknown HealthState = original.HealthStateUnknown
HealthStateWarning HealthState = original.HealthStateWarning
)
type HostIsolationMode = original.HostIsolationMode
const (
HostIsolationModeHyperV HostIsolationMode = original.HostIsolationModeHyperV
HostIsolationModeNone HostIsolationMode = original.HostIsolationModeNone
HostIsolationModeProcess HostIsolationMode = original.HostIsolationModeProcess
)
type HostType = original.HostType
const (
HostTypeContainerHost HostType = original.HostTypeContainerHost
HostTypeExeHost HostType = original.HostTypeExeHost
HostTypeInvalid HostType = original.HostTypeInvalid
)
type ImpactLevel = original.ImpactLevel
const (
ImpactLevelInvalid ImpactLevel = original.ImpactLevelInvalid
ImpactLevelNone ImpactLevel = original.ImpactLevelNone
ImpactLevelRemoveData ImpactLevel = original.ImpactLevelRemoveData
ImpactLevelRemoveNode ImpactLevel = original.ImpactLevelRemoveNode
ImpactLevelRestart ImpactLevel = original.ImpactLevelRestart
)
type Kind = original.Kind
const (
KindApplication Kind = original.KindApplication
KindApplications Kind = original.KindApplications
KindApplicationTypeApplications Kind = original.KindApplicationTypeApplications
KindDeltaNodesCheck Kind = original.KindDeltaNodesCheck
KindDeployedApplication Kind = original.KindDeployedApplication
KindDeployedApplications Kind = original.KindDeployedApplications
KindDeployedServicePackage Kind = original.KindDeployedServicePackage
KindDeployedServicePackages Kind = original.KindDeployedServicePackages
KindEvent Kind = original.KindEvent
KindHealthEvaluation Kind = original.KindHealthEvaluation
KindNode Kind = original.KindNode
KindNodes Kind = original.KindNodes
KindPartition Kind = original.KindPartition
KindPartitions Kind = original.KindPartitions
KindReplica Kind = original.KindReplica
KindReplicas Kind = original.KindReplicas
KindService Kind = original.KindService
KindServices Kind = original.KindServices
KindSystemApplication Kind = original.KindSystemApplication
KindUpgradeDomainDeltaNodesCheck Kind = original.KindUpgradeDomainDeltaNodesCheck
KindUpgradeDomainNodes Kind = original.KindUpgradeDomainNodes
)
type KindBasicApplicationScopedVolumeCreationParameters = original.KindBasicApplicationScopedVolumeCreationParameters
const (
KindApplicationScopedVolumeCreationParameters KindBasicApplicationScopedVolumeCreationParameters = original.KindApplicationScopedVolumeCreationParameters
KindServiceFabricVolumeDisk KindBasicApplicationScopedVolumeCreationParameters = original.KindServiceFabricVolumeDisk
)
type KindBasicAutoScalingMechanism = original.KindBasicAutoScalingMechanism
const (
KindAddRemoveReplica KindBasicAutoScalingMechanism = original.KindAddRemoveReplica
KindAutoScalingMechanism KindBasicAutoScalingMechanism = original.KindAutoScalingMechanism
)
type KindBasicAutoScalingMetric = original.KindBasicAutoScalingMetric
const (
KindAutoScalingMetric KindBasicAutoScalingMetric = original.KindAutoScalingMetric
KindResource KindBasicAutoScalingMetric = original.KindResource
)
type KindBasicAutoScalingTrigger = original.KindBasicAutoScalingTrigger
const (
KindAutoScalingTrigger KindBasicAutoScalingTrigger = original.KindAutoScalingTrigger
KindAverageLoad KindBasicAutoScalingTrigger = original.KindAverageLoad
)
type KindBasicBackupConfigurationInfo = original.KindBasicBackupConfigurationInfo
const (
KindBasicBackupConfigurationInfoKindApplication KindBasicBackupConfigurationInfo = original.KindBasicBackupConfigurationInfoKindApplication
KindBasicBackupConfigurationInfoKindBackupConfigurationInfo KindBasicBackupConfigurationInfo = original.KindBasicBackupConfigurationInfoKindBackupConfigurationInfo
KindBasicBackupConfigurationInfoKindPartition KindBasicBackupConfigurationInfo = original.KindBasicBackupConfigurationInfoKindPartition
KindBasicBackupConfigurationInfoKindService KindBasicBackupConfigurationInfo = original.KindBasicBackupConfigurationInfoKindService
)
type KindBasicChaosEvent = original.KindBasicChaosEvent
const (
KindChaosEvent KindBasicChaosEvent = original.KindChaosEvent
KindExecutingFaults KindBasicChaosEvent = original.KindExecutingFaults
KindStarted KindBasicChaosEvent = original.KindStarted
KindStopped KindBasicChaosEvent = original.KindStopped
KindTestError KindBasicChaosEvent = original.KindTestError
KindValidationFailed KindBasicChaosEvent = original.KindValidationFailed
KindWaiting KindBasicChaosEvent = original.KindWaiting
)
type KindBasicDiagnosticsSinkProperties = original.KindBasicDiagnosticsSinkProperties
const (
KindAzureInternalMonitoringPipeline KindBasicDiagnosticsSinkProperties = original.KindAzureInternalMonitoringPipeline
KindDiagnosticsSinkProperties KindBasicDiagnosticsSinkProperties = original.KindDiagnosticsSinkProperties
)
type KindBasicFabricEvent = original.KindBasicFabricEvent
const (
KindApplicationContainerInstanceExited KindBasicFabricEvent = original.KindApplicationContainerInstanceExited
KindApplicationCreated KindBasicFabricEvent = original.KindApplicationCreated
KindApplicationDeleted KindBasicFabricEvent = original.KindApplicationDeleted
KindApplicationEvent KindBasicFabricEvent = original.KindApplicationEvent
KindApplicationHealthReportExpired KindBasicFabricEvent = original.KindApplicationHealthReportExpired
KindApplicationNewHealthReport KindBasicFabricEvent = original.KindApplicationNewHealthReport
KindApplicationProcessExited KindBasicFabricEvent = original.KindApplicationProcessExited
KindApplicationUpgradeCompleted KindBasicFabricEvent = original.KindApplicationUpgradeCompleted
KindApplicationUpgradeDomainCompleted KindBasicFabricEvent = original.KindApplicationUpgradeDomainCompleted
KindApplicationUpgradeRollbackCompleted KindBasicFabricEvent = original.KindApplicationUpgradeRollbackCompleted
KindApplicationUpgradeRollbackStarted KindBasicFabricEvent = original.KindApplicationUpgradeRollbackStarted
KindApplicationUpgradeStarted KindBasicFabricEvent = original.KindApplicationUpgradeStarted
KindChaosCodePackageRestartScheduled KindBasicFabricEvent = original.KindChaosCodePackageRestartScheduled
KindChaosNodeRestartScheduled KindBasicFabricEvent = original.KindChaosNodeRestartScheduled
KindChaosPartitionPrimaryMoveScheduled KindBasicFabricEvent = original.KindChaosPartitionPrimaryMoveScheduled
KindChaosPartitionSecondaryMoveScheduled KindBasicFabricEvent = original.KindChaosPartitionSecondaryMoveScheduled
KindChaosReplicaRemovalScheduled KindBasicFabricEvent = original.KindChaosReplicaRemovalScheduled
KindChaosReplicaRestartScheduled KindBasicFabricEvent = original.KindChaosReplicaRestartScheduled
KindChaosStarted KindBasicFabricEvent = original.KindChaosStarted
KindChaosStopped KindBasicFabricEvent = original.KindChaosStopped
KindClusterEvent KindBasicFabricEvent = original.KindClusterEvent
KindClusterHealthReportExpired KindBasicFabricEvent = original.KindClusterHealthReportExpired
KindClusterNewHealthReport KindBasicFabricEvent = original.KindClusterNewHealthReport
KindClusterUpgradeCompleted KindBasicFabricEvent = original.KindClusterUpgradeCompleted
KindClusterUpgradeDomainCompleted KindBasicFabricEvent = original.KindClusterUpgradeDomainCompleted
KindClusterUpgradeRollbackCompleted KindBasicFabricEvent = original.KindClusterUpgradeRollbackCompleted
KindClusterUpgradeRollbackStarted KindBasicFabricEvent = original.KindClusterUpgradeRollbackStarted
KindClusterUpgradeStarted KindBasicFabricEvent = original.KindClusterUpgradeStarted
KindContainerInstanceEvent KindBasicFabricEvent = original.KindContainerInstanceEvent
KindDeployedApplicationHealthReportExpired KindBasicFabricEvent = original.KindDeployedApplicationHealthReportExpired
KindDeployedApplicationNewHealthReport KindBasicFabricEvent = original.KindDeployedApplicationNewHealthReport
KindDeployedServicePackageHealthReportExpired KindBasicFabricEvent = original.KindDeployedServicePackageHealthReportExpired
KindDeployedServicePackageNewHealthReport KindBasicFabricEvent = original.KindDeployedServicePackageNewHealthReport
KindFabricEvent KindBasicFabricEvent = original.KindFabricEvent
KindNodeAborted KindBasicFabricEvent = original.KindNodeAborted
KindNodeAddedToCluster KindBasicFabricEvent = original.KindNodeAddedToCluster
KindNodeClosed KindBasicFabricEvent = original.KindNodeClosed
KindNodeDeactivateCompleted KindBasicFabricEvent = original.KindNodeDeactivateCompleted
KindNodeDeactivateStarted KindBasicFabricEvent = original.KindNodeDeactivateStarted
KindNodeDown KindBasicFabricEvent = original.KindNodeDown
KindNodeEvent KindBasicFabricEvent = original.KindNodeEvent
KindNodeHealthReportExpired KindBasicFabricEvent = original.KindNodeHealthReportExpired
KindNodeNewHealthReport KindBasicFabricEvent = original.KindNodeNewHealthReport
KindNodeOpenFailed KindBasicFabricEvent = original.KindNodeOpenFailed
KindNodeOpenSucceeded KindBasicFabricEvent = original.KindNodeOpenSucceeded
KindNodeRemovedFromCluster KindBasicFabricEvent = original.KindNodeRemovedFromCluster
KindNodeUp KindBasicFabricEvent = original.KindNodeUp
KindPartitionAnalysisEvent KindBasicFabricEvent = original.KindPartitionAnalysisEvent
KindPartitionEvent KindBasicFabricEvent = original.KindPartitionEvent
KindPartitionHealthReportExpired KindBasicFabricEvent = original.KindPartitionHealthReportExpired
KindPartitionNewHealthReport KindBasicFabricEvent = original.KindPartitionNewHealthReport
KindPartitionPrimaryMoveAnalysis KindBasicFabricEvent = original.KindPartitionPrimaryMoveAnalysis
KindPartitionReconfigured KindBasicFabricEvent = original.KindPartitionReconfigured
KindReplicaEvent KindBasicFabricEvent = original.KindReplicaEvent
KindServiceCreated KindBasicFabricEvent = original.KindServiceCreated
KindServiceDeleted KindBasicFabricEvent = original.KindServiceDeleted
KindServiceEvent KindBasicFabricEvent = original.KindServiceEvent
KindServiceHealthReportExpired KindBasicFabricEvent = original.KindServiceHealthReportExpired
KindServiceNewHealthReport KindBasicFabricEvent = original.KindServiceNewHealthReport
KindStatefulReplicaHealthReportExpired KindBasicFabricEvent = original.KindStatefulReplicaHealthReportExpired
KindStatefulReplicaNewHealthReport KindBasicFabricEvent = original.KindStatefulReplicaNewHealthReport
KindStatelessReplicaHealthReportExpired KindBasicFabricEvent = original.KindStatelessReplicaHealthReportExpired
KindStatelessReplicaNewHealthReport KindBasicFabricEvent = original.KindStatelessReplicaNewHealthReport
)
type KindBasicNetworkResourcePropertiesBase = original.KindBasicNetworkResourcePropertiesBase
const (
KindLocal KindBasicNetworkResourcePropertiesBase = original.KindLocal
KindNetworkResourceProperties KindBasicNetworkResourcePropertiesBase = original.KindNetworkResourceProperties
KindNetworkResourcePropertiesBase KindBasicNetworkResourcePropertiesBase = original.KindNetworkResourcePropertiesBase
)
type KindBasicPropertyBatchInfo = original.KindBasicPropertyBatchInfo
const (
KindFailed KindBasicPropertyBatchInfo = original.KindFailed
KindPropertyBatchInfo KindBasicPropertyBatchInfo = original.KindPropertyBatchInfo
KindSuccessful KindBasicPropertyBatchInfo = original.KindSuccessful
)
type KindBasicPropertyBatchOperation = original.KindBasicPropertyBatchOperation
const (
KindCheckExists KindBasicPropertyBatchOperation = original.KindCheckExists
KindCheckSequence KindBasicPropertyBatchOperation = original.KindCheckSequence
KindCheckValue KindBasicPropertyBatchOperation = original.KindCheckValue
KindDelete KindBasicPropertyBatchOperation = original.KindDelete
KindGet KindBasicPropertyBatchOperation = original.KindGet
KindPropertyBatchOperation KindBasicPropertyBatchOperation = original.KindPropertyBatchOperation
KindPut KindBasicPropertyBatchOperation = original.KindPut
)
type KindBasicPropertyValue = original.KindBasicPropertyValue
const (
KindBinary KindBasicPropertyValue = original.KindBinary
KindDouble KindBasicPropertyValue = original.KindDouble
KindGUID KindBasicPropertyValue = original.KindGUID
KindInt64 KindBasicPropertyValue = original.KindInt64
KindPropertyValue KindBasicPropertyValue = original.KindPropertyValue
KindString KindBasicPropertyValue = original.KindString
)
type KindBasicProvisionApplicationTypeDescriptionBase = original.KindBasicProvisionApplicationTypeDescriptionBase
const (
KindExternalStore KindBasicProvisionApplicationTypeDescriptionBase = original.KindExternalStore
KindImageStorePath KindBasicProvisionApplicationTypeDescriptionBase = original.KindImageStorePath
KindProvisionApplicationTypeDescriptionBase KindBasicProvisionApplicationTypeDescriptionBase = original.KindProvisionApplicationTypeDescriptionBase
)
type KindBasicRepairImpactDescriptionBase = original.KindBasicRepairImpactDescriptionBase
const (
KindBasicRepairImpactDescriptionBaseKindNode KindBasicRepairImpactDescriptionBase = original.KindBasicRepairImpactDescriptionBaseKindNode
KindBasicRepairImpactDescriptionBaseKindRepairImpactDescriptionBase KindBasicRepairImpactDescriptionBase = original.KindBasicRepairImpactDescriptionBaseKindRepairImpactDescriptionBase
)
type KindBasicRepairTargetDescriptionBase = original.KindBasicRepairTargetDescriptionBase
const (
KindBasicRepairTargetDescriptionBaseKindNode KindBasicRepairTargetDescriptionBase = original.KindBasicRepairTargetDescriptionBaseKindNode
KindBasicRepairTargetDescriptionBaseKindRepairTargetDescriptionBase KindBasicRepairTargetDescriptionBase = original.KindBasicRepairTargetDescriptionBaseKindRepairTargetDescriptionBase
)
type KindBasicReplicaStatusBase = original.KindBasicReplicaStatusBase
const (
KindKeyValueStore KindBasicReplicaStatusBase = original.KindKeyValueStore
KindReplicaStatusBase KindBasicReplicaStatusBase = original.KindReplicaStatusBase
)
type KindBasicReplicatorStatus = original.KindBasicReplicatorStatus
const (
KindActiveSecondary KindBasicReplicatorStatus = original.KindActiveSecondary
KindIdleSecondary KindBasicReplicatorStatus = original.KindIdleSecondary
KindPrimary KindBasicReplicatorStatus = original.KindPrimary
KindReplicatorStatus KindBasicReplicatorStatus = original.KindReplicatorStatus
KindSecondaryReplicatorStatus KindBasicReplicatorStatus = original.KindSecondaryReplicatorStatus
)
type KindBasicSafetyCheck = original.KindBasicSafetyCheck
const (
KindEnsureAvailability KindBasicSafetyCheck = original.KindEnsureAvailability
KindEnsurePartitionQuorum KindBasicSafetyCheck = original.KindEnsurePartitionQuorum
KindEnsureSeedNodeQuorum KindBasicSafetyCheck = original.KindEnsureSeedNodeQuorum
KindPartitionSafetyCheck KindBasicSafetyCheck = original.KindPartitionSafetyCheck
KindSafetyCheck KindBasicSafetyCheck = original.KindSafetyCheck
KindWaitForInbuildReplica KindBasicSafetyCheck = original.KindWaitForInbuildReplica
KindWaitForPrimaryPlacement KindBasicSafetyCheck = original.KindWaitForPrimaryPlacement
KindWaitForPrimarySwap KindBasicSafetyCheck = original.KindWaitForPrimarySwap
KindWaitForReconfiguration KindBasicSafetyCheck = original.KindWaitForReconfiguration
)
type KindBasicScalingMechanismDescription = original.KindBasicScalingMechanismDescription
const (
KindAddRemoveIncrementalNamedPartition KindBasicScalingMechanismDescription = original.KindAddRemoveIncrementalNamedPartition
KindPartitionInstanceCount KindBasicScalingMechanismDescription = original.KindPartitionInstanceCount
KindScalingMechanismDescription KindBasicScalingMechanismDescription = original.KindScalingMechanismDescription
)
type KindBasicScalingTriggerDescription = original.KindBasicScalingTriggerDescription
const (
KindAveragePartitionLoad KindBasicScalingTriggerDescription = original.KindAveragePartitionLoad
KindAverageServiceLoad KindBasicScalingTriggerDescription = original.KindAverageServiceLoad
KindScalingTriggerDescription KindBasicScalingTriggerDescription = original.KindScalingTriggerDescription
)
type KindBasicSecretResourcePropertiesBase = original.KindBasicSecretResourcePropertiesBase
const (
KindInlinedValue KindBasicSecretResourcePropertiesBase = original.KindInlinedValue
KindSecretResourceProperties KindBasicSecretResourcePropertiesBase = original.KindSecretResourceProperties
KindSecretResourcePropertiesBase KindBasicSecretResourcePropertiesBase = original.KindSecretResourcePropertiesBase
)
type KindBasicServiceTypeDescription = original.KindBasicServiceTypeDescription
const (
KindServiceTypeDescription KindBasicServiceTypeDescription = original.KindServiceTypeDescription
KindStateful KindBasicServiceTypeDescription = original.KindStateful
KindStateless KindBasicServiceTypeDescription = original.KindStateless
)
type MoveCost = original.MoveCost
const (
High MoveCost = original.High
Low MoveCost = original.Low
Medium MoveCost = original.Medium
Zero MoveCost = original.Zero
)
type NetworkKind = original.NetworkKind
const (
Local NetworkKind = original.Local
)
type NodeDeactivationIntent = original.NodeDeactivationIntent
const (
NodeDeactivationIntentInvalid NodeDeactivationIntent = original.NodeDeactivationIntentInvalid
NodeDeactivationIntentPause NodeDeactivationIntent = original.NodeDeactivationIntentPause
NodeDeactivationIntentRemoveData NodeDeactivationIntent = original.NodeDeactivationIntentRemoveData
NodeDeactivationIntentRemoveNode NodeDeactivationIntent = original.NodeDeactivationIntentRemoveNode
NodeDeactivationIntentRestart NodeDeactivationIntent = original.NodeDeactivationIntentRestart
)
type NodeDeactivationStatus = original.NodeDeactivationStatus
const (
NodeDeactivationStatusCompleted NodeDeactivationStatus = original.NodeDeactivationStatusCompleted
NodeDeactivationStatusNone NodeDeactivationStatus = original.NodeDeactivationStatusNone
NodeDeactivationStatusSafetyCheckComplete NodeDeactivationStatus = original.NodeDeactivationStatusSafetyCheckComplete
NodeDeactivationStatusSafetyCheckInProgress NodeDeactivationStatus = original.NodeDeactivationStatusSafetyCheckInProgress
)
type NodeDeactivationTaskType = original.NodeDeactivationTaskType
const (
NodeDeactivationTaskTypeClient NodeDeactivationTaskType = original.NodeDeactivationTaskTypeClient
NodeDeactivationTaskTypeInfrastructure NodeDeactivationTaskType = original.NodeDeactivationTaskTypeInfrastructure
NodeDeactivationTaskTypeInvalid NodeDeactivationTaskType = original.NodeDeactivationTaskTypeInvalid
NodeDeactivationTaskTypeRepair NodeDeactivationTaskType = original.NodeDeactivationTaskTypeRepair
)
type NodeStatus = original.NodeStatus
const (
NodeStatusDisabled NodeStatus = original.NodeStatusDisabled
NodeStatusDisabling NodeStatus = original.NodeStatusDisabling
NodeStatusDown NodeStatus = original.NodeStatusDown
NodeStatusEnabling NodeStatus = original.NodeStatusEnabling
NodeStatusInvalid NodeStatus = original.NodeStatusInvalid
NodeStatusRemoved NodeStatus = original.NodeStatusRemoved
NodeStatusUnknown NodeStatus = original.NodeStatusUnknown
NodeStatusUp NodeStatus = original.NodeStatusUp
)
type NodeStatusFilter = original.NodeStatusFilter
const (
All NodeStatusFilter = original.All
Default NodeStatusFilter = original.Default
Disabled NodeStatusFilter = original.Disabled
Disabling NodeStatusFilter = original.Disabling
Down NodeStatusFilter = original.Down
Enabling NodeStatusFilter = original.Enabling
Removed NodeStatusFilter = original.Removed
Unknown NodeStatusFilter = original.Unknown
Up NodeStatusFilter = original.Up
)
type NodeTransitionType = original.NodeTransitionType
const (
NodeTransitionTypeInvalid NodeTransitionType = original.NodeTransitionTypeInvalid
NodeTransitionTypeStart NodeTransitionType = original.NodeTransitionTypeStart
NodeTransitionTypeStop NodeTransitionType = original.NodeTransitionTypeStop
)
type NodeUpgradePhase = original.NodeUpgradePhase
const (
NodeUpgradePhaseInvalid NodeUpgradePhase = original.NodeUpgradePhaseInvalid
NodeUpgradePhasePostUpgradeSafetyCheck NodeUpgradePhase = original.NodeUpgradePhasePostUpgradeSafetyCheck
NodeUpgradePhasePreUpgradeSafetyCheck NodeUpgradePhase = original.NodeUpgradePhasePreUpgradeSafetyCheck
NodeUpgradePhaseUpgrading NodeUpgradePhase = original.NodeUpgradePhaseUpgrading
)
type OperatingSystemType = original.OperatingSystemType
const (
Linux OperatingSystemType = original.Linux
Windows OperatingSystemType = original.Windows
)
type OperationState = original.OperationState
const (
OperationStateCancelled OperationState = original.OperationStateCancelled
OperationStateCompleted OperationState = original.OperationStateCompleted
OperationStateFaulted OperationState = original.OperationStateFaulted
OperationStateForceCancelled OperationState = original.OperationStateForceCancelled
OperationStateInvalid OperationState = original.OperationStateInvalid
OperationStateRollingBack OperationState = original.OperationStateRollingBack
OperationStateRunning OperationState = original.OperationStateRunning
)
type OperationType = original.OperationType
const (
OperationTypeInvalid OperationType = original.OperationTypeInvalid
OperationTypeNodeTransition OperationType = original.OperationTypeNodeTransition
OperationTypePartitionDataLoss OperationType = original.OperationTypePartitionDataLoss
OperationTypePartitionQuorumLoss OperationType = original.OperationTypePartitionQuorumLoss
OperationTypePartitionRestart OperationType = original.OperationTypePartitionRestart
)
type PackageSharingPolicyScope = original.PackageSharingPolicyScope
const (
PackageSharingPolicyScopeAll PackageSharingPolicyScope = original.PackageSharingPolicyScopeAll
PackageSharingPolicyScopeCode PackageSharingPolicyScope = original.PackageSharingPolicyScopeCode
PackageSharingPolicyScopeConfig PackageSharingPolicyScope = original.PackageSharingPolicyScopeConfig
PackageSharingPolicyScopeData PackageSharingPolicyScope = original.PackageSharingPolicyScopeData
PackageSharingPolicyScopeNone PackageSharingPolicyScope = original.PackageSharingPolicyScopeNone
)
type PartitionAccessStatus = original.PartitionAccessStatus
const (
PartitionAccessStatusGranted PartitionAccessStatus = original.PartitionAccessStatusGranted
PartitionAccessStatusInvalid PartitionAccessStatus = original.PartitionAccessStatusInvalid
PartitionAccessStatusNotPrimary PartitionAccessStatus = original.PartitionAccessStatusNotPrimary
PartitionAccessStatusNoWriteQuorum PartitionAccessStatus = original.PartitionAccessStatusNoWriteQuorum
PartitionAccessStatusReconfigurationPending PartitionAccessStatus = original.PartitionAccessStatusReconfigurationPending
)
type PartitionScheme = original.PartitionScheme
const (
PartitionSchemeInvalid PartitionScheme = original.PartitionSchemeInvalid
PartitionSchemeNamed PartitionScheme = original.PartitionSchemeNamed
PartitionSchemeSingleton PartitionScheme = original.PartitionSchemeSingleton
PartitionSchemeUniformInt64Range PartitionScheme = original.PartitionSchemeUniformInt64Range
)
type PartitionSchemeBasicPartitionSchemeDescription = original.PartitionSchemeBasicPartitionSchemeDescription
const (
PartitionSchemeNamed1 PartitionSchemeBasicPartitionSchemeDescription = original.PartitionSchemeNamed1
PartitionSchemePartitionSchemeDescription PartitionSchemeBasicPartitionSchemeDescription = original.PartitionSchemePartitionSchemeDescription
PartitionSchemeSingleton1 PartitionSchemeBasicPartitionSchemeDescription = original.PartitionSchemeSingleton1
PartitionSchemeUniformInt64Range1 PartitionSchemeBasicPartitionSchemeDescription = original.PartitionSchemeUniformInt64Range1
)
type PropertyBatchInfoKind = original.PropertyBatchInfoKind
const (
PropertyBatchInfoKindFailed PropertyBatchInfoKind = original.PropertyBatchInfoKindFailed
PropertyBatchInfoKindInvalid PropertyBatchInfoKind = original.PropertyBatchInfoKindInvalid
PropertyBatchInfoKindSuccessful PropertyBatchInfoKind = original.PropertyBatchInfoKindSuccessful
)
type PropertyBatchOperationKind = original.PropertyBatchOperationKind
const (
PropertyBatchOperationKindCheckExists PropertyBatchOperationKind = original.PropertyBatchOperationKindCheckExists
PropertyBatchOperationKindCheckSequence PropertyBatchOperationKind = original.PropertyBatchOperationKindCheckSequence
PropertyBatchOperationKindCheckValue PropertyBatchOperationKind = original.PropertyBatchOperationKindCheckValue
PropertyBatchOperationKindDelete PropertyBatchOperationKind = original.PropertyBatchOperationKindDelete
PropertyBatchOperationKindGet PropertyBatchOperationKind = original.PropertyBatchOperationKindGet
PropertyBatchOperationKindInvalid PropertyBatchOperationKind = original.PropertyBatchOperationKindInvalid
PropertyBatchOperationKindPut PropertyBatchOperationKind = original.PropertyBatchOperationKindPut
)
type PropertyValueKind = original.PropertyValueKind
const (
PropertyValueKindBinary PropertyValueKind = original.PropertyValueKindBinary
PropertyValueKindDouble PropertyValueKind = original.PropertyValueKindDouble
PropertyValueKindGUID PropertyValueKind = original.PropertyValueKindGUID
PropertyValueKindInt64 PropertyValueKind = original.PropertyValueKindInt64
PropertyValueKindInvalid PropertyValueKind = original.PropertyValueKindInvalid
PropertyValueKindString PropertyValueKind = original.PropertyValueKindString
)
type ProvisionApplicationTypeKind = original.ProvisionApplicationTypeKind
const (
ProvisionApplicationTypeKindExternalStore ProvisionApplicationTypeKind = original.ProvisionApplicationTypeKindExternalStore
ProvisionApplicationTypeKindImageStorePath ProvisionApplicationTypeKind = original.ProvisionApplicationTypeKindImageStorePath
ProvisionApplicationTypeKindInvalid ProvisionApplicationTypeKind = original.ProvisionApplicationTypeKindInvalid
)
type QuorumLossMode = original.QuorumLossMode
const (
QuorumLossModeAllReplicas QuorumLossMode = original.QuorumLossModeAllReplicas
QuorumLossModeInvalid QuorumLossMode = original.QuorumLossModeInvalid
QuorumLossModeQuorumReplicas QuorumLossMode = original.QuorumLossModeQuorumReplicas
)
type ReconfigurationPhase = original.ReconfigurationPhase
const (
ReconfigurationPhaseAbortPhaseZero ReconfigurationPhase = original.ReconfigurationPhaseAbortPhaseZero
ReconfigurationPhaseNone ReconfigurationPhase = original.ReconfigurationPhaseNone
ReconfigurationPhasePhase0 ReconfigurationPhase = original.ReconfigurationPhasePhase0
ReconfigurationPhasePhase1 ReconfigurationPhase = original.ReconfigurationPhasePhase1
ReconfigurationPhasePhase2 ReconfigurationPhase = original.ReconfigurationPhasePhase2
ReconfigurationPhasePhase3 ReconfigurationPhase = original.ReconfigurationPhasePhase3
ReconfigurationPhasePhase4 ReconfigurationPhase = original.ReconfigurationPhasePhase4
ReconfigurationPhaseUnknown ReconfigurationPhase = original.ReconfigurationPhaseUnknown
)
type ReconfigurationType = original.ReconfigurationType
const (
ReconfigurationTypeFailover ReconfigurationType = original.ReconfigurationTypeFailover
ReconfigurationTypeOther ReconfigurationType = original.ReconfigurationTypeOther
ReconfigurationTypeSwapPrimary ReconfigurationType = original.ReconfigurationTypeSwapPrimary
ReconfigurationTypeUnknown ReconfigurationType = original.ReconfigurationTypeUnknown
)
type RepairImpactKind = original.RepairImpactKind
const (
RepairImpactKindInvalid RepairImpactKind = original.RepairImpactKindInvalid
RepairImpactKindNode RepairImpactKind = original.RepairImpactKindNode
)
type RepairTargetKind = original.RepairTargetKind
const (
RepairTargetKindInvalid RepairTargetKind = original.RepairTargetKindInvalid
RepairTargetKindNode RepairTargetKind = original.RepairTargetKindNode
)
type RepairTaskHealthCheckState = original.RepairTaskHealthCheckState
const (
InProgress RepairTaskHealthCheckState = original.InProgress
NotStarted RepairTaskHealthCheckState = original.NotStarted
Skipped RepairTaskHealthCheckState = original.Skipped
Succeeded RepairTaskHealthCheckState = original.Succeeded
TimedOut RepairTaskHealthCheckState = original.TimedOut
)
type ReplicaHealthReportServiceKind = original.ReplicaHealthReportServiceKind
const (
Stateful ReplicaHealthReportServiceKind = original.Stateful
Stateless ReplicaHealthReportServiceKind = original.Stateless
)
type ReplicaKind = original.ReplicaKind
const (
ReplicaKindInvalid ReplicaKind = original.ReplicaKindInvalid
ReplicaKindKeyValueStore ReplicaKind = original.ReplicaKindKeyValueStore
)
type ReplicaRole = original.ReplicaRole
const (
ReplicaRoleActiveSecondary ReplicaRole = original.ReplicaRoleActiveSecondary
ReplicaRoleIdleSecondary ReplicaRole = original.ReplicaRoleIdleSecondary
ReplicaRoleNone ReplicaRole = original.ReplicaRoleNone
ReplicaRolePrimary ReplicaRole = original.ReplicaRolePrimary
ReplicaRoleUnknown ReplicaRole = original.ReplicaRoleUnknown
)
type ReplicaStatus = original.ReplicaStatus
const (
ReplicaStatusDown ReplicaStatus = original.ReplicaStatusDown
ReplicaStatusDropped ReplicaStatus = original.ReplicaStatusDropped
ReplicaStatusInBuild ReplicaStatus = original.ReplicaStatusInBuild
ReplicaStatusInvalid ReplicaStatus = original.ReplicaStatusInvalid
ReplicaStatusReady ReplicaStatus = original.ReplicaStatusReady
ReplicaStatusStandby ReplicaStatus = original.ReplicaStatusStandby
)
type ReplicatorOperationName = original.ReplicatorOperationName
const (
ReplicatorOperationNameAbort ReplicatorOperationName = original.ReplicatorOperationNameAbort
ReplicatorOperationNameBuild ReplicatorOperationName = original.ReplicatorOperationNameBuild
ReplicatorOperationNameChangeRole ReplicatorOperationName = original.ReplicatorOperationNameChangeRole
ReplicatorOperationNameClose ReplicatorOperationName = original.ReplicatorOperationNameClose
ReplicatorOperationNameInvalid ReplicatorOperationName = original.ReplicatorOperationNameInvalid
ReplicatorOperationNameNone ReplicatorOperationName = original.ReplicatorOperationNameNone
ReplicatorOperationNameOnDataLoss ReplicatorOperationName = original.ReplicatorOperationNameOnDataLoss
ReplicatorOperationNameOpen ReplicatorOperationName = original.ReplicatorOperationNameOpen
ReplicatorOperationNameUpdateEpoch ReplicatorOperationName = original.ReplicatorOperationNameUpdateEpoch
ReplicatorOperationNameWaitForCatchup ReplicatorOperationName = original.ReplicatorOperationNameWaitForCatchup
)
type ResourceStatus = original.ResourceStatus
const (
ResourceStatusCreating ResourceStatus = original.ResourceStatusCreating
ResourceStatusDeleting ResourceStatus = original.ResourceStatusDeleting
ResourceStatusFailed ResourceStatus = original.ResourceStatusFailed
ResourceStatusReady ResourceStatus = original.ResourceStatusReady
ResourceStatusUnknown ResourceStatus = original.ResourceStatusUnknown
ResourceStatusUpgrading ResourceStatus = original.ResourceStatusUpgrading
)
type RestartPartitionMode = original.RestartPartitionMode
const (
RestartPartitionModeAllReplicasOrInstances RestartPartitionMode = original.RestartPartitionModeAllReplicasOrInstances
RestartPartitionModeInvalid RestartPartitionMode = original.RestartPartitionModeInvalid
RestartPartitionModeOnlyActiveSecondaries RestartPartitionMode = original.RestartPartitionModeOnlyActiveSecondaries
)
type RestoreState = original.RestoreState
const (
RestoreStateAccepted RestoreState = original.RestoreStateAccepted
RestoreStateFailure RestoreState = original.RestoreStateFailure
RestoreStateInvalid RestoreState = original.RestoreStateInvalid
RestoreStateRestoreInProgress RestoreState = original.RestoreStateRestoreInProgress
RestoreStateSuccess RestoreState = original.RestoreStateSuccess
RestoreStateTimeout RestoreState = original.RestoreStateTimeout
)
type ResultStatus = original.ResultStatus
const (
ResultStatusCancelled ResultStatus = original.ResultStatusCancelled
ResultStatusFailed ResultStatus = original.ResultStatusFailed
ResultStatusInterrupted ResultStatus = original.ResultStatusInterrupted
ResultStatusInvalid ResultStatus = original.ResultStatusInvalid
ResultStatusPending ResultStatus = original.ResultStatusPending
ResultStatusSucceeded ResultStatus = original.ResultStatusSucceeded
)
type RetentionPolicyType = original.RetentionPolicyType
const (
RetentionPolicyTypeBasic RetentionPolicyType = original.RetentionPolicyTypeBasic
RetentionPolicyTypeInvalid RetentionPolicyType = original.RetentionPolicyTypeInvalid
)
type RetentionPolicyTypeBasicBasicRetentionPolicyDescription = original.RetentionPolicyTypeBasicBasicRetentionPolicyDescription
const (
RetentionPolicyTypeBasic1 RetentionPolicyTypeBasicBasicRetentionPolicyDescription = original.RetentionPolicyTypeBasic1
RetentionPolicyTypeRetentionPolicyDescription RetentionPolicyTypeBasicBasicRetentionPolicyDescription = original.RetentionPolicyTypeRetentionPolicyDescription
)
type SafetyCheckKind = original.SafetyCheckKind
const (
SafetyCheckKindEnsureAvailability SafetyCheckKind = original.SafetyCheckKindEnsureAvailability
SafetyCheckKindEnsurePartitionQuorum SafetyCheckKind = original.SafetyCheckKindEnsurePartitionQuorum
SafetyCheckKindEnsureSeedNodeQuorum SafetyCheckKind = original.SafetyCheckKindEnsureSeedNodeQuorum
SafetyCheckKindInvalid SafetyCheckKind = original.SafetyCheckKindInvalid
SafetyCheckKindWaitForInbuildReplica SafetyCheckKind = original.SafetyCheckKindWaitForInbuildReplica
SafetyCheckKindWaitForPrimaryPlacement SafetyCheckKind = original.SafetyCheckKindWaitForPrimaryPlacement
SafetyCheckKindWaitForPrimarySwap SafetyCheckKind = original.SafetyCheckKindWaitForPrimarySwap
SafetyCheckKindWaitForReconfiguration SafetyCheckKind = original.SafetyCheckKindWaitForReconfiguration
)
type ScalingMechanismKind = original.ScalingMechanismKind
const (
ScalingMechanismKindAddRemoveIncrementalNamedPartition ScalingMechanismKind = original.ScalingMechanismKindAddRemoveIncrementalNamedPartition
ScalingMechanismKindInvalid ScalingMechanismKind = original.ScalingMechanismKindInvalid
ScalingMechanismKindPartitionInstanceCount ScalingMechanismKind = original.ScalingMechanismKindPartitionInstanceCount
)
type ScalingTriggerKind = original.ScalingTriggerKind
const (
ScalingTriggerKindAveragePartitionLoad ScalingTriggerKind = original.ScalingTriggerKindAveragePartitionLoad
ScalingTriggerKindAverageServiceLoad ScalingTriggerKind = original.ScalingTriggerKindAverageServiceLoad
ScalingTriggerKindInvalid ScalingTriggerKind = original.ScalingTriggerKindInvalid
)
type ScheduleKind = original.ScheduleKind
const (
ScheduleKindBackupScheduleDescription ScheduleKind = original.ScheduleKindBackupScheduleDescription
ScheduleKindFrequencyBased ScheduleKind = original.ScheduleKindFrequencyBased
ScheduleKindTimeBased ScheduleKind = original.ScheduleKindTimeBased
)
type SecretKind = original.SecretKind
const (
InlinedValue SecretKind = original.InlinedValue
)
type ServiceCorrelationScheme = original.ServiceCorrelationScheme
const (
ServiceCorrelationSchemeAffinity ServiceCorrelationScheme = original.ServiceCorrelationSchemeAffinity
ServiceCorrelationSchemeAlignedAffinity ServiceCorrelationScheme = original.ServiceCorrelationSchemeAlignedAffinity
ServiceCorrelationSchemeInvalid ServiceCorrelationScheme = original.ServiceCorrelationSchemeInvalid
ServiceCorrelationSchemeNonAlignedAffinity ServiceCorrelationScheme = original.ServiceCorrelationSchemeNonAlignedAffinity
)
type ServiceEndpointRole = original.ServiceEndpointRole
const (
ServiceEndpointRoleInvalid ServiceEndpointRole = original.ServiceEndpointRoleInvalid
ServiceEndpointRoleStatefulPrimary ServiceEndpointRole = original.ServiceEndpointRoleStatefulPrimary
ServiceEndpointRoleStatefulSecondary ServiceEndpointRole = original.ServiceEndpointRoleStatefulSecondary
ServiceEndpointRoleStateless ServiceEndpointRole = original.ServiceEndpointRoleStateless
)
type ServiceKind = original.ServiceKind
const (
ServiceKindInvalid ServiceKind = original.ServiceKindInvalid
ServiceKindStateful ServiceKind = original.ServiceKindStateful
ServiceKindStateless ServiceKind = original.ServiceKindStateless
)
type ServiceKindBasicDeployedServiceReplicaDetailInfo = original.ServiceKindBasicDeployedServiceReplicaDetailInfo
const (
ServiceKindDeployedServiceReplicaDetailInfo ServiceKindBasicDeployedServiceReplicaDetailInfo = original.ServiceKindDeployedServiceReplicaDetailInfo
ServiceKindStateful1 ServiceKindBasicDeployedServiceReplicaDetailInfo = original.ServiceKindStateful1
ServiceKindStateless1 ServiceKindBasicDeployedServiceReplicaDetailInfo = original.ServiceKindStateless1
)
type ServiceKindBasicDeployedServiceReplicaInfo = original.ServiceKindBasicDeployedServiceReplicaInfo
const (
ServiceKindBasicDeployedServiceReplicaInfoServiceKindDeployedServiceReplicaInfo ServiceKindBasicDeployedServiceReplicaInfo = original.ServiceKindBasicDeployedServiceReplicaInfoServiceKindDeployedServiceReplicaInfo
ServiceKindBasicDeployedServiceReplicaInfoServiceKindStateful ServiceKindBasicDeployedServiceReplicaInfo = original.ServiceKindBasicDeployedServiceReplicaInfoServiceKindStateful
ServiceKindBasicDeployedServiceReplicaInfoServiceKindStateless ServiceKindBasicDeployedServiceReplicaInfo = original.ServiceKindBasicDeployedServiceReplicaInfoServiceKindStateless
)
type ServiceKindBasicReplicaHealth = original.ServiceKindBasicReplicaHealth
const (
ServiceKindBasicReplicaHealthServiceKindReplicaHealth ServiceKindBasicReplicaHealth = original.ServiceKindBasicReplicaHealthServiceKindReplicaHealth
ServiceKindBasicReplicaHealthServiceKindStateful ServiceKindBasicReplicaHealth = original.ServiceKindBasicReplicaHealthServiceKindStateful
ServiceKindBasicReplicaHealthServiceKindStateless ServiceKindBasicReplicaHealth = original.ServiceKindBasicReplicaHealthServiceKindStateless
)
type ServiceKindBasicReplicaHealthState = original.ServiceKindBasicReplicaHealthState
const (
ServiceKindBasicReplicaHealthStateServiceKindReplicaHealthState ServiceKindBasicReplicaHealthState = original.ServiceKindBasicReplicaHealthStateServiceKindReplicaHealthState
ServiceKindBasicReplicaHealthStateServiceKindStateful ServiceKindBasicReplicaHealthState = original.ServiceKindBasicReplicaHealthStateServiceKindStateful
ServiceKindBasicReplicaHealthStateServiceKindStateless ServiceKindBasicReplicaHealthState = original.ServiceKindBasicReplicaHealthStateServiceKindStateless
)
type ServiceKindBasicReplicaInfo = original.ServiceKindBasicReplicaInfo
const (
ServiceKindBasicReplicaInfoServiceKindReplicaInfo ServiceKindBasicReplicaInfo = original.ServiceKindBasicReplicaInfoServiceKindReplicaInfo
ServiceKindBasicReplicaInfoServiceKindStateful ServiceKindBasicReplicaInfo = original.ServiceKindBasicReplicaInfoServiceKindStateful
ServiceKindBasicReplicaInfoServiceKindStateless ServiceKindBasicReplicaInfo = original.ServiceKindBasicReplicaInfoServiceKindStateless
)
type ServiceKindBasicServiceDescription = original.ServiceKindBasicServiceDescription
const (
ServiceKindBasicServiceDescriptionServiceKindServiceDescription ServiceKindBasicServiceDescription = original.ServiceKindBasicServiceDescriptionServiceKindServiceDescription
ServiceKindBasicServiceDescriptionServiceKindStateful ServiceKindBasicServiceDescription = original.ServiceKindBasicServiceDescriptionServiceKindStateful
ServiceKindBasicServiceDescriptionServiceKindStateless ServiceKindBasicServiceDescription = original.ServiceKindBasicServiceDescriptionServiceKindStateless
)
type ServiceKindBasicServiceInfo = original.ServiceKindBasicServiceInfo
const (
ServiceKindBasicServiceInfoServiceKindServiceInfo ServiceKindBasicServiceInfo = original.ServiceKindBasicServiceInfoServiceKindServiceInfo
ServiceKindBasicServiceInfoServiceKindStateful ServiceKindBasicServiceInfo = original.ServiceKindBasicServiceInfoServiceKindStateful
ServiceKindBasicServiceInfoServiceKindStateless ServiceKindBasicServiceInfo = original.ServiceKindBasicServiceInfoServiceKindStateless
)
type ServiceKindBasicServicePartitionInfo = original.ServiceKindBasicServicePartitionInfo
const (
ServiceKindBasicServicePartitionInfoServiceKindServicePartitionInfo ServiceKindBasicServicePartitionInfo = original.ServiceKindBasicServicePartitionInfoServiceKindServicePartitionInfo
ServiceKindBasicServicePartitionInfoServiceKindStateful ServiceKindBasicServicePartitionInfo = original.ServiceKindBasicServicePartitionInfoServiceKindStateful
ServiceKindBasicServicePartitionInfoServiceKindStateless ServiceKindBasicServicePartitionInfo = original.ServiceKindBasicServicePartitionInfoServiceKindStateless
)
type ServiceKindBasicServiceUpdateDescription = original.ServiceKindBasicServiceUpdateDescription
const (
ServiceKindBasicServiceUpdateDescriptionServiceKindServiceUpdateDescription ServiceKindBasicServiceUpdateDescription = original.ServiceKindBasicServiceUpdateDescriptionServiceKindServiceUpdateDescription
ServiceKindBasicServiceUpdateDescriptionServiceKindStateful ServiceKindBasicServiceUpdateDescription = original.ServiceKindBasicServiceUpdateDescriptionServiceKindStateful
ServiceKindBasicServiceUpdateDescriptionServiceKindStateless ServiceKindBasicServiceUpdateDescription = original.ServiceKindBasicServiceUpdateDescriptionServiceKindStateless
)
type ServiceLoadMetricWeight = original.ServiceLoadMetricWeight
const (
ServiceLoadMetricWeightHigh ServiceLoadMetricWeight = original.ServiceLoadMetricWeightHigh
ServiceLoadMetricWeightLow ServiceLoadMetricWeight = original.ServiceLoadMetricWeightLow
ServiceLoadMetricWeightMedium ServiceLoadMetricWeight = original.ServiceLoadMetricWeightMedium
ServiceLoadMetricWeightZero ServiceLoadMetricWeight = original.ServiceLoadMetricWeightZero
)
type ServiceOperationName = original.ServiceOperationName
const (
ServiceOperationNameAbort ServiceOperationName = original.ServiceOperationNameAbort
ServiceOperationNameChangeRole ServiceOperationName = original.ServiceOperationNameChangeRole
ServiceOperationNameClose ServiceOperationName = original.ServiceOperationNameClose
ServiceOperationNameNone ServiceOperationName = original.ServiceOperationNameNone
ServiceOperationNameOpen ServiceOperationName = original.ServiceOperationNameOpen
ServiceOperationNameUnknown ServiceOperationName = original.ServiceOperationNameUnknown
)
type ServicePackageActivationMode = original.ServicePackageActivationMode
const (
ExclusiveProcess ServicePackageActivationMode = original.ExclusiveProcess
SharedProcess ServicePackageActivationMode = original.SharedProcess
)
type ServicePartitionKind = original.ServicePartitionKind
const (
ServicePartitionKindInt64Range ServicePartitionKind = original.ServicePartitionKindInt64Range
ServicePartitionKindInvalid ServicePartitionKind = original.ServicePartitionKindInvalid
ServicePartitionKindNamed ServicePartitionKind = original.ServicePartitionKindNamed
ServicePartitionKindSingleton ServicePartitionKind = original.ServicePartitionKindSingleton
)
type ServicePartitionKindBasicPartitionInformation = original.ServicePartitionKindBasicPartitionInformation
const (
ServicePartitionKindInt64Range1 ServicePartitionKindBasicPartitionInformation = original.ServicePartitionKindInt64Range1
ServicePartitionKindNamed1 ServicePartitionKindBasicPartitionInformation = original.ServicePartitionKindNamed1
ServicePartitionKindPartitionInformation ServicePartitionKindBasicPartitionInformation = original.ServicePartitionKindPartitionInformation
ServicePartitionKindSingleton1 ServicePartitionKindBasicPartitionInformation = original.ServicePartitionKindSingleton1
)
type ServicePartitionStatus = original.ServicePartitionStatus
const (
ServicePartitionStatusDeleting ServicePartitionStatus = original.ServicePartitionStatusDeleting
ServicePartitionStatusInQuorumLoss ServicePartitionStatus = original.ServicePartitionStatusInQuorumLoss
ServicePartitionStatusInvalid ServicePartitionStatus = original.ServicePartitionStatusInvalid
ServicePartitionStatusNotReady ServicePartitionStatus = original.ServicePartitionStatusNotReady
ServicePartitionStatusReady ServicePartitionStatus = original.ServicePartitionStatusReady
ServicePartitionStatusReconfiguring ServicePartitionStatus = original.ServicePartitionStatusReconfiguring
)
type ServicePlacementPolicyType = original.ServicePlacementPolicyType
const (
ServicePlacementPolicyTypeInvalid ServicePlacementPolicyType = original.ServicePlacementPolicyTypeInvalid
ServicePlacementPolicyTypeInvalidDomain ServicePlacementPolicyType = original.ServicePlacementPolicyTypeInvalidDomain
ServicePlacementPolicyTypeNonPartiallyPlaceService ServicePlacementPolicyType = original.ServicePlacementPolicyTypeNonPartiallyPlaceService
ServicePlacementPolicyTypePreferPrimaryDomain ServicePlacementPolicyType = original.ServicePlacementPolicyTypePreferPrimaryDomain
ServicePlacementPolicyTypeRequireDomain ServicePlacementPolicyType = original.ServicePlacementPolicyTypeRequireDomain
ServicePlacementPolicyTypeRequireDomainDistribution ServicePlacementPolicyType = original.ServicePlacementPolicyTypeRequireDomainDistribution
)
type ServiceStatus = original.ServiceStatus
const (
ServiceStatusActive ServiceStatus = original.ServiceStatusActive
ServiceStatusCreating ServiceStatus = original.ServiceStatusCreating
ServiceStatusDeleting ServiceStatus = original.ServiceStatusDeleting
ServiceStatusFailed ServiceStatus = original.ServiceStatusFailed
ServiceStatusUnknown ServiceStatus = original.ServiceStatusUnknown
ServiceStatusUpgrading ServiceStatus = original.ServiceStatusUpgrading
)
type ServiceTypeRegistrationStatus = original.ServiceTypeRegistrationStatus
const (
ServiceTypeRegistrationStatusDisabled ServiceTypeRegistrationStatus = original.ServiceTypeRegistrationStatusDisabled
ServiceTypeRegistrationStatusEnabled ServiceTypeRegistrationStatus = original.ServiceTypeRegistrationStatusEnabled
ServiceTypeRegistrationStatusInvalid ServiceTypeRegistrationStatus = original.ServiceTypeRegistrationStatusInvalid
ServiceTypeRegistrationStatusRegistered ServiceTypeRegistrationStatus = original.ServiceTypeRegistrationStatusRegistered
)
type SizeTypes = original.SizeTypes
const (
SizeTypesLarge SizeTypes = original.SizeTypesLarge
SizeTypesMedium SizeTypes = original.SizeTypesMedium
SizeTypesSmall SizeTypes = original.SizeTypesSmall
)
type State = original.State
const (
StateApproved State = original.StateApproved
StateClaimed State = original.StateClaimed
StateCompleted State = original.StateCompleted
StateCreated State = original.StateCreated
StateExecuting State = original.StateExecuting
StateInvalid State = original.StateInvalid
StatePreparing State = original.StatePreparing
StateRestoring State = original.StateRestoring
)
type StorageKind = original.StorageKind
const (
StorageKindAzureBlobStore StorageKind = original.StorageKindAzureBlobStore
StorageKindBackupStorageDescription StorageKind = original.StorageKindBackupStorageDescription
StorageKindFileShare StorageKind = original.StorageKindFileShare
)
type Type = original.Type
const (
TypeInvalidDomain Type = original.TypeInvalidDomain
TypeNonPartiallyPlaceService Type = original.TypeNonPartiallyPlaceService
TypePreferPrimaryDomain Type = original.TypePreferPrimaryDomain
TypeRequireDomain Type = original.TypeRequireDomain
TypeRequireDomainDistribution Type = original.TypeRequireDomainDistribution
TypeServicePlacementPolicyDescription Type = original.TypeServicePlacementPolicyDescription
)
type UpgradeDomainState = original.UpgradeDomainState
const (
UpgradeDomainStateCompleted UpgradeDomainState = original.UpgradeDomainStateCompleted
UpgradeDomainStateInProgress UpgradeDomainState = original.UpgradeDomainStateInProgress
UpgradeDomainStateInvalid UpgradeDomainState = original.UpgradeDomainStateInvalid
UpgradeDomainStatePending UpgradeDomainState = original.UpgradeDomainStatePending
)
type UpgradeKind = original.UpgradeKind
const (
UpgradeKindInvalid UpgradeKind = original.UpgradeKindInvalid
UpgradeKindRolling UpgradeKind = original.UpgradeKindRolling
)
type UpgradeMode = original.UpgradeMode
const (
UpgradeModeInvalid UpgradeMode = original.UpgradeModeInvalid
UpgradeModeMonitored UpgradeMode = original.UpgradeModeMonitored
UpgradeModeUnmonitoredAuto UpgradeMode = original.UpgradeModeUnmonitoredAuto
UpgradeModeUnmonitoredManual UpgradeMode = original.UpgradeModeUnmonitoredManual
)
type UpgradeSortOrder = original.UpgradeSortOrder
const (
UpgradeSortOrderDefault UpgradeSortOrder = original.UpgradeSortOrderDefault
UpgradeSortOrderInvalid UpgradeSortOrder = original.UpgradeSortOrderInvalid
UpgradeSortOrderLexicographical UpgradeSortOrder = original.UpgradeSortOrderLexicographical
UpgradeSortOrderNumeric UpgradeSortOrder = original.UpgradeSortOrderNumeric
UpgradeSortOrderReverseLexicographical UpgradeSortOrder = original.UpgradeSortOrderReverseLexicographical
UpgradeSortOrderReverseNumeric UpgradeSortOrder = original.UpgradeSortOrderReverseNumeric
)
type UpgradeState = original.UpgradeState
const (
UpgradeStateFailed UpgradeState = original.UpgradeStateFailed
UpgradeStateInvalid UpgradeState = original.UpgradeStateInvalid
UpgradeStateRollingBackCompleted UpgradeState = original.UpgradeStateRollingBackCompleted
UpgradeStateRollingBackInProgress UpgradeState = original.UpgradeStateRollingBackInProgress
UpgradeStateRollingForwardCompleted UpgradeState = original.UpgradeStateRollingForwardCompleted
UpgradeStateRollingForwardInProgress UpgradeState = original.UpgradeStateRollingForwardInProgress
UpgradeStateRollingForwardPending UpgradeState = original.UpgradeStateRollingForwardPending
)
type UpgradeType = original.UpgradeType
const (
UpgradeTypeInvalid UpgradeType = original.UpgradeTypeInvalid
UpgradeTypeRolling UpgradeType = original.UpgradeTypeRolling
UpgradeTypeRollingForceRestart UpgradeType = original.UpgradeTypeRollingForceRestart
)
type VolumeProvider = original.VolumeProvider
const (
SFAzureFile VolumeProvider = original.SFAzureFile
)
type AadMetadata = original.AadMetadata
type AadMetadataObject = original.AadMetadataObject
type AddRemoveIncrementalNamedPartitionScalingMechanism = original.AddRemoveIncrementalNamedPartitionScalingMechanism
type AddRemoveReplicaScalingMechanism = original.AddRemoveReplicaScalingMechanism
type AnalysisEventMetadata = original.AnalysisEventMetadata
type ApplicationBackupConfigurationInfo = original.ApplicationBackupConfigurationInfo
type ApplicationBackupEntity = original.ApplicationBackupEntity
type ApplicationCapacityDescription = original.ApplicationCapacityDescription
type ApplicationContainerInstanceExitedEvent = original.ApplicationContainerInstanceExitedEvent
type ApplicationCreatedEvent = original.ApplicationCreatedEvent
type ApplicationDeletedEvent = original.ApplicationDeletedEvent
type ApplicationDescription = original.ApplicationDescription
type ApplicationEvent = original.ApplicationEvent
type ApplicationHealth = original.ApplicationHealth
type ApplicationHealthEvaluation = original.ApplicationHealthEvaluation
type ApplicationHealthPolicies = original.ApplicationHealthPolicies
type ApplicationHealthPolicy = original.ApplicationHealthPolicy
type ApplicationHealthPolicyMapItem = original.ApplicationHealthPolicyMapItem
type ApplicationHealthReportExpiredEvent = original.ApplicationHealthReportExpiredEvent
type ApplicationHealthState = original.ApplicationHealthState
type ApplicationHealthStateChunk = original.ApplicationHealthStateChunk
type ApplicationHealthStateChunkList = original.ApplicationHealthStateChunkList
type ApplicationHealthStateFilter = original.ApplicationHealthStateFilter
type ApplicationInfo = original.ApplicationInfo
type ApplicationLoadInfo = original.ApplicationLoadInfo
type ApplicationMetricDescription = original.ApplicationMetricDescription
type ApplicationNameInfo = original.ApplicationNameInfo
type ApplicationNewHealthReportEvent = original.ApplicationNewHealthReportEvent
type ApplicationParameter = original.ApplicationParameter
type ApplicationProcessExitedEvent = original.ApplicationProcessExitedEvent
type ApplicationProperties = original.ApplicationProperties
type ApplicationResourceDescription = original.ApplicationResourceDescription
type ApplicationScopedVolume = original.ApplicationScopedVolume
type ApplicationScopedVolumeCreationParameters = original.ApplicationScopedVolumeCreationParameters
type ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk = original.ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk
type ApplicationTypeApplicationsHealthEvaluation = original.ApplicationTypeApplicationsHealthEvaluation
type ApplicationTypeHealthPolicyMapItem = original.ApplicationTypeHealthPolicyMapItem
type ApplicationTypeImageStorePath = original.ApplicationTypeImageStorePath
type ApplicationTypeInfo = original.ApplicationTypeInfo
type ApplicationTypeManifest = original.ApplicationTypeManifest
type ApplicationUpgradeCompletedEvent = original.ApplicationUpgradeCompletedEvent
type ApplicationUpgradeDescription = original.ApplicationUpgradeDescription
type ApplicationUpgradeDomainCompletedEvent = original.ApplicationUpgradeDomainCompletedEvent
type ApplicationUpgradeProgressInfo = original.ApplicationUpgradeProgressInfo
type ApplicationUpgradeRollbackCompletedEvent = original.ApplicationUpgradeRollbackCompletedEvent
type ApplicationUpgradeRollbackStartedEvent = original.ApplicationUpgradeRollbackStartedEvent
type ApplicationUpgradeStartedEvent = original.ApplicationUpgradeStartedEvent
type ApplicationUpgradeUpdateDescription = original.ApplicationUpgradeUpdateDescription
type ApplicationsHealthEvaluation = original.ApplicationsHealthEvaluation
type AutoScalingMechanism = original.AutoScalingMechanism
type AutoScalingMetric = original.AutoScalingMetric
type AutoScalingPolicy = original.AutoScalingPolicy
type AutoScalingResourceMetric = original.AutoScalingResourceMetric
type AutoScalingTrigger = original.AutoScalingTrigger
type AverageLoadScalingTrigger = original.AverageLoadScalingTrigger
type AveragePartitionLoadScalingTrigger = original.AveragePartitionLoadScalingTrigger
type AverageServiceLoadScalingTrigger = original.AverageServiceLoadScalingTrigger
type AzureBlobBackupStorageDescription = original.AzureBlobBackupStorageDescription
type AzureInternalMonitoringPipelineSinkDescription = original.AzureInternalMonitoringPipelineSinkDescription
type BackupConfigurationInfo = original.BackupConfigurationInfo
type BackupEntity = original.BackupEntity
type BackupInfo = original.BackupInfo
type BackupPartitionDescription = original.BackupPartitionDescription
type BackupPolicyDescription = original.BackupPolicyDescription
type BackupProgressInfo = original.BackupProgressInfo
type BackupScheduleDescription = original.BackupScheduleDescription
type BackupStorageDescription = original.BackupStorageDescription
type BackupSuspensionInfo = original.BackupSuspensionInfo
type BaseClient = original.BaseClient
type BasicApplicationEvent = original.BasicApplicationEvent
type BasicApplicationScopedVolumeCreationParameters = original.BasicApplicationScopedVolumeCreationParameters
type BasicAutoScalingMechanism = original.BasicAutoScalingMechanism
type BasicAutoScalingMetric = original.BasicAutoScalingMetric
type BasicAutoScalingTrigger = original.BasicAutoScalingTrigger
type BasicBackupConfigurationInfo = original.BasicBackupConfigurationInfo
type BasicBackupEntity = original.BasicBackupEntity
type BasicBackupScheduleDescription = original.BasicBackupScheduleDescription
type BasicBackupStorageDescription = original.BasicBackupStorageDescription
type BasicBasicRetentionPolicyDescription = original.BasicBasicRetentionPolicyDescription
type BasicChaosEvent = original.BasicChaosEvent
type BasicClusterEvent = original.BasicClusterEvent
type BasicDeployedServiceReplicaDetailInfo = original.BasicDeployedServiceReplicaDetailInfo
type BasicDeployedServiceReplicaInfo = original.BasicDeployedServiceReplicaInfo
type BasicDiagnosticsSinkProperties = original.BasicDiagnosticsSinkProperties
type BasicFabricEvent = original.BasicFabricEvent
type BasicHealthEvaluation = original.BasicHealthEvaluation
type BasicNetworkResourceProperties = original.BasicNetworkResourceProperties
type BasicNetworkResourcePropertiesBase = original.BasicNetworkResourcePropertiesBase
type BasicNodeEvent = original.BasicNodeEvent
type BasicPartitionAnalysisEvent = original.BasicPartitionAnalysisEvent
type BasicPartitionEvent = original.BasicPartitionEvent
type BasicPartitionInformation = original.BasicPartitionInformation
type BasicPartitionSafetyCheck = original.BasicPartitionSafetyCheck
type BasicPartitionSchemeDescription = original.BasicPartitionSchemeDescription
type BasicPropertyBatchInfo = original.BasicPropertyBatchInfo
type BasicPropertyBatchOperation = original.BasicPropertyBatchOperation
type BasicPropertyValue = original.BasicPropertyValue
type BasicProvisionApplicationTypeDescriptionBase = original.BasicProvisionApplicationTypeDescriptionBase
type BasicRepairImpactDescriptionBase = original.BasicRepairImpactDescriptionBase
type BasicRepairTargetDescriptionBase = original.BasicRepairTargetDescriptionBase
type BasicReplicaEvent = original.BasicReplicaEvent
type BasicReplicaHealth = original.BasicReplicaHealth
type BasicReplicaHealthState = original.BasicReplicaHealthState
type BasicReplicaInfo = original.BasicReplicaInfo
type BasicReplicaStatusBase = original.BasicReplicaStatusBase
type BasicReplicatorStatus = original.BasicReplicatorStatus
type BasicRetentionPolicyDescription = original.BasicRetentionPolicyDescription
type BasicSafetyCheck = original.BasicSafetyCheck
type BasicScalingMechanismDescription = original.BasicScalingMechanismDescription
type BasicScalingTriggerDescription = original.BasicScalingTriggerDescription
type BasicSecondaryReplicatorStatus = original.BasicSecondaryReplicatorStatus
type BasicSecretResourceProperties = original.BasicSecretResourceProperties
type BasicSecretResourcePropertiesBase = original.BasicSecretResourcePropertiesBase
type BasicServiceDescription = original.BasicServiceDescription
type BasicServiceEvent = original.BasicServiceEvent
type BasicServiceInfo = original.BasicServiceInfo
type BasicServicePartitionInfo = original.BasicServicePartitionInfo
type BasicServicePlacementPolicyDescription = original.BasicServicePlacementPolicyDescription
type BasicServiceTypeDescription = original.BasicServiceTypeDescription
type BasicServiceUpdateDescription = original.BasicServiceUpdateDescription
type BinaryPropertyValue = original.BinaryPropertyValue
type Chaos = original.Chaos
type ChaosCodePackageRestartScheduledEvent = original.ChaosCodePackageRestartScheduledEvent
type ChaosContext = original.ChaosContext
type ChaosEvent = original.ChaosEvent
type ChaosEventWrapper = original.ChaosEventWrapper
type ChaosEventsSegment = original.ChaosEventsSegment
type ChaosNodeRestartScheduledEvent = original.ChaosNodeRestartScheduledEvent
type ChaosParameters = original.ChaosParameters
type ChaosParametersDictionaryItem = original.ChaosParametersDictionaryItem
type ChaosPartitionPrimaryMoveScheduledEvent = original.ChaosPartitionPrimaryMoveScheduledEvent
type ChaosPartitionSecondaryMoveScheduledEvent = original.ChaosPartitionSecondaryMoveScheduledEvent
type ChaosReplicaRemovalScheduledEvent = original.ChaosReplicaRemovalScheduledEvent
type ChaosReplicaRestartScheduledEvent = original.ChaosReplicaRestartScheduledEvent
type ChaosSchedule = original.ChaosSchedule
type ChaosScheduleDescription = original.ChaosScheduleDescription
type ChaosScheduleJob = original.ChaosScheduleJob
type ChaosScheduleJobActiveDaysOfWeek = original.ChaosScheduleJobActiveDaysOfWeek
type ChaosStartedEvent = original.ChaosStartedEvent
type ChaosStoppedEvent = original.ChaosStoppedEvent
type ChaosTargetFilter = original.ChaosTargetFilter
type CheckExistsPropertyBatchOperation = original.CheckExistsPropertyBatchOperation
type CheckSequencePropertyBatchOperation = original.CheckSequencePropertyBatchOperation
type CheckValuePropertyBatchOperation = original.CheckValuePropertyBatchOperation
type ClusterConfiguration = original.ClusterConfiguration
type ClusterConfigurationUpgradeDescription = original.ClusterConfigurationUpgradeDescription
type ClusterConfigurationUpgradeStatusInfo = original.ClusterConfigurationUpgradeStatusInfo
type ClusterEvent = original.ClusterEvent
type ClusterHealth = original.ClusterHealth
type ClusterHealthChunk = original.ClusterHealthChunk
type ClusterHealthChunkQueryDescription = original.ClusterHealthChunkQueryDescription
type ClusterHealthPolicies = original.ClusterHealthPolicies
type ClusterHealthPolicy = original.ClusterHealthPolicy
type ClusterHealthReportExpiredEvent = original.ClusterHealthReportExpiredEvent
type ClusterLoadInfo = original.ClusterLoadInfo
type ClusterManifest = original.ClusterManifest
type ClusterNewHealthReportEvent = original.ClusterNewHealthReportEvent
type ClusterUpgradeCompletedEvent = original.ClusterUpgradeCompletedEvent
type ClusterUpgradeDescriptionObject = original.ClusterUpgradeDescriptionObject
type ClusterUpgradeDomainCompletedEvent = original.ClusterUpgradeDomainCompletedEvent
type ClusterUpgradeHealthPolicyObject = original.ClusterUpgradeHealthPolicyObject
type ClusterUpgradeProgressObject = original.ClusterUpgradeProgressObject
type ClusterUpgradeRollbackCompletedEvent = original.ClusterUpgradeRollbackCompletedEvent
type ClusterUpgradeRollbackStartedEvent = original.ClusterUpgradeRollbackStartedEvent
type ClusterUpgradeStartedEvent = original.ClusterUpgradeStartedEvent
type ClusterVersion = original.ClusterVersion
type CodePackageEntryPoint = original.CodePackageEntryPoint
type CodePackageEntryPointStatistics = original.CodePackageEntryPointStatistics
type ComposeDeploymentStatusInfo = original.ComposeDeploymentStatusInfo
type ComposeDeploymentUpgradeDescription = original.ComposeDeploymentUpgradeDescription
type ComposeDeploymentUpgradeProgressInfo = original.ComposeDeploymentUpgradeProgressInfo
type ContainerAPIRequestBody = original.ContainerAPIRequestBody
type ContainerAPIResponse = original.ContainerAPIResponse
type ContainerAPIResult = original.ContainerAPIResult
type ContainerCodePackageProperties = original.ContainerCodePackageProperties
type ContainerEvent = original.ContainerEvent
type ContainerInstanceEvent = original.ContainerInstanceEvent
type ContainerInstanceView = original.ContainerInstanceView
type ContainerLabel = original.ContainerLabel
type ContainerLogs = original.ContainerLogs
type ContainerState = original.ContainerState
type CreateComposeDeploymentDescription = original.CreateComposeDeploymentDescription
type CurrentUpgradeDomainProgressInfo = original.CurrentUpgradeDomainProgressInfo
type DeactivationIntentDescription = original.DeactivationIntentDescription
type DeletePropertyBatchOperation = original.DeletePropertyBatchOperation
type DeltaNodesCheckHealthEvaluation = original.DeltaNodesCheckHealthEvaluation
type DeployServicePackageToNodeDescription = original.DeployServicePackageToNodeDescription
type DeployedApplicationHealth = original.DeployedApplicationHealth
type DeployedApplicationHealthEvaluation = original.DeployedApplicationHealthEvaluation
type DeployedApplicationHealthReportExpiredEvent = original.DeployedApplicationHealthReportExpiredEvent
type DeployedApplicationHealthState = original.DeployedApplicationHealthState
type DeployedApplicationHealthStateChunk = original.DeployedApplicationHealthStateChunk
type DeployedApplicationHealthStateChunkList = original.DeployedApplicationHealthStateChunkList
type DeployedApplicationHealthStateFilter = original.DeployedApplicationHealthStateFilter
type DeployedApplicationInfo = original.DeployedApplicationInfo
type DeployedApplicationNewHealthReportEvent = original.DeployedApplicationNewHealthReportEvent
type DeployedApplicationsHealthEvaluation = original.DeployedApplicationsHealthEvaluation
type DeployedCodePackageInfo = original.DeployedCodePackageInfo
type DeployedServicePackageHealth = original.DeployedServicePackageHealth
type DeployedServicePackageHealthEvaluation = original.DeployedServicePackageHealthEvaluation
type DeployedServicePackageHealthReportExpiredEvent = original.DeployedServicePackageHealthReportExpiredEvent
type DeployedServicePackageHealthState = original.DeployedServicePackageHealthState
type DeployedServicePackageHealthStateChunk = original.DeployedServicePackageHealthStateChunk
type DeployedServicePackageHealthStateChunkList = original.DeployedServicePackageHealthStateChunkList
type DeployedServicePackageHealthStateFilter = original.DeployedServicePackageHealthStateFilter
type DeployedServicePackageInfo = original.DeployedServicePackageInfo
type DeployedServicePackageNewHealthReportEvent = original.DeployedServicePackageNewHealthReportEvent
type DeployedServicePackagesHealthEvaluation = original.DeployedServicePackagesHealthEvaluation
type DeployedServiceReplicaDetailInfo = original.DeployedServiceReplicaDetailInfo
type DeployedServiceReplicaDetailInfoModel = original.DeployedServiceReplicaDetailInfoModel
type DeployedServiceReplicaInfo = original.DeployedServiceReplicaInfo
type DeployedServiceTypeInfo = original.DeployedServiceTypeInfo
type DeployedStatefulServiceReplicaDetailInfo = original.DeployedStatefulServiceReplicaDetailInfo
type DeployedStatefulServiceReplicaInfo = original.DeployedStatefulServiceReplicaInfo
type DeployedStatelessServiceInstanceDetailInfo = original.DeployedStatelessServiceInstanceDetailInfo
type DeployedStatelessServiceInstanceInfo = original.DeployedStatelessServiceInstanceInfo
type DiagnosticsDescription = original.DiagnosticsDescription
type DiagnosticsRef = original.DiagnosticsRef
type DiagnosticsSinkProperties = original.DiagnosticsSinkProperties
type DisableBackupDescription = original.DisableBackupDescription
type DoublePropertyValue = original.DoublePropertyValue
type EnableBackupDescription = original.EnableBackupDescription
type EndpointProperties = original.EndpointProperties
type EndpointRef = original.EndpointRef
type EnsureAvailabilitySafetyCheck = original.EnsureAvailabilitySafetyCheck
type EnsurePartitionQuorumSafetyCheck = original.EnsurePartitionQuorumSafetyCheck
type EntityHealth = original.EntityHealth
type EntityHealthState = original.EntityHealthState
type EntityHealthStateChunk = original.EntityHealthStateChunk
type EntityHealthStateChunkList = original.EntityHealthStateChunkList
type EntityKindHealthStateCount = original.EntityKindHealthStateCount
type EnvironmentVariable = original.EnvironmentVariable
type Epoch = original.Epoch
type EventHealthEvaluation = original.EventHealthEvaluation
type ExecutingFaultsChaosEvent = original.ExecutingFaultsChaosEvent
type ExternalStoreProvisionApplicationTypeDescription = original.ExternalStoreProvisionApplicationTypeDescription
type FabricCodeVersionInfo = original.FabricCodeVersionInfo
type FabricConfigVersionInfo = original.FabricConfigVersionInfo
type FabricError = original.FabricError
type FabricErrorError = original.FabricErrorError
type FabricEvent = original.FabricEvent
type FailedPropertyBatchInfo = original.FailedPropertyBatchInfo
type FailedUpgradeDomainProgressObject = original.FailedUpgradeDomainProgressObject
type FailureUpgradeDomainProgressInfo = original.FailureUpgradeDomainProgressInfo
type FileInfo = original.FileInfo
type FileShareBackupStorageDescription = original.FileShareBackupStorageDescription
type FileVersion = original.FileVersion
type FolderInfo = original.FolderInfo
type FolderSizeInfo = original.FolderSizeInfo
type FrequencyBasedBackupScheduleDescription = original.FrequencyBasedBackupScheduleDescription
type GUIDPropertyValue = original.GUIDPropertyValue
type GatewayDestination = original.GatewayDestination
type GatewayProperties = original.GatewayProperties
type GatewayResourceDescription = original.GatewayResourceDescription
type GetBackupByStorageQueryDescription = original.GetBackupByStorageQueryDescription
type GetPropertyBatchOperation = original.GetPropertyBatchOperation
type HTTPConfig = original.HTTPConfig
type HTTPHostConfig = original.HTTPHostConfig
type HTTPRouteConfig = original.HTTPRouteConfig
type HTTPRouteMatchHeader = original.HTTPRouteMatchHeader
type HTTPRouteMatchPath = original.HTTPRouteMatchPath
type HTTPRouteMatchRule = original.HTTPRouteMatchRule
type HealthEvaluation = original.HealthEvaluation
type HealthEvaluationWrapper = original.HealthEvaluationWrapper
type HealthEvent = original.HealthEvent
type HealthInformation = original.HealthInformation
type HealthStateCount = original.HealthStateCount
type HealthStatistics = original.HealthStatistics
type IdentityDescription = original.IdentityDescription
type IdentityItemDescription = original.IdentityItemDescription
type ImageRegistryCredential = original.ImageRegistryCredential
type ImageStoreContent = original.ImageStoreContent
type ImageStoreCopyDescription = original.ImageStoreCopyDescription
type InlinedValueSecretResourceProperties = original.InlinedValueSecretResourceProperties
type Int64PropertyValue = original.Int64PropertyValue
type Int64RangePartitionInformation = original.Int64RangePartitionInformation
type InvokeDataLossResult = original.InvokeDataLossResult
type InvokeQuorumLossResult = original.InvokeQuorumLossResult
type KeyValueStoreReplicaStatus = original.KeyValueStoreReplicaStatus
type ListApplicationEvent = original.ListApplicationEvent
type ListClusterEvent = original.ListClusterEvent
type ListContainerInstanceEvent = original.ListContainerInstanceEvent
type ListDeployedCodePackageInfo = original.ListDeployedCodePackageInfo
type ListDeployedServicePackageInfo = original.ListDeployedServicePackageInfo
type ListDeployedServiceReplicaInfo = original.ListDeployedServiceReplicaInfo
type ListDeployedServiceTypeInfo = original.ListDeployedServiceTypeInfo
type ListFabricCodeVersionInfo = original.ListFabricCodeVersionInfo
type ListFabricConfigVersionInfo = original.ListFabricConfigVersionInfo
type ListFabricEvent = original.ListFabricEvent
type ListNodeEvent = original.ListNodeEvent
type ListOperationStatus = original.ListOperationStatus
type ListPartitionEvent = original.ListPartitionEvent
type ListRepairTask = original.ListRepairTask
type ListReplicaEvent = original.ListReplicaEvent
type ListServiceEvent = original.ListServiceEvent
type ListServiceTypeInfo = original.ListServiceTypeInfo
type LoadMetricInformation = original.LoadMetricInformation
type LoadMetricReport = original.LoadMetricReport
type LoadMetricReportInfo = original.LoadMetricReportInfo
type LocalNetworkResourceProperties = original.LocalNetworkResourceProperties
type ManagedApplicationIdentity = original.ManagedApplicationIdentity
type ManagedApplicationIdentityDescription = original.ManagedApplicationIdentityDescription
type MeshApplicationClient = original.MeshApplicationClient
type MeshCodePackageClient = original.MeshCodePackageClient
type MeshGatewayClient = original.MeshGatewayClient
type MeshNetworkClient = original.MeshNetworkClient
type MeshSecretClient = original.MeshSecretClient
type MeshSecretValueClient = original.MeshSecretValueClient
type MeshServiceClient = original.MeshServiceClient
type MeshServiceReplicaClient = original.MeshServiceReplicaClient
type MeshVolumeClient = original.MeshVolumeClient
type MonitoringPolicyDescription = original.MonitoringPolicyDescription
type NameDescription = original.NameDescription
type NamedPartitionInformation = original.NamedPartitionInformation
type NamedPartitionSchemeDescription = original.NamedPartitionSchemeDescription
type NetworkRef = original.NetworkRef
type NetworkResourceDescription = original.NetworkResourceDescription
type NetworkResourceProperties = original.NetworkResourceProperties
type NetworkResourcePropertiesBase = original.NetworkResourcePropertiesBase
type NodeAbortedEvent = original.NodeAbortedEvent
type NodeAddedToClusterEvent = original.NodeAddedToClusterEvent
type NodeClosedEvent = original.NodeClosedEvent
type NodeDeactivateCompletedEvent = original.NodeDeactivateCompletedEvent
type NodeDeactivateStartedEvent = original.NodeDeactivateStartedEvent
type NodeDeactivationInfo = original.NodeDeactivationInfo
type NodeDeactivationTask = original.NodeDeactivationTask
type NodeDeactivationTaskID = original.NodeDeactivationTaskID
type NodeDownEvent = original.NodeDownEvent
type NodeEvent = original.NodeEvent
type NodeHealth = original.NodeHealth
type NodeHealthEvaluation = original.NodeHealthEvaluation
type NodeHealthReportExpiredEvent = original.NodeHealthReportExpiredEvent
type NodeHealthState = original.NodeHealthState
type NodeHealthStateChunk = original.NodeHealthStateChunk
type NodeHealthStateChunkList = original.NodeHealthStateChunkList
type NodeHealthStateFilter = original.NodeHealthStateFilter
type NodeID = original.NodeID
type NodeImpact = original.NodeImpact
type NodeInfo = original.NodeInfo
type NodeLoadInfo = original.NodeLoadInfo
type NodeLoadMetricInformation = original.NodeLoadMetricInformation
type NodeNewHealthReportEvent = original.NodeNewHealthReportEvent
type NodeOpenFailedEvent = original.NodeOpenFailedEvent
type NodeOpenSucceededEvent = original.NodeOpenSucceededEvent
type NodeRemovedFromClusterEvent = original.NodeRemovedFromClusterEvent
type NodeRepairImpactDescription = original.NodeRepairImpactDescription
type NodeRepairTargetDescription = original.NodeRepairTargetDescription
type NodeResult = original.NodeResult
type NodeTransitionProgress = original.NodeTransitionProgress
type NodeTransitionResult = original.NodeTransitionResult
type NodeUpEvent = original.NodeUpEvent
type NodeUpgradeProgressInfo = original.NodeUpgradeProgressInfo
type NodesHealthEvaluation = original.NodesHealthEvaluation
type OperationStatus = original.OperationStatus
type PackageSharingPolicyInfo = original.PackageSharingPolicyInfo
type PagedApplicationInfoList = original.PagedApplicationInfoList
type PagedApplicationResourceDescriptionList = original.PagedApplicationResourceDescriptionList
type PagedApplicationTypeInfoList = original.PagedApplicationTypeInfoList
type PagedBackupConfigurationInfoList = original.PagedBackupConfigurationInfoList
type PagedBackupEntityList = original.PagedBackupEntityList
type PagedBackupInfoList = original.PagedBackupInfoList
type PagedBackupPolicyDescriptionList = original.PagedBackupPolicyDescriptionList
type PagedComposeDeploymentStatusInfoList = original.PagedComposeDeploymentStatusInfoList
type PagedDeployedApplicationInfoList = original.PagedDeployedApplicationInfoList
type PagedGatewayResourceDescriptionList = original.PagedGatewayResourceDescriptionList
type PagedNetworkResourceDescriptionList = original.PagedNetworkResourceDescriptionList
type PagedNodeInfoList = original.PagedNodeInfoList
type PagedPropertyInfoList = original.PagedPropertyInfoList
type PagedReplicaInfoList = original.PagedReplicaInfoList
type PagedSecretResourceDescriptionList = original.PagedSecretResourceDescriptionList
type PagedSecretValueResourceDescriptionList = original.PagedSecretValueResourceDescriptionList
type PagedServiceInfoList = original.PagedServiceInfoList
type PagedServicePartitionInfoList = original.PagedServicePartitionInfoList
type PagedServiceReplicaDescriptionList = original.PagedServiceReplicaDescriptionList
type PagedServiceResourceDescriptionList = original.PagedServiceResourceDescriptionList
type PagedSubNameInfoList = original.PagedSubNameInfoList
type PagedVolumeResourceDescriptionList = original.PagedVolumeResourceDescriptionList
type PartitionAnalysisEvent = original.PartitionAnalysisEvent
type PartitionBackupConfigurationInfo = original.PartitionBackupConfigurationInfo
type PartitionBackupEntity = original.PartitionBackupEntity
type PartitionDataLossProgress = original.PartitionDataLossProgress
type PartitionEvent = original.PartitionEvent
type PartitionHealth = original.PartitionHealth
type PartitionHealthEvaluation = original.PartitionHealthEvaluation
type PartitionHealthReportExpiredEvent = original.PartitionHealthReportExpiredEvent
type PartitionHealthState = original.PartitionHealthState
type PartitionHealthStateChunk = original.PartitionHealthStateChunk
type PartitionHealthStateChunkList = original.PartitionHealthStateChunkList
type PartitionHealthStateFilter = original.PartitionHealthStateFilter
type PartitionInformation = original.PartitionInformation
type PartitionInstanceCountScaleMechanism = original.PartitionInstanceCountScaleMechanism
type PartitionLoadInformation = original.PartitionLoadInformation
type PartitionNewHealthReportEvent = original.PartitionNewHealthReportEvent
type PartitionPrimaryMoveAnalysisEvent = original.PartitionPrimaryMoveAnalysisEvent
type PartitionQuorumLossProgress = original.PartitionQuorumLossProgress
type PartitionReconfiguredEvent = original.PartitionReconfiguredEvent
type PartitionRestartProgress = original.PartitionRestartProgress
type PartitionSafetyCheck = original.PartitionSafetyCheck
type PartitionSchemeDescription = original.PartitionSchemeDescription
type PartitionsHealthEvaluation = original.PartitionsHealthEvaluation
type PrimaryReplicatorStatus = original.PrimaryReplicatorStatus
type PropertyBatchDescriptionList = original.PropertyBatchDescriptionList
type PropertyBatchInfo = original.PropertyBatchInfo
type PropertyBatchInfoModel = original.PropertyBatchInfoModel
type PropertyBatchOperation = original.PropertyBatchOperation
type PropertyDescription = original.PropertyDescription
type PropertyInfo = original.PropertyInfo
type PropertyMetadata = original.PropertyMetadata
type PropertyValue = original.PropertyValue
type ProvisionApplicationTypeDescription = original.ProvisionApplicationTypeDescription
type ProvisionApplicationTypeDescriptionBase = original.ProvisionApplicationTypeDescriptionBase
type ProvisionFabricDescription = original.ProvisionFabricDescription
type PutPropertyBatchOperation = original.PutPropertyBatchOperation
type ReconfigurationInformation = original.ReconfigurationInformation
type RegistryCredential = original.RegistryCredential
type ReliableCollectionsRef = original.ReliableCollectionsRef
type RemoteReplicatorAcknowledgementDetail = original.RemoteReplicatorAcknowledgementDetail
type RemoteReplicatorAcknowledgementStatus = original.RemoteReplicatorAcknowledgementStatus
type RemoteReplicatorStatus = original.RemoteReplicatorStatus
type RepairImpactDescriptionBase = original.RepairImpactDescriptionBase
type RepairTargetDescriptionBase = original.RepairTargetDescriptionBase
type RepairTask = original.RepairTask
type RepairTaskApproveDescription = original.RepairTaskApproveDescription
type RepairTaskCancelDescription = original.RepairTaskCancelDescription
type RepairTaskDeleteDescription = original.RepairTaskDeleteDescription
type RepairTaskHistory = original.RepairTaskHistory
type RepairTaskUpdateHealthPolicyDescription = original.RepairTaskUpdateHealthPolicyDescription
type RepairTaskUpdateInfo = original.RepairTaskUpdateInfo
type ReplicaEvent = original.ReplicaEvent
type ReplicaHealth = original.ReplicaHealth
type ReplicaHealthEvaluation = original.ReplicaHealthEvaluation
type ReplicaHealthModel = original.ReplicaHealthModel
type ReplicaHealthState = original.ReplicaHealthState
type ReplicaHealthStateChunk = original.ReplicaHealthStateChunk
type ReplicaHealthStateChunkList = original.ReplicaHealthStateChunkList
type ReplicaHealthStateFilter = original.ReplicaHealthStateFilter
type ReplicaInfo = original.ReplicaInfo
type ReplicaInfoModel = original.ReplicaInfoModel
type ReplicaStatusBase = original.ReplicaStatusBase
type ReplicasHealthEvaluation = original.ReplicasHealthEvaluation
type ReplicatorQueueStatus = original.ReplicatorQueueStatus
type ReplicatorStatus = original.ReplicatorStatus
type ResolvedServiceEndpoint = original.ResolvedServiceEndpoint
type ResolvedServicePartition = original.ResolvedServicePartition
type ResourceLimits = original.ResourceLimits
type ResourceRequests = original.ResourceRequests
type ResourceRequirements = original.ResourceRequirements
type RestartDeployedCodePackageDescription = original.RestartDeployedCodePackageDescription
type RestartNodeDescription = original.RestartNodeDescription
type RestartPartitionResult = original.RestartPartitionResult
type RestorePartitionDescription = original.RestorePartitionDescription
type RestoreProgressInfo = original.RestoreProgressInfo
type ResumeApplicationUpgradeDescription = original.ResumeApplicationUpgradeDescription
type ResumeClusterUpgradeDescription = original.ResumeClusterUpgradeDescription
type RetentionPolicyDescription = original.RetentionPolicyDescription
type RollingUpgradeUpdateDescription = original.RollingUpgradeUpdateDescription
type SafetyCheck = original.SafetyCheck
type SafetyCheckWrapper = original.SafetyCheckWrapper
type ScalingMechanismDescription = original.ScalingMechanismDescription
type ScalingPolicyDescription = original.ScalingPolicyDescription
type ScalingTriggerDescription = original.ScalingTriggerDescription
type SecondaryActiveReplicatorStatus = original.SecondaryActiveReplicatorStatus
type SecondaryIdleReplicatorStatus = original.SecondaryIdleReplicatorStatus
type SecondaryReplicatorStatus = original.SecondaryReplicatorStatus
type SecretResourceDescription = original.SecretResourceDescription
type SecretResourceProperties = original.SecretResourceProperties
type SecretResourcePropertiesBase = original.SecretResourcePropertiesBase
type SecretValue = original.SecretValue
type SecretValueProperties = original.SecretValueProperties
type SecretValueResourceDescription = original.SecretValueResourceDescription
type SecretValueResourceProperties = original.SecretValueResourceProperties
type SeedNodeSafetyCheck = original.SeedNodeSafetyCheck
type SelectedPartition = original.SelectedPartition
type ServiceBackupConfigurationInfo = original.ServiceBackupConfigurationInfo
type ServiceBackupEntity = original.ServiceBackupEntity
type ServiceCorrelationDescription = original.ServiceCorrelationDescription
type ServiceCreatedEvent = original.ServiceCreatedEvent
type ServiceDeletedEvent = original.ServiceDeletedEvent
type ServiceDescription = original.ServiceDescription
type ServiceDescriptionModel = original.ServiceDescriptionModel
type ServiceEvent = original.ServiceEvent
type ServiceFromTemplateDescription = original.ServiceFromTemplateDescription
type ServiceHealth = original.ServiceHealth
type ServiceHealthEvaluation = original.ServiceHealthEvaluation
type ServiceHealthReportExpiredEvent = original.ServiceHealthReportExpiredEvent
type ServiceHealthState = original.ServiceHealthState
type ServiceHealthStateChunk = original.ServiceHealthStateChunk
type ServiceHealthStateChunkList = original.ServiceHealthStateChunkList
type ServiceHealthStateFilter = original.ServiceHealthStateFilter
type ServiceIdentity = original.ServiceIdentity
type ServiceInfo = original.ServiceInfo
type ServiceInfoModel = original.ServiceInfoModel
type ServiceLoadMetricDescription = original.ServiceLoadMetricDescription
type ServiceNameInfo = original.ServiceNameInfo
type ServiceNewHealthReportEvent = original.ServiceNewHealthReportEvent
type ServicePartitionInfo = original.ServicePartitionInfo
type ServicePartitionInfoModel = original.ServicePartitionInfoModel
type ServicePlacementInvalidDomainPolicyDescription = original.ServicePlacementInvalidDomainPolicyDescription
type ServicePlacementNonPartiallyPlaceServicePolicyDescription = original.ServicePlacementNonPartiallyPlaceServicePolicyDescription
type ServicePlacementPolicyDescription = original.ServicePlacementPolicyDescription
type ServicePlacementPreferPrimaryDomainPolicyDescription = original.ServicePlacementPreferPrimaryDomainPolicyDescription
type ServicePlacementRequireDomainDistributionPolicyDescription = original.ServicePlacementRequireDomainDistributionPolicyDescription
type ServicePlacementRequiredDomainPolicyDescription = original.ServicePlacementRequiredDomainPolicyDescription
type ServiceProperties = original.ServiceProperties
type ServiceReplicaDescription = original.ServiceReplicaDescription
type ServiceReplicaProperties = original.ServiceReplicaProperties
type ServiceResourceDescription = original.ServiceResourceDescription
type ServiceResourceProperties = original.ServiceResourceProperties
type ServiceTypeDescription = original.ServiceTypeDescription
type ServiceTypeExtensionDescription = original.ServiceTypeExtensionDescription
type ServiceTypeHealthPolicy = original.ServiceTypeHealthPolicy
type ServiceTypeHealthPolicyMapItem = original.ServiceTypeHealthPolicyMapItem
type ServiceTypeInfo = original.ServiceTypeInfo
type ServiceTypeManifest = original.ServiceTypeManifest
type ServiceUpdateDescription = original.ServiceUpdateDescription
type ServicesHealthEvaluation = original.ServicesHealthEvaluation
type Setting = original.Setting
type SingletonPartitionInformation = original.SingletonPartitionInformation
type SingletonPartitionSchemeDescription = original.SingletonPartitionSchemeDescription
type StartClusterUpgradeDescription = original.StartClusterUpgradeDescription
type StartedChaosEvent = original.StartedChaosEvent
type StatefulReplicaHealthReportExpiredEvent = original.StatefulReplicaHealthReportExpiredEvent
type StatefulReplicaNewHealthReportEvent = original.StatefulReplicaNewHealthReportEvent
type StatefulServiceDescription = original.StatefulServiceDescription
type StatefulServiceInfo = original.StatefulServiceInfo
type StatefulServicePartitionInfo = original.StatefulServicePartitionInfo
type StatefulServiceReplicaHealth = original.StatefulServiceReplicaHealth
type StatefulServiceReplicaHealthState = original.StatefulServiceReplicaHealthState
type StatefulServiceReplicaInfo = original.StatefulServiceReplicaInfo
type StatefulServiceTypeDescription = original.StatefulServiceTypeDescription
type StatefulServiceUpdateDescription = original.StatefulServiceUpdateDescription
type StatelessReplicaHealthReportExpiredEvent = original.StatelessReplicaHealthReportExpiredEvent
type StatelessReplicaNewHealthReportEvent = original.StatelessReplicaNewHealthReportEvent
type StatelessServiceDescription = original.StatelessServiceDescription
type StatelessServiceInfo = original.StatelessServiceInfo
type StatelessServiceInstanceHealth = original.StatelessServiceInstanceHealth
type StatelessServiceInstanceHealthState = original.StatelessServiceInstanceHealthState
type StatelessServiceInstanceInfo = original.StatelessServiceInstanceInfo
type StatelessServicePartitionInfo = original.StatelessServicePartitionInfo
type StatelessServiceTypeDescription = original.StatelessServiceTypeDescription
type StatelessServiceUpdateDescription = original.StatelessServiceUpdateDescription
type StoppedChaosEvent = original.StoppedChaosEvent
type String = original.String
type StringPropertyValue = original.StringPropertyValue
type SuccessfulPropertyBatchInfo = original.SuccessfulPropertyBatchInfo
type SystemApplicationHealthEvaluation = original.SystemApplicationHealthEvaluation
type TCPConfig = original.TCPConfig
type TestErrorChaosEvent = original.TestErrorChaosEvent
type TimeBasedBackupScheduleDescription = original.TimeBasedBackupScheduleDescription
type TimeOfDay = original.TimeOfDay
type TimeRange = original.TimeRange
type UniformInt64RangePartitionSchemeDescription = original.UniformInt64RangePartitionSchemeDescription
type UnplacedReplicaInformation = original.UnplacedReplicaInformation
type UnprovisionApplicationTypeDescriptionInfo = original.UnprovisionApplicationTypeDescriptionInfo
type UnprovisionFabricDescription = original.UnprovisionFabricDescription
type UpdateClusterUpgradeDescription = original.UpdateClusterUpgradeDescription
type UpgradeDomainDeltaNodesCheckHealthEvaluation = original.UpgradeDomainDeltaNodesCheckHealthEvaluation
type UpgradeDomainInfo = original.UpgradeDomainInfo
type UpgradeDomainNodesHealthEvaluation = original.UpgradeDomainNodesHealthEvaluation
type UpgradeOrchestrationServiceState = original.UpgradeOrchestrationServiceState
type UpgradeOrchestrationServiceStateSummary = original.UpgradeOrchestrationServiceStateSummary
type UploadChunkRange = original.UploadChunkRange
type UploadSession = original.UploadSession
type UploadSessionInfo = original.UploadSessionInfo
type ValidationFailedChaosEvent = original.ValidationFailedChaosEvent
type VolumeProperties = original.VolumeProperties
type VolumeProviderParametersAzureFile = original.VolumeProviderParametersAzureFile
type VolumeReference = original.VolumeReference
type VolumeResourceDescription = original.VolumeResourceDescription
type WaitForInbuildReplicaSafetyCheck = original.WaitForInbuildReplicaSafetyCheck
type WaitForPrimaryPlacementSafetyCheck = original.WaitForPrimaryPlacementSafetyCheck
type WaitForPrimarySwapSafetyCheck = original.WaitForPrimarySwapSafetyCheck
type WaitForReconfigurationSafetyCheck = original.WaitForReconfigurationSafetyCheck
type WaitingChaosEvent = original.WaitingChaosEvent
func New() BaseClient {
return original.New()
}
func NewMeshApplicationClient() MeshApplicationClient {
return original.NewMeshApplicationClient()
}
func NewMeshApplicationClientWithBaseURI(baseURI string) MeshApplicationClient {
return original.NewMeshApplicationClientWithBaseURI(baseURI)
}
func NewMeshCodePackageClient() MeshCodePackageClient {
return original.NewMeshCodePackageClient()
}
func NewMeshCodePackageClientWithBaseURI(baseURI string) MeshCodePackageClient {
return original.NewMeshCodePackageClientWithBaseURI(baseURI)
}
func NewMeshGatewayClient() MeshGatewayClient {
return original.NewMeshGatewayClient()
}
func NewMeshGatewayClientWithBaseURI(baseURI string) MeshGatewayClient {
return original.NewMeshGatewayClientWithBaseURI(baseURI)
}
func NewMeshNetworkClient() MeshNetworkClient {
return original.NewMeshNetworkClient()
}
func NewMeshNetworkClientWithBaseURI(baseURI string) MeshNetworkClient {
return original.NewMeshNetworkClientWithBaseURI(baseURI)
}
func NewMeshSecretClient() MeshSecretClient {
return original.NewMeshSecretClient()
}
func NewMeshSecretClientWithBaseURI(baseURI string) MeshSecretClient {
return original.NewMeshSecretClientWithBaseURI(baseURI)
}
func NewMeshSecretValueClient() MeshSecretValueClient {
return original.NewMeshSecretValueClient()
}
func NewMeshSecretValueClientWithBaseURI(baseURI string) MeshSecretValueClient {
return original.NewMeshSecretValueClientWithBaseURI(baseURI)
}
func NewMeshServiceClient() MeshServiceClient {
return original.NewMeshServiceClient()
}
func NewMeshServiceClientWithBaseURI(baseURI string) MeshServiceClient {
return original.NewMeshServiceClientWithBaseURI(baseURI)
}
func NewMeshServiceReplicaClient() MeshServiceReplicaClient {
return original.NewMeshServiceReplicaClient()
}
func NewMeshServiceReplicaClientWithBaseURI(baseURI string) MeshServiceReplicaClient {
return original.NewMeshServiceReplicaClientWithBaseURI(baseURI)
}
func NewMeshVolumeClient() MeshVolumeClient {
return original.NewMeshVolumeClient()
}
func NewMeshVolumeClientWithBaseURI(baseURI string) MeshVolumeClient {
return original.NewMeshVolumeClientWithBaseURI(baseURI)
}
func NewWithBaseURI(baseURI string) BaseClient {
return original.NewWithBaseURI(baseURI)
}
func PossibleApplicationDefinitionKindValues() []ApplicationDefinitionKind {
return original.PossibleApplicationDefinitionKindValues()
}
func PossibleApplicationPackageCleanupPolicyValues() []ApplicationPackageCleanupPolicy {
return original.PossibleApplicationPackageCleanupPolicyValues()
}
func PossibleApplicationScopedVolumeKindValues() []ApplicationScopedVolumeKind {
return original.PossibleApplicationScopedVolumeKindValues()
}
func PossibleApplicationStatusValues() []ApplicationStatus {
return original.PossibleApplicationStatusValues()
}
func PossibleApplicationTypeDefinitionKindValues() []ApplicationTypeDefinitionKind {
return original.PossibleApplicationTypeDefinitionKindValues()
}
func PossibleApplicationTypeStatusValues() []ApplicationTypeStatus {
return original.PossibleApplicationTypeStatusValues()
}
func PossibleAutoScalingMechanismKindValues() []AutoScalingMechanismKind {
return original.PossibleAutoScalingMechanismKindValues()
}
func PossibleAutoScalingMetricKindValues() []AutoScalingMetricKind {
return original.PossibleAutoScalingMetricKindValues()
}
func PossibleAutoScalingResourceMetricNameValues() []AutoScalingResourceMetricName {
return original.PossibleAutoScalingResourceMetricNameValues()
}
func PossibleAutoScalingTriggerKindValues() []AutoScalingTriggerKind {
return original.PossibleAutoScalingTriggerKindValues()
}
func PossibleBackupEntityKindValues() []BackupEntityKind {
return original.PossibleBackupEntityKindValues()
}
func PossibleBackupPolicyScopeValues() []BackupPolicyScope {
return original.PossibleBackupPolicyScopeValues()
}
func PossibleBackupScheduleFrequencyTypeValues() []BackupScheduleFrequencyType {
return original.PossibleBackupScheduleFrequencyTypeValues()
}
func PossibleBackupScheduleKindValues() []BackupScheduleKind {
return original.PossibleBackupScheduleKindValues()
}
func PossibleBackupStateValues() []BackupState {
return original.PossibleBackupStateValues()
}
func PossibleBackupStorageKindValues() []BackupStorageKind {
return original.PossibleBackupStorageKindValues()
}
func PossibleBackupSuspensionScopeValues() []BackupSuspensionScope {
return original.PossibleBackupSuspensionScopeValues()
}
func PossibleBackupTypeValues() []BackupType {
return original.PossibleBackupTypeValues()
}
func PossibleChaosEventKindValues() []ChaosEventKind {
return original.PossibleChaosEventKindValues()
}
func PossibleChaosScheduleStatusValues() []ChaosScheduleStatus {
return original.PossibleChaosScheduleStatusValues()
}
func PossibleChaosStatusValues() []ChaosStatus {
return original.PossibleChaosStatusValues()
}
func PossibleComposeDeploymentStatusValues() []ComposeDeploymentStatus {
return original.PossibleComposeDeploymentStatusValues()
}
func PossibleComposeDeploymentUpgradeStateValues() []ComposeDeploymentUpgradeState {
return original.PossibleComposeDeploymentUpgradeStateValues()
}
func PossibleCreateFabricDumpValues() []CreateFabricDump {
return original.PossibleCreateFabricDumpValues()
}
func PossibleDataLossModeValues() []DataLossMode {
return original.PossibleDataLossModeValues()
}
func PossibleDayOfWeekValues() []DayOfWeek {
return original.PossibleDayOfWeekValues()
}
func PossibleDeactivationIntentValues() []DeactivationIntent {
return original.PossibleDeactivationIntentValues()
}
func PossibleDeployedApplicationStatusValues() []DeployedApplicationStatus {
return original.PossibleDeployedApplicationStatusValues()
}
func PossibleDeploymentStatusValues() []DeploymentStatus {
return original.PossibleDeploymentStatusValues()
}
func PossibleDiagnosticsSinkKindValues() []DiagnosticsSinkKind {
return original.PossibleDiagnosticsSinkKindValues()
}
func PossibleEntityKindBasicBackupEntityValues() []EntityKindBasicBackupEntity {
return original.PossibleEntityKindBasicBackupEntityValues()
}
func PossibleEntityKindValues() []EntityKind {
return original.PossibleEntityKindValues()
}
func PossibleEntryPointStatusValues() []EntryPointStatus {
return original.PossibleEntryPointStatusValues()
}
func PossibleFabricErrorCodesValues() []FabricErrorCodes {
return original.PossibleFabricErrorCodesValues()
}
func PossibleFabricEventKindValues() []FabricEventKind {
return original.PossibleFabricEventKindValues()
}
func PossibleFabricReplicaStatusValues() []FabricReplicaStatus {
return original.PossibleFabricReplicaStatusValues()
}
func PossibleFailureActionValues() []FailureAction {
return original.PossibleFailureActionValues()
}
func PossibleFailureReasonValues() []FailureReason {
return original.PossibleFailureReasonValues()
}
func PossibleHeaderMatchTypeValues() []HeaderMatchType {
return original.PossibleHeaderMatchTypeValues()
}
func PossibleHealthEvaluationKindValues() []HealthEvaluationKind {
return original.PossibleHealthEvaluationKindValues()
}
func PossibleHealthStateValues() []HealthState {
return original.PossibleHealthStateValues()
}
func PossibleHostIsolationModeValues() []HostIsolationMode {
return original.PossibleHostIsolationModeValues()
}
func PossibleHostTypeValues() []HostType {
return original.PossibleHostTypeValues()
}
func PossibleImpactLevelValues() []ImpactLevel {
return original.PossibleImpactLevelValues()
}
func PossibleKindBasicApplicationScopedVolumeCreationParametersValues() []KindBasicApplicationScopedVolumeCreationParameters {
return original.PossibleKindBasicApplicationScopedVolumeCreationParametersValues()
}
func PossibleKindBasicAutoScalingMechanismValues() []KindBasicAutoScalingMechanism {
return original.PossibleKindBasicAutoScalingMechanismValues()
}
func PossibleKindBasicAutoScalingMetricValues() []KindBasicAutoScalingMetric {
return original.PossibleKindBasicAutoScalingMetricValues()
}
func PossibleKindBasicAutoScalingTriggerValues() []KindBasicAutoScalingTrigger {
return original.PossibleKindBasicAutoScalingTriggerValues()
}
func PossibleKindBasicBackupConfigurationInfoValues() []KindBasicBackupConfigurationInfo {
return original.PossibleKindBasicBackupConfigurationInfoValues()
}
func PossibleKindBasicChaosEventValues() []KindBasicChaosEvent {
return original.PossibleKindBasicChaosEventValues()
}
func PossibleKindBasicDiagnosticsSinkPropertiesValues() []KindBasicDiagnosticsSinkProperties {
return original.PossibleKindBasicDiagnosticsSinkPropertiesValues()
}
func PossibleKindBasicFabricEventValues() []KindBasicFabricEvent {
return original.PossibleKindBasicFabricEventValues()
}
func PossibleKindBasicNetworkResourcePropertiesBaseValues() []KindBasicNetworkResourcePropertiesBase {
return original.PossibleKindBasicNetworkResourcePropertiesBaseValues()
}
func PossibleKindBasicPropertyBatchInfoValues() []KindBasicPropertyBatchInfo {
return original.PossibleKindBasicPropertyBatchInfoValues()
}
func PossibleKindBasicPropertyBatchOperationValues() []KindBasicPropertyBatchOperation {
return original.PossibleKindBasicPropertyBatchOperationValues()
}
func PossibleKindBasicPropertyValueValues() []KindBasicPropertyValue {
return original.PossibleKindBasicPropertyValueValues()
}
func PossibleKindBasicProvisionApplicationTypeDescriptionBaseValues() []KindBasicProvisionApplicationTypeDescriptionBase {
return original.PossibleKindBasicProvisionApplicationTypeDescriptionBaseValues()
}
func PossibleKindBasicRepairImpactDescriptionBaseValues() []KindBasicRepairImpactDescriptionBase {
return original.PossibleKindBasicRepairImpactDescriptionBaseValues()
}
func PossibleKindBasicRepairTargetDescriptionBaseValues() []KindBasicRepairTargetDescriptionBase {
return original.PossibleKindBasicRepairTargetDescriptionBaseValues()
}
func PossibleKindBasicReplicaStatusBaseValues() []KindBasicReplicaStatusBase {
return original.PossibleKindBasicReplicaStatusBaseValues()
}
func PossibleKindBasicReplicatorStatusValues() []KindBasicReplicatorStatus {
return original.PossibleKindBasicReplicatorStatusValues()
}
func PossibleKindBasicSafetyCheckValues() []KindBasicSafetyCheck {
return original.PossibleKindBasicSafetyCheckValues()
}
func PossibleKindBasicScalingMechanismDescriptionValues() []KindBasicScalingMechanismDescription {
return original.PossibleKindBasicScalingMechanismDescriptionValues()
}
func PossibleKindBasicScalingTriggerDescriptionValues() []KindBasicScalingTriggerDescription {
return original.PossibleKindBasicScalingTriggerDescriptionValues()
}
func PossibleKindBasicSecretResourcePropertiesBaseValues() []KindBasicSecretResourcePropertiesBase {
return original.PossibleKindBasicSecretResourcePropertiesBaseValues()
}
func PossibleKindBasicServiceTypeDescriptionValues() []KindBasicServiceTypeDescription {
return original.PossibleKindBasicServiceTypeDescriptionValues()
}
func PossibleKindValues() []Kind {
return original.PossibleKindValues()
}
func PossibleMoveCostValues() []MoveCost {
return original.PossibleMoveCostValues()
}
func PossibleNetworkKindValues() []NetworkKind {
return original.PossibleNetworkKindValues()
}
func PossibleNodeDeactivationIntentValues() []NodeDeactivationIntent {
return original.PossibleNodeDeactivationIntentValues()
}
func PossibleNodeDeactivationStatusValues() []NodeDeactivationStatus {
return original.PossibleNodeDeactivationStatusValues()
}
func PossibleNodeDeactivationTaskTypeValues() []NodeDeactivationTaskType {
return original.PossibleNodeDeactivationTaskTypeValues()
}
func PossibleNodeStatusFilterValues() []NodeStatusFilter {
return original.PossibleNodeStatusFilterValues()
}
func PossibleNodeStatusValues() []NodeStatus {
return original.PossibleNodeStatusValues()
}
func PossibleNodeTransitionTypeValues() []NodeTransitionType {
return original.PossibleNodeTransitionTypeValues()
}
func PossibleNodeUpgradePhaseValues() []NodeUpgradePhase {
return original.PossibleNodeUpgradePhaseValues()
}
func PossibleOperatingSystemTypeValues() []OperatingSystemType {
return original.PossibleOperatingSystemTypeValues()
}
func PossibleOperationStateValues() []OperationState {
return original.PossibleOperationStateValues()
}
func PossibleOperationTypeValues() []OperationType {
return original.PossibleOperationTypeValues()
}
func PossiblePackageSharingPolicyScopeValues() []PackageSharingPolicyScope {
return original.PossiblePackageSharingPolicyScopeValues()
}
func PossiblePartitionAccessStatusValues() []PartitionAccessStatus {
return original.PossiblePartitionAccessStatusValues()
}
func PossiblePartitionSchemeBasicPartitionSchemeDescriptionValues() []PartitionSchemeBasicPartitionSchemeDescription {
return original.PossiblePartitionSchemeBasicPartitionSchemeDescriptionValues()
}
func PossiblePartitionSchemeValues() []PartitionScheme {
return original.PossiblePartitionSchemeValues()
}
func PossiblePropertyBatchInfoKindValues() []PropertyBatchInfoKind {
return original.PossiblePropertyBatchInfoKindValues()
}
func PossiblePropertyBatchOperationKindValues() []PropertyBatchOperationKind {
return original.PossiblePropertyBatchOperationKindValues()
}
func PossiblePropertyValueKindValues() []PropertyValueKind {
return original.PossiblePropertyValueKindValues()
}
func PossibleProvisionApplicationTypeKindValues() []ProvisionApplicationTypeKind {
return original.PossibleProvisionApplicationTypeKindValues()
}
func PossibleQuorumLossModeValues() []QuorumLossMode {
return original.PossibleQuorumLossModeValues()
}
func PossibleReconfigurationPhaseValues() []ReconfigurationPhase {
return original.PossibleReconfigurationPhaseValues()
}
func PossibleReconfigurationTypeValues() []ReconfigurationType {
return original.PossibleReconfigurationTypeValues()
}
func PossibleRepairImpactKindValues() []RepairImpactKind {
return original.PossibleRepairImpactKindValues()
}
func PossibleRepairTargetKindValues() []RepairTargetKind {
return original.PossibleRepairTargetKindValues()
}
func PossibleRepairTaskHealthCheckStateValues() []RepairTaskHealthCheckState {
return original.PossibleRepairTaskHealthCheckStateValues()
}
func PossibleReplicaHealthReportServiceKindValues() []ReplicaHealthReportServiceKind {
return original.PossibleReplicaHealthReportServiceKindValues()
}
func PossibleReplicaKindValues() []ReplicaKind {
return original.PossibleReplicaKindValues()
}
func PossibleReplicaRoleValues() []ReplicaRole {
return original.PossibleReplicaRoleValues()
}
func PossibleReplicaStatusValues() []ReplicaStatus {
return original.PossibleReplicaStatusValues()
}
func PossibleReplicatorOperationNameValues() []ReplicatorOperationName {
return original.PossibleReplicatorOperationNameValues()
}
func PossibleResourceStatusValues() []ResourceStatus {
return original.PossibleResourceStatusValues()
}
func PossibleRestartPartitionModeValues() []RestartPartitionMode {
return original.PossibleRestartPartitionModeValues()
}
func PossibleRestoreStateValues() []RestoreState {
return original.PossibleRestoreStateValues()
}
func PossibleResultStatusValues() []ResultStatus {
return original.PossibleResultStatusValues()
}
func PossibleRetentionPolicyTypeBasicBasicRetentionPolicyDescriptionValues() []RetentionPolicyTypeBasicBasicRetentionPolicyDescription {
return original.PossibleRetentionPolicyTypeBasicBasicRetentionPolicyDescriptionValues()
}
func PossibleRetentionPolicyTypeValues() []RetentionPolicyType {
return original.PossibleRetentionPolicyTypeValues()
}
func PossibleSafetyCheckKindValues() []SafetyCheckKind {
return original.PossibleSafetyCheckKindValues()
}
func PossibleScalingMechanismKindValues() []ScalingMechanismKind {
return original.PossibleScalingMechanismKindValues()
}
func PossibleScalingTriggerKindValues() []ScalingTriggerKind {
return original.PossibleScalingTriggerKindValues()
}
func PossibleScheduleKindValues() []ScheduleKind {
return original.PossibleScheduleKindValues()
}
func PossibleSecretKindValues() []SecretKind {
return original.PossibleSecretKindValues()
}
func PossibleServiceCorrelationSchemeValues() []ServiceCorrelationScheme {
return original.PossibleServiceCorrelationSchemeValues()
}
func PossibleServiceEndpointRoleValues() []ServiceEndpointRole {
return original.PossibleServiceEndpointRoleValues()
}
func PossibleServiceKindBasicDeployedServiceReplicaDetailInfoValues() []ServiceKindBasicDeployedServiceReplicaDetailInfo {
return original.PossibleServiceKindBasicDeployedServiceReplicaDetailInfoValues()
}
func PossibleServiceKindBasicDeployedServiceReplicaInfoValues() []ServiceKindBasicDeployedServiceReplicaInfo {
return original.PossibleServiceKindBasicDeployedServiceReplicaInfoValues()
}
func PossibleServiceKindBasicReplicaHealthStateValues() []ServiceKindBasicReplicaHealthState {
return original.PossibleServiceKindBasicReplicaHealthStateValues()
}
func PossibleServiceKindBasicReplicaHealthValues() []ServiceKindBasicReplicaHealth {
return original.PossibleServiceKindBasicReplicaHealthValues()
}
func PossibleServiceKindBasicReplicaInfoValues() []ServiceKindBasicReplicaInfo {
return original.PossibleServiceKindBasicReplicaInfoValues()
}
func PossibleServiceKindBasicServiceDescriptionValues() []ServiceKindBasicServiceDescription {
return original.PossibleServiceKindBasicServiceDescriptionValues()
}
func PossibleServiceKindBasicServiceInfoValues() []ServiceKindBasicServiceInfo {
return original.PossibleServiceKindBasicServiceInfoValues()
}
func PossibleServiceKindBasicServicePartitionInfoValues() []ServiceKindBasicServicePartitionInfo {
return original.PossibleServiceKindBasicServicePartitionInfoValues()
}
func PossibleServiceKindBasicServiceUpdateDescriptionValues() []ServiceKindBasicServiceUpdateDescription {
return original.PossibleServiceKindBasicServiceUpdateDescriptionValues()
}
func PossibleServiceKindValues() []ServiceKind {
return original.PossibleServiceKindValues()
}
func PossibleServiceLoadMetricWeightValues() []ServiceLoadMetricWeight {
return original.PossibleServiceLoadMetricWeightValues()
}
func PossibleServiceOperationNameValues() []ServiceOperationName {
return original.PossibleServiceOperationNameValues()
}
func PossibleServicePackageActivationModeValues() []ServicePackageActivationMode {
return original.PossibleServicePackageActivationModeValues()
}
func PossibleServicePartitionKindBasicPartitionInformationValues() []ServicePartitionKindBasicPartitionInformation {
return original.PossibleServicePartitionKindBasicPartitionInformationValues()
}
func PossibleServicePartitionKindValues() []ServicePartitionKind {
return original.PossibleServicePartitionKindValues()
}
func PossibleServicePartitionStatusValues() []ServicePartitionStatus {
return original.PossibleServicePartitionStatusValues()
}
func PossibleServicePlacementPolicyTypeValues() []ServicePlacementPolicyType {
return original.PossibleServicePlacementPolicyTypeValues()
}
func PossibleServiceStatusValues() []ServiceStatus {
return original.PossibleServiceStatusValues()
}
func PossibleServiceTypeRegistrationStatusValues() []ServiceTypeRegistrationStatus {
return original.PossibleServiceTypeRegistrationStatusValues()
}
func PossibleSizeTypesValues() []SizeTypes {
return original.PossibleSizeTypesValues()
}
func PossibleStateValues() []State {
return original.PossibleStateValues()
}
func PossibleStorageKindValues() []StorageKind {
return original.PossibleStorageKindValues()
}
func PossibleTypeValues() []Type {
return original.PossibleTypeValues()
}
func PossibleUpgradeDomainStateValues() []UpgradeDomainState {
return original.PossibleUpgradeDomainStateValues()
}
func PossibleUpgradeKindValues() []UpgradeKind {
return original.PossibleUpgradeKindValues()
}
func PossibleUpgradeModeValues() []UpgradeMode {
return original.PossibleUpgradeModeValues()
}
func PossibleUpgradeSortOrderValues() []UpgradeSortOrder {
return original.PossibleUpgradeSortOrderValues()
}
func PossibleUpgradeStateValues() []UpgradeState {
return original.PossibleUpgradeStateValues()
}
func PossibleUpgradeTypeValues() []UpgradeType {
return original.PossibleUpgradeTypeValues()
}
func PossibleVolumeProviderValues() []VolumeProvider {
return original.PossibleVolumeProviderValues()
}
func UserAgent() string {
return original.UserAgent() + " profiles/preview"
}
func Version() string {
return original.Version()
}
| pweil-/origin | vendor/github.com/Azure/azure-sdk-for-go/profiles/preview/servicefabric/servicefabric/models.go | GO | apache-2.0 | 148,564 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.metrics;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.metrics.stats.Avg;
import org.apache.kafka.common.metrics.stats.Total;
import org.junit.Test;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import java.lang.management.ManagementFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class JmxReporterTest {
@Test
public void testJmxRegistration() throws Exception {
Metrics metrics = new Metrics();
MBeanServer server = ManagementFactory.getPlatformMBeanServer();
try {
JmxReporter reporter = new JmxReporter();
metrics.addReporter(reporter);
assertFalse(server.isRegistered(new ObjectName(":type=grp1")));
Sensor sensor = metrics.sensor("kafka.requests");
sensor.add(metrics.metricName("pack.bean1.avg", "grp1"), new Avg());
sensor.add(metrics.metricName("pack.bean2.total", "grp2"), new Total());
assertTrue(server.isRegistered(new ObjectName(":type=grp1")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=grp1"), "pack.bean1.avg"));
assertTrue(server.isRegistered(new ObjectName(":type=grp2")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=grp2"), "pack.bean2.total"));
MetricName metricName = metrics.metricName("pack.bean1.avg", "grp1");
String mBeanName = JmxReporter.getMBeanName("", metricName);
assertTrue(reporter.containsMbean(mBeanName));
metrics.removeMetric(metricName);
assertFalse(reporter.containsMbean(mBeanName));
assertFalse(server.isRegistered(new ObjectName(":type=grp1")));
assertTrue(server.isRegistered(new ObjectName(":type=grp2")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=grp2"), "pack.bean2.total"));
metricName = metrics.metricName("pack.bean2.total", "grp2");
metrics.removeMetric(metricName);
assertFalse(reporter.containsMbean(mBeanName));
assertFalse(server.isRegistered(new ObjectName(":type=grp1")));
assertFalse(server.isRegistered(new ObjectName(":type=grp2")));
} finally {
metrics.close();
}
}
@Test
public void testJmxRegistrationSanitization() throws Exception {
Metrics metrics = new Metrics();
MBeanServer server = ManagementFactory.getPlatformMBeanServer();
try {
metrics.addReporter(new JmxReporter());
Sensor sensor = metrics.sensor("kafka.requests");
sensor.add(metrics.metricName("name", "group", "desc", "id", "foo*"), new Total());
sensor.add(metrics.metricName("name", "group", "desc", "id", "foo+"), new Total());
sensor.add(metrics.metricName("name", "group", "desc", "id", "foo?"), new Total());
sensor.add(metrics.metricName("name", "group", "desc", "id", "foo:"), new Total());
sensor.add(metrics.metricName("name", "group", "desc", "id", "foo%"), new Total());
assertTrue(server.isRegistered(new ObjectName(":type=group,id=\"foo\\*\"")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=group,id=\"foo\\*\""), "name"));
assertTrue(server.isRegistered(new ObjectName(":type=group,id=\"foo+\"")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=group,id=\"foo+\""), "name"));
assertTrue(server.isRegistered(new ObjectName(":type=group,id=\"foo\\?\"")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=group,id=\"foo\\?\""), "name"));
assertTrue(server.isRegistered(new ObjectName(":type=group,id=\"foo:\"")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=group,id=\"foo:\""), "name"));
assertTrue(server.isRegistered(new ObjectName(":type=group,id=foo%")));
assertEquals(0.0, server.getAttribute(new ObjectName(":type=group,id=foo%"), "name"));
metrics.removeMetric(metrics.metricName("name", "group", "desc", "id", "foo*"));
metrics.removeMetric(metrics.metricName("name", "group", "desc", "id", "foo+"));
metrics.removeMetric(metrics.metricName("name", "group", "desc", "id", "foo?"));
metrics.removeMetric(metrics.metricName("name", "group", "desc", "id", "foo:"));
metrics.removeMetric(metrics.metricName("name", "group", "desc", "id", "foo%"));
assertFalse(server.isRegistered(new ObjectName(":type=group,id=\"foo\\*\"")));
assertFalse(server.isRegistered(new ObjectName(":type=group,id=foo+")));
assertFalse(server.isRegistered(new ObjectName(":type=group,id=\"foo\\?\"")));
assertFalse(server.isRegistered(new ObjectName(":type=group,id=\"foo:\"")));
assertFalse(server.isRegistered(new ObjectName(":type=group,id=foo%")));
} finally {
metrics.close();
}
}
}
| ollie314/kafka | clients/src/test/java/org/apache/kafka/common/metrics/JmxReporterTest.java | Java | apache-2.0 | 5,954 |
/**
* Copyright 2017 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {
LayoutRectDef,
layoutRectFromDomRect,
layoutRectLtwh,
} from '../../src/layout-rect';
import {
centerFrameUnderVsyncMutate,
collapseFrameUnderVsyncMutate,
expandFrameUnderVsyncMutate,
} from '../../src/full-overlay-frame-helper';
import {restrictedVsync, timer} from './util';
const CENTER_TRANSITION_TIME_MS = 500;
const CENTER_TRANSITION_END_WAIT_TIME_MS = 200;
/**
* Places the child frame in full overlay mode.
* @param {!Window} win Host window.
* @param {!HTMLIFrameElement} iframe
* @param {function(!LayoutRectDef, !LayoutRectDef)} onFinish
* @private
*/
const expandFrameImpl = function(win, iframe, onFinish) {
restrictedVsync(win, {
measure(state) {
state.viewportSize = {
width: win./*OK*/innerWidth,
height: win./*OK*/innerHeight,
};
state.rect = iframe./*OK*/getBoundingClientRect();
},
mutate(state) {
const collapsedRect = layoutRectFromDomRect(state.rect);
const expandedRect = layoutRectLtwh(
0, 0, state.viewportSize.width, state.viewportSize.height);
centerFrameUnderVsyncMutate(iframe, state.rect, state.viewportSize,
CENTER_TRANSITION_TIME_MS);
timer(() => {
restrictedVsync(win, {
mutate() {
expandFrameUnderVsyncMutate(iframe);
onFinish(collapsedRect, expandedRect);
},
});
}, CENTER_TRANSITION_TIME_MS + CENTER_TRANSITION_END_WAIT_TIME_MS);
},
}, {});
};
/**
* Resets the frame from full overlay mode.
* @param {!Window} win Host window.
* @param {!HTMLIFrameElement} iframe
* @param {function()} onFinish
* @param {function(!LayoutRectDef)} onMeasure
* @private
*/
const collapseFrameImpl = function(win, iframe, onFinish, onMeasure) {
restrictedVsync(win, {
mutate() {
collapseFrameUnderVsyncMutate(iframe);
onFinish();
// remeasure so client knows about updated dimensions
restrictedVsync(win, {
measure() {
onMeasure(
layoutRectFromDomRect(iframe./*OK*/getBoundingClientRect()));
},
});
},
});
};
/**
* Places the child frame in full overlay mode.
* @param {!Window} win Host window.
* @param {!HTMLIFrameElement} iframe
* @param {function(!LayoutRectDef, !LayoutRectDef)} onFinish
*/
export let expandFrame = expandFrameImpl;
/**
* @param {!Function} implFn
* @visibleForTesting
*/
export function stubExpandFrameForTesting(implFn) {
expandFrame = implFn;
}
/**
* @visibleForTesting
*/
export function resetExpandFrameForTesting() {
expandFrame = expandFrameImpl;
}
/**
* Places the child frame in full overlay mode.
* @param {!Window} win Host window.
* @param {!HTMLIFrameElement} iframe
* @param {function()} onFinish
* @param {function(!LayoutRectDef)} onMeasure
*/
export let collapseFrame = collapseFrameImpl;
/**
* @param {!Function} implFn
* @visibleForTesting
*/
export function stubCollapseFrameForTesting(implFn) {
collapseFrame = implFn;
}
/**
* @visibleForTesting
*/
export function resetCollapseFrameForTesting() {
collapseFrame = collapseFrameImpl;
}
| donttrustthisbot/amphtml | ads/inabox/frame-overlay-helper.js | JavaScript | apache-2.0 | 3,755 |
/*
* Copyright 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.core;
import org.springframework.cloud.deployer.spi.core.AppDeploymentRequest;
import org.springframework.core.style.ToStringCreator;
/**
* Description of an execution of a task including resource to be executed and how it was configured via Spring Cloud
* Data Flow
*
* @author Mark Pollack
* @author Michael Minella
* @since 2.3
*/
public class TaskManifest {
private AppDeploymentRequest taskDeploymentRequest;
private String platformName;
/**
* Name of the platform the related task execution was executed on.
*
* @return name of the platform
*/
public String getPlatformName() {
return platformName;
}
/**
* Name of the platform the related task execution was executed on.
*
* @param platformName platform name
*/
public void setPlatformName(String platformName) {
this.platformName = platformName;
}
/**
* {@code AppDeploymentRequest} representing the task being executed
*
* @return {@code AppDeploymentRequest}
*/
public AppDeploymentRequest getTaskDeploymentRequest() {
return taskDeploymentRequest;
}
/**
* Task deployment
*
* @param taskDeploymentRequest {@code AppDeploymentRequest}
*/
public void setTaskDeploymentRequest(AppDeploymentRequest taskDeploymentRequest) {
this.taskDeploymentRequest = taskDeploymentRequest;
}
public String toString() {
return (new ToStringCreator(this)).append("taskDeploymentRequest", this.taskDeploymentRequest).append("platformName", this.platformName).toString();
}
}
| mminella/spring-cloud-data | spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/TaskManifest.java | Java | apache-2.0 | 2,150 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.store;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardPath;
import org.elasticsearch.index.store.DirectoryService;
import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.IndexStoreConfig;
import org.elasticsearch.plugins.Plugin;
import java.util.Collections;
import java.util.EnumSet;
import java.util.IdentityHashMap;
import java.util.Map;
public class MockFSIndexStore extends IndexStore {
public static final Setting<Boolean> INDEX_CHECK_INDEX_ON_CLOSE_SETTING =
Setting.boolSetting("index.store.mock.check_index_on_close", true, Property.IndexScope, Property.NodeScope);
public static class TestPlugin extends Plugin {
@Override
public String name() {
return "mock-index-store";
}
@Override
public String description() {
return "a mock index store for testing";
}
@Override
public Settings additionalSettings() {
return Settings.builder().put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "mock").build();
}
public void onModule(SettingsModule module) {
module.registerSetting(INDEX_CHECK_INDEX_ON_CLOSE_SETTING);
module.registerSetting(MockFSDirectoryService.CRASH_INDEX_SETTING);
module.registerSetting(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING);
module.registerSetting(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING);
module.registerSetting(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE_SETTING);
module.registerSetting(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING);
}
@Override
public void onIndexModule(IndexModule indexModule) {
Settings indexSettings = indexModule.getSettings();
if ("mock".equals(indexSettings.get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey()))) {
if (INDEX_CHECK_INDEX_ON_CLOSE_SETTING.get(indexSettings)) {
indexModule.addIndexEventListener(new Listener());
}
indexModule.addIndexStore("mock", MockFSIndexStore::new);
}
}
}
MockFSIndexStore(IndexSettings indexSettings,
IndexStoreConfig config) {
super(indexSettings, config);
}
public DirectoryService newDirectoryService(ShardPath path) {
return new MockFSDirectoryService(indexSettings, this, path);
}
private static final EnumSet<IndexShardState> validCheckIndexStates = EnumSet.of(
IndexShardState.STARTED, IndexShardState.RELOCATED, IndexShardState.POST_RECOVERY
);
private static final class Listener implements IndexEventListener {
private final Map<IndexShard, Boolean> shardSet = Collections.synchronizedMap(new IdentityHashMap<>());
@Override
public void afterIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) {
if (indexShard != null) {
Boolean remove = shardSet.remove(indexShard);
if (remove == Boolean.TRUE) {
ESLogger logger = Loggers.getLogger(getClass(), indexShard.indexSettings().getSettings(), indexShard.shardId());
MockFSDirectoryService.checkIndex(logger, indexShard.store(), indexShard.shardId());
}
}
}
@Override
public void indexShardStateChanged(IndexShard indexShard, @Nullable IndexShardState previousState, IndexShardState currentState, @Nullable String reason) {
if (currentState == IndexShardState.CLOSED && validCheckIndexStates.contains(previousState) && indexShard.indexSettings().isOnSharedFilesystem() == false) {
shardSet.put(indexShard, Boolean.TRUE);
}
}
}
}
| mmaracic/elasticsearch | test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java | Java | apache-2.0 | 5,321 |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.dmn.client.resources;
import org.kie.workbench.common.stunner.svg.annotation.SVGSource;
import org.kie.workbench.common.stunner.svg.annotation.SVGViewFactory;
import org.kie.workbench.common.stunner.svg.client.shape.view.SVGShapeViewResource;
import static org.kie.workbench.common.dmn.client.resources.DMNSVGViewFactory.PATH_CSS;
@SVGViewFactory(cssPath = PATH_CSS)
public interface DMNSVGViewFactory {
String PATH_CSS = "images/shapes/dmn-shapes.css";
String DIAGRAM = "images/shapes/diagram.svg";
String BUSINESS_KNOWLEDGE_MODEL = "images/shapes/business-knowledge-model.svg";
String BUSINESS_KNOWLEDGE_MODEL_PALETTE = "images/shapes/business-knowledge-model-palette.svg";
String DECISION = "images/shapes/decision.svg";
String DECISION_PALETTE = "images/shapes/decision-palette.svg";
String INPUT_DATA = "images/shapes/input-data.svg";
String INPUT_DATA_PALETTE = "images/shapes/input-data-palette.svg";
String KNOWLEDGE_SOURCE = "images/shapes/knowledge-source.svg";
String KNOWLEDGE_SOURCE_PALETTE = "images/shapes/knowledge-source-palette.svg";
String TEXT_ANNOTATION = "images/shapes/text-annotation.svg";
String TEXT_ANNOTATION_PALETTE = "images/shapes/text-annotation-palette.svg";
@SVGSource(DIAGRAM)
SVGShapeViewResource diagram();
@SVGSource(BUSINESS_KNOWLEDGE_MODEL)
SVGShapeViewResource businessKnowledgeModel();
@SVGSource(DECISION)
SVGShapeViewResource decision();
@SVGSource(INPUT_DATA)
SVGShapeViewResource inputData();
@SVGSource(KNOWLEDGE_SOURCE)
SVGShapeViewResource knowledgeSource();
@SVGSource(TEXT_ANNOTATION)
SVGShapeViewResource textAnnotation();
}
| jhrcek/kie-wb-common | kie-wb-common-dmn/kie-wb-common-dmn-client/src/main/java/org/kie/workbench/common/dmn/client/resources/DMNSVGViewFactory.java | Java | apache-2.0 | 2,348 |
// Copyright 2015 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package utils
import (
"os"
jww "github.com/spf13/jwalterweatherman"
)
func CheckErr(err error, s ...string) {
if err != nil {
if len(s) == 0 {
jww.CRITICAL.Println(err)
} else {
for _, message := range s {
jww.ERROR.Println(message)
}
jww.ERROR.Println(err)
}
}
}
func StopOnErr(err error, s ...string) {
if err != nil {
if len(s) == 0 {
newMessage := err.Error()
// Printing an empty string results in a error with
// no message, no bueno.
if newMessage != "" {
jww.CRITICAL.Println(newMessage)
}
} else {
for _, message := range s {
if message != "" {
jww.CRITICAL.Println(message)
}
}
}
os.Exit(-1)
}
}
| coderzh/hugo | utils/utils.go | GO | apache-2.0 | 1,295 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.IO;
using System.Linq;
using System.Reflection.Metadata;
using System.Reflection.PortableExecutable;
namespace Microsoft.CodeAnalysis.Rebuild
{
public static class Extensions
{
internal static void SkipNullTerminator(ref this BlobReader blobReader)
{
var b = blobReader.ReadByte();
if (b != '\0')
{
throw new InvalidDataException(string.Format(RebuildResources.Encountered_unexpected_byte_0_when_expecting_a_null_terminator, b));
}
}
public static MetadataReader? GetEmbeddedPdbMetadataReader(this PEReader peReader)
{
var entry = peReader.ReadDebugDirectory().SingleOrDefault(x => x.Type == DebugDirectoryEntryType.EmbeddedPortablePdb);
if (entry.Type == DebugDirectoryEntryType.Unknown)
{
return null;
}
var provider = peReader.ReadEmbeddedPortablePdbDebugDirectoryData(entry);
return provider.GetMetadataReader();
}
}
}
| physhi/roslyn | src/Compilers/Core/Rebuild/Extensions.cs | C# | apache-2.0 | 1,272 |
package water.deploy;
import java.io.*;
import java.util.*;
import java.util.Map.Entry;
import java.util.jar.*;
import javassist.*;
import water.*;
import water.api.DocGen;
import water.util.Utils;
public class LaunchJar extends Request2 {
static final int API_WEAVER = 1;
static public DocGen.FieldDoc[] DOC_FIELDS;
@API(help = "Jars keys", required = true, filter = Default.class)
public String jars;
@API(help = "Class to instantiate and launch", required = true, filter = Default.class)
public String job_class;
@Override protected Response serve() {
final Job job;
try {
// Move jars from KV store to tmp files
ClassPool pool = new ClassPool(true);
ArrayList<JarEntry> entries = new ArrayList<JarEntry>();
String[] splits = jars.split(",");
for( int i = 0; i < splits.length; i++ ) {
Key key = Key.make(splits[i]);
throw H2O.unimpl();
//ValueArray va = UKV.get(key);
//File file = File.createTempFile("h2o", ".jar");
//Utils.writeFileAndClose(file, va.openStream());
//DKV.remove(key);
//pool.appendClassPath(file.getPath());
//
//JarFile jar = new JarFile(file);
//Enumeration e = jar.entries();
//while( e.hasMoreElements() ) {
// JarEntry entry = (JarEntry) e.nextElement();
// entries.add(entry);
//}
//jar.close();
}
// Append UID to class names so allow multiple invocations
String uid = Key.rand();
ClassMap renames = new ClassMap();
for( JarEntry entry : entries ) {
if( entry.getName().endsWith(".class") ) {
String n = Utils.className(entry.getName());
String u;
int index = n.indexOf("$");
if( index < 0 )
index = n.length();
u = n.substring(0, index) + uid + n.substring(index);
renames.put(n, u);
}
}
ArrayList<CtClass> updated = new ArrayList();
for( Entry<String, String> entry : ((Map<String, String>) renames).entrySet() ) {
CtClass c = pool.get(entry.getKey().replace('/', '.'));
c.replaceClassName(renames);
updated.add(c);
}
// Create jar file and register it on each node
HashSet<String> packages = new HashSet();
ByteArrayOutputStream mem = new ByteArrayOutputStream();
JarOutputStream jar = new JarOutputStream(mem);
DataOutputStream bc = new DataOutputStream(jar);
for( CtClass c : updated ) {
jar.putNextEntry(new JarEntry(c.getName().replace('.', '/') + ".class"));
c.toBytecode(bc);
bc.flush();
String p = c.getPackageName();
if( p == null )
throw new IllegalArgumentException("Package is null for class " + c);
packages.add(p);
}
jar.close();
weavePackages(packages.toArray(new String[0]));
AddJar task = new AddJar();
task._data = mem.toByteArray();
task.invokeOnAllNodes();
// Start job
Class c = Class.forName(job_class + uid);
job = (Job) c.newInstance();
job.fork();
} catch( Exception ex ) {
throw new RuntimeException(ex);
}
return Response.done(this);
}
public static void weavePackages(String... names) {
WeavePackages task = new WeavePackages();
task._names = names;
task.invokeOnAllNodes();
}
static class WeavePackages extends DRemoteTask {
String[] _names;
@Override public void lcompute() {
for( String name : _names )
Boot.weavePackage(name);
tryComplete();
}
@Override public void reduce(DRemoteTask drt) {
}
}
static class AddJar extends DRemoteTask {
byte[] _data;
@Override public void lcompute() {
try {
File file = File.createTempFile("h2o", ".jar");
Utils.writeFileAndClose(file, new ByteArrayInputStream(_data));
Boot._init.addExternalJars(file);
tryComplete();
} catch( Exception ex ) {
throw new RuntimeException(ex);
}
}
@Override public void reduce(DRemoteTask drt) {
}
}
}
| rowhit/h2o-2 | src/main/java/water/deploy/LaunchJar.java | Java | apache-2.0 | 4,106 |
binomial_fit.coef() | madmax983/h2o-3 | h2o-docs/src/booklets/v2_2015/source/GLM_Vignette_code_examples/glm_model_output_20.py | Python | apache-2.0 | 19 |
#!/usr/bin/env python
import os
import sys
from macholib.MachOStandalone import MachOStandalone
from macholib.util import strip_files
def standaloneApp(path):
if not os.path.isdir(path) and os.path.exists(
os.path.join(path, 'Contents')):
raise SystemExit('%s: %s does not look like an app bundle'
% (sys.argv[0], path))
files = MachOStandalone(path).run()
strip_files(files)
def main():
print("WARNING: 'macho_standalone' is deprecated, use 'python -mmacholib dump' instead")
if not sys.argv[1:]:
raise SystemExit('usage: %s [appbundle ...]' % (sys.argv[0],))
for fn in sys.argv[1:]:
standaloneApp(fn)
if __name__ == '__main__':
main()
| timeyyy/PyUpdater | pyupdater/vendor/PyInstaller/lib/macholib/macho_standalone.py | Python | bsd-2-clause | 718 |
/**
* Sticky Notes
*
* An open source lightweight pastebin application
*
* @package StickyNotes
* @author Sayak Banerjee
* @copyright (c) 2014 Sayak Banerjee <mail@sayakbanerjee.com>. All rights reserved.
* @license http://www.opensource.org/licenses/bsd-license.php
* @link http://sayakbanerjee.com/sticky-notes
* @since Version 1.0
* @filesource
*/
/**
* Stores the current URL
*
* @var string
*/
var currentUrl = $(location).attr('href');
/**
* Timer container
*
* @var array
*/
var timers = new Array();
/**
* Instance counter
*
* @var int
*/
var instance = 0;
/**
* This is the main entry point of the script
*
* @return void
*/
function initMain()
{
// Initialize a new instance
initInstance();
// Initialize AJAX components
initAjaxComponents();
// Initialize AJAX navigation
initAjaxNavigation();
// Initialize addons
initAddons();
}
/**
* This initializes all JS addons
*
* @return void
*/
function initAddons()
{
// Initialize code wrapping
initWrapToggle();
// Initialize the code editor
initEditor();
// Initialize tab persistence
initTabPersistence();
// Initialize line reference
initLineReference();
// Initialize bootstrap components
initBootstrap();
}
/**
* Initializes a new instance of the JS library
*
* @return void
*/
function initInstance()
{
// Clear all timers
if (timers[instance] !== undefined)
{
for (idx in timers[instance])
{
clearInterval(timers[instance][idx]);
}
}
// Create a new instance and timer container
instance++;
timers[instance] = new Array();
}
/**
* Starts a new timed operation
*
* @param operation
* @param callback
* @param interval
* @return void
*/
function initTimer(operation, callback, interval)
{
switch (operation)
{
case 'once':
setTimeout(callback, interval);
break;
case 'repeat':
timers[instance].push(setInterval(callback, interval));
break;
}
}
/**
* Scans for and processes AJAX components
*
* Each AJAX component can have 4 parameters:
* - realtime : Indicates if the component involves realtime data
* - onload : The AJAX request will be triggered automatically
* - component : The utility component to request
* - extra : Any extra data that will be sent to the server
*
* @return void
*/
function initAjaxComponents()
{
var count = 1;
// Setup AJAX requests
$('[data-toggle="ajax"]').each(function()
{
var id = 'stickynotes-' + count++;
var onload = $(this).attr('data-onload') === 'true';
var realtime = $(this).attr('data-realtime') === 'true';
var component = $(this).attr('data-component');
var extra = $(this).attr('data-extra');
// Set the id of this element
$(this).attr('data-id', id);
// AJAX URL and component must be defined
if (ajaxUrl !== undefined && component !== undefined)
{
var getUrl = ajaxUrl + '/' + component + (extra !== undefined ? '/' + extra : '');
var callback = function(e)
{
// Add the loading icon
$(this).html('<span class="glyphicon glyphicon-refresh"></span>');
// Send the AJAX request
$.ajax({
url: getUrl,
data: { key: Math.random(), ajax: 1 },
context: $('[data-id="' + id + '"]'),
success: function(response)
{
// Dump the HTML in the element
$(this).html(response);
// If response is link, set it as href as well
if (response.indexOf('http') === 0)
{
$(this).attr('href', response);
$(this).removeAttr('data-toggle');
$(this).off('click');
}
// Load addons again
initAddons();
}
});
if (e !== undefined)
{
e.preventDefault();
}
};
// Execute the AJAX callback
if (onload)
{
if (realtime)
{
initTimer('repeat', callback, 5000);
}
initTimer('once', callback, 0);
}
else
{
$(this).off('click').on('click', callback);
}
}
});
}
/**
* Enabled AJAX navigation across the site
*
* @return void
*/
function initAjaxNavigation()
{
if (ajaxNav !== undefined && ajaxNav && $.support.cors)
{
// AJAX callback
var callback = function(e)
{
var navMethod = $(this).prop('tagName') == 'A' ? 'GET' : 'POST';
var seek = $(this).attr('data-seek');
// Set up data based on method
switch (navMethod)
{
case 'GET':
navUrl = $(this).attr('href');
payload = 'ajax=1';
break;
case 'POST':
navUrl = $(this).attr('action');
payload = $(this).serialize() + '&ajax=1';
break;
}
// Send an AJAX request for all but anchor links
if (navUrl !== undefined && !$('.loader').is(':visible'))
{
$('.loader').show();
$.ajax({
url: navUrl,
method: navMethod,
context: $('body'),
data: payload,
success: function(response, status, info)
{
var isPageSection = response.indexOf('<!DOCTYPE html>') == -1;
var isHtmlContent = info.getResponseHeader('Content-Type').indexOf('text/html') != -1;
// Change the page URL
currentUrl = info.getResponseHeader('StickyNotes-Url');
window.history.pushState({ html: response }, null, currentUrl);
// Handle the response
if (isPageSection && isHtmlContent)
{
$(this).html(response);
}
else if (isHtmlContent)
{
dom = $(document.createElement('html'));
dom[0].innerHTML = response;
$(this).html(dom.find('body').html());
}
else
{
window.location = navUrl;
}
// Seek to top of the page
$.scrollTo(0, 200);
// Load JS triggers again
initMain();
},
error: function()
{
window.location = navUrl;
}
});
e.preventDefault();
}
};
// Execute callback on all links, excluding some
$('body').find('a' +
':not([href*="/admin"])' +
':not([href*="/attachment"])' +
':not([href*="#"])' +
':not([href*="mailto:"])' +
':not([onclick])'
).off('click').on('click', callback);
// Execute callback on all designated forms
$('body').find('form[data-navigate="ajax"]').off('submit').on('submit', callback);
// URL change monitor
initTimer('repeat', function()
{
var href = $(location).attr('href');
// Trim the trailing slash from currentUrl
if (currentUrl.substr(-1) == '/')
{
currentUrl = currentUrl.substr(0, currentUrl.length - 1);
}
// Trim the trailing slash from href
if (href.substr(-1) == '/')
{
href = href.substr(0, href.length - 1);
}
// Reload page if URL changed
if (currentUrl != href && href.indexOf('#') == -1)
{
currentUrl = href;
// Load the selected page
$('.loader').show();
$.get(href, function(response)
{
dom = $(document.createElement('html'));
dom[0].innerHTML = response;
$('body').html(dom.find('body').html());
});
}
}, 300);
}
}
/**
* Activates the code wrapping toggle function
*
* @return void
*/
function initWrapToggle()
{
$('[data-toggle="wrap"]').off('click').on('click', function(e)
{
var isWrapped = $('.pre div').css('white-space') != 'nowrap';
var newValue = isWrapped ? 'nowrap' : 'inherit';
$('.pre div').css('white-space', newValue);
e.preventDefault();
});
}
/**
* Activates the paste editor
*
* @return void
*/
function initEditor()
{
// Insert tab in the code box
$('[name="data"]').off('keydown').on('keydown', function (e)
{
if (e.keyCode == 9)
{
var myValue = "\t";
var startPos = this.selectionStart;
var endPos = this.selectionEnd;
var scrollTop = this.scrollTop;
this.value = this.value.substring(0, startPos) + myValue + this.value.substring(endPos,this.value.length);
this.focus();
this.selectionStart = startPos + myValue.length;
this.selectionEnd = startPos + myValue.length;
this.scrollTop = scrollTop;
e.preventDefault();
}
});
// Tick the private checkbox if password is entered
$('[name="password"]').off('keyup').on('keyup', function()
{
$('[name="private"]').attr('checked', $(this).val().length > 0);
});
}
/**
* Activates some bootstrap components
*
* @return void
*/
function initBootstrap()
{
// Activate tooltips
$('[data-toggle="tooltip"]').tooltip();
}
/**
* Saves the tab state on all pages
*
* @return void
*/
function initTabPersistence()
{
// Restore the previous tab state
$('.nav-tabs').each(function()
{
var id = $(this).attr('id');
var index = $.cookie('stickynotes_tabstate');
if (index !== undefined)
{
$('.nav-tabs > li:eq(' + index + ') a').tab('show');
}
});
// Save the current tab state
$('.nav-tabs > li > a').on('shown.bs.tab', function (e)
{
var id = $(this).parents('.nav-tabs').attr('id');
var index = $(this).parents('li').index();
$.cookie('stickynotes_tabstate', index);
})
// Clear tab state when navigated to a different page
if ($('.nav-tabs').length == 0)
{
$.cookie('stickynotes_tabstate', null);
}
}
/**
* Highlights lines upon clicking them on the #show page
*
* @return void
*/
function initLineReference()
{
if ($('section#show').length != 0)
{
var line = 1;
// First, we allocate unique IDs to all lines
$('.pre li').each(function()
{
$(this).attr('id', 'line-' + line++);
});
// Next, navigate to an ID if the user requested it
var anchor = window.location.hash;
if (anchor.length > 0)
{
var top = $(anchor).offset().top;
// Scroll to the anchor
$.scrollTo(top, 200);
// Highlight the anchor
$(anchor).addClass('highlight');
}
// Click to change anchor
$('.pre li').off('mouseup').on('mouseup', function()
{
if (window.getSelection() == '')
{
var id = $(this).attr('id');
var top = $(this).offset().top;
// Scroll to the anchor
$.scrollTo(top, 200, function() {
window.location.hash = '#' + id;
});
// Highlight the anchor
$('.pre li').removeClass('highlight');
$(this).addClass('highlight');
}
});
}
}
/**
* Draws a Google chart in a container
*
* @return void
*/
function initAreaChart()
{
if (chartData !== undefined && chartContainer !== undefined)
{
// Create an instance of line chart
var chart = new google.visualization.AreaChart(chartContainer);
// Define chart options
var options = {
colors: [ '#428bca', '#d9534f' ],
areaOpacity: 0.1,
lineWidth: 4,
pointSize: 8,
hAxis: {
textStyle: {
color: '#666'
},
gridlines: {
color: 'transparent'
},
baselineColor: '#eeeeee',
format:'MMM d'
},
vAxis: {
textStyle: {
color: '#666'
},
gridlines: {
color: '#eee'
}
},
chartArea: {
left: 50,
top: 10,
width: '100%',
height: 210
},
legend: {
position: 'bottom'
}
};
// Draw the line chart
chart.draw(chartData, options);
}
// Redraw chart on window resize
$(window).off('resize').on('resize', initAreaChart);
}
/**
* Invoke the entry point on DOM ready
*/
$(initMain);
| solitaryr/sticky-notes | public/assets/pbr/js/stickynotes.js | JavaScript | bsd-2-clause | 10,960 |
/*******************************************************************************
* Copyright (c) 2013, Daniel Murphy
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
package org.jbox2d.serialization;
import java.io.IOException;
import java.io.OutputStream;
/**
* Container for holding serialization results. Use
* {@link #getValue()} to get the implementation-specific
* result.
* @author Daniel Murphy
*
*/
public interface SerializationResult {
/**
* The implementation-specific serialization
* result.
* @return serialization result
*/
public Object getValue();
/**
* Writes the result to the given output stream.
* @param argOutputStream
* @throws IOException
*/
public void writeTo(OutputStream argOutputStream) throws IOException;
}
| Latertater/jbox2d | jbox2d-serialization/src/main/java/org/jbox2d/serialization/SerializationResult.java | Java | bsd-2-clause | 2,130 |
cask 'media-center' do
version '23.00.20'
sha256 '70042295e59a0114900ca475cb2ab46d8c8793c58dbb429542ce4129614e5f25'
url "http://files.jriver.com/mediacenter/channels/v#{version.major}/stable/MediaCenter#{version.no_dots}.dmg"
name 'JRiver Media Center'
homepage 'https://www.jriver.com/'
app "Media Center #{version.major}.app"
zap delete: [
"~/Library/Caches/com.jriver.MediaCenter#{version.major}",
"~/Library/Saved Application State/com.jriver.MediaCenter#{version.major}.savedState",
],
trash: [
'~/Library/Application Support/J River/',
'~/Documents/JRiver/',
"~/Library/Preferences/com.jriver.MediaCenter#{version.major}.plist",
]
end
| klane/homebrew-cask | Casks/media-center.rb | Ruby | bsd-2-clause | 773 |
<?php
class Kwc_Trl_DateHelper_DateTime_Component extends Kwc_Abstract
{
}
| kaufmo/koala-framework | tests/Kwc/Trl/DateHelper/DateTime/Component.php | PHP | bsd-2-clause | 75 |
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <stdint.h>
#include "base/test/task_environment.h"
#include "media/base/svc_scalability_mode.h"
#include "media/base/video_codecs.h"
#include "media/video/mock_gpu_video_accelerator_factories.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/renderer/platform/peerconnection/rtc_video_encoder.h"
#include "third_party/blink/renderer/platform/peerconnection/rtc_video_encoder_factory.h"
#include "third_party/webrtc/api/video_codecs/sdp_video_format.h"
#include "third_party/webrtc/api/video_codecs/video_encoder_factory.h"
using ::testing::Return;
namespace blink {
namespace {
constexpr webrtc::VideoEncoderFactory::CodecSupport kSupportedPowerEfficient = {
true, true};
constexpr webrtc::VideoEncoderFactory::CodecSupport kUnsupported = {false,
false};
constexpr gfx::Size kMaxResolution = {1920, 1080};
constexpr uint32_t kMaxFramerateNumerator = 30;
constexpr uint32_t kMaxFramerateDenominator = 1;
const std::vector<media::SVCScalabilityMode> kScalabilityModes = {
media::SVCScalabilityMode::kL1T2, media::SVCScalabilityMode::kL1T3};
bool Equals(webrtc::VideoEncoderFactory::CodecSupport a,
webrtc::VideoEncoderFactory::CodecSupport b) {
return a.is_supported == b.is_supported &&
a.is_power_efficient == b.is_power_efficient;
}
class MockGpuVideoEncodeAcceleratorFactories
: public media::MockGpuVideoAcceleratorFactories {
public:
MockGpuVideoEncodeAcceleratorFactories()
: MockGpuVideoAcceleratorFactories(nullptr) {}
absl::optional<media::VideoEncodeAccelerator::SupportedProfiles>
GetVideoEncodeAcceleratorSupportedProfiles() override {
media::VideoEncodeAccelerator::SupportedProfiles profiles = {
{media::VP8PROFILE_ANY, kMaxResolution, kMaxFramerateNumerator,
kMaxFramerateDenominator, kScalabilityModes},
{media::VP9PROFILE_PROFILE0, kMaxResolution, kMaxFramerateNumerator,
kMaxFramerateDenominator, kScalabilityModes}};
return profiles;
}
};
} // anonymous namespace
typedef webrtc::SdpVideoFormat Sdp;
typedef webrtc::SdpVideoFormat::Parameters Params;
class RTCVideoEncoderFactoryTest : public ::testing::Test {
public:
RTCVideoEncoderFactoryTest() : encoder_factory_(&mock_gpu_factories_) {}
protected:
base::test::TaskEnvironment task_environment_;
MockGpuVideoEncodeAcceleratorFactories mock_gpu_factories_;
RTCVideoEncoderFactory encoder_factory_;
};
TEST_F(RTCVideoEncoderFactoryTest, QueryCodecSupportNoSvc) {
EXPECT_CALL(mock_gpu_factories_, IsEncoderSupportKnown())
.WillRepeatedly(Return(true));
// VP8, H264, and VP9 profile 0 are supported.
EXPECT_TRUE(Equals(encoder_factory_.QueryCodecSupport(
Sdp("VP8"), /*scalability_mode=*/absl::nullopt),
kSupportedPowerEfficient));
EXPECT_TRUE(Equals(encoder_factory_.QueryCodecSupport(
Sdp("VP9"), /*scalability_mode=*/absl::nullopt),
kSupportedPowerEfficient));
// H264, VP9 profile 2 and AV1 are unsupported.
EXPECT_TRUE(Equals(encoder_factory_.QueryCodecSupport(
Sdp("H264", Params{{"level-asymmetry-allowed", "1"},
{"packetization-mode", "1"},
{"profile-level-id", "42001f"}}),
/*scalability_mode=*/absl::nullopt),
kUnsupported));
EXPECT_TRUE(Equals(encoder_factory_.QueryCodecSupport(
Sdp("VP9", Params{{"profile-id", "2"}}),
/*scalability_mode=*/absl::nullopt),
kUnsupported));
EXPECT_TRUE(Equals(encoder_factory_.QueryCodecSupport(
Sdp("AV1"), /*scalability_mode=*/absl::nullopt),
kUnsupported));
}
TEST_F(RTCVideoEncoderFactoryTest, QueryCodecSupportSvc) {
EXPECT_CALL(mock_gpu_factories_, IsEncoderSupportKnown())
.WillRepeatedly(Return(true));
// Test supported modes.
EXPECT_TRUE(Equals(encoder_factory_.QueryCodecSupport(Sdp("VP8"), "L1T2"),
kSupportedPowerEfficient));
EXPECT_TRUE(Equals(encoder_factory_.QueryCodecSupport(Sdp("VP9"), "L1T3"),
kSupportedPowerEfficient));
// Test unsupported modes.
EXPECT_TRUE(Equals(encoder_factory_.QueryCodecSupport(Sdp("AV1"), "L2T1"),
kUnsupported));
EXPECT_TRUE(Equals(encoder_factory_.QueryCodecSupport(Sdp("H264"), "L1T2"),
kUnsupported));
EXPECT_TRUE(Equals(encoder_factory_.QueryCodecSupport(Sdp("VP8"), "L3T3"),
kUnsupported));
}
} // namespace blink
| nwjs/chromium.src | third_party/blink/renderer/platform/peerconnection/rtc_video_encoder_factory_test.cc | C++ | bsd-3-clause | 4,887 |
# -*- coding: utf-8 -*-
# Unit tests for cache framework
# Uses whatever cache backend is set in the test settings file.
from __future__ import unicode_literals
import copy
import os
import re
import shutil
import tempfile
import threading
import time
import unittest
import warnings
from django.conf import settings
from django.core import management, signals
from django.core.cache import (
DEFAULT_CACHE_ALIAS, CacheKeyWarning, cache, caches,
)
from django.core.cache.utils import make_template_fragment_key
from django.db import connection, connections, transaction
from django.http import HttpRequest, HttpResponse, StreamingHttpResponse
from django.middleware.cache import (
CacheMiddleware, FetchFromCacheMiddleware, UpdateCacheMiddleware,
)
from django.middleware.csrf import CsrfViewMiddleware
from django.template import engines
from django.template.context_processors import csrf
from django.template.response import TemplateResponse
from django.test import (
RequestFactory, TestCase, TransactionTestCase, override_settings,
)
from django.test.signals import setting_changed
from django.utils import six, timezone, translation
from django.utils.cache import (
get_cache_key, learn_cache_key, patch_cache_control,
patch_response_headers, patch_vary_headers,
)
from django.utils.encoding import force_text
from django.views.decorators.cache import cache_page
from .models import Poll, expensive_calculation
try: # Use the same idiom as in cache backends
from django.utils.six.moves import cPickle as pickle
except ImportError:
import pickle
# functions/classes for complex data type tests
def f():
return 42
class C:
def m(n):
return 24
class Unpickable(object):
def __getstate__(self):
raise pickle.PickleError()
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
})
class DummyCacheTests(TestCase):
# The Dummy cache backend doesn't really behave like a test backend,
# so it has its own test case.
def test_simple(self):
"Dummy cache backend ignores cache set calls"
cache.set("key", "value")
self.assertIsNone(cache.get("key"))
def test_add(self):
"Add doesn't do anything in dummy cache backend"
cache.add("addkey1", "value")
result = cache.add("addkey1", "newvalue")
self.assertTrue(result)
self.assertIsNone(cache.get("addkey1"))
def test_non_existent(self):
"Non-existent keys aren't found in the dummy cache backend"
self.assertIsNone(cache.get("does_not_exist"))
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
"get_many returns nothing for the dummy cache backend"
cache.set('a', 'a')
cache.set('b', 'b')
cache.set('c', 'c')
cache.set('d', 'd')
self.assertEqual(cache.get_many(['a', 'c', 'd']), {})
self.assertEqual(cache.get_many(['a', 'b', 'e']), {})
def test_delete(self):
"Cache deletion is transparently ignored on the dummy cache backend"
cache.set("key1", "spam")
cache.set("key2", "eggs")
self.assertIsNone(cache.get("key1"))
cache.delete("key1")
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_has_key(self):
"The has_key method doesn't ever return True for the dummy cache backend"
cache.set("hello1", "goodbye1")
self.assertFalse(cache.has_key("hello1"))
self.assertFalse(cache.has_key("goodbye1"))
def test_in(self):
"The in operator doesn't ever return True for the dummy cache backend"
cache.set("hello2", "goodbye2")
self.assertNotIn("hello2", cache)
self.assertNotIn("goodbye2", cache)
def test_incr(self):
"Dummy cache values can't be incremented"
cache.set('answer', 42)
self.assertRaises(ValueError, cache.incr, 'answer')
self.assertRaises(ValueError, cache.incr, 'does_not_exist')
def test_decr(self):
"Dummy cache values can't be decremented"
cache.set('answer', 42)
self.assertRaises(ValueError, cache.decr, 'answer')
self.assertRaises(ValueError, cache.decr, 'does_not_exist')
def test_data_types(self):
"All data types are ignored equally by the dummy cache"
stuff = {
'string': 'this is a string',
'int': 42,
'list': [1, 2, 3, 4],
'tuple': (1, 2, 3, 4),
'dict': {'A': 1, 'B': 2},
'function': f,
'class': C,
}
cache.set("stuff", stuff)
self.assertIsNone(cache.get("stuff"))
def test_expiration(self):
"Expiration has no effect on the dummy cache"
cache.set('expire1', 'very quickly', 1)
cache.set('expire2', 'very quickly', 1)
cache.set('expire3', 'very quickly', 1)
time.sleep(2)
self.assertIsNone(cache.get("expire1"))
cache.add("expire2", "newvalue")
self.assertIsNone(cache.get("expire2"))
self.assertFalse(cache.has_key("expire3"))
def test_unicode(self):
"Unicode values are ignored by the dummy cache"
stuff = {
'ascii': 'ascii_value',
'unicode_ascii': 'Iñtërnâtiônàlizætiøn1',
'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2',
'ascii2': {'x': 1}
}
for (key, value) in stuff.items():
cache.set(key, value)
self.assertIsNone(cache.get(key))
def test_set_many(self):
"set_many does nothing for the dummy cache backend"
cache.set_many({'a': 1, 'b': 2})
cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1')
def test_delete_many(self):
"delete_many does nothing for the dummy cache backend"
cache.delete_many(['a', 'b'])
def test_clear(self):
"clear does nothing for the dummy cache backend"
cache.clear()
def test_incr_version(self):
"Dummy cache versions can't be incremented"
cache.set('answer', 42)
self.assertRaises(ValueError, cache.incr_version, 'answer')
self.assertRaises(ValueError, cache.incr_version, 'does_not_exist')
def test_decr_version(self):
"Dummy cache versions can't be decremented"
cache.set('answer', 42)
self.assertRaises(ValueError, cache.decr_version, 'answer')
self.assertRaises(ValueError, cache.decr_version, 'does_not_exist')
def custom_key_func(key, key_prefix, version):
"A customized cache key function"
return 'CUSTOM-' + '-'.join([key_prefix, str(version), key])
_caches_setting_base = {
'default': {},
'prefix': {'KEY_PREFIX': 'cacheprefix{}'.format(os.getpid())},
'v2': {'VERSION': 2},
'custom_key': {'KEY_FUNCTION': custom_key_func},
'custom_key2': {'KEY_FUNCTION': 'cache.tests.custom_key_func'},
'cull': {'OPTIONS': {'MAX_ENTRIES': 30}},
'zero_cull': {'OPTIONS': {'CULL_FREQUENCY': 0, 'MAX_ENTRIES': 30}},
}
def caches_setting_for_tests(base=None, **params):
# `base` is used to pull in the memcached config from the original settings,
# `params` are test specific overrides and `_caches_settings_base` is the
# base config for the tests.
# This results in the following search order:
# params -> _caches_setting_base -> base
base = base or {}
setting = {k: base.copy() for k in _caches_setting_base.keys()}
for key, cache_params in setting.items():
cache_params.update(_caches_setting_base[key])
cache_params.update(params)
return setting
class BaseCacheTests(object):
# A common set of tests to apply to all cache backends
def setUp(self):
self.factory = RequestFactory()
def tearDown(self):
cache.clear()
def test_simple(self):
# Simple cache set/get works
cache.set("key", "value")
self.assertEqual(cache.get("key"), "value")
def test_add(self):
# A key can be added to a cache
cache.add("addkey1", "value")
result = cache.add("addkey1", "newvalue")
self.assertFalse(result)
self.assertEqual(cache.get("addkey1"), "value")
def test_prefix(self):
# Test for same cache key conflicts between shared backend
cache.set('somekey', 'value')
# should not be set in the prefixed cache
self.assertFalse(caches['prefix'].has_key('somekey'))
caches['prefix'].set('somekey', 'value2')
self.assertEqual(cache.get('somekey'), 'value')
self.assertEqual(caches['prefix'].get('somekey'), 'value2')
def test_non_existent(self):
# Non-existent cache keys return as None/default
# get with non-existent keys
self.assertIsNone(cache.get("does_not_exist"))
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
# Multiple cache keys can be returned using get_many
cache.set('a', 'a')
cache.set('b', 'b')
cache.set('c', 'c')
cache.set('d', 'd')
self.assertDictEqual(cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'})
self.assertDictEqual(cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'})
def test_delete(self):
# Cache keys can be deleted
cache.set("key1", "spam")
cache.set("key2", "eggs")
self.assertEqual(cache.get("key1"), "spam")
cache.delete("key1")
self.assertIsNone(cache.get("key1"))
self.assertEqual(cache.get("key2"), "eggs")
def test_has_key(self):
# The cache can be inspected for cache keys
cache.set("hello1", "goodbye1")
self.assertTrue(cache.has_key("hello1"))
self.assertFalse(cache.has_key("goodbye1"))
cache.set("no_expiry", "here", None)
self.assertTrue(cache.has_key("no_expiry"))
def test_in(self):
# The in operator can be used to inspect cache contents
cache.set("hello2", "goodbye2")
self.assertIn("hello2", cache)
self.assertNotIn("goodbye2", cache)
def test_incr(self):
# Cache values can be incremented
cache.set('answer', 41)
self.assertEqual(cache.incr('answer'), 42)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.incr('answer', 10), 52)
self.assertEqual(cache.get('answer'), 52)
self.assertEqual(cache.incr('answer', -10), 42)
self.assertRaises(ValueError, cache.incr, 'does_not_exist')
def test_decr(self):
# Cache values can be decremented
cache.set('answer', 43)
self.assertEqual(cache.decr('answer'), 42)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.decr('answer', 10), 32)
self.assertEqual(cache.get('answer'), 32)
self.assertEqual(cache.decr('answer', -10), 42)
self.assertRaises(ValueError, cache.decr, 'does_not_exist')
def test_close(self):
self.assertTrue(hasattr(cache, 'close'))
cache.close()
def test_data_types(self):
# Many different data types can be cached
stuff = {
'string': 'this is a string',
'int': 42,
'list': [1, 2, 3, 4],
'tuple': (1, 2, 3, 4),
'dict': {'A': 1, 'B': 2},
'function': f,
'class': C,
}
cache.set("stuff", stuff)
self.assertEqual(cache.get("stuff"), stuff)
def test_cache_read_for_model_instance(self):
# Don't want fields with callable as default to be called on cache read
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
my_poll = Poll.objects.create(question="Well?")
self.assertEqual(Poll.objects.count(), 1)
pub_date = my_poll.pub_date
cache.set('question', my_poll)
cached_poll = cache.get('question')
self.assertEqual(cached_poll.pub_date, pub_date)
# We only want the default expensive calculation run once
self.assertEqual(expensive_calculation.num_runs, 1)
def test_cache_write_for_model_instance_with_deferred(self):
# Don't want fields with callable as default to be called on cache write
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
Poll.objects.create(question="What?")
self.assertEqual(expensive_calculation.num_runs, 1)
defer_qs = Poll.objects.all().defer('question')
self.assertEqual(defer_qs.count(), 1)
self.assertEqual(expensive_calculation.num_runs, 1)
cache.set('deferred_queryset', defer_qs)
# cache set should not re-evaluate default functions
self.assertEqual(expensive_calculation.num_runs, 1)
def test_cache_read_for_model_instance_with_deferred(self):
# Don't want fields with callable as default to be called on cache read
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
Poll.objects.create(question="What?")
self.assertEqual(expensive_calculation.num_runs, 1)
defer_qs = Poll.objects.all().defer('question')
self.assertEqual(defer_qs.count(), 1)
cache.set('deferred_queryset', defer_qs)
self.assertEqual(expensive_calculation.num_runs, 1)
runs_before_cache_read = expensive_calculation.num_runs
cache.get('deferred_queryset')
# We only want the default expensive calculation run on creation and set
self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read)
def test_expiration(self):
# Cache values can be set to expire
cache.set('expire1', 'very quickly', 1)
cache.set('expire2', 'very quickly', 1)
cache.set('expire3', 'very quickly', 1)
time.sleep(2)
self.assertIsNone(cache.get("expire1"))
cache.add("expire2", "newvalue")
self.assertEqual(cache.get("expire2"), "newvalue")
self.assertFalse(cache.has_key("expire3"))
def test_unicode(self):
# Unicode values can be cached
stuff = {
'ascii': 'ascii_value',
'unicode_ascii': 'Iñtërnâtiônàlizætiøn1',
'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2',
'ascii2': {'x': 1}
}
# Test `set`
for (key, value) in stuff.items():
cache.set(key, value)
self.assertEqual(cache.get(key), value)
# Test `add`
for (key, value) in stuff.items():
cache.delete(key)
cache.add(key, value)
self.assertEqual(cache.get(key), value)
# Test `set_many`
for (key, value) in stuff.items():
cache.delete(key)
cache.set_many(stuff)
for (key, value) in stuff.items():
self.assertEqual(cache.get(key), value)
def test_binary_string(self):
# Binary strings should be cacheable
from zlib import compress, decompress
value = 'value_to_be_compressed'
compressed_value = compress(value.encode())
# Test set
cache.set('binary1', compressed_value)
compressed_result = cache.get('binary1')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
# Test add
cache.add('binary1-add', compressed_value)
compressed_result = cache.get('binary1-add')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
# Test set_many
cache.set_many({'binary1-set_many': compressed_value})
compressed_result = cache.get('binary1-set_many')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
def test_set_many(self):
# Multiple keys can be set using set_many
cache.set_many({"key1": "spam", "key2": "eggs"})
self.assertEqual(cache.get("key1"), "spam")
self.assertEqual(cache.get("key2"), "eggs")
def test_set_many_expiration(self):
# set_many takes a second ``timeout`` parameter
cache.set_many({"key1": "spam", "key2": "eggs"}, 1)
time.sleep(2)
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_delete_many(self):
# Multiple keys can be deleted using delete_many
cache.set("key1", "spam")
cache.set("key2", "eggs")
cache.set("key3", "ham")
cache.delete_many(["key1", "key2"])
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
self.assertEqual(cache.get("key3"), "ham")
def test_clear(self):
# The cache can be emptied using clear
cache.set("key1", "spam")
cache.set("key2", "eggs")
cache.clear()
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_long_timeout(self):
'''
Using a timeout greater than 30 days makes memcached think
it is an absolute expiration timestamp instead of a relative
offset. Test that we honour this convention. Refs #12399.
'''
cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second
self.assertEqual(cache.get('key1'), 'eggs')
cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1)
self.assertEqual(cache.get('key2'), 'ham')
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1)
self.assertEqual(cache.get('key3'), 'sausage')
self.assertEqual(cache.get('key4'), 'lobster bisque')
def test_forever_timeout(self):
'''
Passing in None into timeout results in a value that is cached forever
'''
cache.set('key1', 'eggs', None)
self.assertEqual(cache.get('key1'), 'eggs')
cache.add('key2', 'ham', None)
self.assertEqual(cache.get('key2'), 'ham')
added = cache.add('key1', 'new eggs', None)
self.assertEqual(added, False)
self.assertEqual(cache.get('key1'), 'eggs')
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None)
self.assertEqual(cache.get('key3'), 'sausage')
self.assertEqual(cache.get('key4'), 'lobster bisque')
def test_zero_timeout(self):
'''
Passing in zero into timeout results in a value that is not cached
'''
cache.set('key1', 'eggs', 0)
self.assertIsNone(cache.get('key1'))
cache.add('key2', 'ham', 0)
self.assertIsNone(cache.get('key2'))
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0)
self.assertIsNone(cache.get('key3'))
self.assertIsNone(cache.get('key4'))
def test_float_timeout(self):
# Make sure a timeout given as a float doesn't crash anything.
cache.set("key1", "spam", 100.2)
self.assertEqual(cache.get("key1"), "spam")
def _perform_cull_test(self, cull_cache, initial_count, final_count):
# Create initial cache key entries. This will overflow the cache,
# causing a cull.
for i in range(1, initial_count):
cull_cache.set('cull%d' % i, 'value', 1000)
count = 0
# Count how many keys are left in the cache.
for i in range(1, initial_count):
if cull_cache.has_key('cull%d' % i):
count = count + 1
self.assertEqual(count, final_count)
def test_cull(self):
self._perform_cull_test(caches['cull'], 50, 29)
def test_zero_cull(self):
self._perform_cull_test(caches['zero_cull'], 50, 19)
def test_invalid_keys(self):
"""
All the builtin backends (except memcached, see below) should warn on
keys that would be refused by memcached. This encourages portable
caching code without making it too difficult to use production backends
with more liberal key rules. Refs #6447.
"""
# mimic custom ``make_key`` method being defined since the default will
# never show the below warnings
def func(key, *args):
return key
old_func = cache.key_func
cache.key_func = func
try:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
# memcached does not allow whitespace or control characters in keys
cache.set('key with spaces', 'value')
self.assertEqual(len(w), 2)
self.assertIsInstance(w[0].message, CacheKeyWarning)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
# memcached limits key length to 250
cache.set('a' * 251, 'value')
self.assertEqual(len(w), 1)
self.assertIsInstance(w[0].message, CacheKeyWarning)
finally:
cache.key_func = old_func
def test_cache_versioning_get_set(self):
# set, using default version = 1
cache.set('answer1', 42)
self.assertEqual(cache.get('answer1'), 42)
self.assertEqual(cache.get('answer1', version=1), 42)
self.assertIsNone(cache.get('answer1', version=2))
self.assertIsNone(caches['v2'].get('answer1'))
self.assertEqual(caches['v2'].get('answer1', version=1), 42)
self.assertIsNone(caches['v2'].get('answer1', version=2))
# set, default version = 1, but manually override version = 2
cache.set('answer2', 42, version=2)
self.assertIsNone(cache.get('answer2'))
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
# v2 set, using default version = 2
caches['v2'].set('answer3', 42)
self.assertIsNone(cache.get('answer3'))
self.assertIsNone(cache.get('answer3', version=1))
self.assertEqual(cache.get('answer3', version=2), 42)
self.assertEqual(caches['v2'].get('answer3'), 42)
self.assertIsNone(caches['v2'].get('answer3', version=1))
self.assertEqual(caches['v2'].get('answer3', version=2), 42)
# v2 set, default version = 2, but manually override version = 1
caches['v2'].set('answer4', 42, version=1)
self.assertEqual(cache.get('answer4'), 42)
self.assertEqual(cache.get('answer4', version=1), 42)
self.assertIsNone(cache.get('answer4', version=2))
self.assertIsNone(caches['v2'].get('answer4'))
self.assertEqual(caches['v2'].get('answer4', version=1), 42)
self.assertIsNone(caches['v2'].get('answer4', version=2))
def test_cache_versioning_add(self):
# add, default version = 1, but manually override version = 2
cache.add('answer1', 42, version=2)
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
cache.add('answer1', 37, version=2)
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
cache.add('answer1', 37, version=1)
self.assertEqual(cache.get('answer1', version=1), 37)
self.assertEqual(cache.get('answer1', version=2), 42)
# v2 add, using default version = 2
caches['v2'].add('answer2', 42)
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
caches['v2'].add('answer2', 37)
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
caches['v2'].add('answer2', 37, version=1)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 42)
# v2 add, default version = 2, but manually override version = 1
caches['v2'].add('answer3', 42, version=1)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertIsNone(cache.get('answer3', version=2))
caches['v2'].add('answer3', 37, version=1)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertIsNone(cache.get('answer3', version=2))
caches['v2'].add('answer3', 37)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertEqual(cache.get('answer3', version=2), 37)
def test_cache_versioning_has_key(self):
cache.set('answer1', 42)
# has_key
self.assertTrue(cache.has_key('answer1'))
self.assertTrue(cache.has_key('answer1', version=1))
self.assertFalse(cache.has_key('answer1', version=2))
self.assertFalse(caches['v2'].has_key('answer1'))
self.assertTrue(caches['v2'].has_key('answer1', version=1))
self.assertFalse(caches['v2'].has_key('answer1', version=2))
def test_cache_versioning_delete(self):
cache.set('answer1', 37, version=1)
cache.set('answer1', 42, version=2)
cache.delete('answer1')
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
cache.set('answer2', 37, version=1)
cache.set('answer2', 42, version=2)
cache.delete('answer2', version=2)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertIsNone(cache.get('answer2', version=2))
cache.set('answer3', 37, version=1)
cache.set('answer3', 42, version=2)
caches['v2'].delete('answer3')
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertIsNone(cache.get('answer3', version=2))
cache.set('answer4', 37, version=1)
cache.set('answer4', 42, version=2)
caches['v2'].delete('answer4', version=1)
self.assertIsNone(cache.get('answer4', version=1))
self.assertEqual(cache.get('answer4', version=2), 42)
def test_cache_versioning_incr_decr(self):
cache.set('answer1', 37, version=1)
cache.set('answer1', 42, version=2)
cache.incr('answer1')
self.assertEqual(cache.get('answer1', version=1), 38)
self.assertEqual(cache.get('answer1', version=2), 42)
cache.decr('answer1')
self.assertEqual(cache.get('answer1', version=1), 37)
self.assertEqual(cache.get('answer1', version=2), 42)
cache.set('answer2', 37, version=1)
cache.set('answer2', 42, version=2)
cache.incr('answer2', version=2)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 43)
cache.decr('answer2', version=2)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 42)
cache.set('answer3', 37, version=1)
cache.set('answer3', 42, version=2)
caches['v2'].incr('answer3')
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertEqual(cache.get('answer3', version=2), 43)
caches['v2'].decr('answer3')
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertEqual(cache.get('answer3', version=2), 42)
cache.set('answer4', 37, version=1)
cache.set('answer4', 42, version=2)
caches['v2'].incr('answer4', version=1)
self.assertEqual(cache.get('answer4', version=1), 38)
self.assertEqual(cache.get('answer4', version=2), 42)
caches['v2'].decr('answer4', version=1)
self.assertEqual(cache.get('answer4', version=1), 37)
self.assertEqual(cache.get('answer4', version=2), 42)
def test_cache_versioning_get_set_many(self):
# set, using default version = 1
cache.set_many({'ford1': 37, 'arthur1': 42})
self.assertDictEqual(cache.get_many(['ford1', 'arthur1']),
{'ford1': 37, 'arthur1': 42})
self.assertDictEqual(cache.get_many(['ford1', 'arthur1'], version=1),
{'ford1': 37, 'arthur1': 42})
self.assertDictEqual(cache.get_many(['ford1', 'arthur1'], version=2), {})
self.assertDictEqual(caches['v2'].get_many(['ford1', 'arthur1']), {})
self.assertDictEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=1),
{'ford1': 37, 'arthur1': 42})
self.assertDictEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=2), {})
# set, default version = 1, but manually override version = 2
cache.set_many({'ford2': 37, 'arthur2': 42}, version=2)
self.assertDictEqual(cache.get_many(['ford2', 'arthur2']), {})
self.assertDictEqual(cache.get_many(['ford2', 'arthur2'], version=1), {})
self.assertDictEqual(cache.get_many(['ford2', 'arthur2'], version=2),
{'ford2': 37, 'arthur2': 42})
self.assertDictEqual(caches['v2'].get_many(['ford2', 'arthur2']),
{'ford2': 37, 'arthur2': 42})
self.assertDictEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=1), {})
self.assertDictEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=2),
{'ford2': 37, 'arthur2': 42})
# v2 set, using default version = 2
caches['v2'].set_many({'ford3': 37, 'arthur3': 42})
self.assertDictEqual(cache.get_many(['ford3', 'arthur3']), {})
self.assertDictEqual(cache.get_many(['ford3', 'arthur3'], version=1), {})
self.assertDictEqual(cache.get_many(['ford3', 'arthur3'], version=2),
{'ford3': 37, 'arthur3': 42})
self.assertDictEqual(caches['v2'].get_many(['ford3', 'arthur3']),
{'ford3': 37, 'arthur3': 42})
self.assertDictEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=1), {})
self.assertDictEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=2),
{'ford3': 37, 'arthur3': 42})
# v2 set, default version = 2, but manually override version = 1
caches['v2'].set_many({'ford4': 37, 'arthur4': 42}, version=1)
self.assertDictEqual(cache.get_many(['ford4', 'arthur4']),
{'ford4': 37, 'arthur4': 42})
self.assertDictEqual(cache.get_many(['ford4', 'arthur4'], version=1),
{'ford4': 37, 'arthur4': 42})
self.assertDictEqual(cache.get_many(['ford4', 'arthur4'], version=2), {})
self.assertDictEqual(caches['v2'].get_many(['ford4', 'arthur4']), {})
self.assertDictEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=1),
{'ford4': 37, 'arthur4': 42})
self.assertDictEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=2), {})
def test_incr_version(self):
cache.set('answer', 42, version=2)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertEqual(cache.get('answer', version=2), 42)
self.assertIsNone(cache.get('answer', version=3))
self.assertEqual(cache.incr_version('answer', version=2), 3)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertIsNone(cache.get('answer', version=2))
self.assertEqual(cache.get('answer', version=3), 42)
caches['v2'].set('answer2', 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
self.assertIsNone(caches['v2'].get('answer2', version=3))
self.assertEqual(caches['v2'].incr_version('answer2'), 3)
self.assertIsNone(caches['v2'].get('answer2'))
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertIsNone(caches['v2'].get('answer2', version=2))
self.assertEqual(caches['v2'].get('answer2', version=3), 42)
self.assertRaises(ValueError, cache.incr_version, 'does_not_exist')
def test_decr_version(self):
cache.set('answer', 42, version=2)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertEqual(cache.get('answer', version=2), 42)
self.assertEqual(cache.decr_version('answer', version=2), 1)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.get('answer', version=1), 42)
self.assertIsNone(cache.get('answer', version=2))
caches['v2'].set('answer2', 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
self.assertEqual(caches['v2'].decr_version('answer2'), 1)
self.assertIsNone(caches['v2'].get('answer2'))
self.assertEqual(caches['v2'].get('answer2', version=1), 42)
self.assertIsNone(caches['v2'].get('answer2', version=2))
self.assertRaises(ValueError, cache.decr_version, 'does_not_exist', version=2)
def test_custom_key_func(self):
# Two caches with different key functions aren't visible to each other
cache.set('answer1', 42)
self.assertEqual(cache.get('answer1'), 42)
self.assertIsNone(caches['custom_key'].get('answer1'))
self.assertIsNone(caches['custom_key2'].get('answer1'))
caches['custom_key'].set('answer2', 42)
self.assertIsNone(cache.get('answer2'))
self.assertEqual(caches['custom_key'].get('answer2'), 42)
self.assertEqual(caches['custom_key2'].get('answer2'), 42)
def test_cache_write_unpickable_object(self):
update_middleware = UpdateCacheMiddleware()
update_middleware.cache = cache
fetch_middleware = FetchFromCacheMiddleware()
fetch_middleware.cache = cache
request = self.factory.get('/cache/test')
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNone(get_cache_data)
response = HttpResponse()
content = 'Testing cookie serialization.'
response.content = content
response.set_cookie('foo', 'bar')
update_middleware.process_response(request, response)
get_cache_data = fetch_middleware.process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode('utf-8'))
self.assertEqual(get_cache_data.cookies, response.cookies)
update_middleware.process_response(request, get_cache_data)
get_cache_data = fetch_middleware.process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode('utf-8'))
self.assertEqual(get_cache_data.cookies, response.cookies)
def test_add_fail_on_pickleerror(self):
"See https://code.djangoproject.com/ticket/21200"
with self.assertRaises(pickle.PickleError):
cache.add('unpickable', Unpickable())
def test_set_fail_on_pickleerror(self):
"See https://code.djangoproject.com/ticket/21200"
with self.assertRaises(pickle.PickleError):
cache.set('unpickable', Unpickable())
def test_get_or_set(self):
self.assertIsNone(cache.get('projector'))
self.assertEqual(cache.get_or_set('projector', 42), 42)
self.assertEqual(cache.get('projector'), 42)
def test_get_or_set_callable(self):
def my_callable():
return 'value'
self.assertEqual(cache.get_or_set('mykey', my_callable), 'value')
def test_get_or_set_version(self):
cache.get_or_set('brian', 1979, version=2)
with self.assertRaisesMessage(ValueError, 'You need to specify a value.'):
cache.get_or_set('brian')
with self.assertRaisesMessage(ValueError, 'You need to specify a value.'):
cache.get_or_set('brian', version=1)
self.assertIsNone(cache.get('brian', version=1))
self.assertEqual(cache.get_or_set('brian', 42, version=1), 42)
self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979)
self.assertIsNone(cache.get('brian', version=3))
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.db.DatabaseCache',
# Spaces are used in the table name to ensure quoting/escaping is working
LOCATION='test cache table'
))
class DBCacheTests(BaseCacheTests, TransactionTestCase):
available_apps = ['cache']
def setUp(self):
# The super calls needs to happen first for the settings override.
super(DBCacheTests, self).setUp()
self.create_table()
def tearDown(self):
# The super call needs to happen first because it uses the database.
super(DBCacheTests, self).tearDown()
self.drop_table()
def create_table(self):
management.call_command('createcachetable', verbosity=0, interactive=False)
def drop_table(self):
with connection.cursor() as cursor:
table_name = connection.ops.quote_name('test cache table')
cursor.execute('DROP TABLE %s' % table_name)
def test_zero_cull(self):
self._perform_cull_test(caches['zero_cull'], 50, 18)
def test_second_call_doesnt_crash(self):
out = six.StringIO()
management.call_command('createcachetable', stdout=out)
self.assertEqual(out.getvalue(),
"Cache table 'test cache table' already exists.\n" * len(settings.CACHES))
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.db.DatabaseCache',
# Use another table name to avoid the 'table already exists' message.
LOCATION='createcachetable_dry_run_mode'
))
def test_createcachetable_dry_run_mode(self):
out = six.StringIO()
management.call_command('createcachetable', dry_run=True, stdout=out)
output = out.getvalue()
self.assertTrue(output.startswith("CREATE TABLE"))
def test_createcachetable_with_table_argument(self):
"""
Delete and recreate cache table with legacy behavior (explicitly
specifying the table name).
"""
self.drop_table()
out = six.StringIO()
management.call_command(
'createcachetable',
'test cache table',
verbosity=2,
stdout=out,
)
self.assertEqual(out.getvalue(),
"Cache table 'test cache table' created.\n")
def test_clear_commits_transaction(self):
# Ensure the database transaction is committed (#19896)
cache.set("key1", "spam")
cache.clear()
transaction.rollback()
self.assertIsNone(cache.get("key1"))
@override_settings(USE_TZ=True)
class DBCacheWithTimeZoneTests(DBCacheTests):
pass
class DBCacheRouter(object):
"""A router that puts the cache table on the 'other' database."""
def db_for_read(self, model, **hints):
if model._meta.app_label == 'django_cache':
return 'other'
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == 'django_cache':
return 'other'
return None
def allow_migrate(self, db, app_label, **hints):
if app_label == 'django_cache':
return db == 'other'
return None
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'my_cache_table',
},
},
)
class CreateCacheTableForDBCacheTests(TestCase):
multi_db = True
@override_settings(DATABASE_ROUTERS=[DBCacheRouter()])
def test_createcachetable_observes_database_router(self):
# cache table should not be created on 'default'
with self.assertNumQueries(0, using='default'):
management.call_command('createcachetable',
database='default',
verbosity=0, interactive=False)
# cache table should be created on 'other'
# Queries:
# 1: check table doesn't already exist
# 2: create savepoint (if transactional DDL is supported)
# 3: create the table
# 4: create the index
# 5: release savepoint (if transactional DDL is supported)
num = 5 if connections['other'].features.can_rollback_ddl else 3
with self.assertNumQueries(num, using='other'):
management.call_command('createcachetable',
database='other',
verbosity=0, interactive=False)
class PicklingSideEffect(object):
def __init__(self, cache):
self.cache = cache
self.locked = False
def __getstate__(self):
if self.cache._lock.active_writers:
self.locked = True
return {}
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.locmem.LocMemCache',
))
class LocMemCacheTests(BaseCacheTests, TestCase):
def setUp(self):
super(LocMemCacheTests, self).setUp()
# LocMem requires a hack to make the other caches
# share a data store with the 'normal' cache.
caches['prefix']._cache = cache._cache
caches['prefix']._expire_info = cache._expire_info
caches['v2']._cache = cache._cache
caches['v2']._expire_info = cache._expire_info
caches['custom_key']._cache = cache._cache
caches['custom_key']._expire_info = cache._expire_info
caches['custom_key2']._cache = cache._cache
caches['custom_key2']._expire_info = cache._expire_info
@override_settings(CACHES={
'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'},
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'other'
},
})
def test_multiple_caches(self):
"Check that multiple locmem caches are isolated"
cache.set('value', 42)
self.assertEqual(caches['default'].get('value'), 42)
self.assertIsNone(caches['other'].get('value'))
def test_locking_on_pickle(self):
"""#20613/#18541 -- Ensures pickling is done outside of the lock."""
bad_obj = PicklingSideEffect(cache)
cache.set('set', bad_obj)
self.assertFalse(bad_obj.locked, "Cache was locked during pickling")
cache.add('add', bad_obj)
self.assertFalse(bad_obj.locked, "Cache was locked during pickling")
def test_incr_decr_timeout(self):
"""incr/decr does not modify expiry time (matches memcached behavior)"""
key = 'value'
_key = cache.make_key(key)
cache.set(key, 1, timeout=cache.default_timeout * 10)
expire = cache._expire_info[_key]
cache.incr(key)
self.assertEqual(expire, cache._expire_info[_key])
cache.decr(key)
self.assertEqual(expire, cache._expire_info[_key])
# memcached backend isn't guaranteed to be available.
# To check the memcached backend, the test settings file will
# need to contain at least one cache backend setting that points at
# your memcache server.
memcached_params = {}
for _cache_params in settings.CACHES.values():
if _cache_params['BACKEND'].startswith('django.core.cache.backends.memcached.'):
memcached_params = _cache_params
memcached_never_expiring_params = memcached_params.copy()
memcached_never_expiring_params['TIMEOUT'] = None
memcached_far_future_params = memcached_params.copy()
memcached_far_future_params['TIMEOUT'] = 31536000 # 60*60*24*365, 1 year
@unittest.skipUnless(memcached_params, "memcached not available")
@override_settings(CACHES=caches_setting_for_tests(base=memcached_params))
class MemcachedCacheTests(BaseCacheTests, TestCase):
def test_invalid_keys(self):
"""
On memcached, we don't introduce a duplicate key validation
step (for speed reasons), we just let the memcached API
library raise its own exception on bad keys. Refs #6447.
In order to be memcached-API-library agnostic, we only assert
that a generic exception of some kind is raised.
"""
# memcached does not allow whitespace or control characters in keys
self.assertRaises(Exception, cache.set, 'key with spaces', 'value')
# memcached limits key length to 250
self.assertRaises(Exception, cache.set, 'a' * 251, 'value')
# Explicitly display a skipped test if no configured cache uses MemcachedCache
@unittest.skipUnless(
memcached_params.get('BACKEND') == 'django.core.cache.backends.memcached.MemcachedCache',
"cache with python-memcached library not available")
def test_memcached_uses_highest_pickle_version(self):
# Regression test for #19810
for cache_key, cache_config in settings.CACHES.items():
if cache_config['BACKEND'] == 'django.core.cache.backends.memcached.MemcachedCache':
self.assertEqual(caches[cache_key]._cache.pickleProtocol,
pickle.HIGHEST_PROTOCOL)
@override_settings(CACHES=caches_setting_for_tests(base=memcached_never_expiring_params))
def test_default_never_expiring_timeout(self):
# Regression test for #22845
cache.set('infinite_foo', 'bar')
self.assertEqual(cache.get('infinite_foo'), 'bar')
@override_settings(CACHES=caches_setting_for_tests(base=memcached_far_future_params))
def test_default_far_future_timeout(self):
# Regression test for #22845
cache.set('future_foo', 'bar')
self.assertEqual(cache.get('future_foo'), 'bar')
def test_cull(self):
# culling isn't implemented, memcached deals with it.
pass
def test_zero_cull(self):
# culling isn't implemented, memcached deals with it.
pass
def test_memcached_deletes_key_on_failed_set(self):
# By default memcached allows objects up to 1MB. For the cache_db session
# backend to always use the current session, memcached needs to delete
# the old key if it fails to set.
# pylibmc doesn't seem to have SERVER_MAX_VALUE_LENGTH as far as I can
# tell from a quick check of its source code. This is falling back to
# the default value exposed by python-memcached on my system.
max_value_length = getattr(cache._lib, 'SERVER_MAX_VALUE_LENGTH', 1048576)
cache.set('small_value', 'a')
self.assertEqual(cache.get('small_value'), 'a')
large_value = 'a' * (max_value_length + 1)
cache.set('small_value', large_value)
# small_value should be deleted, or set if configured to accept larger values
value = cache.get('small_value')
self.assertTrue(value is None or value == large_value)
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.filebased.FileBasedCache',
))
class FileBasedCacheTests(BaseCacheTests, TestCase):
"""
Specific test cases for the file-based cache.
"""
def setUp(self):
super(FileBasedCacheTests, self).setUp()
self.dirname = tempfile.mkdtemp()
# Caches location cannot be modified through override_settings / modify_settings,
# hence settings are manipulated directly here and the setting_changed signal
# is triggered manually.
for cache_params in settings.CACHES.values():
cache_params.update({'LOCATION': self.dirname})
setting_changed.send(self.__class__, setting='CACHES', enter=False)
def tearDown(self):
super(FileBasedCacheTests, self).tearDown()
# Call parent first, as cache.clear() may recreate cache base directory
shutil.rmtree(self.dirname)
def test_ignores_non_cache_files(self):
fname = os.path.join(self.dirname, 'not-a-cache-file')
with open(fname, 'w'):
os.utime(fname, None)
cache.clear()
self.assertTrue(os.path.exists(fname),
'Expected cache.clear to ignore non cache files')
os.remove(fname)
def test_clear_does_not_remove_cache_dir(self):
cache.clear()
self.assertTrue(os.path.exists(self.dirname),
'Expected cache.clear to keep the cache dir')
def test_creates_cache_dir_if_nonexistent(self):
os.rmdir(self.dirname)
cache.set('foo', 'bar')
os.path.exists(self.dirname)
@override_settings(CACHES={
'default': {
'BACKEND': 'cache.liberal_backend.CacheClass',
},
})
class CustomCacheKeyValidationTests(TestCase):
"""
Tests for the ability to mixin a custom ``validate_key`` method to
a custom cache backend that otherwise inherits from a builtin
backend, and override the default key validation. Refs #6447.
"""
def test_custom_key_validation(self):
# this key is both longer than 250 characters, and has spaces
key = 'some key with spaces' * 15
val = 'a value'
cache.set(key, val)
self.assertEqual(cache.get(key), val)
@override_settings(
CACHES={
'default': {
'BACKEND': 'cache.closeable_cache.CacheClass',
}
}
)
class CacheClosingTests(TestCase):
def test_close(self):
self.assertFalse(cache.closed)
signals.request_finished.send(self.__class__)
self.assertTrue(cache.closed)
DEFAULT_MEMORY_CACHES_SETTINGS = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'unique-snowflake',
}
}
NEVER_EXPIRING_CACHES_SETTINGS = copy.deepcopy(DEFAULT_MEMORY_CACHES_SETTINGS)
NEVER_EXPIRING_CACHES_SETTINGS['default']['TIMEOUT'] = None
class DefaultNonExpiringCacheKeyTests(TestCase):
"""Tests that verify that settings having Cache arguments with a TIMEOUT
set to `None` will create Caches that will set non-expiring keys.
This fixes ticket #22085.
"""
def setUp(self):
# The 5 minute (300 seconds) default expiration time for keys is
# defined in the implementation of the initializer method of the
# BaseCache type.
self.DEFAULT_TIMEOUT = caches[DEFAULT_CACHE_ALIAS].default_timeout
def tearDown(self):
del(self.DEFAULT_TIMEOUT)
def test_default_expiration_time_for_keys_is_5_minutes(self):
"""The default expiration time of a cache key is 5 minutes.
This value is defined inside the __init__() method of the
:class:`django.core.cache.backends.base.BaseCache` type.
"""
self.assertEqual(300, self.DEFAULT_TIMEOUT)
def test_caches_with_unset_timeout_has_correct_default_timeout(self):
"""Caches that have the TIMEOUT parameter undefined in the default
settings will use the default 5 minute timeout.
"""
cache = caches[DEFAULT_CACHE_ALIAS]
self.assertEqual(self.DEFAULT_TIMEOUT, cache.default_timeout)
@override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS)
def test_caches_set_with_timeout_as_none_has_correct_default_timeout(self):
"""Memory caches that have the TIMEOUT parameter set to `None` in the
default settings with have `None` as the default timeout.
This means "no timeout".
"""
cache = caches[DEFAULT_CACHE_ALIAS]
self.assertIsNone(cache.default_timeout)
self.assertIsNone(cache.get_backend_timeout())
@override_settings(CACHES=DEFAULT_MEMORY_CACHES_SETTINGS)
def test_caches_with_unset_timeout_set_expiring_key(self):
"""Memory caches that have the TIMEOUT parameter unset will set cache
keys having the default 5 minute timeout.
"""
key = "my-key"
value = "my-value"
cache = caches[DEFAULT_CACHE_ALIAS]
cache.set(key, value)
cache_key = cache.make_key(key)
self.assertIsNotNone(cache._expire_info[cache_key])
@override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS)
def text_caches_set_with_timeout_as_none_set_non_expiring_key(self):
"""Memory caches that have the TIMEOUT parameter set to `None` will set
a non expiring key by default.
"""
key = "another-key"
value = "another-value"
cache = caches[DEFAULT_CACHE_ALIAS]
cache.set(key, value)
cache_key = cache.make_key(key)
self.assertIsNone(cache._expire_info[cache_key])
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHE_MIDDLEWARE_SECONDS=1,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
USE_I18N=False,
)
class CacheUtils(TestCase):
"""TestCase for django.utils.cache functions."""
def setUp(self):
self.host = 'www.example.com'
self.path = '/cache/test/'
self.factory = RequestFactory(HTTP_HOST=self.host)
def tearDown(self):
cache.clear()
def _get_request_cache(self, method='GET', query_string=None, update_cache=None):
request = self._get_request(self.host, self.path,
method, query_string=query_string)
request._cache_update_cache = True if not update_cache else update_cache
return request
def _set_cache(self, request, msg):
response = HttpResponse()
response.content = msg
return UpdateCacheMiddleware().process_response(request, response)
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ('Accept-Encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
)
for initial_vary, newheaders, resulting_vary in headers:
response = HttpResponse()
if initial_vary is not None:
response['Vary'] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response['Vary'], resulting_vary)
def test_get_cache_key(self):
request = self.factory.get(self.path)
response = HttpResponse()
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
# Verify that a specified key_prefix is taken into account.
key_prefix = 'localprefix'
learn_cache_key(request, response, key_prefix=key_prefix)
self.assertEqual(
get_cache_key(request, key_prefix=key_prefix),
'views.decorators.cache.cache_page.localprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
def test_get_cache_key_with_query(self):
request = self.factory.get(self.path, {'test': 1})
response = HttpResponse()
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
# Verify that the querystring is taken into account.
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'beaf87a9a99ee81c673ea2d67ccbec2a.d41d8cd98f00b204e9800998ecf8427e'
)
def test_cache_key_varies_by_url(self):
"""
get_cache_key keys differ by fully-qualified URL instead of path
"""
request1 = self.factory.get(self.path, HTTP_HOST='sub-1.example.com')
learn_cache_key(request1, HttpResponse())
request2 = self.factory.get(self.path, HTTP_HOST='sub-2.example.com')
learn_cache_key(request2, HttpResponse())
self.assertNotEqual(get_cache_key(request1), get_cache_key(request2))
def test_learn_cache_key(self):
request = self.factory.head(self.path)
response = HttpResponse()
response['Vary'] = 'Pony'
# Make sure that the Vary header is added to the key hash
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
def test_patch_cache_control(self):
tests = (
# Initial Cache-Control, kwargs to patch_cache_control, expected Cache-Control parts
(None, {'private': True}, {'private'}),
# Test whether private/public attributes are mutually exclusive
('private', {'private': True}, {'private'}),
('private', {'public': True}, {'public'}),
('public', {'public': True}, {'public'}),
('public', {'private': True}, {'private'}),
('must-revalidate,max-age=60,private', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}),
('must-revalidate,max-age=60,public', {'private': True}, {'must-revalidate', 'max-age=60', 'private'}),
('must-revalidate,max-age=60', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}),
)
cc_delim_re = re.compile(r'\s*,\s*')
for initial_cc, newheaders, expected_cc in tests:
response = HttpResponse()
if initial_cc is not None:
response['Cache-Control'] = initial_cc
patch_cache_control(response, **newheaders)
parts = set(cc_delim_re.split(response['Cache-Control']))
self.assertEqual(parts, expected_cc)
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'KEY_PREFIX': 'cacheprefix',
},
},
)
class PrefixedCacheUtils(CacheUtils):
pass
@override_settings(
CACHE_MIDDLEWARE_SECONDS=60,
CACHE_MIDDLEWARE_KEY_PREFIX='test',
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
)
class CacheHEADTest(TestCase):
def setUp(self):
self.path = '/cache/test/'
self.factory = RequestFactory()
def tearDown(self):
cache.clear()
def _set_cache(self, request, msg):
response = HttpResponse()
response.content = msg
return UpdateCacheMiddleware().process_response(request, response)
def test_head_caches_correctly(self):
test_content = 'test content'
request = self.factory.head(self.path)
request._cache_update_cache = True
self._set_cache(request, test_content)
request = self.factory.head(self.path)
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(test_content.encode(), get_cache_data.content)
def test_head_with_cached_get(self):
test_content = 'test content'
request = self.factory.get(self.path)
request._cache_update_cache = True
self._set_cache(request, test_content)
request = self.factory.head(self.path)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(test_content.encode(), get_cache_data.content)
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
LANGUAGES=[
('en', 'English'),
('es', 'Spanish'),
],
)
class CacheI18nTest(TestCase):
def setUp(self):
self.path = '/cache/test/'
self.factory = RequestFactory()
def tearDown(self):
cache.clear()
@override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False)
def test_cache_key_i18n_translation(self):
request = self.factory.get(self.path)
lang = translation.get_language()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when translation is active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
def check_accept_language_vary(self, accept_language, vary, reference_key):
request = self.factory.get(self.path)
request.META['HTTP_ACCEPT_LANGUAGE'] = accept_language
request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0'
response = HttpResponse()
response['Vary'] = vary
key = learn_cache_key(request, response)
key2 = get_cache_key(request)
self.assertEqual(key, reference_key)
self.assertEqual(key2, reference_key)
@override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False)
def test_cache_key_i18n_translation_accept_language(self):
lang = translation.get_language()
self.assertEqual(lang, 'en')
request = self.factory.get(self.path)
request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0'
response = HttpResponse()
response['Vary'] = 'accept-encoding'
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when translation is active")
self.check_accept_language_vary(
'en-us',
'cookie, accept-language, accept-encoding',
key
)
self.check_accept_language_vary(
'en-US',
'cookie, accept-encoding, accept-language',
key
)
self.check_accept_language_vary(
'en-US,en;q=0.8',
'accept-encoding, accept-language, cookie',
key
)
self.check_accept_language_vary(
'en-US,en;q=0.8,ko;q=0.6',
'accept-language, cookie, accept-encoding',
key
)
self.check_accept_language_vary(
'ko-kr,ko;q=0.8,en-us;q=0.5,en;q=0.3 ',
'accept-encoding, cookie, accept-language',
key
)
self.check_accept_language_vary(
'ko-KR,ko;q=0.8,en-US;q=0.6,en;q=0.4',
'accept-language, accept-encoding, cookie',
key
)
self.check_accept_language_vary(
'ko;q=1.0,en;q=0.5',
'cookie, accept-language, accept-encoding',
key
)
self.check_accept_language_vary(
'ko, en',
'cookie, accept-encoding, accept-language',
key
)
self.check_accept_language_vary(
'ko-KR, en-US',
'accept-encoding, accept-language, cookie',
key
)
@override_settings(USE_I18N=False, USE_L10N=True, USE_TZ=False)
def test_cache_key_i18n_formatting(self):
request = self.factory.get(self.path)
lang = translation.get_language()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when formatting is active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
@override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True)
def test_cache_key_i18n_timezone(self):
request = self.factory.get(self.path)
# This is tightly coupled to the implementation,
# but it's the most straightforward way to test the key.
tz = force_text(timezone.get_current_timezone_name(), errors='ignore')
tz = tz.encode('ascii', 'ignore').decode('ascii').replace(' ', '_')
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(tz, key, "Cache keys should include the time zone name when time zones are active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
@override_settings(USE_I18N=False, USE_L10N=False)
def test_cache_key_no_i18n(self):
request = self.factory.get(self.path)
lang = translation.get_language()
tz = force_text(timezone.get_current_timezone_name(), errors='ignore')
tz = tz.encode('ascii', 'ignore').decode('ascii').replace(' ', '_')
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertNotIn(lang, key, "Cache keys shouldn't include the language name when i18n isn't active")
self.assertNotIn(tz, key, "Cache keys shouldn't include the time zone name when i18n isn't active")
@override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True)
def test_cache_key_with_non_ascii_tzname(self):
# Regression test for #17476
class CustomTzName(timezone.UTC):
name = ''
def tzname(self, dt):
return self.name
request = self.factory.get(self.path)
response = HttpResponse()
with timezone.override(CustomTzName()):
CustomTzName.name = 'Hora estándar de Argentina'.encode('UTF-8') # UTF-8 string
sanitized_name = 'Hora_estndar_de_Argentina'
self.assertIn(sanitized_name, learn_cache_key(request, response),
"Cache keys should include the time zone name when time zones are active")
CustomTzName.name = 'Hora estándar de Argentina' # unicode
sanitized_name = 'Hora_estndar_de_Argentina'
self.assertIn(sanitized_name, learn_cache_key(request, response),
"Cache keys should include the time zone name when time zones are active")
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHE_MIDDLEWARE_SECONDS=60,
USE_ETAGS=True,
USE_I18N=True,
)
def test_middleware(self):
def set_cache(request, lang, msg):
translation.activate(lang)
response = HttpResponse()
response.content = msg
return UpdateCacheMiddleware().process_response(request, response)
# cache with non empty request.GET
request = self.factory.get(self.path, {'foo': 'bar', 'other': 'true'})
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware().process_request(request)
# first access, cache must return None
self.assertIsNone(get_cache_data)
response = HttpResponse()
content = 'Check for cache with QUERY_STRING'
response.content = content
UpdateCacheMiddleware().process_response(request, response)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
# cache must return content
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode())
# different QUERY_STRING, cache must be empty
request = self.factory.get(self.path, {'foo': 'bar', 'somethingelse': 'true'})
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNone(get_cache_data)
# i18n tests
en_message = "Hello world!"
es_message = "Hola mundo!"
request = self.factory.get(self.path)
request._cache_update_cache = True
set_cache(request, 'en', en_message)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
# Check that we can recover the cache
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, en_message.encode())
# Check that we use etags
self.assertTrue(get_cache_data.has_header('ETag'))
# Check that we can disable etags
with self.settings(USE_ETAGS=False):
request._cache_update_cache = True
set_cache(request, 'en', en_message)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertFalse(get_cache_data.has_header('ETag'))
# change the session language and set content
request = self.factory.get(self.path)
request._cache_update_cache = True
set_cache(request, 'es', es_message)
# change again the language
translation.activate('en')
# retrieve the content from cache
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertEqual(get_cache_data.content, en_message.encode())
# change again the language
translation.activate('es')
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertEqual(get_cache_data.content, es_message.encode())
# reset the language
translation.deactivate()
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHE_MIDDLEWARE_SECONDS=60,
USE_ETAGS=True,
)
def test_middleware_doesnt_cache_streaming_response(self):
request = self.factory.get(self.path)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNone(get_cache_data)
# This test passes on Python < 3.3 even without the corresponding code
# in UpdateCacheMiddleware, because pickling a StreamingHttpResponse
# fails (http://bugs.python.org/issue14288). LocMemCache silently
# swallows the exception and doesn't store the response in cache.
content = ['Check for cache with streaming content.']
response = StreamingHttpResponse(content)
UpdateCacheMiddleware().process_response(request, response)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNone(get_cache_data)
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'KEY_PREFIX': 'cacheprefix'
},
},
)
class PrefixedCacheI18nTest(CacheI18nTest):
pass
def hello_world_view(request, value):
return HttpResponse('Hello World %s' % value)
def csrf_view(request):
return HttpResponse(csrf(request)['csrf_token'])
@override_settings(
CACHE_MIDDLEWARE_ALIAS='other',
CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix',
CACHE_MIDDLEWARE_SECONDS=30,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'other',
'TIMEOUT': '1',
},
},
)
class CacheMiddlewareTest(TestCase):
def setUp(self):
super(CacheMiddlewareTest, self).setUp()
self.factory = RequestFactory()
self.default_cache = caches['default']
self.other_cache = caches['other']
def tearDown(self):
self.default_cache.clear()
self.other_cache.clear()
super(CacheMiddlewareTest, self).tearDown()
def test_constructor(self):
"""
Ensure the constructor is correctly distinguishing between usage of CacheMiddleware as
Middleware vs. usage of CacheMiddleware as view decorator and setting attributes
appropriately.
"""
# If no arguments are passed in construction, it's being used as middleware.
middleware = CacheMiddleware()
# Now test object attributes against values defined in setUp above
self.assertEqual(middleware.cache_timeout, 30)
self.assertEqual(middleware.key_prefix, 'middlewareprefix')
self.assertEqual(middleware.cache_alias, 'other')
# If arguments are being passed in construction, it's being used as a decorator.
# First, test with "defaults":
as_view_decorator = CacheMiddleware(cache_alias=None, key_prefix=None)
self.assertEqual(as_view_decorator.cache_timeout, 30) # Timeout value for 'default' cache, i.e. 30
self.assertEqual(as_view_decorator.key_prefix, '')
self.assertEqual(as_view_decorator.cache_alias, 'default') # Value of DEFAULT_CACHE_ALIAS from django.core.cache
# Next, test with custom values:
as_view_decorator_with_custom = CacheMiddleware(cache_timeout=60, cache_alias='other', key_prefix='foo')
self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60)
self.assertEqual(as_view_decorator_with_custom.key_prefix, 'foo')
self.assertEqual(as_view_decorator_with_custom.cache_alias, 'other')
def test_middleware(self):
middleware = CacheMiddleware()
prefix_middleware = CacheMiddleware(key_prefix='prefix1')
timeout_middleware = CacheMiddleware(cache_timeout=1)
request = self.factory.get('/view/')
# Put the request through the request middleware
result = middleware.process_request(request)
self.assertIsNone(result)
response = hello_world_view(request, '1')
# Now put the response through the response middleware
response = middleware.process_response(request, response)
# Repeating the request should result in a cache hit
result = middleware.process_request(request)
self.assertIsNotNone(result)
self.assertEqual(result.content, b'Hello World 1')
# The same request through a different middleware won't hit
result = prefix_middleware.process_request(request)
self.assertIsNone(result)
# The same request with a timeout _will_ hit
result = timeout_middleware.process_request(request)
self.assertIsNotNone(result)
self.assertEqual(result.content, b'Hello World 1')
def test_view_decorator(self):
# decorate the same view with different cache decorators
default_view = cache_page(3)(hello_world_view)
default_with_prefix_view = cache_page(3, key_prefix='prefix1')(hello_world_view)
explicit_default_view = cache_page(3, cache='default')(hello_world_view)
explicit_default_with_prefix_view = cache_page(3, cache='default', key_prefix='prefix1')(hello_world_view)
other_view = cache_page(1, cache='other')(hello_world_view)
other_with_prefix_view = cache_page(1, cache='other', key_prefix='prefix2')(hello_world_view)
request = self.factory.get('/view/')
# Request the view once
response = default_view(request, '1')
self.assertEqual(response.content, b'Hello World 1')
# Request again -- hit the cache
response = default_view(request, '2')
self.assertEqual(response.content, b'Hello World 1')
# Requesting the same view with the explicit cache should yield the same result
response = explicit_default_view(request, '3')
self.assertEqual(response.content, b'Hello World 1')
# Requesting with a prefix will hit a different cache key
response = explicit_default_with_prefix_view(request, '4')
self.assertEqual(response.content, b'Hello World 4')
# Hitting the same view again gives a cache hit
response = explicit_default_with_prefix_view(request, '5')
self.assertEqual(response.content, b'Hello World 4')
# And going back to the implicit cache will hit the same cache
response = default_with_prefix_view(request, '6')
self.assertEqual(response.content, b'Hello World 4')
# Requesting from an alternate cache won't hit cache
response = other_view(request, '7')
self.assertEqual(response.content, b'Hello World 7')
# But a repeated hit will hit cache
response = other_view(request, '8')
self.assertEqual(response.content, b'Hello World 7')
# And prefixing the alternate cache yields yet another cache entry
response = other_with_prefix_view(request, '9')
self.assertEqual(response.content, b'Hello World 9')
# But if we wait a couple of seconds...
time.sleep(2)
# ... the default cache will still hit
caches['default']
response = default_view(request, '11')
self.assertEqual(response.content, b'Hello World 1')
# ... the default cache with a prefix will still hit
response = default_with_prefix_view(request, '12')
self.assertEqual(response.content, b'Hello World 4')
# ... the explicit default cache will still hit
response = explicit_default_view(request, '13')
self.assertEqual(response.content, b'Hello World 1')
# ... the explicit default cache with a prefix will still hit
response = explicit_default_with_prefix_view(request, '14')
self.assertEqual(response.content, b'Hello World 4')
# .. but a rapidly expiring cache won't hit
response = other_view(request, '15')
self.assertEqual(response.content, b'Hello World 15')
# .. even if it has a prefix
response = other_with_prefix_view(request, '16')
self.assertEqual(response.content, b'Hello World 16')
def test_sensitive_cookie_not_cached(self):
"""
Django must prevent caching of responses that set a user-specific (and
maybe security sensitive) cookie in response to a cookie-less request.
"""
csrf_middleware = CsrfViewMiddleware()
cache_middleware = CacheMiddleware()
request = self.factory.get('/view/')
self.assertIsNone(cache_middleware.process_request(request))
csrf_middleware.process_view(request, csrf_view, (), {})
response = csrf_view(request)
response = csrf_middleware.process_response(request, response)
response = cache_middleware.process_response(request, response)
# Inserting a CSRF cookie in a cookie-less request prevented caching.
self.assertIsNone(cache_middleware.process_request(request))
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHE_MIDDLEWARE_SECONDS=1,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
USE_I18N=False,
)
class TestWithTemplateResponse(TestCase):
"""
Tests various headers w/ TemplateResponse.
Most are probably redundant since they manipulate the same object
anyway but the Etag header is 'special' because it relies on the
content being complete (which is not necessarily always the case
with a TemplateResponse)
"""
def setUp(self):
self.path = '/cache/test/'
self.factory = RequestFactory()
def tearDown(self):
cache.clear()
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ('Accept-Encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
)
for initial_vary, newheaders, resulting_vary in headers:
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
if initial_vary is not None:
response['Vary'] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response['Vary'], resulting_vary)
def test_get_cache_key(self):
request = self.factory.get(self.path)
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
key_prefix = 'localprefix'
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e'
)
# Verify that a specified key_prefix is taken into account.
learn_cache_key(request, response, key_prefix=key_prefix)
self.assertEqual(
get_cache_key(request, key_prefix=key_prefix),
'views.decorators.cache.cache_page.localprefix.GET.'
'58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e'
)
def test_get_cache_key_with_query(self):
request = self.factory.get(self.path, {'test': 1})
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
# Verify that the querystring is taken into account.
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'0f1c2d56633c943073c4569d9a9502fe.d41d8cd98f00b204e9800998ecf8427e'
)
@override_settings(USE_ETAGS=False)
def test_without_etag(self):
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
self.assertFalse(response.has_header('ETag'))
patch_response_headers(response)
self.assertFalse(response.has_header('ETag'))
response = response.render()
self.assertFalse(response.has_header('ETag'))
@override_settings(USE_ETAGS=True)
def test_with_etag(self):
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
self.assertFalse(response.has_header('ETag'))
patch_response_headers(response)
self.assertFalse(response.has_header('ETag'))
response = response.render()
self.assertTrue(response.has_header('ETag'))
class TestMakeTemplateFragmentKey(TestCase):
def test_without_vary_on(self):
key = make_template_fragment_key('a.fragment')
self.assertEqual(key, 'template.cache.a.fragment.d41d8cd98f00b204e9800998ecf8427e')
def test_with_one_vary_on(self):
key = make_template_fragment_key('foo', ['abc'])
self.assertEqual(key,
'template.cache.foo.900150983cd24fb0d6963f7d28e17f72')
def test_with_many_vary_on(self):
key = make_template_fragment_key('bar', ['abc', 'def'])
self.assertEqual(key,
'template.cache.bar.4b35f12ab03cec09beec4c21b2d2fa88')
def test_proper_escaping(self):
key = make_template_fragment_key('spam', ['abc:def%'])
self.assertEqual(key,
'template.cache.spam.f27688177baec990cdf3fbd9d9c3f469')
class CacheHandlerTest(TestCase):
def test_same_instance(self):
"""
Attempting to retrieve the same alias should yield the same instance.
"""
cache1 = caches['default']
cache2 = caches['default']
self.assertIs(cache1, cache2)
def test_per_thread(self):
"""
Requesting the same alias from separate threads should yield separate
instances.
"""
c = []
def runner():
c.append(caches['default'])
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertIsNot(c[0], c[1])
| weiawe/django | tests/cache/tests.py | Python | bsd-3-clause | 85,300 |
#include <pcl/features/rops_estimation.h>
#include <pcl/io/pcd_io.h>
int main (int argc, char** argv)
{
if (argc != 4)
return (-1);
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ> ());
if (pcl::io::loadPCDFile (argv[1], *cloud) == -1)
return (-1);
pcl::PointIndicesPtr indices (new pcl::PointIndices);
std::ifstream indices_file;
indices_file.open (argv[2], std::ifstream::in);
for (std::string line; std::getline (indices_file, line);)
{
std::istringstream in (line);
unsigned int index = 0;
in >> index;
indices->indices.push_back (index - 1);
}
indices_file.close ();
std::vector <pcl::Vertices> triangles;
std::ifstream triangles_file;
triangles_file.open (argv[3], std::ifstream::in);
for (std::string line; std::getline (triangles_file, line);)
{
pcl::Vertices triangle;
std::istringstream in (line);
unsigned int vertex = 0;
in >> vertex;
triangle.vertices.push_back (vertex - 1);
in >> vertex;
triangle.vertices.push_back (vertex - 1);
in >> vertex;
triangle.vertices.push_back (vertex - 1);
triangles.push_back (triangle);
}
float support_radius = 0.0285f;
unsigned int number_of_partition_bins = 5;
unsigned int number_of_rotations = 3;
pcl::search::KdTree<pcl::PointXYZ>::Ptr search_method (new pcl::search::KdTree<pcl::PointXYZ>);
search_method->setInputCloud (cloud);
pcl::ROPSEstimation <pcl::PointXYZ, pcl::Histogram <135> > feature_estimator;
feature_estimator.setSearchMethod (search_method);
feature_estimator.setSearchSurface (cloud);
feature_estimator.setInputCloud (cloud);
feature_estimator.setIndices (indices);
feature_estimator.setTriangles (triangles);
feature_estimator.setRadiusSearch (support_radius);
feature_estimator.setNumberOfPartitionBins (number_of_partition_bins);
feature_estimator.setNumberOfRotations (number_of_rotations);
feature_estimator.setSupportRadius (support_radius);
pcl::PointCloud<pcl::Histogram <135> >::Ptr histograms (new pcl::PointCloud <pcl::Histogram <135> > ());
feature_estimator.compute (*histograms);
return (0);
}
| drmateo/pcl | doc/tutorials/content/sources/rops_feature/rops_feature.cpp | C++ | bsd-3-clause | 2,148 |
/*===================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) University College London (UCL).
All rights reserved.
This software is distributed WITHOUT ANY WARRANTY; without
even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE.
See LICENSE.txt or http://www.mitk.org for details.
===================================================================*/
#include "QmitkUiLoader.h"
#include "QmitkDataStorageComboBoxWithSelectNone.h"
#include "mitkNodePredicateDataType.h"
#include "mitkNodePredicateOr.h"
#include "mitkImage.h"
//-----------------------------------------------------------------------------
QmitkUiLoader::QmitkUiLoader(const mitk::DataStorage* dataStorage, QObject *parent)
: ctkCmdLineModuleQtUiLoader(parent)
, m_DataStorage(dataStorage)
{
}
//-----------------------------------------------------------------------------
QmitkUiLoader::~QmitkUiLoader()
{
}
//-----------------------------------------------------------------------------
QStringList QmitkUiLoader::availableWidgets () const
{
QStringList availableWidgets = ctkCmdLineModuleQtUiLoader::availableWidgets();
availableWidgets << "QmitkDataStorageComboBoxWithSelectNone";
return availableWidgets;
}
//-----------------------------------------------------------------------------
QWidget* QmitkUiLoader::createWidget(const QString& className, QWidget* parent, const QString& name)
{
QWidget* widget = nullptr;
if (className == "QmitkDataStorageComboBoxWithSelectNone")
{
auto comboBox = new QmitkDataStorageComboBoxWithSelectNone(parent);
comboBox->setObjectName(name);
comboBox->SetAutoSelectNewItems(false);
comboBox->SetPredicate(mitk::TNodePredicateDataType< mitk::Image >::New());
comboBox->SetDataStorage(const_cast<mitk::DataStorage*>(m_DataStorage));
comboBox->setCurrentIndex(0);
widget = comboBox;
}
else
{
widget = ctkCmdLineModuleQtUiLoader::createWidget(className, parent, name);
}
return widget;
}
| NifTK/MITK | Plugins/org.mitk.gui.qt.cmdlinemodules/src/internal/QmitkUiLoader.cpp | C++ | bsd-3-clause | 2,060 |
description("This tests that page scaling does not affect mouse event pageX and pageY coordinates.");
var html = document.documentElement;
var div = document.createElement("div");
div.style.width = "100px";
div.style.height = "100px";
div.style.backgroundColor = "blue";
var eventLog = "";
function appendEventLog() {
var msg = event.type + "(" + event.pageX + ", " + event.pageY + ")";
if (window.eventSender) {
eventLog += msg;
} else {
debug(msg);
}
}
function clearEventLog() {
eventLog = "";
}
div.addEventListener("click", appendEventLog, false);
document.body.insertBefore(div, document.body.firstChild);
function sendEvents(button) {
if (!window.eventSender) {
debug("This test requires DumpRenderTree. Click on the blue rect with the left mouse button to log the mouse coordinates.")
return;
}
eventSender.mouseDown(button);
eventSender.mouseUp(button);
}
function testEvents(button, description, expectedString) {
sendEvents(button);
debug(description);
shouldBeEqualToString("eventLog", expectedString);
debug("");
clearEventLog();
}
if (window.eventSender) {
eventSender.mouseMoveTo(10, 10);
// We are clicking in the same position on screen. As we scale or transform the page,
// we expect the pageX and pageY event coordinates to change because different
// parts of the document are under the mouse.
testEvents(0, "Unscaled", "click(10, 10)");
window.eventSender.setPageScaleFactorLimits(0.5, 0.5);
window.eventSender.setPageScaleFactor(0.5, 0, 0);
testEvents(0, "setPageScale(0.5)", "click(20, 20)");
}
| modulexcite/blink | LayoutTests/fast/events/script-tests/page-scaled-mouse-click.js | JavaScript | bsd-3-clause | 1,651 |
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @emails react-core
*/
'use strict';
var React;
describe('ReactES6Class', function() {
var container;
var Inner;
var attachedListener = null;
var renderedName = null;
beforeEach(function() {
React = require('React');
container = document.createElement('div');
attachedListener = null;
renderedName = null;
Inner = class extends React.Component {
getName() {
return this.props.name;
}
render() {
attachedListener = this.props.onClick;
renderedName = this.props.name;
return <div className={this.props.name} />;
}
};
});
function test(element, expectedTag, expectedClassName) {
var instance = React.render(element, container);
expect(container.firstChild).not.toBeNull();
expect(container.firstChild.tagName).toBe(expectedTag);
expect(container.firstChild.className).toBe(expectedClassName);
return instance;
}
it('preserves the name of the class for use in error messages', function() {
class Foo extends React.Component { }
expect(Foo.name).toBe('Foo');
});
it('throws if no render function is defined', function() {
class Foo extends React.Component { }
expect(() => React.render(<Foo />, container)).toThrow();
});
it('renders a simple stateless component with prop', function() {
class Foo {
render() {
return <Inner name={this.props.bar} />;
}
}
test(<Foo bar="foo" />, 'DIV', 'foo');
test(<Foo bar="bar" />, 'DIV', 'bar');
});
it('renders based on state using initial values in this.props', function() {
class Foo extends React.Component {
constructor(props) {
super(props);
this.state = {bar: this.props.initialValue};
}
render() {
return <span className={this.state.bar} />;
}
}
test(<Foo initialValue="foo" />, 'SPAN', 'foo');
});
it('renders based on state using props in the constructor', function() {
class Foo extends React.Component {
constructor(props) {
this.state = {bar: props.initialValue};
}
changeState() {
this.setState({bar: 'bar'});
}
render() {
if (this.state.bar === 'foo') {
return <div className="foo" />;
}
return <span className={this.state.bar} />;
}
}
var instance = test(<Foo initialValue="foo" />, 'DIV', 'foo');
instance.changeState();
test(<Foo />, 'SPAN', 'bar');
});
it('renders based on context in the constructor', function() {
class Foo extends React.Component {
constructor(props, context) {
super(props, context);
this.state = {tag: context.tag, className: this.context.className};
}
render() {
var Tag = this.state.tag;
return <Tag className={this.state.className} />;
}
}
Foo.contextTypes = {
tag: React.PropTypes.string,
className: React.PropTypes.string
};
class Outer extends React.Component {
getChildContext() {
return {tag: 'span', className: 'foo'};
}
render() {
return <Foo />;
}
}
Outer.childContextTypes = {
tag: React.PropTypes.string,
className: React.PropTypes.string
};
test(<Outer />, 'SPAN', 'foo');
});
it('renders only once when setting state in componentWillMount', function() {
var renderCount = 0;
class Foo extends React.Component {
constructor(props) {
this.state = {bar: props.initialValue};
}
componentWillMount() {
this.setState({bar: 'bar'});
}
render() {
renderCount++;
return <span className={this.state.bar} />;
}
}
test(<Foo initialValue="foo" />, 'SPAN', 'bar');
expect(renderCount).toBe(1);
});
it('should throw with non-object in the initial state property', function() {
[['an array'], 'a string', 1234].forEach(function(state) {
class Foo {
constructor() {
this.state = state;
}
render() {
return <span />;
}
}
expect(() => test(<Foo />, 'span', '')).toThrow(
'Invariant Violation: Foo.state: ' +
'must be set to an object or null'
);
});
});
it('should render with null in the initial state property', function() {
class Foo extends React.Component {
constructor() {
this.state = null;
}
render() {
return <span />;
}
}
test(<Foo />, 'SPAN', '');
});
it('setState through an event handler', function() {
class Foo extends React.Component {
constructor(props) {
this.state = {bar: props.initialValue};
}
handleClick() {
this.setState({bar: 'bar'});
}
render() {
return (
<Inner
name={this.state.bar}
onClick={this.handleClick.bind(this)}
/>
);
}
}
test(<Foo initialValue="foo" />, 'DIV', 'foo');
attachedListener();
expect(renderedName).toBe('bar');
});
it('should not implicitly bind event handlers', function() {
class Foo extends React.Component {
constructor(props) {
this.state = {bar: props.initialValue};
}
handleClick() {
this.setState({bar: 'bar'});
}
render() {
return (
<Inner
name={this.state.bar}
onClick={this.handleClick}
/>
);
}
}
test(<Foo initialValue="foo" />, 'DIV', 'foo');
expect(attachedListener).toThrow();
});
it('renders using forceUpdate even when there is no state', function() {
class Foo extends React.Component {
constructor(props) {
this.mutativeValue = props.initialValue;
}
handleClick() {
this.mutativeValue = 'bar';
this.forceUpdate();
}
render() {
return (
<Inner
name={this.mutativeValue}
onClick={this.handleClick.bind(this)}
/>
);
}
}
test(<Foo initialValue="foo" />, 'DIV', 'foo');
attachedListener();
expect(renderedName).toBe('bar');
});
it('will call all the normal life cycle methods', function() {
var lifeCycles = [];
class Foo {
constructor() {
this.state = {};
}
componentWillMount() {
lifeCycles.push('will-mount');
}
componentDidMount() {
lifeCycles.push('did-mount');
}
componentWillReceiveProps(nextProps) {
lifeCycles.push('receive-props', nextProps);
}
shouldComponentUpdate(nextProps, nextState) {
lifeCycles.push('should-update', nextProps, nextState);
return true;
}
componentWillUpdate(nextProps, nextState) {
lifeCycles.push('will-update', nextProps, nextState);
}
componentDidUpdate(prevProps, prevState) {
lifeCycles.push('did-update', prevProps, prevState);
}
componentWillUnmount() {
lifeCycles.push('will-unmount');
}
render() {
return <span className={this.props.value} />;
}
}
test(<Foo value="foo" />, 'SPAN', 'foo');
expect(lifeCycles).toEqual([
'will-mount',
'did-mount'
]);
lifeCycles = []; // reset
test(<Foo value="bar" />, 'SPAN', 'bar');
expect(lifeCycles).toEqual([
'receive-props', {value: 'bar'},
'should-update', {value: 'bar'}, {},
'will-update', {value: 'bar'}, {},
'did-update', {value: 'foo'}, {}
]);
lifeCycles = []; // reset
React.unmountComponentAtNode(container);
expect(lifeCycles).toEqual([
'will-unmount'
]);
});
it('warns when classic properties are defined on the instance, ' +
'but does not invoke them.', function() {
spyOn(console, 'error');
var getInitialStateWasCalled = false;
class Foo extends React.Component {
constructor() {
this.contextTypes = {};
this.propTypes = {};
}
getInitialState() {
getInitialStateWasCalled = true;
return {};
}
render() {
return <span className="foo" />;
}
}
test(<Foo />, 'SPAN', 'foo');
expect(getInitialStateWasCalled).toBe(false);
expect(console.error.calls.length).toBe(3);
expect(console.error.calls[0].args[0]).toContain(
'getInitialState was defined on Foo, a plain JavaScript class.'
);
expect(console.error.calls[1].args[0]).toContain(
'propTypes was defined as an instance property on Foo.'
);
expect(console.error.calls[2].args[0]).toContain(
'contextTypes was defined as an instance property on Foo.'
);
});
it('should warn when mispelling shouldComponentUpdate', function() {
spyOn(console, 'error');
class NamedComponent {
componentShouldUpdate() {
return false;
}
render() {
return <span className="foo" />;
}
}
test(<NamedComponent />, 'SPAN', 'foo');
expect(console.error.calls.length).toBe(1);
expect(console.error.calls[0].args[0]).toBe(
'Warning: ' +
'NamedComponent has a method called componentShouldUpdate(). Did you ' +
'mean shouldComponentUpdate()? The name is phrased as a question ' +
'because the function is expected to return a value.'
);
});
it('should throw AND warn when trying to access classic APIs', function() {
spyOn(console, 'error');
var instance = test(<Inner name="foo" />, 'DIV', 'foo');
expect(() => instance.getDOMNode()).toThrow();
expect(() => instance.replaceState({})).toThrow();
expect(() => instance.isMounted()).toThrow();
expect(() => instance.setProps({name: 'bar'})).toThrow();
expect(() => instance.replaceProps({name: 'bar'})).toThrow();
expect(console.error.calls.length).toBe(5);
expect(console.error.calls[0].args[0]).toContain(
'getDOMNode(...) is deprecated in plain JavaScript React classes'
);
expect(console.error.calls[1].args[0]).toContain(
'replaceState(...) is deprecated in plain JavaScript React classes'
);
expect(console.error.calls[2].args[0]).toContain(
'isMounted(...) is deprecated in plain JavaScript React classes'
);
expect(console.error.calls[3].args[0]).toContain(
'setProps(...) is deprecated in plain JavaScript React classes'
);
expect(console.error.calls[4].args[0]).toContain(
'replaceProps(...) is deprecated in plain JavaScript React classes'
);
});
it('supports this.context passed via getChildContext', function() {
class Bar {
render() {
return <div className={this.context.bar} />;
}
}
Bar.contextTypes = {bar: React.PropTypes.string};
class Foo {
getChildContext() {
return {bar: 'bar-through-context'};
}
render() {
return <Bar />;
}
}
Foo.childContextTypes = {bar: React.PropTypes.string};
test(<Foo />, 'DIV', 'bar-through-context');
});
it('supports classic refs', function() {
class Foo {
render() {
return <Inner name="foo" ref="inner" />;
}
}
var instance = test(<Foo />, 'DIV', 'foo');
expect(instance.refs.inner.getName()).toBe('foo');
});
it('supports drilling through to the DOM using findDOMNode', function() {
var instance = test(<Inner name="foo" />, 'DIV', 'foo');
var node = React.findDOMNode(instance);
expect(node).toBe(container.firstChild);
});
});
| AlexJeng/react | src/modern/class/__tests__/ReactES6Class-test.js | JavaScript | bsd-3-clause | 11,780 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ash/shell.h"
#include <algorithm>
#include <string>
#include "ash/accelerators/accelerator_controller.h"
#include "ash/accelerators/accelerator_delegate.h"
#include "ash/accelerators/focus_manager_factory.h"
#include "ash/accelerators/nested_accelerator_delegate.h"
#include "ash/ash_switches.h"
#include "ash/autoclick/autoclick_controller.h"
#include "ash/desktop_background/desktop_background_controller.h"
#include "ash/desktop_background/desktop_background_view.h"
#include "ash/desktop_background/user_wallpaper_delegate.h"
#include "ash/display/cursor_window_controller.h"
#include "ash/display/display_controller.h"
#include "ash/display/display_manager.h"
#include "ash/display/event_transformation_handler.h"
#include "ash/display/mouse_cursor_event_filter.h"
#include "ash/display/screen_position_controller.h"
#include "ash/drag_drop/drag_drop_controller.h"
#include "ash/first_run/first_run_helper_impl.h"
#include "ash/focus_cycler.h"
#include "ash/frame/custom_frame_view_ash.h"
#include "ash/gpu_support.h"
#include "ash/high_contrast/high_contrast_controller.h"
#include "ash/host/ash_window_tree_host_init_params.h"
#include "ash/keyboard_uma_event_filter.h"
#include "ash/magnifier/magnification_controller.h"
#include "ash/magnifier/partial_magnification_controller.h"
#include "ash/media_delegate.h"
#include "ash/new_window_delegate.h"
#include "ash/root_window_controller.h"
#include "ash/session/session_state_delegate.h"
#include "ash/shelf/app_list_shelf_item_delegate.h"
#include "ash/shelf/shelf_delegate.h"
#include "ash/shelf/shelf_item_delegate.h"
#include "ash/shelf/shelf_item_delegate_manager.h"
#include "ash/shelf/shelf_layout_manager.h"
#include "ash/shelf/shelf_model.h"
#include "ash/shelf/shelf_widget.h"
#include "ash/shelf/shelf_window_watcher.h"
#include "ash/shell_delegate.h"
#include "ash/shell_factory.h"
#include "ash/shell_init_params.h"
#include "ash/shell_window_ids.h"
#include "ash/system/locale/locale_notification_controller.h"
#include "ash/system/status_area_widget.h"
#include "ash/system/tray/system_tray_delegate.h"
#include "ash/system/tray/system_tray_notifier.h"
#include "ash/wm/app_list_controller.h"
#include "ash/wm/ash_focus_rules.h"
#include "ash/wm/ash_native_cursor_manager.h"
#include "ash/wm/coordinate_conversion.h"
#include "ash/wm/event_client_impl.h"
#include "ash/wm/lock_state_controller.h"
#include "ash/wm/maximize_mode/maximize_mode_controller.h"
#include "ash/wm/maximize_mode/maximize_mode_window_manager.h"
#include "ash/wm/mru_window_tracker.h"
#include "ash/wm/overlay_event_filter.h"
#include "ash/wm/overview/window_selector_controller.h"
#include "ash/wm/power_button_controller.h"
#include "ash/wm/resize_shadow_controller.h"
#include "ash/wm/root_window_layout_manager.h"
#include "ash/wm/screen_dimmer.h"
#include "ash/wm/system_gesture_event_filter.h"
#include "ash/wm/system_modal_container_event_filter.h"
#include "ash/wm/system_modal_container_layout_manager.h"
#include "ash/wm/toplevel_window_event_handler.h"
#include "ash/wm/video_detector.h"
#include "ash/wm/window_animations.h"
#include "ash/wm/window_cycle_controller.h"
#include "ash/wm/window_positioner.h"
#include "ash/wm/window_properties.h"
#include "ash/wm/window_util.h"
#include "ash/wm/workspace_controller.h"
#include "base/bind.h"
#include "base/debug/trace_event.h"
#include "ui/aura/client/aura_constants.h"
#include "ui/aura/env.h"
#include "ui/aura/layout_manager.h"
#include "ui/aura/window.h"
#include "ui/aura/window_event_dispatcher.h"
#include "ui/base/ui_base_switches.h"
#include "ui/base/user_activity/user_activity_detector.h"
#include "ui/compositor/layer.h"
#include "ui/compositor/layer_animator.h"
#include "ui/events/event_target_iterator.h"
#include "ui/gfx/display.h"
#include "ui/gfx/geometry/size.h"
#include "ui/gfx/image/image_skia.h"
#include "ui/gfx/screen.h"
#include "ui/keyboard/keyboard.h"
#include "ui/keyboard/keyboard_controller.h"
#include "ui/keyboard/keyboard_switches.h"
#include "ui/keyboard/keyboard_util.h"
#include "ui/message_center/message_center.h"
#include "ui/views/corewm/tooltip_aura.h"
#include "ui/views/corewm/tooltip_controller.h"
#include "ui/views/focus/focus_manager_factory.h"
#include "ui/views/widget/native_widget_aura.h"
#include "ui/views/widget/widget.h"
#include "ui/wm/core/accelerator_filter.h"
#include "ui/wm/core/compound_event_filter.h"
#include "ui/wm/core/focus_controller.h"
#include "ui/wm/core/input_method_event_filter.h"
#include "ui/wm/core/nested_accelerator_controller.h"
#include "ui/wm/core/shadow_controller.h"
#include "ui/wm/core/visibility_controller.h"
#include "ui/wm/core/window_modality_controller.h"
#if defined(OS_CHROMEOS)
#if defined(USE_X11)
#include "ui/gfx/x/x11_types.h"
#endif // defined(USE_X11)
#include "ash/accelerators/magnifier_key_scroller.h"
#include "ash/accelerators/spoken_feedback_toggler.h"
#include "ash/ash_constants.h"
#include "ash/content/display/screen_orientation_delegate_chromeos.h"
#include "ash/display/display_change_observer_chromeos.h"
#include "ash/display/display_configurator_animation.h"
#include "ash/display/display_error_observer_chromeos.h"
#include "ash/display/projecting_observer_chromeos.h"
#include "ash/display/resolution_notification_controller.h"
#include "ash/sticky_keys/sticky_keys_controller.h"
#include "ash/system/chromeos/bluetooth/bluetooth_notification_controller.h"
#include "ash/system/chromeos/brightness/brightness_controller_chromeos.h"
#include "ash/system/chromeos/power/power_event_observer.h"
#include "ash/system/chromeos/power/power_status.h"
#include "ash/system/chromeos/power/video_activity_notifier.h"
#include "ash/system/chromeos/session/last_window_closed_logout_reminder.h"
#include "ash/system/chromeos/session/logout_confirmation_controller.h"
#include "ash/touch/touch_transformer_controller.h"
#include "ash/virtual_keyboard_controller.h"
#include "base/bind_helpers.h"
#include "base/sys_info.h"
#include "chromeos/accelerometer/accelerometer_reader.h"
#include "chromeos/dbus/dbus_thread_manager.h"
#include "ui/chromeos/user_activity_power_manager_notifier.h"
#include "ui/display/chromeos/display_configurator.h"
#endif // defined(OS_CHROMEOS)
namespace ash {
namespace {
using aura::Window;
using views::Widget;
// A Corewm VisibilityController subclass that calls the Ash animation routine
// so we can pick up our extended animations. See ash/wm/window_animations.h.
class AshVisibilityController : public ::wm::VisibilityController {
public:
AshVisibilityController() {}
~AshVisibilityController() override {}
private:
// Overridden from ::wm::VisibilityController:
bool CallAnimateOnChildWindowVisibilityChanged(aura::Window* window,
bool visible) override {
return AnimateOnChildWindowVisibilityChanged(window, visible);
}
DISALLOW_COPY_AND_ASSIGN(AshVisibilityController);
};
AshWindowTreeHostInitParams ShellInitParamsToAshWindowTreeHostInitParams(
const ShellInitParams& shell_init_params) {
AshWindowTreeHostInitParams ash_init_params;
#if defined(OS_WIN)
ash_init_params.remote_hwnd = shell_init_params.remote_hwnd;
#endif
return ash_init_params;
}
} // namespace
// static
Shell* Shell::instance_ = NULL;
// static
bool Shell::initially_hide_cursor_ = false;
////////////////////////////////////////////////////////////////////////////////
// Shell, public:
// static
Shell* Shell::CreateInstance(const ShellInitParams& init_params) {
CHECK(!instance_);
instance_ = new Shell(init_params.delegate);
instance_->Init(init_params);
return instance_;
}
// static
Shell* Shell::GetInstance() {
CHECK(instance_);
return instance_;
}
// static
bool Shell::HasInstance() {
return !!instance_;
}
// static
void Shell::DeleteInstance() {
delete instance_;
instance_ = NULL;
}
// static
RootWindowController* Shell::GetPrimaryRootWindowController() {
CHECK(HasInstance());
return GetRootWindowController(GetPrimaryRootWindow());
}
// static
Shell::RootWindowControllerList Shell::GetAllRootWindowControllers() {
CHECK(HasInstance());
return Shell::GetInstance()->display_controller()->
GetAllRootWindowControllers();
}
// static
aura::Window* Shell::GetPrimaryRootWindow() {
CHECK(HasInstance());
return GetInstance()->display_controller()->GetPrimaryRootWindow();
}
// static
aura::Window* Shell::GetTargetRootWindow() {
CHECK(HasInstance());
Shell* shell = GetInstance();
if (shell->scoped_target_root_window_)
return shell->scoped_target_root_window_;
return shell->target_root_window_;
}
// static
gfx::Screen* Shell::GetScreen() {
return gfx::Screen::GetScreenByType(gfx::SCREEN_TYPE_ALTERNATE);
}
// static
aura::Window::Windows Shell::GetAllRootWindows() {
CHECK(HasInstance());
return Shell::GetInstance()->display_controller()->
GetAllRootWindows();
}
// static
aura::Window* Shell::GetContainer(aura::Window* root_window,
int container_id) {
return root_window->GetChildById(container_id);
}
// static
const aura::Window* Shell::GetContainer(const aura::Window* root_window,
int container_id) {
return root_window->GetChildById(container_id);
}
// static
std::vector<aura::Window*> Shell::GetContainersFromAllRootWindows(
int container_id,
aura::Window* priority_root) {
std::vector<aura::Window*> containers;
aura::Window::Windows root_windows = GetAllRootWindows();
for (aura::Window::Windows::const_iterator it = root_windows.begin();
it != root_windows.end(); ++it) {
aura::Window* container = (*it)->GetChildById(container_id);
if (container) {
if (priority_root && priority_root->Contains(container))
containers.insert(containers.begin(), container);
else
containers.push_back(container);
}
}
return containers;
}
void Shell::ShowContextMenu(const gfx::Point& location_in_screen,
ui::MenuSourceType source_type) {
// No context menus if there is no session with an active user.
if (!session_state_delegate_->NumberOfLoggedInUsers())
return;
// No context menus when screen is locked.
if (session_state_delegate_->IsScreenLocked())
return;
aura::Window* root =
wm::GetRootWindowMatching(gfx::Rect(location_in_screen, gfx::Size()));
GetRootWindowController(root)
->ShowContextMenu(location_in_screen, source_type);
}
void Shell::ShowAppList(aura::Window* window) {
// If the context window is not given, show it on the target root window.
if (!window)
window = GetTargetRootWindow();
if (!app_list_controller_)
app_list_controller_.reset(new AppListController);
app_list_controller_->Show(window);
}
void Shell::DismissAppList() {
if (!app_list_controller_)
return;
app_list_controller_->Dismiss();
}
void Shell::ToggleAppList(aura::Window* window) {
if (app_list_controller_ && app_list_controller_->IsVisible()) {
DismissAppList();
return;
}
ShowAppList(window);
}
bool Shell::GetAppListTargetVisibility() const {
return app_list_controller_.get() &&
app_list_controller_->GetTargetVisibility();
}
aura::Window* Shell::GetAppListWindow() {
return app_list_controller_.get() ? app_list_controller_->GetWindow() : NULL;
}
app_list::AppListView* Shell::GetAppListView() {
return app_list_controller_.get() ? app_list_controller_->GetView() : NULL;
}
bool Shell::IsSystemModalWindowOpen() const {
if (simulate_modal_window_open_for_testing_)
return true;
const std::vector<aura::Window*> containers = GetContainersFromAllRootWindows(
kShellWindowId_SystemModalContainer, NULL);
for (std::vector<aura::Window*>::const_iterator cit = containers.begin();
cit != containers.end(); ++cit) {
for (aura::Window::Windows::const_iterator wit = (*cit)->children().begin();
wit != (*cit)->children().end(); ++wit) {
if ((*wit)->GetProperty(aura::client::kModalKey) ==
ui::MODAL_TYPE_SYSTEM && (*wit)->TargetVisibility()) {
return true;
}
}
}
return false;
}
views::NonClientFrameView* Shell::CreateDefaultNonClientFrameView(
views::Widget* widget) {
// Use translucent-style window frames for dialogs.
return new CustomFrameViewAsh(widget);
}
void Shell::RotateFocus(Direction direction) {
focus_cycler_->RotateFocus(direction == FORWARD ? FocusCycler::FORWARD
: FocusCycler::BACKWARD);
}
void Shell::SetDisplayWorkAreaInsets(Window* contains,
const gfx::Insets& insets) {
if (!display_controller_->UpdateWorkAreaOfDisplayNearestWindow(
contains, insets)) {
return;
}
FOR_EACH_OBSERVER(ShellObserver, observers_,
OnDisplayWorkAreaInsetsChanged());
}
void Shell::OnLoginStateChanged(user::LoginStatus status) {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnLoginStateChanged(status));
}
void Shell::OnLoginUserProfilePrepared() {
CreateShelf();
CreateKeyboard();
}
void Shell::UpdateAfterLoginStatusChange(user::LoginStatus status) {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter)
(*iter)->UpdateAfterLoginStatusChange(status);
}
void Shell::OnAppTerminating() {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnAppTerminating());
}
void Shell::OnLockStateChanged(bool locked) {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnLockStateChanged(locked));
#ifndef NDEBUG
// Make sure that there is no system modal in Lock layer when unlocked.
if (!locked) {
std::vector<aura::Window*> containers = GetContainersFromAllRootWindows(
kShellWindowId_LockSystemModalContainer, GetPrimaryRootWindow());
for (std::vector<aura::Window*>::const_iterator iter = containers.begin();
iter != containers.end(); ++iter) {
DCHECK_EQ(0u, (*iter)->children().size());
}
}
#endif
}
void Shell::OnCastingSessionStartedOrStopped(bool started) {
#if defined(OS_CHROMEOS)
if (projecting_observer_)
projecting_observer_->OnCastingSessionStartedOrStopped(started);
#endif
}
void Shell::OnOverviewModeStarting() {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnOverviewModeStarting());
}
void Shell::OnOverviewModeEnding() {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnOverviewModeEnding());
}
void Shell::OnMaximizeModeStarted() {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnMaximizeModeStarted());
}
void Shell::OnMaximizeModeEnded() {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnMaximizeModeEnded());
}
void Shell::OnRootWindowAdded(aura::Window* root_window) {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnRootWindowAdded(root_window));
}
void Shell::CreateShelf() {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter)
(*iter)->shelf()->CreateShelf();
}
void Shell::OnShelfCreatedForRootWindow(aura::Window* root_window) {
FOR_EACH_OBSERVER(ShellObserver,
observers_,
OnShelfCreatedForRootWindow(root_window));
}
void Shell::CreateKeyboard() {
// TODO(bshe): Primary root window controller may not be the controller to
// attach virtual keyboard. See http://crbug.com/303429
InitKeyboard();
GetPrimaryRootWindowController()->
ActivateKeyboard(keyboard::KeyboardController::GetInstance());
}
void Shell::DeactivateKeyboard() {
if (keyboard::KeyboardController::GetInstance()) {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter) {
(*iter)->DeactivateKeyboard(keyboard::KeyboardController::GetInstance());
}
}
keyboard::KeyboardController::ResetInstance(NULL);
}
void Shell::ShowShelf() {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter)
(*iter)->ShowShelf();
}
void Shell::AddShellObserver(ShellObserver* observer) {
observers_.AddObserver(observer);
}
void Shell::RemoveShellObserver(ShellObserver* observer) {
observers_.RemoveObserver(observer);
}
#if defined(OS_CHROMEOS)
bool Shell::ShouldSaveDisplaySettings() {
return !((maximize_mode_controller_->IsMaximizeModeWindowManagerEnabled() &&
maximize_mode_controller_->
ignore_display_configuration_updates()) ||
resolution_notification_controller_->DoesNotificationTimeout());
}
#endif
void Shell::UpdateShelfVisibility() {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter)
if ((*iter)->shelf())
(*iter)->UpdateShelfVisibility();
}
void Shell::SetShelfAutoHideBehavior(ShelfAutoHideBehavior behavior,
aura::Window* root_window) {
ash::ShelfLayoutManager::ForShelf(root_window)->SetAutoHideBehavior(behavior);
}
ShelfAutoHideBehavior Shell::GetShelfAutoHideBehavior(
aura::Window* root_window) const {
return ash::ShelfLayoutManager::ForShelf(root_window)->auto_hide_behavior();
}
void Shell::SetShelfAlignment(ShelfAlignment alignment,
aura::Window* root_window) {
if (ash::ShelfLayoutManager::ForShelf(root_window)->SetAlignment(alignment)) {
FOR_EACH_OBSERVER(
ShellObserver, observers_, OnShelfAlignmentChanged(root_window));
}
}
ShelfAlignment Shell::GetShelfAlignment(const aura::Window* root_window) {
return GetRootWindowController(root_window)
->GetShelfLayoutManager()
->GetAlignment();
}
void Shell::SetDimming(bool should_dim) {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter)
(*iter)->screen_dimmer()->SetDimming(should_dim);
}
void Shell::NotifyFullscreenStateChange(bool is_fullscreen,
aura::Window* root_window) {
FOR_EACH_OBSERVER(ShellObserver, observers_, OnFullscreenStateChanged(
is_fullscreen, root_window));
}
void Shell::CreateModalBackground(aura::Window* window) {
if (!modality_filter_) {
modality_filter_.reset(new SystemModalContainerEventFilter(this));
AddPreTargetHandler(modality_filter_.get());
}
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter)
(*iter)->GetSystemModalLayoutManager(window)->CreateModalBackground();
}
void Shell::OnModalWindowRemoved(aura::Window* removed) {
RootWindowControllerList controllers = GetAllRootWindowControllers();
bool activated = false;
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end() && !activated; ++iter) {
activated = (*iter)->GetSystemModalLayoutManager(removed)->
ActivateNextModalWindow();
}
if (!activated) {
RemovePreTargetHandler(modality_filter_.get());
modality_filter_.reset();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter)
(*iter)->GetSystemModalLayoutManager(removed)->DestroyModalBackground();
}
}
WebNotificationTray* Shell::GetWebNotificationTray() {
return GetPrimaryRootWindowController()->shelf()->
status_area_widget()->web_notification_tray();
}
bool Shell::HasPrimaryStatusArea() {
ShelfWidget* shelf = GetPrimaryRootWindowController()->shelf();
return shelf && shelf->status_area_widget();
}
SystemTray* Shell::GetPrimarySystemTray() {
return GetPrimaryRootWindowController()->GetSystemTray();
}
ShelfDelegate* Shell::GetShelfDelegate() {
if (!shelf_delegate_) {
shelf_model_.reset(new ShelfModel);
// Creates ShelfItemDelegateManager before ShelfDelegate.
shelf_item_delegate_manager_.reset(
new ShelfItemDelegateManager(shelf_model_.get()));
shelf_delegate_.reset(delegate_->CreateShelfDelegate(shelf_model_.get()));
scoped_ptr<ShelfItemDelegate> controller(new AppListShelfItemDelegate);
// Finding the shelf model's location of the app list and setting its
// ShelfItemDelegate.
int app_list_index = shelf_model_->GetItemIndexForType(TYPE_APP_LIST);
DCHECK_GE(app_list_index, 0);
ShelfID app_list_id = shelf_model_->items()[app_list_index].id;
DCHECK(app_list_id);
shelf_item_delegate_manager_->SetShelfItemDelegate(app_list_id,
controller.Pass());
shelf_window_watcher_.reset(new ShelfWindowWatcher(
shelf_model_.get(), shelf_item_delegate_manager_.get()));
}
return shelf_delegate_.get();
}
void Shell::SetTouchHudProjectionEnabled(bool enabled) {
if (is_touch_hud_projection_enabled_ == enabled)
return;
is_touch_hud_projection_enabled_ = enabled;
FOR_EACH_OBSERVER(ShellObserver, observers_,
OnTouchHudProjectionToggled(enabled));
}
#if defined(OS_CHROMEOS)
ash::FirstRunHelper* Shell::CreateFirstRunHelper() {
return new ash::FirstRunHelperImpl;
}
void Shell::SetCursorCompositingEnabled(bool enabled) {
display_controller_->cursor_window_controller()->SetCursorCompositingEnabled(
enabled);
native_cursor_manager_->SetNativeCursorEnabled(!enabled);
}
#endif // defined(OS_CHROMEOS)
void Shell::DoInitialWorkspaceAnimation() {
return GetPrimaryRootWindowController()->workspace_controller()->
DoInitialAnimation();
}
////////////////////////////////////////////////////////////////////////////////
// Shell, private:
Shell::Shell(ShellDelegate* delegate)
: target_root_window_(NULL),
scoped_target_root_window_(NULL),
delegate_(delegate),
window_positioner_(new WindowPositioner),
activation_client_(NULL),
#if defined(OS_CHROMEOS)
accelerometer_reader_(new chromeos::AccelerometerReader()),
display_configurator_(new ui::DisplayConfigurator()),
#endif // defined(OS_CHROMEOS)
native_cursor_manager_(new AshNativeCursorManager),
cursor_manager_(
scoped_ptr<::wm::NativeCursorManager>(native_cursor_manager_)),
simulate_modal_window_open_for_testing_(false),
is_touch_hud_projection_enabled_(false) {
DCHECK(delegate_.get());
gpu_support_.reset(delegate_->CreateGPUSupport());
display_manager_.reset(new DisplayManager);
display_controller_.reset(new DisplayController);
user_metrics_recorder_.reset(new UserMetricsRecorder);
#if defined(OS_CHROMEOS)
PowerStatus::Initialize();
#endif
}
Shell::~Shell() {
TRACE_EVENT0("shutdown", "ash::Shell::Destructor");
delegate_->PreShutdown();
views::FocusManagerFactory::Install(NULL);
// Remove the focus from any window. This will prevent overhead and side
// effects (e.g. crashes) from changing focus during shutdown.
// See bug crbug.com/134502.
aura::client::GetFocusClient(GetPrimaryRootWindow())->FocusWindow(NULL);
// Please keep in same order as in Init() because it's easy to miss one.
if (window_modality_controller_)
window_modality_controller_.reset();
#if defined(OS_CHROMEOS)
RemovePreTargetHandler(magnifier_key_scroll_handler_.get());
magnifier_key_scroll_handler_.reset();
RemovePreTargetHandler(speech_feedback_handler_.get());
speech_feedback_handler_.reset();
#endif
RemovePreTargetHandler(user_activity_detector_.get());
RemovePreTargetHandler(overlay_filter_.get());
RemovePreTargetHandler(input_method_filter_.get());
RemovePreTargetHandler(accelerator_filter_.get());
RemovePreTargetHandler(event_transformation_handler_.get());
RemovePreTargetHandler(toplevel_window_event_handler_.get());
RemovePostTargetHandler(toplevel_window_event_handler_.get());
RemovePreTargetHandler(system_gesture_filter_.get());
RemovePreTargetHandler(keyboard_metrics_filter_.get());
RemovePreTargetHandler(mouse_cursor_filter_.get());
// TooltipController is deleted with the Shell so removing its references.
RemovePreTargetHandler(tooltip_controller_.get());
// Destroy the virtual keyboard controller before the maximize mode controller
// since the latters destructor triggers events that the former is listening
// to but no longer cares about.
#if defined(OS_CHROMEOS)
virtual_keyboard_controller_.reset();
#endif
// Destroy maximize mode controller early on since it has some observers which
// need to be removed.
maximize_mode_controller_->Shutdown();
maximize_mode_controller_.reset();
// AppList needs to be released before shelf layout manager, which is
// destroyed with shelf container in the loop below. However, app list
// container is now on top of shelf container and released after it.
// TODO(xiyuan): Move it back when app list container is no longer needed.
app_list_controller_.reset();
#if defined(OS_CHROMEOS)
// Destroy the LastWindowClosedLogoutReminder before the
// LogoutConfirmationController.
last_window_closed_logout_reminder_.reset();
// Destroy the LogoutConfirmationController before the SystemTrayDelegate.
logout_confirmation_controller_.reset();
#endif
// Destroy SystemTrayDelegate before destroying the status area(s).
system_tray_delegate_->Shutdown();
system_tray_delegate_.reset();
locale_notification_controller_.reset();
// Drag-and-drop must be canceled prior to close all windows.
drag_drop_controller_.reset();
// Controllers who have WindowObserver added must be deleted
// before |display_controller_| is deleted.
#if defined(OS_CHROMEOS)
// VideoActivityNotifier must be deleted before |video_detector_| is
// deleted because it's observing video activity through
// VideoDetectorObserver interface.
video_activity_notifier_.reset();
#endif // defined(OS_CHROMEOS)
video_detector_.reset();
high_contrast_controller_.reset();
shadow_controller_.reset();
resize_shadow_controller_.reset();
window_cycle_controller_.reset();
window_selector_controller_.reset();
mru_window_tracker_.reset();
// |shelf_window_watcher_| has a weak pointer to |shelf_Model_|
// and has window observers.
shelf_window_watcher_.reset();
// Destroy all child windows including widgets.
display_controller_->CloseChildWindows();
display_controller_->CloseMirroringDisplay();
// Chrome implementation of shelf delegate depends on FocusClient,
// so must be deleted before |focus_client_|.
shelf_delegate_.reset();
focus_client_.reset();
// Destroy SystemTrayNotifier after destroying SystemTray as TrayItems
// needs to remove observers from it.
system_tray_notifier_.reset();
// These need a valid Shell instance to clean up properly, so explicitly
// delete them before invalidating the instance.
// Alphabetical. TODO(oshima): sort.
magnification_controller_.reset();
partial_magnification_controller_.reset();
tooltip_controller_.reset();
event_client_.reset();
nested_accelerator_controller_.reset();
toplevel_window_event_handler_.reset();
visibility_controller_.reset();
// |shelf_item_delegate_manager_| observes |shelf_model_|. It must be
// destroyed before |shelf_model_| is destroyed.
shelf_item_delegate_manager_.reset();
shelf_model_.reset();
power_button_controller_.reset();
lock_state_controller_.reset();
#if defined(OS_CHROMEOS)
resolution_notification_controller_.reset();
#endif
desktop_background_controller_.reset();
mouse_cursor_filter_.reset();
#if defined(OS_CHROMEOS)
touch_transformer_controller_.reset();
#endif // defined(OS_CHROMEOS)
// This also deletes all RootWindows. Note that we invoke Shutdown() on
// DisplayController before resetting |display_controller_|, since destruction
// of its owned RootWindowControllers relies on the value.
display_manager_->CreateScreenForShutdown();
display_controller_->Shutdown();
display_controller_.reset();
screen_position_controller_.reset();
accessibility_delegate_.reset();
new_window_delegate_.reset();
media_delegate_.reset();
keyboard::KeyboardController::ResetInstance(NULL);
#if defined(OS_CHROMEOS)
if (display_change_observer_)
display_configurator_->RemoveObserver(display_change_observer_.get());
if (display_configurator_animation_)
display_configurator_->RemoveObserver(
display_configurator_animation_.get());
if (display_error_observer_)
display_configurator_->RemoveObserver(display_error_observer_.get());
if (projecting_observer_)
display_configurator_->RemoveObserver(projecting_observer_.get());
display_change_observer_.reset();
PowerStatus::Shutdown();
// Ensure that DBusThreadManager outlives this Shell.
DCHECK(chromeos::DBusThreadManager::IsInitialized());
#endif
DCHECK(instance_ == this);
instance_ = NULL;
}
void Shell::Init(const ShellInitParams& init_params) {
delegate_->PreInit();
bool display_initialized = display_manager_->InitFromCommandLine();
#if defined(OS_CHROMEOS)
display_configurator_->Init(!gpu_support_->IsPanelFittingDisabled());
display_configurator_animation_.reset(new DisplayConfiguratorAnimation());
display_configurator_->AddObserver(display_configurator_animation_.get());
// The DBusThreadManager must outlive this Shell. See the DCHECK in ~Shell.
chromeos::DBusThreadManager* dbus_thread_manager =
chromeos::DBusThreadManager::Get();
projecting_observer_.reset(
new ProjectingObserver(dbus_thread_manager->GetPowerManagerClient()));
display_configurator_->AddObserver(projecting_observer_.get());
if (!display_initialized && base::SysInfo::IsRunningOnChromeOS()) {
display_change_observer_.reset(new DisplayChangeObserver);
// Register |display_change_observer_| first so that the rest of
// observer gets invoked after the root windows are configured.
display_configurator_->AddObserver(display_change_observer_.get());
display_error_observer_.reset(new DisplayErrorObserver());
display_configurator_->AddObserver(display_error_observer_.get());
display_configurator_->set_state_controller(display_change_observer_.get());
display_configurator_->set_mirroring_controller(display_manager_.get());
display_configurator_->ForceInitialConfigure(
delegate_->IsFirstRunAfterBoot() ? kChromeOsBootColor : 0);
display_initialized = true;
}
#endif // defined(OS_CHROMEOS)
if (!display_initialized)
display_manager_->InitDefaultDisplay();
display_manager_->RefreshFontParams();
// Install the custom factory first so that views::FocusManagers for Tray,
// Shelf, and WallPaper could be created by the factory.
views::FocusManagerFactory::Install(new AshFocusManagerFactory);
aura::Env::CreateInstance(true);
aura::Env::GetInstance()->set_context_factory(init_params.context_factory);
// The WindowModalityController needs to be at the front of the input event
// pretarget handler list to ensure that it processes input events when modal
// windows are active.
window_modality_controller_.reset(
new ::wm::WindowModalityController(this));
env_filter_.reset(new ::wm::CompoundEventFilter);
AddPreTargetHandler(env_filter_.get());
::wm::FocusController* focus_controller =
new ::wm::FocusController(new wm::AshFocusRules);
focus_client_.reset(focus_controller);
activation_client_ = focus_controller;
activation_client_->AddObserver(this);
focus_cycler_.reset(new FocusCycler());
screen_position_controller_.reset(new ScreenPositionController);
display_controller_->Start();
display_controller_->CreatePrimaryHost(
ShellInitParamsToAshWindowTreeHostInitParams(init_params));
aura::Window* root_window = display_controller_->GetPrimaryRootWindow();
target_root_window_ = root_window;
#if defined(OS_CHROMEOS)
resolution_notification_controller_.reset(
new ResolutionNotificationController);
#endif
cursor_manager_.SetDisplay(GetScreen()->GetPrimaryDisplay());
nested_accelerator_controller_.reset(
new ::wm::NestedAcceleratorController(new NestedAcceleratorDelegate));
accelerator_controller_.reset(new AcceleratorController);
maximize_mode_controller_.reset(new MaximizeModeController());
#if defined(OS_CHROMEOS)
magnifier_key_scroll_handler_ = MagnifierKeyScroller::CreateHandler();
AddPreTargetHandler(magnifier_key_scroll_handler_.get());
speech_feedback_handler_ = SpokenFeedbackToggler::CreateHandler();
AddPreTargetHandler(speech_feedback_handler_.get());
#endif
// The order in which event filters are added is significant.
// ui::UserActivityDetector passes events to observers, so let them get
// rewritten first.
user_activity_detector_.reset(new ui::UserActivityDetector);
AddPreTargetHandler(user_activity_detector_.get());
overlay_filter_.reset(new OverlayEventFilter);
AddPreTargetHandler(overlay_filter_.get());
AddShellObserver(overlay_filter_.get());
input_method_filter_.reset(new ::wm::InputMethodEventFilter(
root_window->GetHost()->GetAcceleratedWidget()));
AddPreTargetHandler(input_method_filter_.get());
accelerator_filter_.reset(new ::wm::AcceleratorFilter(
scoped_ptr< ::wm::AcceleratorDelegate>(new AcceleratorDelegate).Pass(),
accelerator_controller_->accelerator_history()));
AddPreTargetHandler(accelerator_filter_.get());
event_transformation_handler_.reset(new EventTransformationHandler);
AddPreTargetHandler(event_transformation_handler_.get());
toplevel_window_event_handler_.reset(new ToplevelWindowEventHandler);
system_gesture_filter_.reset(new SystemGestureEventFilter);
AddPreTargetHandler(system_gesture_filter_.get());
keyboard_metrics_filter_.reset(new KeyboardUMAEventFilter);
AddPreTargetHandler(keyboard_metrics_filter_.get());
// The keyboard system must be initialized before the RootWindowController is
// created.
#if defined(OS_CHROMEOS)
keyboard::InitializeKeyboard();
#endif
#if defined(OS_CHROMEOS)
sticky_keys_controller_.reset(new StickyKeysController);
#endif
lock_state_controller_.reset(new LockStateController);
power_button_controller_.reset(new PowerButtonController(
lock_state_controller_.get()));
#if defined(OS_CHROMEOS)
// Pass the initial display state to PowerButtonController.
power_button_controller_->OnDisplayModeChanged(
display_configurator_->cached_displays());
#endif
AddShellObserver(lock_state_controller_.get());
drag_drop_controller_.reset(new DragDropController);
mouse_cursor_filter_.reset(new MouseCursorEventFilter());
PrependPreTargetHandler(mouse_cursor_filter_.get());
// Create Controllers that may need root window.
// TODO(oshima): Move as many controllers before creating
// RootWindowController as possible.
visibility_controller_.reset(new AshVisibilityController);
magnification_controller_.reset(
MagnificationController::CreateInstance());
mru_window_tracker_.reset(new MruWindowTracker(activation_client_));
partial_magnification_controller_.reset(
new PartialMagnificationController());
autoclick_controller_.reset(AutoclickController::CreateInstance());
high_contrast_controller_.reset(new HighContrastController);
video_detector_.reset(new VideoDetector);
window_selector_controller_.reset(new WindowSelectorController());
window_cycle_controller_.reset(new WindowCycleController());
tooltip_controller_.reset(
new views::corewm::TooltipController(
scoped_ptr<views::corewm::Tooltip>(
new views::corewm::TooltipAura(gfx::SCREEN_TYPE_ALTERNATE))));
AddPreTargetHandler(tooltip_controller_.get());
event_client_.reset(new EventClientImpl);
// This controller needs to be set before SetupManagedWindowMode.
desktop_background_controller_.reset(new DesktopBackgroundController());
user_wallpaper_delegate_.reset(delegate_->CreateUserWallpaperDelegate());
session_state_delegate_.reset(delegate_->CreateSessionStateDelegate());
accessibility_delegate_.reset(delegate_->CreateAccessibilityDelegate());
new_window_delegate_.reset(delegate_->CreateNewWindowDelegate());
media_delegate_.reset(delegate_->CreateMediaDelegate());
resize_shadow_controller_.reset(new ResizeShadowController());
shadow_controller_.reset(
new ::wm::ShadowController(activation_client_));
// Create system_tray_notifier_ before the delegate.
system_tray_notifier_.reset(new ash::SystemTrayNotifier());
// Initialize system_tray_delegate_ before initializing StatusAreaWidget.
system_tray_delegate_.reset(delegate()->CreateSystemTrayDelegate());
DCHECK(system_tray_delegate_.get());
locale_notification_controller_.reset(new LocaleNotificationController);
// Initialize system_tray_delegate_ after StatusAreaWidget is created.
system_tray_delegate_->Initialize();
#if defined(OS_CHROMEOS)
// Create the LogoutConfirmationController after the SystemTrayDelegate.
logout_confirmation_controller_.reset(new LogoutConfirmationController(
base::Bind(&SystemTrayDelegate::SignOut,
base::Unretained(system_tray_delegate_.get()))));
// Create TouchTransformerController before DisplayController::InitDisplays()
// since TouchTransformerController listens on
// DisplayController::Observer::OnDisplaysInitialized().
touch_transformer_controller_.reset(new TouchTransformerController());
#endif // defined(OS_CHROMEOS)
display_controller_->InitDisplays();
#if defined(OS_CHROMEOS)
// Needs to be created after InitDisplays() since it may cause the virtual
// keyboard to be deployed.
virtual_keyboard_controller_.reset(new VirtualKeyboardController);
#endif // defined(OS_CHROMEOS)
// It needs to be created after RootWindowController has been created
// (which calls OnWindowResized has been called, otherwise the
// widget will not paint when restoring after a browser crash. Also it needs
// to be created after InitSecondaryDisplays() to initialize the wallpapers in
// the correct size.
user_wallpaper_delegate_->InitializeWallpaper();
if (initially_hide_cursor_)
cursor_manager_.HideCursor();
cursor_manager_.SetCursor(ui::kCursorPointer);
#if defined(OS_CHROMEOS)
// Set accelerator controller delegates.
accelerator_controller_->SetBrightnessControlDelegate(
scoped_ptr<ash::BrightnessControlDelegate>(
new ash::system::BrightnessControllerChromeos).Pass());
power_event_observer_.reset(new PowerEventObserver());
user_activity_notifier_.reset(
new ui::UserActivityPowerManagerNotifier(user_activity_detector_.get()));
video_activity_notifier_.reset(
new VideoActivityNotifier(video_detector_.get()));
bluetooth_notification_controller_.reset(new BluetoothNotificationController);
last_window_closed_logout_reminder_.reset(new LastWindowClosedLogoutReminder);
screen_orientation_delegate_.reset(new ScreenOrientationDelegate());
#endif
// The compositor thread and main message loop have to be running in
// order to create mirror window. Run it after the main message loop
// is started.
display_manager_->CreateMirrorWindowAsyncIfAny();
}
void Shell::InitKeyboard() {
if (keyboard::IsKeyboardEnabled()) {
if (keyboard::KeyboardController::GetInstance()) {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter) {
(*iter)->DeactivateKeyboard(
keyboard::KeyboardController::GetInstance());
}
}
keyboard::KeyboardControllerProxy* proxy =
delegate_->CreateKeyboardControllerProxy();
keyboard::KeyboardController::ResetInstance(
new keyboard::KeyboardController(proxy));
}
}
void Shell::InitRootWindow(aura::Window* root_window) {
DCHECK(activation_client_);
DCHECK(visibility_controller_.get());
DCHECK(drag_drop_controller_.get());
aura::client::SetFocusClient(root_window, focus_client_.get());
input_method_filter_->SetInputMethodPropertyInRootWindow(root_window);
aura::client::SetActivationClient(root_window, activation_client_);
::wm::FocusController* focus_controller =
static_cast< ::wm::FocusController*>(activation_client_);
root_window->AddPreTargetHandler(focus_controller);
aura::client::SetVisibilityClient(root_window, visibility_controller_.get());
aura::client::SetDragDropClient(root_window, drag_drop_controller_.get());
aura::client::SetScreenPositionClient(root_window,
screen_position_controller_.get());
aura::client::SetCursorClient(root_window, &cursor_manager_);
aura::client::SetTooltipClient(root_window, tooltip_controller_.get());
aura::client::SetEventClient(root_window, event_client_.get());
aura::client::SetWindowMoveClient(root_window,
toplevel_window_event_handler_.get());
root_window->AddPreTargetHandler(toplevel_window_event_handler_.get());
root_window->AddPostTargetHandler(toplevel_window_event_handler_.get());
if (nested_accelerator_controller_) {
aura::client::SetDispatcherClient(root_window,
nested_accelerator_controller_.get());
}
}
bool Shell::CanWindowReceiveEvents(aura::Window* window) {
RootWindowControllerList controllers = GetAllRootWindowControllers();
for (RootWindowControllerList::iterator iter = controllers.begin();
iter != controllers.end(); ++iter) {
SystemModalContainerLayoutManager* layout_manager =
(*iter)->GetSystemModalLayoutManager(window);
if (layout_manager && layout_manager->CanWindowReceiveEvents(window))
return true;
// Allow events to fall through to the virtual keyboard even if displaying
// a system modal dialog.
if ((*iter)->IsVirtualKeyboardWindow(window))
return true;
}
return false;
}
////////////////////////////////////////////////////////////////////////////////
// Shell, ui::EventTarget overrides:
bool Shell::CanAcceptEvent(const ui::Event& event) {
return true;
}
ui::EventTarget* Shell::GetParentTarget() {
return aura::Env::GetInstance();
}
scoped_ptr<ui::EventTargetIterator> Shell::GetChildIterator() const {
return scoped_ptr<ui::EventTargetIterator>();
}
ui::EventTargeter* Shell::GetEventTargeter() {
NOTREACHED();
return NULL;
}
void Shell::OnEvent(ui::Event* event) {
}
////////////////////////////////////////////////////////////////////////////////
// Shell, aura::client::ActivationChangeObserver implementation:
void Shell::OnWindowActivated(aura::Window* gained_active,
aura::Window* lost_active) {
if (gained_active)
target_root_window_ = gained_active->GetRootWindow();
}
} // namespace ash
| mohamed--abdel-maksoud/chromium.src | ash/shell.cc | C++ | bsd-3-clause | 43,179 |
/* Main.java -- a standalone viewer for Java applets
Copyright (C) 2003, 2004, 2006 Free Software Foundation, Inc.
This file is part of GNU Classpath.
GNU Classpath is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
GNU Classpath is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Classpath; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA.
Linking this library statically or dynamically with other modules is
making a combined work based on this library. Thus, the terms and
conditions of the GNU General Public License cover the whole
combination.
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. */
package gnu.classpath.tools.appletviewer;
import gnu.classpath.tools.getopt.ClasspathToolParser;
import gnu.classpath.tools.getopt.Option;
import gnu.classpath.tools.getopt.OptionException;
import gnu.classpath.tools.getopt.OptionGroup;
import gnu.classpath.tools.getopt.Parser;
import java.applet.Applet;
import java.awt.Dimension;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.ResourceBundle;
class Main
{
/**
* The localized strings are kept in a separate file.
*/
public static final ResourceBundle messages = ResourceBundle.getBundle
("gnu.classpath.tools.appletviewer.MessagesBundle");
private static HashMap classLoaderCache = new HashMap();
private static ClassLoader getClassLoader(URL codebase, ArrayList archives)
{
// Should load class loader each time. It is possible that there
// are more than one applet to be loaded with different archives.
AppletClassLoader loader = new AppletClassLoader(codebase, archives);
classLoaderCache.put(codebase, loader);
return loader;
}
private static String code = null;
private static String codebase = null;
private static String archive = null;
private static List parameters = new ArrayList();
private static Dimension dimensions = new Dimension(-1, -1);
private static String pipeInName = null;
private static String pipeOutName = null;
private static boolean pluginMode = false;
private static Parser parser = null;
static Applet createApplet(AppletTag tag)
{
Applet applet = null;
try
{
ClassLoader loader = getClassLoader(tag.prependCodeBase(""),
tag.getArchives());
String code = tag.getCode();
if (code.endsWith(".class"))
code = code.substring(0, code.length() - 6).replace('/', '.');
Class c = loader.loadClass(code);
applet = (Applet) c.newInstance();
}
catch (Exception e)
{
e.printStackTrace();
}
if (applet == null)
applet = new ErrorApplet("Error loading applet");
return applet;
}
protected static boolean verbose;
/**
* The main method starting the applet viewer.
*
* @param args the arguments given on the command line.
*
* @exception IOException if an error occurs.
*/
public static void main(String[] args) throws IOException
{
parser = new ClasspathToolParser("appletviewer", true);
parser.setHeader("usage: appletviewer [OPTION] -code CODE | URL...");
OptionGroup attributeGroup = new OptionGroup("Applet tag options");
attributeGroup.add(new Option("code", Main.messages.getString
("gcjwebplugin.code_description"),
"CODE")
{
public void parsed(String argument) throws OptionException
{
code = argument;
}
});
attributeGroup.add(new Option("codebase", Main.messages.getString
("gcjwebplugin.codebase_description"),
"CODEBASE")
{
public void parsed(String argument) throws OptionException
{
codebase = argument;
}
});
attributeGroup.add(new Option("archive", Main.messages.getString
("gcjwebplugin.archive_description"),
"ARCHIVE")
{
public void parsed(String argument) throws OptionException
{
archive = argument;
}
});
attributeGroup.add(new Option("width", Main.messages.getString
("gcjwebplugin.width_description"),
"WIDTH")
{
public void parsed(String argument) throws OptionException
{
dimensions.width = Integer.parseInt(argument);
}
});
attributeGroup.add(new Option("height", Main.messages.getString
("gcjwebplugin.height_description"),
"HEIGHT")
{
public void parsed(String argument) throws OptionException
{
dimensions.height = Integer.parseInt(argument);
}
});
attributeGroup.add(new Option("param", Main.messages.getString
("gcjwebplugin.param_description"),
"NAME,VALUE")
{
public void parsed(String argument) throws OptionException
{
parameters.add(argument);
}
});
OptionGroup pluginGroup = new OptionGroup("Plugin option");
pluginGroup.add(new Option("plugin", Main.messages.getString
("gcjwebplugin.plugin_description"),
"INPUT,OUTPUT")
{
public void parsed(String argument) throws OptionException
{
pluginMode = true;
int comma = argument.indexOf(',');
pipeInName = argument.substring(0, comma);
pipeOutName = argument.substring(comma + 1);
}
});
OptionGroup debuggingGroup = new OptionGroup("Debugging option");
debuggingGroup.add(new Option("verbose", Main.messages.getString
("gcjwebplugin.verbose_description"),
(String) null)
{
public void parsed(String argument) throws OptionException
{
verbose = true;
}
});
OptionGroup compatibilityGroup = new OptionGroup("Compatibility options");
compatibilityGroup.add(new Option("debug", Main.messages.getString
("gcjwebplugin.debug_description"),
(String) null)
{
public void parsed(String argument) throws OptionException
{
// Currently ignored.
}
});
compatibilityGroup.add(new Option("encoding", Main.messages.getString
("gcjwebplugin.encoding_description"),
"CHARSET")
{
public void parsed(String argument) throws OptionException
{
// FIXME: We should probably be using
// java.nio.charset.CharsetDecoder to handle the encoding. What
// is the status of Classpath's implementation?
}
});
parser.add(attributeGroup);
parser.add(pluginGroup);
parser.add(debuggingGroup);
parser.add(compatibilityGroup);
String[] urls = parser.parse(args);
// Print arguments.
printArguments(args);
args = urls;
if (dimensions.height < 0)
dimensions.height = 200;
if (dimensions.width < 0)
dimensions.width = (int) (1.6 * dimensions.height);
//System.setSecurityManager(new AppletSecurityManager(pluginMode));
if (pluginMode)
{
InputStream in;
OutputStream out;
in = new FileInputStream(pipeInName);
out = new FileOutputStream(pipeOutName);
PluginAppletViewer.start(in, out);
}
else
{
if (code == null)
{
// The --code option wasn't given and there are no URL
// arguments so we have nothing to work with.
if (args.length == 0)
{
System.err.println(Main.messages.getString("gcjwebplugin.no_input_files"));
System.exit(1);
}
// Create a standalone appletviewer from a list of URLs.
new StandaloneAppletViewer(args);
}
else
{
// Create a standalone appletviewer from the --code
// option.
new StandaloneAppletViewer(code, codebase, archive, parameters, dimensions);
}
}
}
static void printArguments(String[] args)
{
if (verbose)
{
System.out.println("raw arguments:");
for (int i = 0; i < args.length; i++)
System.out.println(" " + args[i]);
}
}
}
| shaotuanchen/sunflower_exp | tools/source/gcc-4.2.4/libjava/classpath/tools/gnu/classpath/tools/appletviewer/Main.java | Java | bsd-3-clause | 9,974 |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <stdint.h>
#include <stdio.h>
#include "base/at_exit.h"
#include "base/command_line.h"
#include "base/files/file_path.h"
#include "base/logging.h"
#include "base/message_loop/message_loop.h"
#include "base/process/launch.h"
#include "base/strings/string_number_conversions.h"
#include "base/test/test_timeouts.h"
#include "net/test/python_utils.h"
#include "sync/test/local_sync_test_server.h"
static void PrintUsage() {
printf("run_sync_testserver [--port=<port>] [--xmpp-port=<xmpp_port>]\n");
}
// Launches the chromiumsync_test.py or xmppserver_test.py scripts, which test
// the sync HTTP and XMPP sever functionality respectively.
static bool RunSyncTest(
const base::FilePath::StringType& sync_test_script_name) {
scoped_ptr<syncer::LocalSyncTestServer> test_server(
new syncer::LocalSyncTestServer());
if (!test_server->SetPythonPath()) {
LOG(ERROR) << "Error trying to set python path. Exiting.";
return false;
}
base::FilePath sync_test_script_path;
if (!test_server->GetTestScriptPath(sync_test_script_name,
&sync_test_script_path)) {
LOG(ERROR) << "Error trying to get path for test script "
<< sync_test_script_name;
return false;
}
base::CommandLine python_command(base::CommandLine::NO_PROGRAM);
if (!GetPythonCommand(&python_command)) {
LOG(ERROR) << "Could not get python runtime command.";
return false;
}
python_command.AppendArgPath(sync_test_script_path);
if (!base::LaunchProcess(python_command, base::LaunchOptions()).IsValid()) {
LOG(ERROR) << "Failed to launch test script " << sync_test_script_name;
return false;
}
return true;
}
// Gets a port value from the switch with name |switch_name| and writes it to
// |port|. Returns true if a port was provided and false otherwise.
static bool GetPortFromSwitch(const std::string& switch_name, uint16_t* port) {
DCHECK(port != NULL) << "|port| is NULL";
base::CommandLine* command_line = base::CommandLine::ForCurrentProcess();
int port_int = 0;
if (command_line->HasSwitch(switch_name)) {
std::string port_str = command_line->GetSwitchValueASCII(switch_name);
if (!base::StringToInt(port_str, &port_int)) {
return false;
}
}
*port = static_cast<uint16_t>(port_int);
return true;
}
int main(int argc, const char* argv[]) {
base::AtExitManager at_exit_manager;
base::MessageLoopForIO message_loop;
// Process command line
base::CommandLine::Init(argc, argv);
base::CommandLine* command_line = base::CommandLine::ForCurrentProcess();
logging::LoggingSettings settings;
settings.logging_dest = logging::LOG_TO_ALL;
settings.log_file = FILE_PATH_LITERAL("sync_testserver.log");
if (!logging::InitLogging(settings)) {
printf("Error: could not initialize logging. Exiting.\n");
return -1;
}
TestTimeouts::Initialize();
if (command_line->HasSwitch("help")) {
PrintUsage();
return 0;
}
if (command_line->HasSwitch("sync-test")) {
return RunSyncTest(FILE_PATH_LITERAL("chromiumsync_test.py")) ? 0 : -1;
}
if (command_line->HasSwitch("xmpp-test")) {
return RunSyncTest(FILE_PATH_LITERAL("xmppserver_test.py")) ? 0 : -1;
}
uint16_t port = 0;
GetPortFromSwitch("port", &port);
uint16_t xmpp_port = 0;
GetPortFromSwitch("xmpp-port", &xmpp_port);
scoped_ptr<syncer::LocalSyncTestServer> test_server(
new syncer::LocalSyncTestServer(port, xmpp_port));
if (!test_server->Start()) {
printf("Error: failed to start python sync test server. Exiting.\n");
return -1;
}
printf("Python sync test server running at %s (type ctrl+c to exit)\n",
test_server->host_port_pair().ToString().c_str());
message_loop.Run();
return 0;
}
| js0701/chromium-crosswalk | sync/tools/testserver/run_sync_testserver.cc | C++ | bsd-3-clause | 3,925 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/browser/plugin_loader_posix.h"
#include <stddef.h>
#include <stdint.h>
#include "base/at_exit.h"
#include "base/bind.h"
#include "base/files/file_path.h"
#include "base/memory/ref_counted.h"
#include "base/message_loop/message_loop.h"
#include "base/strings/utf_string_conversions.h"
#include "content/browser/browser_thread_impl.h"
#include "content/common/plugin_list.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
using base::ASCIIToUTF16;
namespace content {
class MockPluginLoaderPosix : public PluginLoaderPosix {
public:
MOCK_METHOD0(LoadPluginsInternal, void(void));
size_t number_of_pending_callbacks() {
return callbacks_.size();
}
std::vector<base::FilePath>* canonical_list() {
return &canonical_list_;
}
size_t next_load_index() {
return next_load_index_;
}
const std::vector<WebPluginInfo>& loaded_plugins() {
return loaded_plugins_;
}
std::vector<WebPluginInfo>* internal_plugins() {
return &internal_plugins_;
}
void RealLoadPluginsInternal() {
PluginLoaderPosix::LoadPluginsInternal();
}
bool LaunchUtilityProcess() override {
// This method always does nothing and returns false. The actual
// implementation of this method launches another process, which is not
// very unit_test friendly.
return false;
}
void TestOnPluginLoaded(uint32_t index, const WebPluginInfo& plugin) {
OnPluginLoaded(index, plugin);
}
void TestOnPluginLoadFailed(uint32_t index, const base::FilePath& path) {
OnPluginLoadFailed(index, path);
}
protected:
virtual ~MockPluginLoaderPosix() {}
};
void VerifyCallback(int* run_count, const std::vector<WebPluginInfo>&) {
++(*run_count);
}
class PluginLoaderPosixTest : public testing::Test {
public:
PluginLoaderPosixTest()
: plugin1_(ASCIIToUTF16("plugin1"), base::FilePath("/tmp/one.plugin"),
ASCIIToUTF16("1.0"), base::string16()),
plugin2_(ASCIIToUTF16("plugin2"), base::FilePath("/tmp/two.plugin"),
ASCIIToUTF16("2.0"), base::string16()),
plugin3_(ASCIIToUTF16("plugin3"), base::FilePath("/tmp/three.plugin"),
ASCIIToUTF16("3.0"), base::string16()),
file_thread_(BrowserThread::FILE, &message_loop_),
io_thread_(BrowserThread::IO, &message_loop_),
plugin_loader_(new MockPluginLoaderPosix) {
}
void SetUp() override { PluginServiceImpl::GetInstance()->Init(); }
base::MessageLoop* message_loop() { return &message_loop_; }
MockPluginLoaderPosix* plugin_loader() { return plugin_loader_.get(); }
void AddThreePlugins() {
plugin_loader_->canonical_list()->clear();
plugin_loader_->canonical_list()->push_back(plugin1_.path);
plugin_loader_->canonical_list()->push_back(plugin2_.path);
plugin_loader_->canonical_list()->push_back(plugin3_.path);
}
// Data used for testing.
WebPluginInfo plugin1_;
WebPluginInfo plugin2_;
WebPluginInfo plugin3_;
private:
// Destroys PluginService and PluginList.
base::ShadowingAtExitManager at_exit_manager_;
base::MessageLoopForIO message_loop_;
BrowserThreadImpl file_thread_;
BrowserThreadImpl io_thread_;
scoped_refptr<MockPluginLoaderPosix> plugin_loader_;
};
TEST_F(PluginLoaderPosixTest, QueueRequests) {
int did_callback = 0;
PluginService::GetPluginsCallback callback =
base::Bind(&VerifyCallback, base::Unretained(&did_callback));
plugin_loader()->GetPlugins(callback);
plugin_loader()->GetPlugins(callback);
EXPECT_CALL(*plugin_loader(), LoadPluginsInternal()).Times(1);
message_loop()->RunUntilIdle();
EXPECT_EQ(0, did_callback);
plugin_loader()->canonical_list()->clear();
plugin_loader()->canonical_list()->push_back(plugin1_.path);
plugin_loader()->TestOnPluginLoaded(0, plugin1_);
message_loop()->RunUntilIdle();
EXPECT_EQ(2, did_callback);
}
TEST_F(PluginLoaderPosixTest, QueueRequestsAndInvalidate) {
int did_callback = 0;
PluginService::GetPluginsCallback callback =
base::Bind(&VerifyCallback, base::Unretained(&did_callback));
plugin_loader()->GetPlugins(callback);
EXPECT_CALL(*plugin_loader(), LoadPluginsInternal()).Times(1);
message_loop()->RunUntilIdle();
EXPECT_EQ(0, did_callback);
::testing::Mock::VerifyAndClearExpectations(plugin_loader());
// Invalidate the plugin list, then queue up another request.
PluginList::Singleton()->RefreshPlugins();
plugin_loader()->GetPlugins(callback);
EXPECT_CALL(*plugin_loader(), LoadPluginsInternal()).Times(1);
plugin_loader()->canonical_list()->clear();
plugin_loader()->canonical_list()->push_back(plugin1_.path);
plugin_loader()->TestOnPluginLoaded(0, plugin1_);
message_loop()->RunUntilIdle();
// Only the first request should have been fulfilled.
EXPECT_EQ(1, did_callback);
plugin_loader()->canonical_list()->clear();
plugin_loader()->canonical_list()->push_back(plugin1_.path);
plugin_loader()->TestOnPluginLoaded(0, plugin1_);
message_loop()->RunUntilIdle();
EXPECT_EQ(2, did_callback);
}
TEST_F(PluginLoaderPosixTest, ThreeSuccessfulLoads) {
int did_callback = 0;
PluginService::GetPluginsCallback callback =
base::Bind(&VerifyCallback, base::Unretained(&did_callback));
plugin_loader()->GetPlugins(callback);
EXPECT_CALL(*plugin_loader(), LoadPluginsInternal()).Times(1);
message_loop()->RunUntilIdle();
AddThreePlugins();
EXPECT_EQ(0u, plugin_loader()->next_load_index());
const std::vector<WebPluginInfo>& plugins(plugin_loader()->loaded_plugins());
plugin_loader()->TestOnPluginLoaded(0, plugin1_);
EXPECT_EQ(1u, plugin_loader()->next_load_index());
EXPECT_EQ(1u, plugins.size());
EXPECT_EQ(plugin1_.name, plugins[0].name);
message_loop()->RunUntilIdle();
EXPECT_EQ(0, did_callback);
plugin_loader()->TestOnPluginLoaded(1, plugin2_);
EXPECT_EQ(2u, plugin_loader()->next_load_index());
EXPECT_EQ(2u, plugins.size());
EXPECT_EQ(plugin2_.name, plugins[1].name);
message_loop()->RunUntilIdle();
EXPECT_EQ(0, did_callback);
plugin_loader()->TestOnPluginLoaded(2, plugin3_);
EXPECT_EQ(3u, plugins.size());
EXPECT_EQ(plugin3_.name, plugins[2].name);
message_loop()->RunUntilIdle();
EXPECT_EQ(1, did_callback);
}
TEST_F(PluginLoaderPosixTest, ThreeSuccessfulLoadsThenCrash) {
int did_callback = 0;
PluginService::GetPluginsCallback callback =
base::Bind(&VerifyCallback, base::Unretained(&did_callback));
plugin_loader()->GetPlugins(callback);
EXPECT_CALL(*plugin_loader(), LoadPluginsInternal()).Times(2);
message_loop()->RunUntilIdle();
AddThreePlugins();
EXPECT_EQ(0u, plugin_loader()->next_load_index());
const std::vector<WebPluginInfo>& plugins(plugin_loader()->loaded_plugins());
plugin_loader()->TestOnPluginLoaded(0, plugin1_);
EXPECT_EQ(1u, plugin_loader()->next_load_index());
EXPECT_EQ(1u, plugins.size());
EXPECT_EQ(plugin1_.name, plugins[0].name);
message_loop()->RunUntilIdle();
EXPECT_EQ(0, did_callback);
plugin_loader()->TestOnPluginLoaded(1, plugin2_);
EXPECT_EQ(2u, plugin_loader()->next_load_index());
EXPECT_EQ(2u, plugins.size());
EXPECT_EQ(plugin2_.name, plugins[1].name);
message_loop()->RunUntilIdle();
EXPECT_EQ(0, did_callback);
plugin_loader()->TestOnPluginLoaded(2, plugin3_);
EXPECT_EQ(3u, plugins.size());
EXPECT_EQ(plugin3_.name, plugins[2].name);
message_loop()->RunUntilIdle();
EXPECT_EQ(1, did_callback);
plugin_loader()->OnProcessCrashed(42);
}
TEST_F(PluginLoaderPosixTest, TwoFailures) {
int did_callback = 0;
PluginService::GetPluginsCallback callback =
base::Bind(&VerifyCallback, base::Unretained(&did_callback));
plugin_loader()->GetPlugins(callback);
EXPECT_CALL(*plugin_loader(), LoadPluginsInternal()).Times(1);
message_loop()->RunUntilIdle();
AddThreePlugins();
EXPECT_EQ(0u, plugin_loader()->next_load_index());
const std::vector<WebPluginInfo>& plugins(plugin_loader()->loaded_plugins());
plugin_loader()->TestOnPluginLoadFailed(0, plugin1_.path);
EXPECT_EQ(1u, plugin_loader()->next_load_index());
EXPECT_EQ(0u, plugins.size());
message_loop()->RunUntilIdle();
EXPECT_EQ(0, did_callback);
plugin_loader()->TestOnPluginLoaded(1, plugin2_);
EXPECT_EQ(2u, plugin_loader()->next_load_index());
EXPECT_EQ(1u, plugins.size());
EXPECT_EQ(plugin2_.name, plugins[0].name);
message_loop()->RunUntilIdle();
EXPECT_EQ(0, did_callback);
plugin_loader()->TestOnPluginLoadFailed(2, plugin3_.path);
EXPECT_EQ(1u, plugins.size());
message_loop()->RunUntilIdle();
EXPECT_EQ(1, did_callback);
}
TEST_F(PluginLoaderPosixTest, CrashedProcess) {
int did_callback = 0;
PluginService::GetPluginsCallback callback =
base::Bind(&VerifyCallback, base::Unretained(&did_callback));
plugin_loader()->GetPlugins(callback);
EXPECT_CALL(*plugin_loader(), LoadPluginsInternal()).Times(1);
message_loop()->RunUntilIdle();
AddThreePlugins();
EXPECT_EQ(0u, plugin_loader()->next_load_index());
const std::vector<WebPluginInfo>& plugins(plugin_loader()->loaded_plugins());
plugin_loader()->TestOnPluginLoaded(0, plugin1_);
EXPECT_EQ(1u, plugin_loader()->next_load_index());
EXPECT_EQ(1u, plugins.size());
EXPECT_EQ(plugin1_.name, plugins[0].name);
message_loop()->RunUntilIdle();
EXPECT_EQ(0, did_callback);
EXPECT_CALL(*plugin_loader(), LoadPluginsInternal()).Times(1);
plugin_loader()->OnProcessCrashed(42);
EXPECT_EQ(1u, plugin_loader()->canonical_list()->size());
EXPECT_EQ(0u, plugin_loader()->next_load_index());
EXPECT_EQ(plugin3_.path.value(),
plugin_loader()->canonical_list()->at(0).value());
}
TEST_F(PluginLoaderPosixTest, InternalPlugin) {
int did_callback = 0;
PluginService::GetPluginsCallback callback =
base::Bind(&VerifyCallback, base::Unretained(&did_callback));
plugin_loader()->GetPlugins(callback);
EXPECT_CALL(*plugin_loader(), LoadPluginsInternal()).Times(1);
message_loop()->RunUntilIdle();
plugin2_.path = base::FilePath("/internal/plugin.plugin");
AddThreePlugins();
plugin_loader()->internal_plugins()->clear();
plugin_loader()->internal_plugins()->push_back(plugin2_);
EXPECT_EQ(0u, plugin_loader()->next_load_index());
const std::vector<WebPluginInfo>& plugins(plugin_loader()->loaded_plugins());
plugin_loader()->TestOnPluginLoaded(0, plugin1_);
EXPECT_EQ(1u, plugin_loader()->next_load_index());
EXPECT_EQ(1u, plugins.size());
EXPECT_EQ(plugin1_.name, plugins[0].name);
message_loop()->RunUntilIdle();
EXPECT_EQ(0, did_callback);
// Internal plugins can fail to load if they're built-in with manual
// entrypoint functions.
plugin_loader()->TestOnPluginLoadFailed(1, plugin2_.path);
EXPECT_EQ(2u, plugin_loader()->next_load_index());
EXPECT_EQ(2u, plugins.size());
EXPECT_EQ(plugin2_.name, plugins[1].name);
EXPECT_EQ(0u, plugin_loader()->internal_plugins()->size());
message_loop()->RunUntilIdle();
EXPECT_EQ(0, did_callback);
plugin_loader()->TestOnPluginLoaded(2, plugin3_);
EXPECT_EQ(3u, plugins.size());
EXPECT_EQ(plugin3_.name, plugins[2].name);
message_loop()->RunUntilIdle();
EXPECT_EQ(1, did_callback);
}
TEST_F(PluginLoaderPosixTest, AllCrashed) {
int did_callback = 0;
PluginService::GetPluginsCallback callback =
base::Bind(&VerifyCallback, base::Unretained(&did_callback));
plugin_loader()->GetPlugins(callback);
// Spin the loop so that the canonical list of plugins can be set.
EXPECT_CALL(*plugin_loader(), LoadPluginsInternal()).Times(1);
message_loop()->RunUntilIdle();
AddThreePlugins();
EXPECT_EQ(0u, plugin_loader()->next_load_index());
// Mock the first two calls like normal.
testing::Expectation first =
EXPECT_CALL(*plugin_loader(), LoadPluginsInternal()).Times(2);
// On the last call, go through the default impl.
EXPECT_CALL(*plugin_loader(), LoadPluginsInternal())
.After(first)
.WillOnce(
testing::Invoke(plugin_loader(),
&MockPluginLoaderPosix::RealLoadPluginsInternal));
plugin_loader()->OnProcessCrashed(42);
plugin_loader()->OnProcessCrashed(42);
plugin_loader()->OnProcessCrashed(42);
message_loop()->RunUntilIdle();
EXPECT_EQ(1, did_callback);
EXPECT_EQ(0u, plugin_loader()->loaded_plugins().size());
}
TEST_F(PluginLoaderPosixTest, PluginLaunchFailed) {
int did_callback = 0;
PluginService::GetPluginsCallback callback =
base::Bind(&VerifyCallback, base::Unretained(&did_callback));
EXPECT_CALL(*plugin_loader(), LoadPluginsInternal())
.WillOnce(testing::Invoke(
plugin_loader(), &MockPluginLoaderPosix::RealLoadPluginsInternal));
plugin_loader()->GetPlugins(callback);
message_loop()->RunUntilIdle();
EXPECT_EQ(1, did_callback);
EXPECT_EQ(0u, plugin_loader()->loaded_plugins().size());
// TODO(erikchen): This is a genuine leak that should be fixed.
// https://code.google.com/p/chromium/issues/detail?id=431906
testing::Mock::AllowLeak(plugin_loader());
}
} // namespace content
| js0701/chromium-crosswalk | content/browser/plugin_loader_posix_unittest.cc | C++ | bsd-3-clause | 13,212 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/audio/scoped_loop_observer.h"
#include "base/bind.h"
#include "base/synchronization/waitable_event.h"
namespace media {
ScopedLoopObserver::ScopedLoopObserver(
const scoped_refptr<base::MessageLoopProxy>& loop)
: loop_(loop) {
ObserveLoopDestruction(true, NULL);
}
ScopedLoopObserver::~ScopedLoopObserver() {
ObserveLoopDestruction(false, NULL);
}
void ScopedLoopObserver::ObserveLoopDestruction(bool enable,
base::WaitableEvent* done) {
// Note: |done| may be NULL.
if (loop_->BelongsToCurrentThread()) {
MessageLoop* loop = MessageLoop::current();
if (enable) {
loop->AddDestructionObserver(this);
} else {
loop->RemoveDestructionObserver(this);
}
} else {
base::WaitableEvent event(false, false);
if (loop_->PostTask(FROM_HERE,
base::Bind(&ScopedLoopObserver::ObserveLoopDestruction,
base::Unretained(this), enable, &event))) {
event.Wait();
} else {
// The message loop's thread has already terminated, so no need to wait.
}
}
if (done)
done->Signal();
}
} // namespace media.
| timopulkkinen/BubbleFish | media/audio/scoped_loop_observer.cc | C++ | bsd-3-clause | 1,340 |
<?php
/**
* PHP configuration based AclInterface implementation
*
* PHP 5
*
* CakePHP(tm) : Rapid Development Framework (http://cakephp.org)
* Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org)
*
* Licensed under The MIT License
* Redistributions of files must retain the above copyright notice.
*
* @copyright Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org)
* @link http://cakephp.org CakePHP(tm) Project
* @package Cake.Controller.Component.Acl
* @since CakePHP(tm) v 2.1
* @license MIT License (http://www.opensource.org/licenses/mit-license.php)
*/
/**
* PhpAcl implements an access control system using a plain PHP configuration file.
* An example file can be found in app/Config/acl.php
*
* @package Cake.Controller.Component.Acl
*/
class PhpAcl extends Object implements AclInterface {
const DENY = false;
const ALLOW = true;
/**
* Options:
* - policy: determines behavior of the check method. Deny policy needs explicit allow rules, allow policy needs explicit deny rules
* - config: absolute path to config file that contains the acl rules (@see app/Config/acl.php)
*
* @var array
*/
public $options = array();
/**
* Aro Object
*
* @var PhpAro
*/
public $Aro = null;
/**
* Aco Object
*
* @var PhpAco
*/
public $Aco = null;
/**
* Constructor
*
* Sets a few default settings up.
*/
public function __construct() {
$this->options = array(
'policy' => self::DENY,
'config' => APP . 'Config' . DS . 'acl.php',
);
}
/**
* Initialize method
*
* @param AclComponent $Component Component instance
* @return void
*/
public function initialize(Component $Component) {
if (!empty($Component->settings['adapter'])) {
$this->options = array_merge($this->options, $Component->settings['adapter']);
}
App::uses('PhpReader', 'Configure');
$Reader = new PhpReader(dirname($this->options['config']) . DS);
$config = $Reader->read(basename($this->options['config']));
$this->build($config);
$Component->Aco = $this->Aco;
$Component->Aro = $this->Aro;
}
/**
* build and setup internal ACL representation
*
* @param array $config configuration array, see docs
* @return void
* @throws AclException When required keys are missing.
*/
public function build(array $config) {
if (empty($config['roles'])) {
throw new AclException(__d('cake_dev','"roles" section not found in configuration.'));
}
if (empty($config['rules']['allow']) && empty($config['rules']['deny'])) {
throw new AclException(__d('cake_dev','Neither "allow" nor "deny" rules were provided in configuration.'));
}
$rules['allow'] = !empty($config['rules']['allow']) ? $config['rules']['allow'] : array();
$rules['deny'] = !empty($config['rules']['deny']) ? $config['rules']['deny'] : array();
$roles = !empty($config['roles']) ? $config['roles'] : array();
$map = !empty($config['map']) ? $config['map'] : array();
$alias = !empty($config['alias']) ? $config['alias'] : array();
$this->Aro = new PhpAro($roles, $map, $alias);
$this->Aco = new PhpAco($rules);
}
/**
* No op method, allow cannot be done with PhpAcl
*
* @param string $aro ARO The requesting object identifier.
* @param string $aco ACO The controlled object identifier.
* @param string $action Action (defaults to *)
* @return boolean Success
*/
public function allow($aro, $aco, $action = "*") {
return $this->Aco->access($this->Aro->resolve($aro), $aco, $action, 'allow');
}
/**
* deny ARO access to ACO
*
* @param string $aro ARO The requesting object identifier.
* @param string $aco ACO The controlled object identifier.
* @param string $action Action (defaults to *)
* @return boolean Success
*/
public function deny($aro, $aco, $action = "*") {
return $this->Aco->access($this->Aro->resolve($aro), $aco, $action, 'deny');
}
/**
* No op method
*
* @param string $aro ARO The requesting object identifier.
* @param string $aco ACO The controlled object identifier.
* @param string $action Action (defaults to *)
* @return boolean Success
*/
public function inherit($aro, $aco, $action = "*") {
return false;
}
/**
* Main ACL check function. Checks to see if the ARO (access request object) has access to the
* ACO (access control object).
*
* @param string $aro ARO
* @param string $aco ACO
* @param string $action Action
* @return boolean true if access is granted, false otherwise
*/
public function check($aro, $aco, $action = "*") {
$allow = $this->options['policy'];
$prioritizedAros = $this->Aro->roles($aro);
if ($action && $action != "*") {
$aco .= '/' . $action;
}
$path = $this->Aco->path($aco);
if (empty($path)) {
return $allow;
}
foreach ($path as $depth => $node) {
foreach ($prioritizedAros as $aros) {
if (!empty($node['allow'])) {
$allow = $allow || count(array_intersect($node['allow'], $aros)) > 0;
}
if (!empty($node['deny'])) {
$allow = $allow && count(array_intersect($node['deny'], $aros)) == 0;
}
}
}
return $allow;
}
}
/**
* Access Control Object
*
*/
class PhpAco {
/**
* holds internal ACO representation
*
* @var array
*/
protected $_tree = array();
/**
* map modifiers for ACO paths to their respective PCRE pattern
*
* @var array
*/
public static $modifiers = array(
'*' => '.*',
);
public function __construct(array $rules = array()) {
foreach (array('allow', 'deny') as $type) {
if (empty($rules[$type])) {
$rules[$type] = array();
}
}
$this->build($rules['allow'], $rules['deny']);
}
/**
* return path to the requested ACO with allow and deny rules attached on each level
*
* @return array
*/
public function path($aco) {
$aco = $this->resolve($aco);
$path = array();
$level = 0;
$root = $this->_tree;
$stack = array(array($root, 0));
while (!empty($stack)) {
list($root, $level) = array_pop($stack);
if (empty($path[$level])) {
$path[$level] = array();
}
foreach ($root as $node => $elements) {
$pattern = '/^' . str_replace(array_keys(self::$modifiers), array_values(self::$modifiers), $node) . '$/';
if ($node == $aco[$level] || preg_match($pattern, $aco[$level])) {
// merge allow/denies with $path of current level
foreach (array('allow', 'deny') as $policy) {
if (!empty($elements[$policy])) {
if (empty($path[$level][$policy])) {
$path[$level][$policy] = array();
}
$path[$level][$policy] = array_merge($path[$level][$policy], $elements[$policy]);
}
}
// traverse
if (!empty($elements['children']) && isset($aco[$level + 1])) {
array_push($stack, array($elements['children'], $level + 1));
}
}
}
}
return $path;
}
/**
* allow/deny ARO access to ARO
*
* @return void
*/
public function access($aro, $aco, $action, $type = 'deny') {
$aco = $this->resolve($aco);
$depth = count($aco);
$root = $this->_tree;
$tree = &$root;
foreach ($aco as $i => $node) {
if (!isset($tree[$node])) {
$tree[$node] = array(
'children' => array(),
);
}
if ($i < $depth - 1) {
$tree = &$tree[$node]['children'];
} else {
if (empty($tree[$node][$type])) {
$tree[$node][$type] = array();
}
$tree[$node][$type] = array_merge(is_array($aro) ? $aro : array($aro), $tree[$node][$type]);
}
}
$this->_tree = &$root;
}
/**
* resolve given ACO string to a path
*
* @param string $aco ACO string
* @return array path
*/
public function resolve($aco) {
if (is_array($aco)) {
return array_map('strtolower', $aco);
}
// strip multiple occurences of '/'
$aco = preg_replace('#/+#', '/', $aco);
// make case insensitive
$aco = ltrim(strtolower($aco), '/');
return array_filter(array_map('trim', explode('/', $aco)));
}
/**
* build a tree representation from the given allow/deny informations for ACO paths
*
* @param array $allow ACO allow rules
* @param array $deny ACO deny rules
* @return void
*/
public function build(array $allow, array $deny = array()) {
$stack = array();
$this->_tree = array();
$tree = array();
$root = &$tree;
foreach ($allow as $dotPath => $aros) {
if (is_string($aros)) {
$aros = array_map('trim', explode(',', $aros));
}
$this->access($aros, $dotPath, null, 'allow');
}
foreach ($deny as $dotPath => $aros) {
if (is_string($aros)) {
$aros = array_map('trim', explode(',', $aros));
}
$this->access($aros, $dotPath, null, 'deny');
}
}
}
/**
* Access Request Object
*
*/
class PhpAro {
/**
* role to resolve to when a provided ARO is not listed in
* the internal tree
*
* @var string
*/
const DEFAULT_ROLE = 'Role/default';
/**
* map external identifiers. E.g. if
*
* array('User' => array('username' => 'jeff', 'role' => 'editor'))
*
* is passed as an ARO to one of the methods of AclComponent, PhpAcl
* will check if it can be resolved to an User or a Role defined in the
* configuration file.
*
* @var array
* @see app/Config/acl.php
*/
public $map = array(
'User' => 'User/username',
'Role' => 'User/role',
);
/**
* aliases to map
*
* @var array
*/
public $aliases = array();
/**
* internal ARO representation
*
* @var array
*/
protected $_tree = array();
public function __construct(array $aro = array(), array $map = array(), array $aliases = array()) {
if (!empty($map)) {
$this->map = $map;
}
$this->aliases = $aliases;
$this->build($aro);
}
/**
* From the perspective of the given ARO, walk down the tree and
* collect all inherited AROs levelwise such that AROs from different
* branches with equal distance to the requested ARO will be collected at the same
* index. The resulting array will contain a prioritized list of (list of) roles ordered from
* the most distant AROs to the requested one itself.
*
* @param mixed $aro An ARO identifier
* @return array prioritized AROs
*/
public function roles($aro) {
$aros = array();
$aro = $this->resolve($aro);
$stack = array(array($aro, 0));
while (!empty($stack)) {
list($element, $depth) = array_pop($stack);
$aros[$depth][] = $element;
foreach ($this->_tree as $node => $children) {
if (in_array($element, $children)) {
array_push($stack, array($node, $depth + 1));
}
}
}
return array_reverse($aros);
}
/**
* resolve an ARO identifier to an internal ARO string using
* the internal mapping information.
*
* @param mixed $aro ARO identifier (User.jeff, array('User' => ...), etc)
* @return string internal aro string (e.g. User/jeff, Role/default)
*/
public function resolve($aro) {
foreach ($this->map as $aroGroup => $map) {
list ($model, $field) = explode('/', $map, 2);
$mapped = '';
if (is_array($aro)) {
if (isset($aro['model']) && isset($aro['foreign_key']) && $aro['model'] == $aroGroup) {
$mapped = $aroGroup . '/' . $aro['foreign_key'];
} elseif (isset($aro[$model][$field])) {
$mapped = $aroGroup . '/' . $aro[$model][$field];
} elseif (isset($aro[$field])) {
$mapped = $aroGroup . '/' . $aro[$field];
}
} elseif (is_string($aro)) {
$aro = ltrim($aro, '/');
if (strpos($aro, '/') === false) {
$mapped = $aroGroup . '/' . $aro;
} else {
list($aroModel, $aroValue) = explode('/', $aro, 2);
$aroModel = Inflector::camelize($aroModel);
if ($aroModel == $model || $aroModel == $aroGroup) {
$mapped = $aroGroup . '/' . $aroValue;
}
}
}
if (isset($this->_tree[$mapped])) {
return $mapped;
}
// is there a matching alias defined (e.g. Role/1 => Role/admin)?
if (!empty($this->aliases[$mapped])) {
return $this->aliases[$mapped];
}
}
return self::DEFAULT_ROLE;
}
/**
* adds a new ARO to the tree
*
* @param array $aro one or more ARO records
* @return void
*/
public function addRole(array $aro) {
foreach ($aro as $role => $inheritedRoles) {
if (!isset($this->_tree[$role])) {
$this->_tree[$role] = array();
}
if (!empty($inheritedRoles)) {
if (is_string($inheritedRoles)) {
$inheritedRoles = array_map('trim', explode(',', $inheritedRoles));
}
foreach ($inheritedRoles as $dependency) {
// detect cycles
$roles = $this->roles($dependency);
if (in_array($role, Set::flatten($roles))) {
$path = '';
foreach ($roles as $roleDependencies) {
$path .= implode('|', (array)$roleDependencies) . ' -> ';
}
trigger_error(__d('cake_dev', 'cycle detected when inheriting %s from %s. Path: %s', $role, $dependency, $path . $role));
continue;
}
if (!isset($this->_tree[$dependency])) {
$this->_tree[$dependency] = array();
}
$this->_tree[$dependency][] = $role;
}
}
}
}
/**
* adds one or more aliases to the internal map. Overwrites existing entries.
*
* @param array $alias alias from => to (e.g. Role/13 -> Role/editor)
* @return void
*/
public function addAlias(array $alias) {
$this->aliases = array_merge($this->aliases, $alias);
}
/**
* build an ARO tree structure for internal processing
*
* @param array $aros array of AROs as key and their inherited AROs as values
* @return void
*/
public function build(array $aros) {
$this->_tree = array();
$this->addRole($aros);
}
}
| jcu-eresearch/Edgar | webapplication/lib/Cake/Controller/Component/Acl/PhpAcl.php | PHP | bsd-3-clause | 13,365 |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "skia/ext/fontmgr_default.h"
#include "third_party/skia/include/core/SkFontMgr.h"
namespace {
SkDEBUGCODE(bool g_factory_called;)
// This is a purposefully leaky pointer that has ownership of the FontMgr.
SkFontMgr* g_fontmgr_override = nullptr;
} // namespace
namespace skia {
void OverrideDefaultSkFontMgr(sk_sp<SkFontMgr> fontmgr) {
SkASSERT(!g_factory_called);
SkSafeUnref(g_fontmgr_override);
g_fontmgr_override = fontmgr.release();
}
} // namespace skia
SK_API sk_sp<SkFontMgr> SkFontMgr::Factory() {
SkDEBUGCODE(g_factory_called = true;);
return g_fontmgr_override ? sk_ref_sp(g_fontmgr_override)
: skia::CreateDefaultSkFontMgr();
} | nwjs/chromium.src | skia/ext/fontmgr_default.cc | C++ | bsd-3-clause | 873 |
// Copyright © 2010-2015 The CefSharp Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
using System.IO;
namespace CefSharp
{
//TODO: Eval naming for this interface, not happy with this name
public interface IResourceHandler
{
/// <summary>
/// Processes request asynchronously.
/// </summary>
/// <param name="request">The request object.</param>
/// <param name="callback">The callback used to Continue or Cancel the request (async).</param>
/// <returns>true if the request is handled, false otherwise.</returns>
bool ProcessRequestAsync(IRequest request, ICallback callback);
Stream GetResponse(IResponse response, out long responseLength, out string redirectUrl);
}
}
| joshvera/CefSharp | CefSharp/IResourceHandler.cs | C# | bsd-3-clause | 844 |
from __future__ import absolute_import
from ..model import Model
from ..core.properties import (Any, Dict, String)
class ImageSource(Model):
""" A base class for all image source types. """
_args = ('url', 'extra_url_vars')
url = String(default="", help="""
tile service url (example: http://c.tile.openstreetmap.org/{Z}/{X}/{Y}.png)
""")
extra_url_vars = Dict(String, Any, help="""
A dictionary that maps url variable template keys to values.
These variables are useful for parts of tile urls which do not change from tile to tile (e.g. server host name, or layer name).
""")
| phobson/bokeh | bokeh/models/images.py | Python | bsd-3-clause | 617 |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chromecast.shell;
import android.content.Context;
import android.content.Intent;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.os.IBinder;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.accessibility.AccessibilityNodeProvider;
import android.widget.FrameLayout;
import androidx.annotation.Nullable;
import org.chromium.base.Log;
import org.chromium.chromecast.base.CastSwitches;
/**
* View for displaying a WebContents in CastShell.
*
* <p>Intended to be used with {@link android.app.Presentation}.
*
* <p>
* Typically, this class is controlled by CastContentWindowAndroid through
* CastWebContentsSurfaceHelper. If the CastContentWindowAndroid is destroyed,
* CastWebContentsView should be removed from the activity holding it.
* Similarily, if the view is removed from a activity or the activity holding
* it is destroyed, CastContentWindowAndroid should be notified by intent.
*/
public class CastWebContentsView extends FrameLayout {
private static final String TAG = "CastWebContentV";
private CastWebContentsSurfaceHelper mSurfaceHelper;
public CastWebContentsView(Context context) {
super(context);
initView();
}
private void initView() {
FrameLayout.LayoutParams matchParent = new FrameLayout.LayoutParams(
FrameLayout.LayoutParams.MATCH_PARENT, FrameLayout.LayoutParams.MATCH_PARENT);
addView(LayoutInflater.from(getContext())
.inflate(R.layout.cast_web_contents_activity, null),
matchParent);
// Adds a transparent view on top to allow a highlight rectangule to be drawn when
// accessibility is turned on.
addView(new View(getContext()), matchParent);
}
public void onStart(Bundle startArgumentsBundle) {
Log.d(TAG, "onStart");
if (mSurfaceHelper != null) {
return;
}
mSurfaceHelper = new CastWebContentsSurfaceHelper(
CastWebContentsScopes.onLayoutView(getContext(),
findViewById(R.id.web_contents_container),
CastSwitches.getSwitchValueColor(
CastSwitches.CAST_APP_BACKGROUND_COLOR, Color.BLACK),
this ::getHostWindowToken),
(Uri uri) -> sendIntentSync(CastWebContentsIntentUtils.onWebContentStopped(uri)));
CastWebContentsSurfaceHelper.StartParams params =
CastWebContentsSurfaceHelper.StartParams.fromBundle(startArgumentsBundle);
if (params == null) return;
mSurfaceHelper.onNewStartParams(params);
}
public void onResume() {
Log.d(TAG, "onResume");
}
public void onPause() {
Log.d(TAG, "onPause");
}
public void onStop() {
Log.d(TAG, "onStop");
if (mSurfaceHelper != null) {
mSurfaceHelper.onDestroy();
}
}
@Nullable
protected IBinder getHostWindowToken() {
return getWindowToken();
}
private void sendIntentSync(Intent in) {
CastWebContentsIntentUtils.getLocalBroadcastManager().sendBroadcastSync(in);
}
@Override
public void setAccessibilityDelegate(AccessibilityDelegate delegate) {
View contentView = getContentView();
if (contentView != null) {
contentView.setAccessibilityDelegate(delegate);
} else {
Log.w(TAG, "Content view is null!");
}
}
@Override
public boolean onHoverEvent(MotionEvent event) {
View contentView = getContentView();
if (contentView != null) {
return contentView.onHoverEvent(event);
} else {
Log.w(TAG, "Content view is null!");
return false;
}
}
public AccessibilityNodeProvider getWebContentsAccessibilityNodeProvider() {
View contentView = getContentView();
if (contentView != null) {
return contentView.getAccessibilityNodeProvider();
} else {
Log.w(TAG, "Content view is null! Returns a null AccessibilityNodeProvider.");
return null;
}
}
private View getContentView() {
return findViewWithTag(CastWebContentsScopes.VIEW_TAG_CONTENT_VIEW);
}
}
| ric2b/Vivaldi-browser | chromium/chromecast/browser/android/apk/src/org/chromium/chromecast/shell/CastWebContentsView.java | Java | bsd-3-clause | 4,548 |
require 'test/unit'
require 'rails/version' # For getting the rails version constants
require 'active_support/vendor' # For loading I18n
require 'mocha'
require 'net/http'
require File.dirname(__FILE__) + '/../lib/recaptcha'
class RecaptchaVerifyTest < Test::Unit::TestCase
def setup
ENV['RECAPTCHA_PRIVATE_KEY'] = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
@controller = TestController.new
@controller.request = stub(:remote_ip => "1.1.1.1")
@controller.params = {:recaptcha_challenge_field => "challenge", :recaptcha_response_field => "response"}
@expected_post_data = {}
@expected_post_data["privatekey"] = ENV['RECAPTCHA_PRIVATE_KEY']
@expected_post_data["remoteip"] = @controller.request.remote_ip
@expected_post_data["challenge"] = "challenge"
@expected_post_data["response"] = "response"
@expected_uri = URI.parse("http://#{Recaptcha::RECAPTCHA_VERIFY_SERVER}/verify")
end
def test_should_raise_exception_without_private_key
assert_raise Recaptcha::RecaptchaError do
ENV['RECAPTCHA_PRIVATE_KEY'] = nil
@controller.verify_recaptcha
end
end
def test_should_return_false_when_key_is_invalid
expect_http_post(response_with_body("false\ninvalid-site-private-key"))
assert !@controller.verify_recaptcha
assert_equal "invalid-site-private-key", @controller.session[:recaptcha_error]
end
def test_returns_true_on_success
@controller.session[:recaptcha_error] = "previous error that should be cleared"
expect_http_post(response_with_body("true\n"))
assert @controller.verify_recaptcha
assert_nil @controller.session[:recaptcha_error]
end
def test_errors_should_be_added_to_model
expect_http_post(response_with_body("false\nbad-news"))
errors = mock
errors.expects(:add).with(:base, "Captcha response is incorrect, please try again.")
model = mock(:valid? => false, :errors => errors)
assert !@controller.verify_recaptcha(:model => model)
assert_equal "bad-news", @controller.session[:recaptcha_error]
end
def test_returns_true_on_success_with_optional_key
@controller.session[:recaptcha_error] = "previous error that should be cleared"
# reset private key
@expected_post_data["privatekey"] = 'ADIFFERENTPRIVATEKEYXXXXXXXXXXXXXX'
expect_http_post(response_with_body("true\n"))
assert @controller.verify_recaptcha(:private_key => 'ADIFFERENTPRIVATEKEYXXXXXXXXXXXXXX')
assert_nil @controller.session[:recaptcha_error]
end
def test_timeout
expect_http_post(Timeout::Error, :exception => true)
assert !@controller.verify_recaptcha()
assert_equal "recaptcha-not-reachable", @controller.session[:recaptcha_error]
end
private
class TestController
include Recaptcha::Verify
attr_accessor :request, :params, :session
def initialize
@session = {}
end
end
def expect_http_post(response, options = {})
unless options[:exception]
Net::HTTP.expects(:post_form).with(@expected_uri, @expected_post_data).returns(response)
else
Net::HTTP.expects(:post_form).raises response
end
end
def response_with_body(body)
stub(:body => body)
end
end
| augustf/wtgsite | vendor/plugins/recaptcha/test/verify_recaptcha_test.rb | Ruby | mit | 3,213 |
<?php
namespace DoctrineBundle\Tests\DependencyInjection\Fixtures\Bundles\XmlBundle\Entity;
class Test
{
} | boutell/SillyCMS | src/vendor/symfony/src/Symfony/Bundle/DoctrineBundle/Tests/DependencyInjection/Fixtures/Bundles/XmlBundle/Entity/Test.php | PHP | mit | 108 |
// Copyright 2011 Daniel James.
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#include <boost/core/lightweight_test.hpp>
#include <boost/limits.hpp>
#include <boost/static_assert.hpp>
#include <boost/type_traits/is_same.hpp>
#include <boost/unordered/detail/implementation.hpp>
// Boilerplate
#define ALLOCATOR_METHODS(name) \
template <typename U> struct rebind \
{ \
typedef name<U> other; \
}; \
\
name() {} \
template <typename Y> name(name<Y> const&) {} \
T* address(T& r) { return &r; } \
T const* address(T const& r) { return &r; } \
T* allocate(std::size_t n) \
{ \
return static_cast<T*>(::operator new(n * sizeof(T))); \
} \
T* allocate(std::size_t n, void const*) \
{ \
return static_cast<T*>(::operator new(n * sizeof(T))); \
} \
void deallocate(T* p, std::size_t) { ::operator delete((void*)p); } \
void construct(T* p, T const& t) { new (p) T(t); } \
void destroy(T* p) { p->~T(); } \
std::size_t max_size() const \
{ \
return (std::numeric_limits<std::size_t>::max)(); \
} \
bool operator==(name<T> const&) const { return true; } \
bool operator!=(name<T> const&) const { return false; } \
/**/
#define ALLOCATOR_METHODS_TYPEDEFS(name) \
template <typename U> struct rebind \
{ \
typedef name<U> other; \
}; \
\
name() {} \
template <typename Y> name(name<Y> const&) {} \
pointer address(T& r) { return &r; } \
const_pointer address(T const& r) { return &r; } \
pointer allocate(std::size_t n) \
{ \
return pointer(::operator new(n * sizeof(T))); \
} \
pointer allocate(std::size_t n, void const*) \
{ \
return pointer(::operator new(n * sizeof(T))); \
} \
void deallocate(pointer p, std::size_t) { ::operator delete((void*)p); } \
void construct(T* p, T const& t) { new (p) T(t); } \
void destroy(T* p) { p->~T(); } \
size_type max_size() const \
{ \
return (std::numeric_limits<size_type>::max)(); \
} \
bool operator==(name<T> const&) { return true; } \
bool operator!=(name<T> const&) { return false; } \
/**/
struct yes_type
{
enum
{
value = true
};
};
struct no_type
{
enum
{
value = false
};
};
// For tracking calls...
static int selected;
void reset() { selected = 0; }
template <typename Allocator> int call_select()
{
typedef boost::unordered::detail::allocator_traits<Allocator> traits;
Allocator a;
reset();
BOOST_TEST(traits::select_on_container_copy_construction(a) == a);
return selected;
}
// Empty allocator test
template <typename T> struct empty_allocator
{
typedef T value_type;
ALLOCATOR_METHODS(empty_allocator)
};
void test_empty_allocator()
{
typedef empty_allocator<int> allocator;
typedef boost::unordered::detail::allocator_traits<allocator> traits;
#if BOOST_UNORDERED_USE_ALLOCATOR_TRAITS == 1
BOOST_STATIC_ASSERT((boost::is_same<traits::size_type,
std::make_unsigned<std::ptrdiff_t>::type>::value));
#else
BOOST_STATIC_ASSERT((boost::is_same<traits::size_type, std::size_t>::value));
#endif
BOOST_STATIC_ASSERT(
(boost::is_same<traits::difference_type, std::ptrdiff_t>::value));
BOOST_STATIC_ASSERT((boost::is_same<traits::pointer, int*>::value));
BOOST_STATIC_ASSERT(
(boost::is_same<traits::const_pointer, int const*>::value));
BOOST_STATIC_ASSERT((boost::is_same<traits::value_type, int>::value));
BOOST_TEST(!traits::propagate_on_container_copy_assignment::value);
BOOST_TEST(!traits::propagate_on_container_move_assignment::value);
BOOST_TEST(!traits::propagate_on_container_swap::value);
BOOST_TEST(traits::is_always_equal::value);
BOOST_TEST(call_select<allocator>() == 0);
}
// allocator 1
template <typename T> struct allocator1
{
typedef T value_type;
ALLOCATOR_METHODS(allocator1)
typedef yes_type propagate_on_container_copy_assignment;
typedef yes_type propagate_on_container_move_assignment;
typedef yes_type propagate_on_container_swap;
typedef yes_type is_always_equal;
allocator1<T> select_on_container_copy_construction() const
{
++selected;
return allocator1<T>();
}
};
void test_allocator1()
{
typedef allocator1<int> allocator;
typedef boost::unordered::detail::allocator_traits<allocator> traits;
#if BOOST_UNORDERED_USE_ALLOCATOR_TRAITS == 1
BOOST_STATIC_ASSERT((boost::is_same<traits::size_type,
std::make_unsigned<std::ptrdiff_t>::type>::value));
#else
BOOST_STATIC_ASSERT((boost::is_same<traits::size_type, std::size_t>::value));
#endif
BOOST_STATIC_ASSERT(
(boost::is_same<traits::difference_type, std::ptrdiff_t>::value));
BOOST_STATIC_ASSERT((boost::is_same<traits::pointer, int*>::value));
BOOST_STATIC_ASSERT(
(boost::is_same<traits::const_pointer, int const*>::value));
BOOST_STATIC_ASSERT((boost::is_same<traits::value_type, int>::value));
BOOST_TEST(traits::propagate_on_container_copy_assignment::value);
BOOST_TEST(traits::propagate_on_container_move_assignment::value);
BOOST_TEST(traits::propagate_on_container_swap::value);
BOOST_TEST(traits::is_always_equal::value);
BOOST_TEST(call_select<allocator>() == 1);
}
// allocator 2
template <typename Alloc> struct allocator2_base
{
Alloc select_on_container_copy_construction() const
{
++selected;
return Alloc();
}
};
template <typename T> struct allocator2 : allocator2_base<allocator2<T> >
{
typedef T value_type;
typedef T* pointer;
typedef T const* const_pointer;
typedef std::size_t size_type;
ALLOCATOR_METHODS(allocator2)
typedef no_type propagate_on_container_copy_assignment;
typedef no_type propagate_on_container_move_assignment;
typedef no_type propagate_on_container_swap;
typedef no_type is_always_equal;
};
void test_allocator2()
{
typedef allocator2<int> allocator;
typedef boost::unordered::detail::allocator_traits<allocator> traits;
BOOST_STATIC_ASSERT((boost::is_same<traits::size_type, std::size_t>::value));
BOOST_STATIC_ASSERT(
(boost::is_same<traits::difference_type, std::ptrdiff_t>::value));
BOOST_STATIC_ASSERT((boost::is_same<traits::pointer, int*>::value));
BOOST_STATIC_ASSERT(
(boost::is_same<traits::const_pointer, int const*>::value));
BOOST_STATIC_ASSERT((boost::is_same<traits::value_type, int>::value));
BOOST_TEST(!traits::propagate_on_container_copy_assignment::value);
BOOST_TEST(!traits::propagate_on_container_move_assignment::value);
BOOST_TEST(!traits::propagate_on_container_swap::value);
BOOST_TEST(!traits::is_always_equal::value);
BOOST_TEST(call_select<allocator>() == 1);
}
// allocator 3
template <typename T> struct ptr
{
T* value_;
ptr(void* v) : value_((T*)v) {}
T& operator*() const { return *value_; }
};
template <> struct ptr<void>
{
void* value_;
ptr(void* v) : value_(v) {}
};
template <> struct ptr<const void>
{
void const* value_;
ptr(void const* v) : value_(v) {}
};
template <typename T> struct allocator3
{
typedef T value_type;
typedef ptr<T> pointer;
typedef ptr<T const> const_pointer;
typedef unsigned short size_type;
int x; // Just to make it non-empty, so that is_always_equal is false.
ALLOCATOR_METHODS_TYPEDEFS(allocator3)
typedef yes_type propagate_on_container_copy_assignment;
typedef no_type propagate_on_container_move_assignment;
allocator3<T> select_on_container_copy_construction() const
{
++selected;
return allocator3<T>();
}
};
void test_allocator3()
{
typedef allocator3<int> allocator;
typedef boost::unordered::detail::allocator_traits<allocator> traits;
BOOST_STATIC_ASSERT(
(boost::is_same<traits::size_type, unsigned short>::value));
BOOST_STATIC_ASSERT(
(boost::is_same<traits::difference_type, std::ptrdiff_t>::value));
BOOST_STATIC_ASSERT((boost::is_same<traits::pointer, ptr<int> >::value));
BOOST_STATIC_ASSERT(
(boost::is_same<traits::const_pointer, ptr<int const> >::value));
BOOST_STATIC_ASSERT((boost::is_same<traits::value_type, int>::value));
BOOST_TEST(traits::propagate_on_container_copy_assignment::value);
BOOST_TEST(!traits::propagate_on_container_move_assignment::value);
BOOST_TEST(!traits::propagate_on_container_swap::value);
BOOST_TEST(!traits::is_always_equal::value);
BOOST_TEST(call_select<allocator>() == 1);
}
int main()
{
test_empty_allocator();
test_allocator1();
test_allocator2();
test_allocator3();
return boost::report_errors();
}
| davehorton/drachtio-server | deps/boost_1_77_0/libs/unordered/test/unordered/allocator_traits.cpp | C++ | mit | 11,069 |
<?php
class CM_Paging_StreamSubscribe_User extends CM_Paging_StreamSubscribe_Abstract {
/**
* @param CM_Model_User $user
*/
public function __construct(CM_Model_User $user) {
$source = new CM_PagingSource_Sql('`id`', 'cm_stream_subscribe', '`userId` = ' . $user->getId());
parent::__construct($source);
}
}
| alexispeter/CM | library/CM/Paging/StreamSubscribe/User.php | PHP | mit | 347 |
require 'fastlane_core'
require 'credentials_manager'
module Snapshot
class Options
def self.available_options
output_directory = (File.directory?("fastlane") ? "fastlane/screenshots" : "screenshots")
@options ||= [
FastlaneCore::ConfigItem.new(key: :workspace,
short_option: "-w",
env_name: "SNAPSHOT_WORKSPACE",
optional: true,
description: "Path the workspace file",
verify_block: proc do |value|
v = File.expand_path(value.to_s)
UI.user_error!("Workspace file not found at path '#{v}'") unless File.exist?(v)
UI.user_error!("Workspace file invalid") unless File.directory?(v)
UI.user_error!("Workspace file is not a workspace, must end with .xcworkspace") unless v.include?(".xcworkspace")
end),
FastlaneCore::ConfigItem.new(key: :project,
short_option: "-p",
optional: true,
env_name: "SNAPSHOT_PROJECT",
description: "Path the project file",
verify_block: proc do |value|
v = File.expand_path(value.to_s)
UI.user_error!("Project file not found at path '#{v}'") unless File.exist?(v)
UI.user_error!("Project file invalid") unless File.directory?(v)
UI.user_error!("Project file is not a project file, must end with .xcodeproj") unless v.include?(".xcodeproj")
end),
FastlaneCore::ConfigItem.new(key: :xcargs,
short_option: "-X",
env_name: "SNAPSHOT_XCARGS",
description: "Pass additional arguments to xcodebuild for the test phase. Be sure to quote the setting names and values e.g. OTHER_LDFLAGS=\"-ObjC -lstdc++\"",
optional: true,
type: :shell_string),
FastlaneCore::ConfigItem.new(key: :devices,
description: "A list of devices you want to take the screenshots from",
short_option: "-d",
type: Array,
optional: true,
verify_block: proc do |value|
available = FastlaneCore::DeviceManager.simulators
value.each do |current|
device = current.strip
unless available.any? { |d| d.name.strip == device } || device == "Mac"
UI.user_error!("Device '#{device}' not in list of available simulators '#{available.join(', ')}'")
end
end
end),
FastlaneCore::ConfigItem.new(key: :languages,
description: "A list of languages which should be used",
short_option: "-g",
type: Array,
default_value: ['en-US']),
FastlaneCore::ConfigItem.new(key: :launch_arguments,
env_name: 'SNAPSHOT_LAUNCH_ARGUMENTS',
description: "A list of launch arguments which should be used",
short_option: "-m",
type: Array,
default_value: ['']),
FastlaneCore::ConfigItem.new(key: :output_directory,
short_option: "-o",
env_name: "SNAPSHOT_OUTPUT_DIRECTORY",
description: "The directory where to store the screenshots",
default_value: output_directory),
FastlaneCore::ConfigItem.new(key: :output_simulator_logs,
env_name: "SNAPSHOT_OUTPUT_SIMULATOR_LOGS",
description: "If the logs generated by the app (e.g. using NSLog, perror, etc.) in the Simulator should be written to the output_directory",
type: TrueClass,
default_value: false,
optional: true),
FastlaneCore::ConfigItem.new(key: :ios_version,
description: "By default, the latest version should be used automatically. If you want to change it, do it here",
short_option: "-i",
optional: true),
FastlaneCore::ConfigItem.new(key: :skip_open_summary,
env_name: 'SNAPSHOT_SKIP_OPEN_SUMMARY',
description: "Don't open the HTML summary after running _snapshot_",
default_value: false,
is_string: false),
FastlaneCore::ConfigItem.new(key: :skip_helper_version_check,
env_name: 'SNAPSHOT_SKIP_SKIP_HELPER_VERSION_CHECK',
description: "Do not check for most recent SnapshotHelper code",
default_value: false,
is_string: false),
FastlaneCore::ConfigItem.new(key: :clear_previous_screenshots,
env_name: 'SNAPSHOT_CLEAR_PREVIOUS_SCREENSHOTS',
description: "Enabling this option will automatically clear previously generated screenshots before running snapshot",
default_value: false,
is_string: false),
FastlaneCore::ConfigItem.new(key: :reinstall_app,
env_name: 'SNAPSHOT_REINSTALL_APP',
description: "Enabling this option will automatically uninstall the application before running it",
default_value: false,
is_string: false),
FastlaneCore::ConfigItem.new(key: :erase_simulator,
env_name: 'SNAPSHOT_ERASE_SIMULATOR',
description: "Enabling this option will automatically erase the simulator before running the application",
default_value: false,
is_string: false),
FastlaneCore::ConfigItem.new(key: :localize_simulator,
env_name: 'SNAPSHOT_LOCALIZE_SIMULATOR',
description: "Enabling this option will configure the Simulator's system language",
default_value: false,
is_string: false),
FastlaneCore::ConfigItem.new(key: :app_identifier,
env_name: 'SNAPSHOT_APP_IDENTIFIER',
short_option: "-a",
optional: true,
description: "The bundle identifier of the app to uninstall (only needed when enabling reinstall_app)",
default_value: ENV["SNAPSHOT_APP_IDENTITIFER"] || CredentialsManager::AppfileConfig.try_fetch_value(:app_identifier)),
FastlaneCore::ConfigItem.new(key: :add_photos,
env_name: 'SNAPSHOT_PHOTOS',
short_option: "-j",
description: "A list of photos that should be added to the simulator before running the application",
type: Array,
optional: true),
FastlaneCore::ConfigItem.new(key: :add_videos,
env_name: 'SNAPSHOT_VIDEOS',
short_option: "-u",
description: "A list of videos that should be added to the simulator before running the application",
type: Array,
optional: true),
# Everything around building
FastlaneCore::ConfigItem.new(key: :buildlog_path,
short_option: "-l",
env_name: "SNAPSHOT_BUILDLOG_PATH",
description: "The directory where to store the build log",
default_value: "#{FastlaneCore::Helper.buildlog_path}/snapshot"),
FastlaneCore::ConfigItem.new(key: :clean,
short_option: "-c",
env_name: "SNAPSHOT_CLEAN",
description: "Should the project be cleaned before building it?",
is_string: false,
default_value: false),
FastlaneCore::ConfigItem.new(key: :configuration,
short_option: "-q",
env_name: "SNAPSHOT_CONFIGURATION",
description: "The configuration to use when building the app. Defaults to 'Release'",
optional: true),
FastlaneCore::ConfigItem.new(key: :xcpretty_args,
short_option: "-x",
env_name: "SNAPSHOT_XCPRETTY_ARGS",
description: "Additional xcpretty arguments",
is_string: true,
optional: true),
FastlaneCore::ConfigItem.new(key: :sdk,
short_option: "-k",
env_name: "SNAPSHOT_SDK",
description: "The SDK that should be used for building the application",
optional: true),
FastlaneCore::ConfigItem.new(key: :scheme,
short_option: "-s",
env_name: 'SNAPSHOT_SCHEME',
description: "The scheme you want to use, this must be the scheme for the UI Tests",
optional: true), # optional true because we offer a picker to the user
FastlaneCore::ConfigItem.new(key: :number_of_retries,
short_option: "-n",
env_name: 'SNAPSHOT_NUMBER_OF_RETRIES',
description: "The number of times a test can fail before snapshot should stop retrying",
type: Integer,
default_value: 1),
FastlaneCore::ConfigItem.new(key: :stop_after_first_error,
env_name: 'SNAPSHOT_BREAK_ON_FIRST_ERROR',
description: "Should snapshot stop immediately after the tests completely failed on one device?",
default_value: false,
is_string: false),
FastlaneCore::ConfigItem.new(key: :derived_data_path,
short_option: "-f",
env_name: "SNAPSHOT_DERIVED_DATA_PATH",
description: "The directory where build products and other derived data will go",
optional: true),
FastlaneCore::ConfigItem.new(key: :test_target_name,
env_name: "SNAPSHOT_TEST_TARGET_NAME",
description: "The name of the target you want to test (if you desire to override the Target Application from Xcode)",
optional: true),
FastlaneCore::ConfigItem.new(key: :namespace_log_files,
env_name: "SNAPSHOT_NAMESPACE_LOG_FILES",
description: "Separate the log files per device and per language",
optional: true,
is_string: false)
]
end
end
end
| NicholasFFox/fastlane | snapshot/lib/snapshot/options.rb | Ruby | mit | 13,252 |
//---------------------------------------------------------------------
// <copyright file="JsonLightUtils.cs" company="Microsoft">
// Copyright (C) Microsoft Corporation. All rights reserved. See License.txt in the project root for license information.
// </copyright>
//---------------------------------------------------------------------
namespace Microsoft.Test.OData.TDD.Tests.Common.JsonLight
{
using System.Collections.Generic;
using Microsoft.OData.Core;
using Microsoft.OData.Core.JsonLight;
public static class JsonLightUtils
{
/// <summary>The default streaming Json Light media type.</summary>
internal static readonly ODataMediaType JsonLightStreamingMediaType = new ODataMediaType(
MimeConstants.MimeApplicationType,
MimeConstants.MimeJsonSubType,
new[]{
new KeyValuePair<string, string>(MimeConstants.MimeMetadataParameterName, MimeConstants.MimeMetadataParameterValueMinimal),
new KeyValuePair<string, string>(MimeConstants.MimeStreamingParameterName, MimeConstants.MimeParameterValueTrue),
new KeyValuePair<string, string>(MimeConstants.MimeIeee754CompatibleParameterName, MimeConstants.MimeParameterValueFalse)
});
/// <summary>
/// Gets the name of the property annotation property.
/// </summary>
/// <param name="propertyName">The name of the property to annotate.</param>
/// <param name="annotationName">The name of the annotation.</param>
/// <returns>The property name for the annotation property.</returns>
public static string GetPropertyAnnotationName(string propertyName, string annotationName)
{
return propertyName + JsonLightConstants.ODataPropertyAnnotationSeparatorChar + annotationName;
}
}
} | hotchandanisagar/odata.net | test/FunctionalTests/Tests/DataOData/Tests/OData.TDD.Tests/Common/JsonLight/JsonLightUtils.cs | C# | mit | 1,894 |
require "test_helper"
class MaintainingRepoSubscriptionsTest < ActionDispatch::IntegrationTest
fixtures :repos
def triage_the_sandbox
login_via_github
visit "/"
click_link "issue_triage_sandbox"
click_button "I Want to Triage: bemurphy/issue_triage_sandbox"
end
test "subscribing to a repo" do
assert_difference 'ActionMailer::Base.deliveries.size', +1 do
triage_the_sandbox
assert page.has_content?("issue_triage_sandbox")
end
assert_equal IssueAssignment.last.delivered, true
end
test "send an issue! button" do
triage_the_sandbox
assert_difference 'ActionMailer::Base.deliveries.size', +1 do
click_link "issue_triage_sandbox"
click_link "Send new issue!"
assert page.has_content?("You will receive an email with your new issue shortly")
end
assert_equal IssueAssignment.last.delivered, true
end
test "listing subscribers" do
triage_the_sandbox
click_link 'issue_triage_sandbox'
click_link 'Subscribers'
assert page.has_content?("@mockstar")
end
test "list only favorite languages" do
login_via_github
visit "/"
assert !page.has_content?("javascript")
end
end
| colinrubbert/codetriage | test/integration/maintaining_repo_subscriptions_test.rb | Ruby | mit | 1,192 |
// Generated by CoffeeScript 1.3.3
(function() {
define(["smog/server", "smog/notify", "templates/connect"], function(server, notify, templ) {
return {
show: function() {
$('#content').html(templ());
$('#connect-modal').modal({
backdrop: false
});
return $('#connect-button').click(function() {
var host;
host = $('#host').val();
return server.connect(host, function(err, okay) {
if (err != null) {
if (typeof err === 'object' && Object.keys(err).length === 0) {
err = "Server unavailable";
}
return notify.error("Connection error: " + (err.err || err));
} else {
$('#connect-modal').modal('hide');
return window.location.hash = '#/home';
}
});
});
}
};
});
}).call(this);
| wearefractal/smog | public/js/routes/index.js | JavaScript | mit | 911 |
//---------------------------------------------------------------------
// <copyright file="StreamReferenceValueReaderJsonLightTests.cs" company="Microsoft">
// Copyright (C) Microsoft Corporation. All rights reserved. See License.txt in the project root for license information.
// </copyright>
//---------------------------------------------------------------------
namespace Microsoft.Test.Taupo.OData.Reader.Tests.JsonLight
{
#region Namespaces
using System.Collections.Generic;
using System.Linq;
using Microsoft.Test.Taupo.Astoria.Contracts.OData;
using Microsoft.Test.Taupo.Astoria.OData;
using Microsoft.Test.Taupo.Common;
using Microsoft.Test.Taupo.Contracts.EntityModel;
using Microsoft.Test.Taupo.Execution;
using Microsoft.Test.Taupo.OData.Common;
using Microsoft.Test.Taupo.OData.Contracts;
using Microsoft.Test.Taupo.OData.Contracts.Json;
using Microsoft.Test.Taupo.OData.JsonLight;
using Microsoft.Test.Taupo.OData.Reader.Tests;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Microsoft.OData.Edm;
using Microsoft.OData.Edm.Library;
using TestModels = Microsoft.Test.OData.Utils.Metadata.TestModels;
#endregion Namespaces
/// <summary>
/// Tests reading of various complex value JSON Light payloads.
/// </summary>
[TestClass, TestCase]
public class StreamReferenceValueReaderJsonLightTests : ODataReaderTestCase
{
[InjectDependency]
public IPayloadGenerator PayloadGenerator { get; set; }
private PayloadReaderTestDescriptor.Settings settings;
[InjectDependency]
public PayloadReaderTestDescriptor.Settings Settings
{
get { return this.settings; }
set { this.settings = value; this.settings.ExpectedResultSettings.ObjectModelToPayloadElementConverter = new JsonLightObjectModelToPayloadElementConverter(); }
}
private sealed class StreamPropertyTestCase
{
public string DebugDescription { get; set; }
public string Json { get; set; }
public EntityInstance ExpectedEntity { get; set; }
public ExpectedException ExpectedException { get; set; }
public bool OnlyResponse { get; set; }
public IEdmTypeReference OwningEntityType { get; set; }
}
[TestMethod, TestCategory("Reader.Json"), Variation(Description = "Verifies correct reading of stream properties (stream reference values) with fully specified metadata.")]
public void StreamPropertyTest()
{
IEdmModel model = TestModels.BuildTestModel();
var testCases = new[]
{
new StreamPropertyTestCase
{
DebugDescription = "Just edit link",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", "http://odata.org/test/Cities(1)/Skyline", "http://odata.org/streamproperty/editlink", null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaEditLinkAnnotationName) + "\":\"http://odata.org/streamproperty/editlink\""
},
new StreamPropertyTestCase
{
DebugDescription = "Just read link",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", "http://odata.org/streamproperty/readlink", "http://odata.org/test/Cities(1)/Skyline", null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaReadLinkAnnotationName) + "\":\"http://odata.org/streamproperty/readlink\""
},
new StreamPropertyTestCase
{
DebugDescription = "Just content type",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", "http://odata.org/test/Cities(1)/Skyline", "http://odata.org/test/Cities(1)/Skyline", "streamproperty:contenttype", null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaContentTypeAnnotationName) + "\":\"streamproperty:contenttype\""
},
new StreamPropertyTestCase
{
DebugDescription = "Just ETag",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", "http://odata.org/test/Cities(1)/Skyline", "http://odata.org/test/Cities(1)/Skyline", null, "streamproperty:etag"),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaETagAnnotationName) + "\":\"streamproperty:etag\""
},
new StreamPropertyTestCase
{
DebugDescription = "Everything",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", "http://odata.org/streamproperty/readlink", "http://odata.org/streamproperty/editlink", "streamproperty:contenttype", "streamproperty:etag"),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaEditLinkAnnotationName) + "\":\"http://odata.org/streamproperty/editlink\"," +
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaReadLinkAnnotationName) + "\":\"http://odata.org/streamproperty/readlink\"," +
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaContentTypeAnnotationName) + "\":\"streamproperty:contenttype\"," +
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaETagAnnotationName) + "\":\"streamproperty:etag\""
},
new StreamPropertyTestCase
{
DebugDescription = "Just custom annotation - should report empty stream property",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", "http://odata.org/test/Cities(1)/Skyline", "http://odata.org/test/Cities(1)/Skyline", null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", "custom.value") + "\":\"value\""
},
new StreamPropertyTestCase
{
DebugDescription = "Everything with custom annotation - custom annotations should be ignored",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", "http://odata.org/streamproperty/readlink", "http://odata.org/streamproperty/editlink", "streamproperty:contenttype", "streamproperty:etag"),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaEditLinkAnnotationName) + "\":\"http://odata.org/streamproperty/editlink\"," +
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", "custom.value") + "\":\"value\"," +
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaReadLinkAnnotationName) + "\":\"http://odata.org/streamproperty/readlink\"," +
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaContentTypeAnnotationName) + "\":\"streamproperty:contenttype\"," +
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaETagAnnotationName) + "\":\"streamproperty:etag\"," +
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", "custom.type") + "\":42"
},
new StreamPropertyTestCase
{
DebugDescription = "With odata.type annotation - should fail",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", null, null, null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataTypeAnnotationName) + "\":\"Edm.Stream\"",
ExpectedException = ODataExpectedExceptions.ODataException("ODataJsonLightEntryAndFeedDeserializer_UnexpectedStreamPropertyAnnotation", "Skyline", JsonLightConstants.ODataTypeAnnotationName)
},
new StreamPropertyTestCase
{
DebugDescription = "Everything with navigation link URL annotation - should fail",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", "http://odata.org/streamproperty/readlink", "http://odata.org/streamproperty/editlink", "streamproperty:contenttype", "streamproperty:etag"),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaEditLinkAnnotationName) + "\":\"http://odata.org/streamproperty/editlink\"," +
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaReadLinkAnnotationName) + "\":\"http://odata.org/streamproperty/readlink\"," +
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataNavigationLinkUrlAnnotationName) + "\":\"http://odata.org/streamproperty/navlink\"," +
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaContentTypeAnnotationName) + "\":\"streamproperty:contenttype\"," +
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaETagAnnotationName) + "\":\"streamproperty:etag\"",
ExpectedException = ODataExpectedExceptions.ODataException("ODataJsonLightEntryAndFeedDeserializer_UnexpectedStreamPropertyAnnotation", "Skyline", JsonLightConstants.ODataNavigationLinkUrlAnnotationName)
},
new StreamPropertyTestCase
{
DebugDescription = "Invalid edit link - wrong primitive",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", null, null, null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaEditLinkAnnotationName) + "\":42",
ExpectedException = ODataExpectedExceptions.ODataException("JsonReaderExtensions_CannotReadPropertyValueAsString", "42", JsonLightConstants.ODataMediaEditLinkAnnotationName),
OnlyResponse = true,
},
new StreamPropertyTestCase
{
DebugDescription = "Invalid edit link - null",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", null, null, null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaEditLinkAnnotationName) + "\":null",
ExpectedException = ODataExpectedExceptions.ODataException("ODataJsonLightReaderUtils_AnnotationWithNullValue", JsonLightConstants.ODataMediaEditLinkAnnotationName),
OnlyResponse = true,
},
new StreamPropertyTestCase
{
DebugDescription = "Invalid read link - wrong primitive",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", null, null, null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaReadLinkAnnotationName) + "\":true",
ExpectedException = ODataExpectedExceptions.ODataException("JsonReaderExtensions_CannotReadPropertyValueAsString", "True", JsonLightConstants.ODataMediaReadLinkAnnotationName),
OnlyResponse = true,
},
new StreamPropertyTestCase
{
DebugDescription = "Invalid read link - null",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", null, null, null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaReadLinkAnnotationName) + "\":null",
ExpectedException = ODataExpectedExceptions.ODataException("ODataJsonLightReaderUtils_AnnotationWithNullValue", JsonLightConstants.ODataMediaReadLinkAnnotationName),
OnlyResponse = true,
},
new StreamPropertyTestCase
{
DebugDescription = "Invalid ETag - non primitive",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", null, null, null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaETagAnnotationName) + "\":[]",
ExpectedException = ODataExpectedExceptions.ODataException("JsonReaderExtensions_UnexpectedNodeDetected", "PrimitiveValue", "StartArray"),
OnlyResponse = true,
},
new StreamPropertyTestCase
{
DebugDescription = "Invalid ETag - null",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", null, null, null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaETagAnnotationName) + "\":null",
ExpectedException = ODataExpectedExceptions.ODataException("ODataJsonLightReaderUtils_AnnotationWithNullValue", JsonLightConstants.ODataMediaETagAnnotationName),
OnlyResponse = true,
},
new StreamPropertyTestCase
{
DebugDescription = "Invalid content type - non primitive",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", null, null, null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaContentTypeAnnotationName) + "\":{}",
ExpectedException = ODataExpectedExceptions.ODataException("JsonReaderExtensions_UnexpectedNodeDetected", "PrimitiveValue", "StartObject"),
OnlyResponse = true,
},
new StreamPropertyTestCase
{
DebugDescription = "Invalid content type - null",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", null, null, null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaContentTypeAnnotationName) + "\":null",
ExpectedException = ODataExpectedExceptions.ODataException("ODataJsonLightReaderUtils_AnnotationWithNullValue", JsonLightConstants.ODataMediaContentTypeAnnotationName),
OnlyResponse = true,
},
new StreamPropertyTestCase
{
DebugDescription = "Open stream property",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("OpenSkyline", null, "http://odata.org/streamproperty/editlink", null, null),
OwningEntityType = model.FindDeclaredType("TestModel.CityOpenType").ToTypeReference(),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("OpenSkyline", JsonLightConstants.ODataMediaEditLinkAnnotationName) + "\":\"http://odata.org/streamproperty/editlink\"",
ExpectedException = ODataExpectedExceptions.ODataException("ODataJsonLightEntryAndFeedDeserializer_OpenPropertyWithoutValue", "OpenSkyline"),
OnlyResponse = true
},
new StreamPropertyTestCase
{
DebugDescription = "Undeclared stream property",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("NewSkyline", null, "http://odata.org/streamproperty/editlink", null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("NewSkyline", JsonLightConstants.ODataMediaEditLinkAnnotationName) + "\":\"http://odata.org/streamproperty/editlink\"",
ExpectedException = ODataExpectedExceptions.ODataException("ValidationUtils_PropertyDoesNotExistOnType", "NewSkyline", "TestModel.CityType"),
OnlyResponse = true
},
new StreamPropertyTestCase
{
DebugDescription = "Stream property declared with non-stream type",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Name", null, "http://odata.org/streamproperty/editlink", null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Name", JsonLightConstants.ODataMediaEditLinkAnnotationName) + "\":\"http://odata.org/streamproperty/editlink\"",
ExpectedException = ODataExpectedExceptions.ODataException("ODataJsonLightEntryAndFeedDeserializer_PropertyWithoutValueWithWrongType", "Name", "Edm.String"),
OnlyResponse = true
},
new StreamPropertyTestCase
{
DebugDescription = "Stream property with value",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", null, "http://odata.org/streamproperty/editlink", null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaEditLinkAnnotationName) + "\":\"http://odata.org/streamproperty/editlink\"," +
"\"Skyline\":\"value\"",
ExpectedException = ODataExpectedExceptions.ODataException("ODataJsonLightEntryAndFeedDeserializer_StreamPropertyWithValue", "Skyline"),
OnlyResponse = true
},
};
this.RunStreamPropertyTest(model, testCases);
}
[TestMethod, TestCategory("Reader.Json"), Variation(Description = "Verifies correct reading of stream properties (stream reference values) with fully specified metadata.")]
public void StreamPropertyTestWithRelativeLinkUris()
{
IEdmModel model = TestModels.BuildTestModel();
var testCases = new[]
{
new StreamPropertyTestCase
{
DebugDescription = "Invalid edit link - non-URL",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", null, null, null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaEditLinkAnnotationName) + "\":\"xxx yyy zzz\"",
ExpectedException = null,
OnlyResponse = true,
},
new StreamPropertyTestCase
{
DebugDescription = "Invalid read link - non-URL",
ExpectedEntity = PayloadBuilder.Entity().StreamProperty("Skyline", null, null, null, null),
Json =
"\"" + JsonLightUtils.GetPropertyAnnotationName("Skyline", JsonLightConstants.ODataMediaReadLinkAnnotationName) + "\":\"xxx yyy zzz\"",
ExpectedException = null,
OnlyResponse = true,
},
};
this.RunStreamPropertyTest(model, testCases);
}
private void RunStreamPropertyTest(IEdmModel model, IEnumerable<StreamPropertyTestCase> testCases)
{
var cityType = model.FindDeclaredType("TestModel.CityType").ToTypeReference();
var cities = model.EntityContainer.FindEntitySet("Cities");
IEnumerable<PayloadReaderTestDescriptor> testDescriptors = testCases.Select(testCase =>
{
IEdmTypeReference entityType = testCase.OwningEntityType ?? cityType;
EntityInstance entity = PayloadBuilder.Entity(entityType.FullName()).PrimitiveProperty("Id", 1)
.JsonRepresentation(
"{" +
"\"" + JsonLightConstants.ODataPropertyAnnotationSeparator + JsonLightConstants.ODataContextAnnotationName + "\":\"http://odata.org/test/$metadata#TestModel.DefaultContainer.Cities/" + entityType.FullName() + "()/$entity\"," +
"\"" + JsonLightConstants.ODataPropertyAnnotationSeparator + JsonLightConstants.ODataTypeAnnotationName + "\":\"" + entityType.FullName() + "\"," +
"\"Id\": 1," +
testCase.Json +
"}")
.ExpectedEntityType(entityType, cities);
foreach (NamedStreamInstance streamProperty in testCase.ExpectedEntity.Properties.OfType<NamedStreamInstance>())
{
entity.Add(streamProperty.DeepCopy());
}
return new PayloadReaderTestDescriptor(this.Settings)
{
DebugDescription = testCase.DebugDescription,
PayloadEdmModel = model,
PayloadElement = entity,
ExpectedException = testCase.ExpectedException,
SkipTestConfiguration = tc => testCase.OnlyResponse ? tc.IsRequest : false
};
});
this.CombinatorialEngineProvider.RunCombinations(
testDescriptors,
this.ReaderTestConfigurationProvider.JsonLightFormatConfigurations,
(testDescriptor, testConfiguration) =>
{
if (testConfiguration.IsRequest)
{
testDescriptor = new PayloadReaderTestDescriptor(testDescriptor)
{
ExpectedException = ODataExpectedExceptions.ODataException("ODataJsonLightEntryAndFeedDeserializer_StreamPropertyInRequest")
};
}
// These descriptors are already tailored specifically for Json Light and
// do not require normalization.
testDescriptor.TestDescriptorNormalizers.Clear();
var testConfigClone = new ReaderTestConfiguration(testConfiguration);
testConfigClone.MessageReaderSettings.BaseUri = null;
testDescriptor.RunTest(testConfigClone);
});
}
}
}
| hotchandanisagar/odata.net | test/FunctionalTests/Tests/DataOData/Tests/OData.Reader.Tests/JsonLight/StreamReferenceValueReaderJsonLightTests.cs | C# | mit | 23,505 |
package cmd
import (
"bytes"
"fmt"
"github.com/smira/aptly/aptly"
"github.com/smira/aptly/deb"
"github.com/smira/aptly/query"
"github.com/smira/aptly/utils"
"github.com/smira/commander"
"github.com/smira/flag"
"os"
"path/filepath"
"text/template"
)
func aptlyRepoInclude(cmd *commander.Command, args []string) error {
var err error
if len(args) < 1 {
cmd.Usage()
return commander.ErrCommandError
}
verifier, err := getVerifier(context.Flags())
if err != nil {
return fmt.Errorf("unable to initialize GPG verifier: %s", err)
}
if verifier == nil {
verifier = &utils.GpgVerifier{}
}
forceReplace := context.Flags().Lookup("force-replace").Value.Get().(bool)
acceptUnsigned := context.Flags().Lookup("accept-unsigned").Value.Get().(bool)
ignoreSignatures := context.Flags().Lookup("ignore-signatures").Value.Get().(bool)
noRemoveFiles := context.Flags().Lookup("no-remove-files").Value.Get().(bool)
repoTemplate, err := template.New("repo").Parse(context.Flags().Lookup("repo").Value.Get().(string))
if err != nil {
return fmt.Errorf("error parsing -repo template: %s", err)
}
uploaders := (*deb.Uploaders)(nil)
uploadersFile := context.Flags().Lookup("uploaders-file").Value.Get().(string)
if uploadersFile != "" {
uploaders, err = deb.NewUploadersFromFile(uploadersFile)
if err != nil {
return err
}
for i := range uploaders.Rules {
uploaders.Rules[i].CompiledCondition, err = query.Parse(uploaders.Rules[i].Condition)
if err != nil {
return fmt.Errorf("error parsing query %s: %s", uploaders.Rules[i].Condition, err)
}
}
}
reporter := &aptly.ConsoleResultReporter{Progress: context.Progress()}
var changesFiles, failedFiles, processedFiles []string
changesFiles, failedFiles = deb.CollectChangesFiles(args, reporter)
for _, path := range changesFiles {
var changes *deb.Changes
changes, err = deb.NewChanges(path)
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", path, err)
continue
}
err = changes.VerifyAndParse(acceptUnsigned, ignoreSignatures, verifier)
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
err = changes.Prepare()
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
repoName := &bytes.Buffer{}
err = repoTemplate.Execute(repoName, changes.Stanza)
if err != nil {
return fmt.Errorf("error applying template to repo: %s", err)
}
context.Progress().Printf("Loading repository %s for changes file %s...\n", repoName.String(), changes.ChangesName)
repo, err := context.CollectionFactory().LocalRepoCollection().ByName(repoName.String())
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
currentUploaders := uploaders
if repo.Uploaders != nil {
currentUploaders = repo.Uploaders
for i := range currentUploaders.Rules {
currentUploaders.Rules[i].CompiledCondition, err = query.Parse(currentUploaders.Rules[i].Condition)
if err != nil {
return fmt.Errorf("error parsing query %s: %s", currentUploaders.Rules[i].Condition, err)
}
}
}
if currentUploaders != nil {
if err = currentUploaders.IsAllowed(changes); err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("changes file skipped due to uploaders config: %s, keys %#v: %s",
changes.ChangesName, changes.SignatureKeys, err)
changes.Cleanup()
continue
}
}
err = context.CollectionFactory().LocalRepoCollection().LoadComplete(repo)
if err != nil {
return fmt.Errorf("unable to load repo: %s", err)
}
list, err := deb.NewPackageListFromRefList(repo.RefList(), context.CollectionFactory().PackageCollection(), context.Progress())
if err != nil {
return fmt.Errorf("unable to load packages: %s", err)
}
packageFiles, _ := deb.CollectPackageFiles([]string{changes.TempDir}, reporter)
var restriction deb.PackageQuery
restriction, err = changes.PackageQuery()
if err != nil {
failedFiles = append(failedFiles, path)
reporter.Warning("unable to process file %s: %s", changes.ChangesName, err)
changes.Cleanup()
continue
}
var processedFiles2, failedFiles2 []string
processedFiles2, failedFiles2, err = deb.ImportPackageFiles(list, packageFiles, forceReplace, verifier, context.PackagePool(),
context.CollectionFactory().PackageCollection(), reporter, restriction)
if err != nil {
return fmt.Errorf("unable to import package files: %s", err)
}
repo.UpdateRefList(deb.NewPackageRefListFromPackageList(list))
err = context.CollectionFactory().LocalRepoCollection().Update(repo)
if err != nil {
return fmt.Errorf("unable to save: %s", err)
}
err = changes.Cleanup()
if err != nil {
return err
}
for _, file := range failedFiles2 {
failedFiles = append(failedFiles, filepath.Join(changes.BasePath, filepath.Base(file)))
}
for _, file := range processedFiles2 {
processedFiles = append(processedFiles, filepath.Join(changes.BasePath, filepath.Base(file)))
}
processedFiles = append(processedFiles, path)
}
if !noRemoveFiles {
processedFiles = utils.StrSliceDeduplicate(processedFiles)
for _, file := range processedFiles {
err := os.Remove(file)
if err != nil {
return fmt.Errorf("unable to remove file: %s", err)
}
}
}
if len(failedFiles) > 0 {
context.Progress().ColoredPrintf("@y[!]@| @!Some files were skipped due to errors:@|")
for _, file := range failedFiles {
context.Progress().ColoredPrintf(" %s", file)
}
return fmt.Errorf("some files failed to be added")
}
return err
}
func makeCmdRepoInclude() *commander.Command {
cmd := &commander.Command{
Run: aptlyRepoInclude,
UsageLine: "include <file.changes>|<directory> ...",
Short: "add packages to local repositories based on .changes files",
Long: `
Command include looks for .changes files in list of arguments or specified directories. Each
.changes file is verified, parsed, referenced files are put into separate temporary directory
and added into local repository. Successfully imported files are removed by default.
Additionally uploads could be restricted with <uploaders.json> file. Rules in this file control
uploads based on GPG key ID of .changes file signature and queries on .changes file fields.
Example:
$ aptly repo include -repo=foo-release incoming/
`,
Flag: *flag.NewFlagSet("aptly-repo-include", flag.ExitOnError),
}
cmd.Flag.Bool("no-remove-files", false, "don't remove files that have been imported successfully into repository")
cmd.Flag.Bool("force-replace", false, "when adding package that conflicts with existing package, remove existing package")
cmd.Flag.String("repo", "{{.Distribution}}", "which repo should files go to, defaults to Distribution field of .changes file")
cmd.Flag.Var(&keyRingsFlag{}, "keyring", "gpg keyring to use when verifying Release file (could be specified multiple times)")
cmd.Flag.Bool("ignore-signatures", false, "disable verification of .changes file signature")
cmd.Flag.Bool("accept-unsigned", false, "accept unsigned .changes files")
cmd.Flag.String("uploaders-file", "", "path to uploaders.json file")
return cmd
}
| bsundsrud/aptly | cmd/repo_include.go | GO | mit | 7,511 |
/**
* Creates a new instance of Emitter.
* @class
* @returns {Object} Returns a new instance of Emitter.
* @example
* // Creates a new instance of Emitter.
* var Emitter = require('emitter');
*
* var emitter = new Emitter();
*/
class Emitter {
/**
* Adds a listener to the collection for the specified event.
* @memberof! Emitter.prototype
* @function
* @param {String} event - The event name.
* @param {Function} listener - A listener function to add.
* @returns {Object} Returns an instance of Emitter.
* @example
* // Add an event listener to "foo" event.
* emitter.on('foo', listener);
*/
on(event, listener) {
// Use the current collection or create it.
this._eventCollection = this._eventCollection || {};
// Use the current collection of an event or create it.
this._eventCollection[event] = this._eventCollection[event] || [];
// Appends the listener into the collection of the given event
this._eventCollection[event].push(listener);
return this;
}
/**
* Adds a listener to the collection for the specified event that will be called only once.
* @memberof! Emitter.prototype
* @function
* @param {String} event - The event name.
* @param {Function} listener - A listener function to add.
* @returns {Object} Returns an instance of Emitter.
* @example
* // Will add an event handler to "foo" event once.
* emitter.once('foo', listener);
*/
once(event, listener) {
const self = this;
function fn() {
self.off(event, fn);
listener.apply(this, arguments);
}
fn.listener = listener;
this.on(event, fn);
return this;
}
/**
* Removes a listener from the collection for the specified event.
* @memberof! Emitter.prototype
* @function
* @param {String} event - The event name.
* @param {Function} listener - A listener function to remove.
* @returns {Object} Returns an instance of Emitter.
* @example
* // Remove a given listener.
* emitter.off('foo', listener);
*/
off(event, listener) {
let listeners;
// Defines listeners value.
if (!this._eventCollection || !(listeners = this._eventCollection[event])) {
return this;
}
listeners.forEach((fn, i) => {
if (fn === listener || fn.listener === listener) {
// Removes the given listener.
listeners.splice(i, 1);
}
});
// Removes an empty event collection.
if (listeners.length === 0) {
delete this._eventCollection[event];
}
return this;
}
/**
* Execute each item in the listener collection in order with the specified data.
* @memberof! Emitter.prototype
* @function
* @param {String} event - The name of the event you want to emit.
* @param {...Object} data - Data to pass to the listeners.
* @returns {Object} Returns an instance of Emitter.
* @example
* // Emits the "foo" event with 'param1' and 'param2' as arguments.
* emitter.emit('foo', 'param1', 'param2');
*/
emit(event, ...args) {
let listeners;
// Defines listeners value.
if (!this._eventCollection || !(listeners = this._eventCollection[event])) {
return this;
}
// Clone listeners
listeners = listeners.slice(0);
listeners.forEach(fn => fn.apply(this, args));
return this;
}
}
/**
* Exports Emitter
*/
export default Emitter;
| sinfin/folio | vendor/assets/bower_components/emitter-es6/src/index.js | JavaScript | mit | 3,401 |